diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 11576eb6..00000000 --- a/.coveragerc +++ /dev/null @@ -1,9 +0,0 @@ -[run] -branch = True -source = qinling -omit = - qinling/tests/* - qinling/db/sqlalchemy/migration/alembic_migrations/versions/* - -[report] -ignore_errors = True diff --git a/.gitignore b/.gitignore deleted file mode 100644 index a1dbcfcd..00000000 --- a/.gitignore +++ /dev/null @@ -1,62 +0,0 @@ -*.py[cod] -*.sqlite - -# C extensions -*.so - -# Packages -*.egg -*.eggs -*.egg-info -dist -build -.venv -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.stestr/* -.tox -nosetests.xml -cover/* -.testrepository/ -subunit.log -.qinling.conf -AUTHORS -ChangeLog - -# Translations -*.mo - -# Mr Developer -.mr.developer.cfg -.project -.pydevproject -.idea -.DS_Store -etc/*.conf -etc/qinling.conf.sample -*.patch - -#Linux swap file -*.swp - -# Files created by releasenotes build -releasenotes/build - -# Files created by doc build -doc/source/api - -# Files created by API build -api-ref/build/ diff --git a/.mailmap b/.mailmap deleted file mode 100644 index 516ae6fe..00000000 --- a/.mailmap +++ /dev/null @@ -1,3 +0,0 @@ -# Format is: -# -# diff --git a/.stestr.conf b/.stestr.conf deleted file mode 100644 index b27ce3de..00000000 --- a/.stestr.conf +++ /dev/null @@ -1,3 +0,0 @@ -[DEFAULT] -test_path=${TEST_PATH:-./qinling/tests} -top_dir=./ diff --git a/.zuul.yaml b/.zuul.yaml deleted file mode 100644 index d5a32fbd..00000000 --- a/.zuul.yaml +++ /dev/null @@ -1,89 +0,0 @@ -- job: - name: qinling-tempest - parent: devstack-tempest - timeout: 7800 - required-projects: - - openstack/devstack-gate - - openstack/python-qinlingclient - - openstack/qinling - - openstack/tempest - irrelevant-files: - - ^.*\.rst$ - - ^doc/.* - - ^qinling/tests/unit/.*$ - - ^releasenotes/.*$ - - ^api-ref/.*$ - pre-run: playbooks/pre.yaml - post-run: playbooks/post.yaml - vars: - devstack_services: - # Disable unneeded services. - # These services are enabled in the devstack base job and are not - # needed for qinling. - etcd3: false - peakmem_tracker: false - tls-proxy: false - g-api: false - g-reg: false - n-api: false - n-api-meta: false - n-cauth: false - n-cond: false - n-cpu: false - n-novnc: false - n-obj: false - n-sch: false - placement-api: false - q-agt: false - q-dhcp: false - q-l3: false - q-meta: false - q-metering: false - q-svc: false - s-account: false - s-container: false - s-object: false - s-proxy: false - c-api: false - c-bak: false - c-sch: false - c-vol: false - cinder: false - devstack_plugins: - qinling: https://opendev.org/openstack/qinling - devstack_localrc: - TEMPEST_PLUGINS: /opt/stack/qinling - USE_PYTHON3: True - tox_envlist: all - tox_extra_args: --sitepackages - tempest_test_regex: ^(qinling_tempest_plugin.) - # Qinling's default replicas number is 3, some test cases need - # 2 workers, set concurrency to 2 to avoid - # "Not enough workers available" error. - tempest_concurrency: 2 - -- job: - name: qinling-tempest-centos7 - parent: qinling-tempest - nodeset: devstack-single-node-centos-7 - -- project: - templates: - - openstack-cover-jobs - - openstack-lower-constraints-jobs - - openstack-python3-wallaby-jobs - - check-requirements - - publish-openstack-docs-pti - - release-notes-jobs-python3 - check: - jobs: - - qinling-tempest: - voting: false - gate: - queue: qinling - jobs: - - qinling-tempest: - voting: false - experimental: - jobs: - - qinling-tempest-centos7 diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index ed405d2a..00000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,41 +0,0 @@ -If you would like to contribute to the development of OpenStack, you must -follow the steps in this page: - - https://docs.openstack.org/infra/manual/developers.html - -If you already have a good understanding of how the system works and your -OpenStack accounts are set up, you can skip to the development workflow -section of this documentation to learn how changes to OpenStack should be -submitted for review via the Gerrit tool: - - https://docs.openstack.org/infra/manual/developers.html#development-workflow - -Pull requests submitted through GitHub will be ignored. - -How to file a bug -~~~~~~~~~~~~~~~~~ - -* Go to - `Qinling project storyboard `_, - click ``Add story``. -* Input the bug title and description, be aware the Markdown formatting is - supported, it's useful if you want to paste code or log. After completion, - click ``Save Changes``. -* In the created story page, add a tag named 'bug' in the ``Tags`` section. -* Now, you can see the new bug in - `Qinling project dashboard `_. - -How to propose a feature -~~~~~~~~~~~~~~~~~~~~~~~~ - -* Go to - `Qinling project storyboard `_, - click ``Add story``. -* Input the feature title and description, be aware the Markdown formatting is - supported, it's useful if you want to paste code or log. After completion, - click ``Save Changes``. -* In the created story page, add a tag named 'feature' in the ``Tags`` section. -* You can optionally break the story into different tasks so that the whole - story can be tracked properly in different patches. -* Now, you can see the new feature in - `Qinling project dashboard `_. diff --git a/HACKING.rst b/HACKING.rst deleted file mode 100644 index dd9c4ef4..00000000 --- a/HACKING.rst +++ /dev/null @@ -1,4 +0,0 @@ -qinling Style Commandments -=============================================== - -Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 68c771a0..00000000 --- a/LICENSE +++ /dev/null @@ -1,176 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - diff --git a/README.rst b/README.rst index 334b92e4..86e34d67 100644 --- a/README.rst +++ b/README.rst @@ -1,24 +1,10 @@ -======= -Qinling -======= +This project is no longer maintained. -.. note:: +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". - Qinling (is pronounced "tchinling") refers to Qinling Mountains in southern - Shaanxi Province in China. The mountains provide a natural boundary between - North and South China and support a huge variety of plant and wildlife, some - of which is found nowhere else on Earth. - -Qinling is Function as a Service for OpenStack. This project aims to provide a -platform to support serverless functions (like AWS Lambda). Qinling supports -different container orchestration platforms (Kubernetes/Swarm, etc.) and -different function package storage backends (local/Swift/S3) by nature using -plugin mechanism. - -* Free software: under the `Apache license `_ -* Documentation: https://docs.openstack.org/qinling/latest/ -* Source: https://opendev.org/openstack/qinling -* Features: https://storyboard.openstack.org/#!/project/927 -* Bug Track: https://storyboard.openstack.org/#!/project/927 -* Release notes: https://docs.openstack.org/releasenotes/qinling/ -* IRC channel on Freenode: #openstack-qinling +For any further questions, please email +openstack-discuss@lists.openstack.org or join #openstack-dev on +Freenode. diff --git a/api-ref/source/aliases.inc b/api-ref/source/aliases.inc deleted file mode 100644 index edc19101..00000000 --- a/api-ref/source/aliases.inc +++ /dev/null @@ -1,265 +0,0 @@ -.. -*- rst -*- - -======== - Aliases -======== - -Lists, creates, updates, shows details and deletes aliases. - -Function aliases are like pointers to the specific function versions. -By using aliases, you can access the specific version of a function an -alias is pointing to without having to know the specific version the alias -is pointing to. - - -Create an alias -=============== - -.. rest_method:: POST /v1/aliases - -Create ana alias. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: function_id - - version: alias_version - - description: alias_description - -Request Example ---------------- - -.. literalinclude:: samples/aliases/create-alias-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: alias_name - - description: alias_description - - function_id: function_id - - version_number: alias_version - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/aliases/create-alias-response.json - :language: javascript - - -List aliases -============ - -.. rest_method:: GET /v1/aliases - -List aliases. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: alias_name - - description: alias_description - - function_id: function_id - - version_number: alias_version - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/aliases/list-aliases-response.json - :language: javascript - - -Show an alias -============= - -.. rest_method:: GET /v1/aliases/{name} - -Show an alias. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: path_alias_name - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: alias_name - - description: alias_description - - function_id: function_id - - version_number: alias_version - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/aliases/show-alias-response.json - :language: javascript - - -Update an alias -=============== - -.. rest_method:: PUT /v1/aliases/{name} - -Update an alias. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: path_alias_name - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: alias_name - - description: alias_description - - function_id: function_id - - version_number: alias_version - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Request Example ---------------- - -.. literalinclude:: samples/aliases/update-alias-request.json - :language: javascript - -Response Example ----------------- - -.. literalinclude:: samples/aliases/update-alias-response.json - :language: text - - -Delete an alias -=============== - -.. rest_method:: DELETE /v1/aliases/{name} - -Delete an alias. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: path_alias_name - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/aliases/delete-alias-response.text - :language: text diff --git a/api-ref/source/api-versions.inc b/api-ref/source/api-versions.inc deleted file mode 100644 index 6759dd55..00000000 --- a/api-ref/source/api-versions.inc +++ /dev/null @@ -1,31 +0,0 @@ -============ -API Versions -============ - -Show all enabled API versions - -List all API versions -===================== - -.. rest_method:: GET / - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -No parameters needed - -Response Example ----------------- - -.. literalinclude:: samples/api-versions/get-versions-response.json - :language: javascript diff --git a/api-ref/source/conf.py b/api-ref/source/conf.py deleted file mode 100644 index 003cd971..00000000 --- a/api-ref/source/conf.py +++ /dev/null @@ -1,206 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# Qinling documentation build configuration file -# -# This file is execfile()d with the current directory set to -# its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import os -import sys - -extensions = [ - 'os_api_ref', - 'openstackdocstheme', -] - -html_theme = 'openstackdocs' -html_theme_options = { - "sidebar_mode": "toc", -} - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('../../')) -sys.path.insert(0, os.path.abspath('../')) -sys.path.insert(0, os.path.abspath('./')) - -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -# -# source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -copyright = u'2010-present, OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# The reST default role (used for this markup: `text`) to use -# for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = False - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# openstackdocstheme options -openstackdocs_repo_name = 'openstack/qinling' -openstackdocs_use_storyboard = True - -# -- Options for man page output ---------------------------------------------- - -# Grouping the document tree for man pages. -# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual' - - -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -# html_theme_path = ["."] -# html_theme = '_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = ['_static'] - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_use_modindex = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'qinlingdoc' - - -# -- Options for LaTeX output ------------------------------------------------- - -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', 'Qinling.tex', - u'OpenStack Function Engine API Documentation', - u'OpenStack Foundation', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_use_modindex = True diff --git a/api-ref/source/executions.inc b/api-ref/source/executions.inc deleted file mode 100644 index 155b105e..00000000 --- a/api-ref/source/executions.inc +++ /dev/null @@ -1,308 +0,0 @@ -.. -*- rst -*- - -=========== - Executions -=========== - -Lists, creates, shows details for, and deletes function executions. - -An execution runs the function and stores the return of the function. -The execution gets the returns and the prints. - - -Create an execution -==================== - -.. rest_method:: POST /v1/executions - -Create an execution. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: execution_function_id - - function_alias: execution_function_alias - - input: execution_input - - description: execution_description - - function_version: execution_function_version - - sync: execution_sync - -Request Example ---------------- - -.. literalinclude:: samples/executions/create-execution-request.json - :language: javascript - -.. literalinclude:: samples/executions/create-execution-alias-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - function_id: execution_function_id - - input: execution_input - - description: execution_description - - function_version: execution_function_version - - sync: execution_sync - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/executions/create-execution-response.json - :language: javascript - - -List executions -=============== - -.. rest_method:: GET /v1/executions - -List executions. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - function_id: execution_function_id - - input: execution_input - - description: execution_description - - function_version: execution_function_version - - sync: execution_sync - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/executions/list-executions-response.json - :language: javascript - - -List executions by filters -========================== - -.. rest_method:: GET /v1/executions?{filter}={function_id} - -List executions by filters.. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - filter: path_execution_filter - - function_id: path_function_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - function_id: execution_function_id - - input: execution_input - - description: execution_description - - function_version: execution_function_version - - sync: execution_sync - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/executions/list-executions-filters-response.json - :language: javascript - - -Show an execution -================= - -.. rest_method:: GET /v1/executions/{execution_id} - -Show an execution. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - execution_id: path_execution_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - function_id: execution_function_id - - input: execution_input - - description: execution_description - - function_version: execution_function_version - - sync: execution_sync - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/executions/show-execution-response.json - :language: javascript - - -Show an execution log -===================== - -.. rest_method:: GET /v1/executions/{execution_id}/log - -Show an execution log. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - execution_id: path_execution_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/executions/show-execution-log-response.json - :language: javascript - - -Delete an execution -=================== - -.. rest_method:: DELETE /v1/executions/{execution_id} - -Delete an execution. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - execution_id: path_execution_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/executions/delete-execution-response.text - :language: text diff --git a/api-ref/source/functions.inc b/api-ref/source/functions.inc deleted file mode 100644 index 1fa18329..00000000 --- a/api-ref/source/functions.inc +++ /dev/null @@ -1,524 +0,0 @@ -.. -*- rst -*- - -========== - Functions -========== - -Lists, creates, shows details for, downloads, detaches, updates, -scales and deletes functions. - -Function contains the source code that will be executed into the runtime. -The source code langage within the function should match the runtime. - - -Create a function -================= - -.. rest_method:: POST /v1/functions - -Create a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: function_name - - description: function_description - - runtime_id: function_runtime_id - - code: function_code - - entry: function_entry - - cpu: function_cpu - - memory: function_memory - - timeout: function_timeout - - package: function_package - -Request Example ---------------- - -Package -####### - -.. literalinclude:: samples/functions/create-function-package-request.json - :language: javascript - -.. literalinclude:: samples/functions/create-function-package-curl-request.text - :language: text - -Swift -##### - -.. literalinclude:: samples/functions/create-function-swift-request.json - :language: javascript - -Image -##### - -.. literalinclude:: samples/functions/create-function-image-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: function_name - - description: function_description - - runtime_id: function_runtime_id - - code: function_code - - entry: function_entry - - cpu: function_cpu - - memory: function_memory - - timeout: function_timeout - - package: function_package - - latest_version: function_latest_version - - count: function_count - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -Package -####### - -.. literalinclude:: samples/functions/create-function-package-response.json - :language: javascript - -Swift -##### - -.. literalinclude:: samples/functions/create-function-swift-response.json - :language: javascript - -Image -##### - -.. literalinclude:: samples/functions/create-function-image-response.json - :language: javascript - - -List functions -============== - -.. rest_method:: GET /v1/functions - -List functions. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: function_name - - description: function_description - - runtime_id: function_runtime_id - - code: function_code - - count: function_count - - cpu: function_cpu - - memory: function_memory - - entry: function_entry - - timeout: function_timeout - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/functions/list-functions-response.json - :language: javascript - - -Show a function -=============== - -.. rest_method:: GET /v1/functions/{function_id} - -Show a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: function_name - - description: function_description - - runtime_id: function_runtime_id - - code: function_code - - count: function_count - - cpu: function_cpu - - memory: function_memory - - entry: function_entry - - timeout: function_timeout - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/functions/show-function-response.json - :language: javascript - - -Update a function -================= - -.. rest_method:: PUT /v1/functions/{function_id} - -Update a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - name: function_name_update - - description: function_description - - runtime_id: function_runtime_id_update - - code: function_code_update - - entry: function_entry_update - - cpu: function_cpu - - memory: function_memory - - timeout: function_timeout - - package: function_package - - count: function_count - -Request Example ---------------- - -Package -####### - -.. literalinclude:: samples/functions/update-function-package-request.json - :language: javascript - -.. literalinclude:: samples/functions/update-function-package-curl-request.text - :language: text - -Swift -##### - -.. literalinclude:: samples/functions/update-function-swift-request.json - :language: javascript - -Image -##### - -It is not possible to update an ``image`` function. - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: function_name - - description: function_description - - runtime_id: function_runtime_id - - code: function_code - - count: function_count - - cpu: function_cpu - - memory: function_memory - - entry: function_entry - - timeout: function_timeout - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -Package -####### - -.. literalinclude:: samples/functions/update-function-package-response.json - :language: javascript - -Swift -##### - -.. literalinclude:: samples/functions/update-function-swift-response.json - :language: javascript - -Image -##### - -None - - -Download a function -=================== - -.. rest_method:: GET /v1/functions/{function_id}?download=true - -Download a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/functions/download-function-response.text - :language: text - - -Detach a function from a worker -=============================== - -.. rest_method:: POST /v1/functions/{function_id}/detach - -Detach a function from a worker. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/functions/detach-function-response.text - :language: text - - -Scale up a function -=================== - -.. rest_method:: POST /v1/functions/{function_id}/scale_up - -Scale up a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - count: function_scale_count_up - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/functions/scaleup-function-response.text - :language: text - - -Scale down a function -===================== - -.. rest_method:: POST /v1/functions/{function_id}/scale_down - -Scale down a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - count: function_scale_count_down - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/functions/scaledown-function-response.text - :language: text - - -Delete a function -================= - -.. rest_method:: DELETE /v1/functions/{function_id} - -Delete a function. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/functions/delete-function-response.text - :language: text diff --git a/api-ref/source/index.rst b/api-ref/source/index.rst deleted file mode 100644 index e74460d8..00000000 --- a/api-ref/source/index.rst +++ /dev/null @@ -1,17 +0,0 @@ -:tocdepth: 2 - -==================== - Function Engine API -==================== - -.. rest_expand_all:: - -.. include:: urls.inc -.. include:: api-versions.inc -.. include:: runtimes.inc -.. include:: functions.inc -.. include:: executions.inc -.. include:: versions.inc -.. include:: aliases.inc -.. include:: webhooks.inc -.. include:: jobs.inc diff --git a/api-ref/source/jobs.inc b/api-ref/source/jobs.inc deleted file mode 100644 index ba449b47..00000000 --- a/api-ref/source/jobs.inc +++ /dev/null @@ -1,279 +0,0 @@ -.. -*- rst -*- - -===== - Jobs -===== - -Lists, creates, updates, shows details and deletes jobs. - -Jobs allows to run periodically functions based on time period. -Time period is based on cron syntax. - - -Create a job -============ - -.. rest_method:: POST /v1/jobs - -Create a job. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: job_name - - function_id: job_function_id - - function_alias: job_function_alias - - function_version: job_function_version - - function_input: job_input - - pattern: job_pattern - - count: job_count - -Request Example ---------------- - -.. literalinclude:: samples/jobs/create-job-request.json - :language: javascript - -.. literalinclude:: samples/jobs/create-job-alias-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: job_name - - function_id: job_function_id - - function_version: job_function_version - - function_input: job_input - - pattern: job_pattern - - count: job_count - - first_execution_time: job_first_execution - - next_execution_time: job_next_execution - - status: status - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/jobs/create-job-response.json - :language: javascript - - -List jobs -========= - -.. rest_method:: GET /v1/jobs - -List jobs. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: job_name - - function_id: job_function_id - - function_version: job_function_version - - function_input: job_input - - pattern: job_pattern - - count: job_count - - first_execution_time: job_first_execution - - next_execution_time: job_next_execution - - status: status - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/jobs/list-jobs-response.json - :language: javascript - - -Show a job -========== - -.. rest_method:: GET /v1/jobs/{job_id} - -Show a job. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - job_id: path_job_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: job_name - - function_id: job_function_id - - function_version: job_function_version - - function_input: job_input - - pattern: job_pattern - - count: job_count - - first_execution_time: job_first_execution - - next_execution_time: job_next_execution - - status: status - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/jobs/show-job-response.json - :language: javascript - - -Update a job -============ - -.. rest_method:: PUT /v1/jobs/{job_id} - -Update a job. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - job_id: path_job_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: job_name - - function_input: job_input - - pattern: job_pattern_update - - next_execution_time: job_next_execution_update - - status: job_status_update - -Request Example ---------------- - -.. literalinclude:: samples/jobs/update-job-request.json - :language: javascript - -Response Example ----------------- - -.. literalinclude:: samples/jobs/update-job-response.json - :language: javascript - - -Delete a job -============ - -.. rest_method:: DELETE /v1/jobs/{job_id} - -Delete a job. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - job_id: path_job_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/jobs/delete-job-response.text - :language: text diff --git a/api-ref/source/parameters.yaml b/api-ref/source/parameters.yaml deleted file mode 100644 index 66acd901..00000000 --- a/api-ref/source/parameters.yaml +++ /dev/null @@ -1,517 +0,0 @@ -#################### -# Header Variables # -#################### - -x-auth-token: - description: | - Token used to identify the user from Keystone - in: header - required: false - type: string - -#################### -# Path Variables # -#################### - -path_alias_name: - description: | - Name of the alias - in: path - required: true - type: string - -path_execution_filter: - description: | - Execution filter, e.g. "``function_id``, ``function_version``, etc..." - in: path - required: true - type: string - -path_execution_id: - description: | - ID for the execution - in: path - required: true - type: uuid - -path_function_id: - description: | - ID for the function - in: path - required: true - type: uuid - -path_job_id: - description: | - ID for the job - in: path - required: true - type: uuid - -path_runtime_id: - description: | - ID for the runtime - in: path - required: true - type: uuid - -path_version_number: - description: | - Version number - in: path - required: true - type: integer - -path_webhook_id: - description: | - ID for the webhook - in: path - required: true - type: uuid - -#################### -# Body Variables # -#################### - -alias_description: - description: | - Description for the alias - in: body - required: false - type: string - -alias_name: - description: | - Name of the alias - in: body - required: true - type: string - -alias_version: - description: | - Version of the function to point the alias - in: body - required: true - type: integer - -created_at: - description: | - Date / Time when resource was created - in: body - required: false - type: datestamp - -execution_description: - description: | - Description for the execution - in: body - required: false - type: string - -execution_function_alias: - description: | - Alias name of the function (if an alias has been created) - in: body - required: false - type: string - -execution_function_id: - description: | - ID for the function, if an alias is used then ``function_id`` is - not required - in: body - required: true - type: uuid - -execution_function_version: - description: | - Version of the function to execute, default is ``0``. If an alias is - used then ``function_version`` is not required - in: body - required: false - type: integer - -execution_input: - description: | - Input data to pass to the function, e.g. ``{"name": "foo"}`` - in: body - required: false - type: JSON - -execution_sync: - description: | - Execution is sync or async, default is ``true`` - in: body - required: false - type: bool - -function_code: - description: | - Source code type, e.g. "``package``", "``swift``", "``image``" - in: body - required: true - type: JSON - -function_code_update: - description: | - Source code type, e.g. "``package``", "``swift``", "``image``" - - Package function can't have this value updated. - in: body - required: false - type: JSON - -function_count: - description: | - Total of time the function has been executed - in: body - required: true - type: integer - -function_cpu: - description: | - Max CPU usage during the execution - in: body - required: false - type: integer - -function_description: - description: | - Description for the function - in: body - required: false - type: string - -function_entry: - description: | - Function entry point, e.g. "``hello_world.main``" - - Not required if the source is "``image``" - in: body - required: true - type: string - -function_entry_update: - description: | - Function entry point, e.g. "``hello_world.main``" - in: body - required: false - type: string - -function_id: - description: | - ID for the function - in: body - required: true - type: uuid - -function_latest_version: - description: | - Latest version created based on this function - in: body - required: false - type: integer - -function_memory: - description: | - Max memory usage during the execution - in: body - required: false - type: integer - -function_name: - description: | - Function name - in: body - required: true - type: string - -function_name_update: - description: | - Function name - in: body - required: false - type: string - -function_package: - description: | - Package to upload, e.g. "``package=@./hello_world.zip``" - - Required only if '``"code": {"source": "package"}``' - in: body - required: false - type: string - -function_runtime_id: - description: | - Runtime ID used by the function - in: body - required: true - type: uuid - -function_runtime_id_update: - description: | - Runtime ID used by the function - in: body - required: false - type: uuid - -function_scale_count_down: - description: | - Number of workers to scale down a function, default is ``1`` - in: body - required: false - type: integer - -function_scale_count_up: - description: | - Number of workers to scale up a function, default is ``1`` - in: body - required: false - type: integer - -function_timeout: - description: | - Timeout in second before the execution fails - in: body - required: false - type: integer - -id: - description: | - ID for the resource - in: body - required: true - type: uuid - -job_count: - description: | - Number of time the function should be executed, default is ``unlimited`` - in: body - required: false - type: integer - -job_first_execution: - description: | - First time the function will be executed - in: body - required: true - type: string - -job_function_alias: - description: | - Alias name of the function (if an alias has been created) - in: body - required: false - type: string - -job_function_id: - description: | - ID for the function, if an alias is used then ``function_id`` is - not required - in: body - required: true - type: uuid - -job_function_version: - description: | - Version of the function to execute, default is ``0``. If an alias is - used then ``function_version`` is not required - in: body - required: false - type: integer - -job_input: - description: | - Input data to pass to the function, e.g. ``{"name": "foo"}`` - in: body - required: false - type: JSON - -job_name: - description: | - Name of the job - in: body - required: true - type: string - -job_next_execution: - description: | - Next time the function will be executed - in: body - required: true - type: string - -job_next_execution_update: - description: | - Next time the function will be executed, e.g. "``2019-06-07 22:41:00``" - in: body - required: true - type: string - -job_pattern: - description: | - Cron pattern, e.g. "``* * * * *``" is equal to every minutes - in: body - required: true - type: string - -job_pattern_update: - description: | - Cron pattern, e.g. "``* * * * *``" is equal to every minutes - in: body - required: false - type: string - -job_status_update: - description: | - Status of the job, e.g. "``paused``, ``running``, ``canceled``, ``done``" - in: body - required: false - type: string - -project_id: - description: | - ID for the project that owns the resource - in: body - required: true - type: uuid - -runtime_capacity: - description: | - Number of workers available and currently used - in: body - required: false - type: string - -runtime_description: - description: | - Description for the runtime - in: body - required: false - type: string - -runtime_id: - description: | - ID for the runtime - in: body - required: true - type: uuid - -runtime_image: - description: | - Docker image used to build the runtime, e.g. "openstackqinling/python3-runtime" - in: body - required: true - type: string - -runtime_image_update: - description: | - Docker image used to build the runtime, e.g. "openstackqinling/python3-runtime" - in: body - required: false - type: string - -runtime_is_public: - description: | - Make the runtime public or not, if public then the runtime will be - available for all the tenant. - The runtime is public by default. - in: body - required: false - type: bool - -runtime_name: - description: | - Runtime name - in: body - required: true - type: string - -runtime_name_update: - description: | - Runtime name - in: body - required: false - type: string - -runtime_trusted: - description: | - Define the runtime as trusted or not, depending the value Kubernetes - will apply some annotations like ``io.kubernetes.cri-o.TrustedSandbox`` - in the pod specification to choose the underlying container runtime. - The runtime is trusted by default. - in: body - required: false - type: bool - -status: - description: | - Status of the resource - in: body - required: true - type: enum - -updated_at: - description: | - Date / Time when resource last updated - in: body - required: false - type: datestamp - -version_count: - description: | - Total of time the version has been executed - in: body - required: false - type: integer - -version_description: - description: | - Description for the version - in: body - required: false - type: string - -version_number: - description: | - Version of the function - in: body - required: false - type: integer - -webhook_description: - description: | - Description for the webhook - in: body - required: false - type: string - -webhook_function_alias: - description: | - Alias name of the function (if an alias has been created) - in: body - required: false - type: string - -webhook_function_id: - description: | - ID for the function, if an alias is used then ``function_id`` is - not required - in: body - required: true - type: uuid - -webhook_function_version: - description: | - Version of the function - in: body - required: false - type: integer - -webhook_url: - description: | - Webhook URL - in: body - required: true - type: string diff --git a/api-ref/source/runtimes.inc b/api-ref/source/runtimes.inc deleted file mode 100644 index cc1c6336..00000000 --- a/api-ref/source/runtimes.inc +++ /dev/null @@ -1,317 +0,0 @@ -.. -*- rst -*- - -========= - Runtimes -========= - -Lists, creates, updates, shows details and deletes runtimes. - -The runtime is programming language support. Each runtime is encapsulated -in a container image which is used for creating the runtime container. -Qinling communicates with the container by sending HTTP requests and runs -user function inside. - - -Create a runtime -================ - -.. rest_method:: POST /v1/runtimes - -Create a runtime. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - name: runtime_name - - image: runtime_image - - description: runtime_description - - is_public: runtime_is_public - - trusted: runtime_trusted - -Request Example ---------------- - -.. literalinclude:: samples/runtimes/create-runtime-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: runtime_name - - image: runtime_image - - description: runtime_description - - is_public: runtime_is_public - - trusted: runtime_trusted - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/create-runtime-response.json - :language: javascript - - -List runtimes -============= - -.. rest_method:: GET /v1/runtimes - -List runtimes. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: runtime_name - - image: runtime_image - - description: runtime_description - - is_public: runtime_is_public - - trusted: runtime_trusted - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/list-runtimes-response.json - :language: javascript - - -Show a runtime -============== - -.. rest_method:: GET /v1/runtimes/{runtime_id} - -Show a runtime. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - runtime_id: path_runtime_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: runtime_name - - image: runtime_image - - description: runtime_description - - is_public: runtime_is_public - - trusted: runtime_trusted - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/show-runtime-response.json - :language: javascript - - -Show a runtime pool -=================== - -.. rest_method:: GET /v1/runtimes/{runtime_id}/pool - -Show a runtime pool. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - runtime_id: runtime_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - name: id - - capacity: runtime_capacity - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/show-runtime-pool-response.json - :language: javascript - - -Update a runtime -================ - -.. rest_method:: PUT /v1/runtimes/{runtime_id} - -Update a runtime. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - runtime_id: path_runtime_id - - name: runtime_name_update - - image: runtime_image_update - - description: runtime_description - -Request Example ---------------- - -.. literalinclude:: samples/runtimes/update-runtime-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - name: runtime_name - - image: runtime_image - - description: runtime_description - - is_public: runtime_is_public - - trusted: runtime_trusted - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/update-runtime-response.json - :language: javascript - - -Delete a runtime -================ - -.. rest_method:: DELETE /v1/runtimes/{runtime_id} - -Delete a runtime. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - runtime_id: path_runtime_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/runtimes/delete-runtime-response.text - :language: text diff --git a/api-ref/source/samples/aliases/create-alias-request.json b/api-ref/source/samples/aliases/create-alias-request.json deleted file mode 100644 index 08fd7cf5..00000000 --- a/api-ref/source/samples/aliases/create-alias-request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "name": "hw-v2", - "function_version": 2, - "description": "Hello World, version 2" -} diff --git a/api-ref/source/samples/aliases/create-alias-response.json b/api-ref/source/samples/aliases/create-alias-response.json deleted file mode 100644 index 3a00779f..00000000 --- a/api-ref/source/samples/aliases/create-alias-response.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "name": "hw-v2", - "created_at": "2019-06-07 02:07:06.572982", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "function_version": 2, - "description": "Hello World, version 2" -} diff --git a/api-ref/source/samples/aliases/delete-alias-response.text b/api-ref/source/samples/aliases/delete-alias-response.text deleted file mode 100644 index a117b121..00000000 --- a/api-ref/source/samples/aliases/delete-alias-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete function_alias hw-v2 has been accepted. diff --git a/api-ref/source/samples/aliases/list-aliases-response.json b/api-ref/source/samples/aliases/list-aliases-response.json deleted file mode 100644 index 61dac25a..00000000 --- a/api-ref/source/samples/aliases/list-aliases-response.json +++ /dev/null @@ -1,20 +0,0 @@ -[ - { - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "name": "hw-v2", - "created_at": "2019-06-07 02:07:06", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "function_version": 2, - "description": "Hello World, version 2" - }, - { - "function_id": "ab003f7a-402a-4a2e-8f00-2f848bde800d", - "name": "hq-v1", - "created_at": "2019-06-07 02:15:16", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "function_version": 37, - "description": "Hello Qinling, version 37" - } -] diff --git a/api-ref/source/samples/aliases/show-alias-response.json b/api-ref/source/samples/aliases/show-alias-response.json deleted file mode 100644 index ff9800e4..00000000 --- a/api-ref/source/samples/aliases/show-alias-response.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "name": "hw-v2", - "created_at": "2019-06-07 02:07:06", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "function_version": 2, - "description": "Hello World, version 2" -} diff --git a/api-ref/source/samples/aliases/update-alias-request.json b/api-ref/source/samples/aliases/update-alias-request.json deleted file mode 100644 index 333f4a4e..00000000 --- a/api-ref/source/samples/aliases/update-alias-request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "function_id": "ab003f7a-402a-4a2e-8f00-2f848bde800d", - "name": "hq-v1", - "function_version": 38, - "description": "Hello Qinling, version 38" -} diff --git a/api-ref/source/samples/aliases/update-alias-response.json b/api-ref/source/samples/aliases/update-alias-response.json deleted file mode 100644 index b6148b4d..00000000 --- a/api-ref/source/samples/aliases/update-alias-response.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "function_id": "ab003f7a-402a-4a2e-8f00-2f848bde800d", - "name": "hq-v1", - "created_at": "2019-06-07 02:15:16", - "updated_at": "2019-06-07 02:32:14.097418", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "function_version": 38, - "description": "Hello Qinling, version 38" -} diff --git a/api-ref/source/samples/api-versions/get-versions-response.json b/api-ref/source/samples/api-versions/get-versions-response.json deleted file mode 100644 index 67bf31d2..00000000 --- a/api-ref/source/samples/api-versions/get-versions-response.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "versions": [ - { - "status": "CURRENT", - "id": "v1.0", - "links": [ - { - "href": "http://api-uat.ormuco.com:7070/v1", - "target": "v1", - "rel": "self" - } - ] - } - ] -} diff --git a/api-ref/source/samples/executions/create-execution-alias-request.json b/api-ref/source/samples/executions/create-execution-alias-request.json deleted file mode 100644 index f0d6bc4f..00000000 --- a/api-ref/source/samples/executions/create-execution-alias-request.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "description": "Execution of a function based on an alias", - "sync": true, - "input": "{\"name\": \"foo\"}", - "function_alias": "hq-production-function" -} diff --git a/api-ref/source/samples/executions/create-execution-request.json b/api-ref/source/samples/executions/create-execution-request.json deleted file mode 100644 index 1dc482f5..00000000 --- a/api-ref/source/samples/executions/create-execution-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "description": "The world should know Foo!", - "sync": true, - "function_version": 0, - "input": "{\"name\": \"foo\"}", - "function_id": "3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46" -} diff --git a/api-ref/source/samples/executions/create-execution-response.json b/api-ref/source/samples/executions/create-execution-response.json deleted file mode 100644 index 7a6a23a1..00000000 --- a/api-ref/source/samples/executions/create-execution-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": "The world should know Foo!", - "updated_at": "2019-06-06 23:13:37", - "created_at": "2019-06-06 23:13:37", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.044, \"output\": \"Hello, bar\"}", - "input": "{\"name\": \"foo\"}", - "function_id": "3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46", - "id": "62033613-6f02-4a6d-b569-7ba7fd18f8f7" -} diff --git a/api-ref/source/samples/executions/delete-execution-response.text b/api-ref/source/samples/executions/delete-execution-response.text deleted file mode 100644 index 97106e94..00000000 --- a/api-ref/source/samples/executions/delete-execution-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete execution 9b6d0e80-7c4b-49d1-bf11-f16999433337 has been accepted. diff --git a/api-ref/source/samples/executions/list-executions-filters-response.json b/api-ref/source/samples/executions/list-executions-filters-response.json deleted file mode 100644 index 8edb9065..00000000 --- a/api-ref/source/samples/executions/list-executions-filters-response.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": null, - "created_at": "2019-06-06 21:05:39", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.074, \"output\": \"Hello, World\"}", - "input": null, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "updated_at": "2019-06-06 21:05:41", - "id": "7dbb2ebc-9096-4e8e-9ded-0af8cd6cef60" - }, - { - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": null, - "created_at": "2019-06-06 23:50:41", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.183, \"output\": \"Hello, bar\"}", - "input": "{\"name\": \"bar\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "updated_at": "2019-06-06 23:50:42", - "id": "9b6d0e80-7c4b-49d1-bf11-f16999433337" - } -] diff --git a/api-ref/source/samples/executions/list-executions-response.json b/api-ref/source/samples/executions/list-executions-response.json deleted file mode 100644 index 66eebcbc..00000000 --- a/api-ref/source/samples/executions/list-executions-response.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": null, - "created_at": "2019-06-06 21:05:39", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.074, \"output\": \"Hello, World\"}", - "input": null, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "updated_at": "2019-06-06 21:05:41", - "id": "7dbb2ebc-9096-4e8e-9ded-0af8cd6cef60" - }, - { - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": "The world should know Foo!", - "created_at": "2019-06-06 23:15:30", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.032, \"output\": \"Hello, bar\"}", - "input": "{\"name\": \"bar\"}", - "function_id": "3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46", - "updated_at": "2019-06-06 23:15:30", - "id": "347bb366-95c9-49ab-8179-424c7b707124" - } -] diff --git a/api-ref/source/samples/executions/show-execution-log-response.json b/api-ref/source/samples/executions/show-execution-log-response.json deleted file mode 100644 index 2531fb28..00000000 --- a/api-ref/source/samples/executions/show-execution-log-response.json +++ /dev/null @@ -1,3 +0,0 @@ -Start execution: 347bb366-95c9-49ab-8179-424c7b707124 -Hello, World! -Finished execution: 347bb366-95c9-49ab-8179-424c7b707124 diff --git a/api-ref/source/samples/executions/show-execution-response.json b/api-ref/source/samples/executions/show-execution-response.json deleted file mode 100644 index 969a5cbd..00000000 --- a/api-ref/source/samples/executions/show-execution-response.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "status": "success", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": "The world should know Foo!", - "created_at": "2019-06-06 23:15:30", - "sync": true, - "function_version": 0, - "result": "{\"duration\": 0.032, \"output\": \"Hello, bar\"}", - "input": "{\"name\": \"bar\"}", - "function_id": "3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46", - "updated_at": "2019-06-06 23:15:30", - "id": "347bb366-95c9-49ab-8179-424c7b707124" -} diff --git a/api-ref/source/samples/functions/create-function-image-request.json b/api-ref/source/samples/functions/create-function-image-request.json deleted file mode 100644 index a0cfe105..00000000 --- a/api-ref/source/samples/functions/create-function-image-request.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "code": { - "source": "image", - "image": "docker-registry/hello-world" - }, - "description": "Hello World from Docker image", - "memory_size": 33554432, - "timeout": 15, - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/create-function-image-response.json b/api-ref/source/samples/functions/create-function-image-response.json deleted file mode 100644 index 6c0eba06..00000000 --- a/api-ref/source/samples/functions/create-function-image-response.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "count": 0, - "code": { - "source": "image", - "image": "docker-registry/hello-world" - }, - "description": "Hello World from Docker image", - "created_at": "2019-06-10 01:51:29.515424", - "updated_at": null, - "latest_version": 0, - "id": "28de9573-2397-4fa1-b627-60124395d1ff", - "memory_size": 33554432, - "timeout": 15, - "entry": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/create-function-package-curl-request.text b/api-ref/source/samples/functions/create-function-package-curl-request.text deleted file mode 100644 index 5c63cb7b..00000000 --- a/api-ref/source/samples/functions/create-function-package-curl-request.text +++ /dev/null @@ -1,11 +0,0 @@ -curl -X POST http://127.0.0.1:7070/v1/functions \ - -H "X-Auth-Token: gAAAAAB..." \ - -F name=hello_world \ - -F runtime_id=cbd9e85f-a1b2-4120-a2c7-57c90809ce7d \ - -F code='{"source": "package"}' \ - -F entry=hello_world.main \ - -F cpu=200 \ - -F memory=33000000 \ - -F description="Hello World function based on a package" \ - -F timeout=15 \ - -F package=@/home/dev/hello_world.zip diff --git a/api-ref/source/samples/functions/create-function-package-request.json b/api-ref/source/samples/functions/create-function-package-request.json deleted file mode 100644 index 055e70e2..00000000 --- a/api-ref/source/samples/functions/create-function-package-request.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "code": { - "source": "package" - }, - "description": "Hello World function from local package archive", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/create-function-package-response.json b/api-ref/source/samples/functions/create-function-package-response.json deleted file mode 100644 index 5a672db8..00000000 --- a/api-ref/source/samples/functions/create-function-package-response.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "count": 0, - "code": { - "source": "package", - "md5sum": "e439d00bab001d7ad7d1e0e451567699" - }, - "description": "Hello World function from local package archive", - "created_at": "2019-06-10 01:15:02.108208", - "updated_at": null, - "latest_version": 0, - "id": "00f3b8ef-e794-4897-8a34-fa6a489d4ede", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/create-function-swift-request.json b/api-ref/source/samples/functions/create-function-swift-request.json deleted file mode 100644 index 05ab5650..00000000 --- a/api-ref/source/samples/functions/create-function-swift-request.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "code": { - "source": "swift", - "swift": { - "object": "hello_world.py", - "container": "functions" - } - }, - "description": "Hello World function from Swift", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/create-function-swift-response.json b/api-ref/source/samples/functions/create-function-swift-response.json deleted file mode 100644 index 5caa6f3a..00000000 --- a/api-ref/source/samples/functions/create-function-swift-response.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "count": 0, - "code": { - "source": "swift", - "swift": { - "object": "hello_world.py", - "container": "functions" - } - }, - "description": "Hello World function from Swift", - "created_at": "2019-06-10 01:43:53.909146", - "updated_at": null, - "latest_version": 0, - "id": "1127cb1a-938e-4262-b22f-d88f182624f6", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/delete-function-response.text b/api-ref/source/samples/functions/delete-function-response.text deleted file mode 100644 index 480e0108..00000000 --- a/api-ref/source/samples/functions/delete-function-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete function ff64180b-d41b-420e-9846-ee706c1548b0 has been accepted. diff --git a/api-ref/source/samples/functions/detach-function-response.text b/api-ref/source/samples/functions/detach-function-response.text deleted file mode 100644 index 327cef8d..00000000 --- a/api-ref/source/samples/functions/detach-function-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to detach function f8e7357f-af4a-4448-87d8-6fed4c7db5b5 has been accepted. diff --git a/api-ref/source/samples/functions/download-function-response.text b/api-ref/source/samples/functions/download-function-response.text deleted file mode 100644 index 98b5969b..00000000 --- a/api-ref/source/samples/functions/download-function-response.text +++ /dev/null @@ -1 +0,0 @@ -Code package downloaded to /root/f8e7357f-af4a-4448-87d8-6fed4c7db5b5.zip diff --git a/api-ref/source/samples/functions/list-functions-response.json b/api-ref/source/samples/functions/list-functions-response.json deleted file mode 100644 index b0f03765..00000000 --- a/api-ref/source/samples/functions/list-functions-response.json +++ /dev/null @@ -1,32 +0,0 @@ -[ - { - "count": 7, - "code": "{\"source\": \"package\", \"md5sum\": \"648926b8a9705d3bd77cefdeca46a254\"}", - "description": "Function Hello World :)", - "created_at": "2019-06-06 18:59:02", - "timeout": 10, - "updated_at": null, - "cpu": 250, - "memory_size": 134217728, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "22f62e09-e361-4481-b2da-22e3a4ed9a43", - "name": "hello_world" - }, - { - "count": 0, - "code": "{\"source\": \"package\", \"md5sum\": \"e0309c12d355352cc92b5433bf293210\"}", - "description": "Function Goodbye World :(", - "created_at": "2019-06-06 19:04:30", - "timeout": 5, - "updated_at": "2019-06-06 19:04:30", - "cpu": 100, - "memory_size": 33554432, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "entry": "goodbye_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "2324a256-a249-4cec-968b-6ca7f99d805b", - "name": "goodbye_world" - } -] diff --git a/api-ref/source/samples/functions/scaledown-function-request.json b/api-ref/source/samples/functions/scaledown-function-request.json deleted file mode 100644 index 8f8b7682..00000000 --- a/api-ref/source/samples/functions/scaledown-function-request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "count": 6 -} diff --git a/api-ref/source/samples/functions/scaledown-function-response.text b/api-ref/source/samples/functions/scaledown-function-response.text deleted file mode 100644 index 2aeb798c..00000000 --- a/api-ref/source/samples/functions/scaledown-function-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to scale down function 3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46 has been accepted. diff --git a/api-ref/source/samples/functions/scaleup-function-request.json b/api-ref/source/samples/functions/scaleup-function-request.json deleted file mode 100644 index 8f8b7682..00000000 --- a/api-ref/source/samples/functions/scaleup-function-request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "count": 6 -} diff --git a/api-ref/source/samples/functions/scaleup-function-response.text b/api-ref/source/samples/functions/scaleup-function-response.text deleted file mode 100644 index 5d2066d8..00000000 --- a/api-ref/source/samples/functions/scaleup-function-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to scale up function 3e0e5bf8-ee7a-4fc8-9641-d1cbd9c60f46 has been accepted. diff --git a/api-ref/source/samples/functions/show-function-response.json b/api-ref/source/samples/functions/show-function-response.json deleted file mode 100644 index f1cdc2ed..00000000 --- a/api-ref/source/samples/functions/show-function-response.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "count": 7, - "code": "{\"source\": \"package\", \"md5sum\": \"648926b8a9705d3bd77cefdeca46a254\"}", - "description": "Function Hello World :)", - "created_at": "2019-06-06 18:59:02", - "timeout": 10, - "updated_at": null, - "cpu": 250, - "memory_size": 134217728, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "22f62e09-e361-4481-b2da-22e3a4ed9a43", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/update-function-package-curl-request.text b/api-ref/source/samples/functions/update-function-package-curl-request.text deleted file mode 100644 index 003baf01..00000000 --- a/api-ref/source/samples/functions/update-function-package-curl-request.text +++ /dev/null @@ -1,10 +0,0 @@ -curl -X PUT http://127.0.0.1:7070/v1/functions/{function_id} \ - -H "X-Auth-Token: gAAAAAB..." \ - -F name=hello_world \ - -F runtime_id=cbd9e85f-a1b2-4120-a2c7-57c90809ce7d \ - -F entry=hello_world.main \ - -F cpu=200 \ - -F memory=33000000 \ - -F description="Hello World function based on a package" \ - -F timeout=15 \ - -F package=@/home/dev/hello_world.zip diff --git a/api-ref/source/samples/functions/update-function-package-request.json b/api-ref/source/samples/functions/update-function-package-request.json deleted file mode 100644 index 7db2a32b..00000000 --- a/api-ref/source/samples/functions/update-function-package-request.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "description": "Hello World function from local package archive", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/update-function-package-response.json b/api-ref/source/samples/functions/update-function-package-response.json deleted file mode 100644 index de2f45c1..00000000 --- a/api-ref/source/samples/functions/update-function-package-response.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "count": 0, - "code": { - "source": "package", - "md5sum": "e439d00bab001d7ad7d1e0e451567699" - }, - "description": "Hello World function from local package archive", - "created_at": "2019-06-10 01:15:02.108208", - "updated_at": "2019-06-10 01:15:02.115438", - "latest_version": 0, - "id": "00f3b8ef-e794-4897-8a34-fa6a489d4ede", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/update-function-swift-request.json b/api-ref/source/samples/functions/update-function-swift-request.json deleted file mode 100644 index 05ab5650..00000000 --- a/api-ref/source/samples/functions/update-function-swift-request.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "code": { - "source": "swift", - "swift": { - "object": "hello_world.py", - "container": "functions" - } - }, - "description": "Hello World function from Swift", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/functions/update-function-swift-response.json b/api-ref/source/samples/functions/update-function-swift-response.json deleted file mode 100644 index 717b2940..00000000 --- a/api-ref/source/samples/functions/update-function-swift-response.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "count": 0, - "code": { - "source": "swift", - "swift": { - "object": "hello_world.py", - "container": "functions" - } - }, - "description": "Hello World function from Swift", - "created_at": "2019-06-10 01:28:38", - "updated_at": "2019-06-10 02:22:27.663257", - "latest_version": 0, - "id": "ecdf3b0c-ac37-4ba8-a02c-5d60cf86cc8b", - "memory_size": 33554432, - "timeout": 15, - "entry": "hello_world.main", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "cpu": 200, - "runtime_id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "name": "hello_world" -} diff --git a/api-ref/source/samples/jobs/create-job-alias-request.json b/api-ref/source/samples/jobs/create-job-alias-request.json deleted file mode 100644 index 6959338b..00000000 --- a/api-ref/source/samples/jobs/create-job-alias-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "count": 5, - "name": "every-minute", - "pattern": "* * * * *", - "function_input": "{\"name\": \"Foo\"}", - "function_alias": "hq-production-function" -} diff --git a/api-ref/source/samples/jobs/create-job-request.json b/api-ref/source/samples/jobs/create-job-request.json deleted file mode 100644 index 2deefb88..00000000 --- a/api-ref/source/samples/jobs/create-job-request.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "count": 5, - "name": "every-minute", - "pattern": "* * * * *", - "function_version": 2, - "function_input": "{\"name\": \"Foo\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5" -} diff --git a/api-ref/source/samples/jobs/create-job-response.json b/api-ref/source/samples/jobs/create-job-response.json deleted file mode 100644 index 9016ffcb..00000000 --- a/api-ref/source/samples/jobs/create-job-response.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "count": 5, - "status": "running", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "name": "every-minute", - "pattern": "* * * * *", - "created_at": "2019-06-07 19:11:08.375646", - "updated_at": null, - "function_version": 2, - "first_execution_time": "2019-06-07 19:12:00", - "function_input": "{\"name\": \"Foo\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "id": "dc7ef571-1f8d-4dc8-a659-fc86f80dd44c", - "next_execution_time": "2019-06-07 19:12:00" -} diff --git a/api-ref/source/samples/jobs/delete-job-response.text b/api-ref/source/samples/jobs/delete-job-response.text deleted file mode 100644 index 5dcf4c61..00000000 --- a/api-ref/source/samples/jobs/delete-job-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete job 697546a1-090c-48de-8ecd-ef4cdb9da7a9 has been accepted. diff --git a/api-ref/source/samples/jobs/list-jobs-response.json b/api-ref/source/samples/jobs/list-jobs-response.json deleted file mode 100644 index a6e5f50d..00000000 --- a/api-ref/source/samples/jobs/list-jobs-response.json +++ /dev/null @@ -1,32 +0,0 @@ -[ - { - "count": null, - "status": "running", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "name": "every-minute", - "pattern": "0 0 * * SUN", - "created_at": "2019-06-07 19:18:50", - "updated_at": "2019-06-07 19:29:59", - "id": "697546a1-090c-48de-8ecd-ef4cdb9da7a9", - "first_execution_time": "2019-06-07 19:19:00", - "function_input": null, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "function_version": 2, - "next_execution_time": "2019-06-07 19:31:00" - }, - { - "count": 0, - "status": "done", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "name": "every-minute", - "pattern": "* * * * *", - "created_at": "2019-06-07 19:21:33", - "updated_at": "2019-06-07 19:25:59", - "id": "4e435c84-ab06-4ff5-8f05-b7ffe25c1224", - "first_execution_time": "2019-06-07 19:22:00", - "function_input": "{\"name\": \"Foo\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "function_version": 0, - "next_execution_time": "2019-06-07 19:26:00" - } -] diff --git a/api-ref/source/samples/jobs/show-job-response.json b/api-ref/source/samples/jobs/show-job-response.json deleted file mode 100644 index 880f58ef..00000000 --- a/api-ref/source/samples/jobs/show-job-response.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "count": 0, - "status": "done", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "name": "every-minute", - "pattern": "* * * * *", - "created_at": "2019-06-07 19:21:33", - "updated_at": "2019-06-07 19:25:59", - "id": "4e435c84-ab06-4ff5-8f05-b7ffe25c1224", - "first_execution_time": "2019-06-07 19:22:00", - "function_input": "{\"name\": \"Foo\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "function_version": 0, - "next_execution_time": "2019-06-07 19:26:00" -} diff --git a/api-ref/source/samples/jobs/update-job-request.json b/api-ref/source/samples/jobs/update-job-request.json deleted file mode 100644 index 55b0382c..00000000 --- a/api-ref/source/samples/jobs/update-job-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "status": "paused", - "name": "every-minute", - "pattern": "* * * * *", - "function_input": "{\"name\": \"Foo\"}", - "next_execution_time": "2019-06-07 22:41:00" -} diff --git a/api-ref/source/samples/jobs/update-job-response.json b/api-ref/source/samples/jobs/update-job-response.json deleted file mode 100644 index 0fe25936..00000000 --- a/api-ref/source/samples/jobs/update-job-response.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "count": null, - "status": "paused", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "name": "every-minute", - "pattern": "* * * * *", - "created_at": "2019-06-07 19:18:50", - "updated_at": "2019-06-07 19:41:20.108254", - "function_version": 2, - "first_execution_time": "2019-06-07 19:19:00", - "function_input": "{\"name\": \"Foo\"}", - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "id": "697546a1-090c-48de-8ecd-ef4cdb9da7a9", - "next_execution_time": "2019-06-07 22:41:00" -} diff --git a/api-ref/source/samples/runtimes/create-runtime-request.json b/api-ref/source/samples/runtimes/create-runtime-request.json deleted file mode 100644 index 2cf3e39d..00000000 --- a/api-ref/source/samples/runtimes/create-runtime-request.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "python3", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime", - "is_public:": true, - "trusted": true -} diff --git a/api-ref/source/samples/runtimes/create-runtime-response.json b/api-ref/source/samples/runtimes/create-runtime-response.json deleted file mode 100644 index b0c71c05..00000000 --- a/api-ref/source/samples/runtimes/create-runtime-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "status": "creating", - "created_at": "2019-06-06 15:33:23.178645", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime", - "updated_at": null, - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "b351d3b6-8ccb-4f5b-b70f-58c72298a2ac", - "trusted": true, - "name": "python3" -} diff --git a/api-ref/source/samples/runtimes/delete-runtime-response.text b/api-ref/source/samples/runtimes/delete-runtime-response.text deleted file mode 100644 index f3eabdc2..00000000 --- a/api-ref/source/samples/runtimes/delete-runtime-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete runtime 63f8f51d-7270-4545-abdd-f767ef8b1882 has been accepted. diff --git a/api-ref/source/samples/runtimes/list-runtimes-response.json b/api-ref/source/samples/runtimes/list-runtimes-response.json deleted file mode 100644 index 7444b767..00000000 --- a/api-ref/source/samples/runtimes/list-runtimes-response.json +++ /dev/null @@ -1,38 +0,0 @@ -[ - { - "status": "available", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime", - "updated_at": "2019-05-30 14:40:50", - "created_at": "2019-05-30 14:40:41", - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "trusted": true, - "name": "python3" - }, - { - "status": "available", - "description": "Runtime Python2.7 ready for production.", - "image": "openstackqinling/python2-runtime", - "updated_at": "2019-06-03 19:01:37", - "created_at": "2019-06-03 19:00:33", - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "c4682a12-1a82-424d-b299-9b238fb6b694", - "trusted": true, - "name": "python2.7" - }, - { - "status": "available", - "description": "Runtime Node.js 8.10 ready for production.", - "image": "openstackqinling/nodejs-runtime", - "updated_at": "2019-06-03 19:30:31", - "created_at": "2019-06-03 19:30:06", - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "d3780197-d3a8-4961-8228-354ac7592e48", - "trusted": true, - "name": "nodejs8.10" - } -] diff --git a/api-ref/source/samples/runtimes/show-runtime-pool-response.json b/api-ref/source/samples/runtimes/show-runtime-pool-response.json deleted file mode 100644 index 36e8e0b2..00000000 --- a/api-ref/source/samples/runtimes/show-runtime-pool-response.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "capacity": { - "available": 3, - "total": 3 - }, - "name": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d" -} diff --git a/api-ref/source/samples/runtimes/show-runtime-response.json b/api-ref/source/samples/runtimes/show-runtime-response.json deleted file mode 100644 index 21a2c6ee..00000000 --- a/api-ref/source/samples/runtimes/show-runtime-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "status": "available", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime", - "updated_at": "2019-05-30 14:40:50", - "created_at": "2019-05-30 14:40:41", - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "trusted": true, - "name": "python3" -} diff --git a/api-ref/source/samples/runtimes/update-runtime-request.json b/api-ref/source/samples/runtimes/update-runtime-request.json deleted file mode 100644 index 7cd0e37a..00000000 --- a/api-ref/source/samples/runtimes/update-runtime-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "python3", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime" -} diff --git a/api-ref/source/samples/runtimes/update-runtime-response.json b/api-ref/source/samples/runtimes/update-runtime-response.json deleted file mode 100644 index ea96575d..00000000 --- a/api-ref/source/samples/runtimes/update-runtime-response.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "status": "available", - "description": "Runtime Python3.7 ready for production.", - "image": "openstackqinling/python3-runtime", - "updated_at": "2019-05-30 14:40:50", - "created_at": "2019-06-06 16:40:31", - "is_public": true, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "cbd9e85f-a1b2-4120-a2c7-57c90809ce7d", - "trusted": true, - "name": "python3" -} diff --git a/api-ref/source/samples/versions/create-version-request.json b/api-ref/source/samples/versions/create-version-request.json deleted file mode 100644 index 00cb1df6..00000000 --- a/api-ref/source/samples/versions/create-version-request.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "description": "This version is ready for production" -} diff --git a/api-ref/source/samples/versions/create-version-response.json b/api-ref/source/samples/versions/create-version-response.json deleted file mode 100644 index d3eaf6a1..00000000 --- a/api-ref/source/samples/versions/create-version-response.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "count": 0, - "version_number": 1, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "This version is ready for production", - "created_at": "2019-06-07 01:23:16.278205", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "45ebdcbf-6e2f-46c9-bd58-7f6ae01ecfcc" -} diff --git a/api-ref/source/samples/versions/detach-version-response.text b/api-ref/source/samples/versions/detach-version-response.text deleted file mode 100644 index 698dce95..00000000 --- a/api-ref/source/samples/versions/detach-version-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to detach function f8e7357f-af4a-4448-87d8-6fed4c7db5b5(version 1) has been accepted. diff --git a/api-ref/source/samples/versions/list-versions-response.json b/api-ref/source/samples/versions/list-versions-response.json deleted file mode 100644 index 757f6749..00000000 --- a/api-ref/source/samples/versions/list-versions-response.json +++ /dev/null @@ -1,22 +0,0 @@ -[ - { - "count": 0, - "version_number": 1, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": "This version is ready for production", - "created_at": "2019-06-07 01:23:16", - "updated_at": null, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "id": "45ebdcbf-6e2f-46c9-bd58-7f6ae01ecfcc" - }, - { - "count": 0, - "version_number": 2, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "description": "This version has not been tested yet", - "created_at": "2019-06-07 01:34:10", - "updated_at": null, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "id": "9490f863-020b-4746-8f30-9324cff31678" - } -] diff --git a/api-ref/source/samples/versions/show-version-response.json b/api-ref/source/samples/versions/show-version-response.json deleted file mode 100644 index 4d15d119..00000000 --- a/api-ref/source/samples/versions/show-version-response.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "count": 0, - "version_number": 1, - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "This version is ready for production", - "created_at": "2019-06-07 01:23:16", - "updated_at": null, - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "45ebdcbf-6e2f-46c9-bd58-7f6ae01ecfcc" -} diff --git a/api-ref/source/samples/webhooks/create-webhook-alias-request.json b/api-ref/source/samples/webhooks/create-webhook-alias-request.json deleted file mode 100644 index 0e4dd86d..00000000 --- a/api-ref/source/samples/webhooks/create-webhook-alias-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "function_alias": "hq-production-function", - "description": "Webhook based on function alias triggered by Jenkins" -} diff --git a/api-ref/source/samples/webhooks/create-webhook-request.json b/api-ref/source/samples/webhooks/create-webhook-request.json deleted file mode 100644 index bfd93a16..00000000 --- a/api-ref/source/samples/webhooks/create-webhook-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Jenkins", - "function_version": 2 -} diff --git a/api-ref/source/samples/webhooks/create-webhook-response.json b/api-ref/source/samples/webhooks/create-webhook-response.json deleted file mode 100644 index d8ef6606..00000000 --- a/api-ref/source/samples/webhooks/create-webhook-response.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Jenkins", - "created_at": "2019-06-07 14:53:22.989283", - "updated_at": null, - "function_version": 2, - "webhook_url": "http://127.0.0.1:7070/v1/webhooks/2aac6d93-c4d3-4fc6-9d3b-8852be0c8c89/invoke", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "2aac6d93-c4d3-4fc6-9d3b-8852be0c8c89" -} diff --git a/api-ref/source/samples/webhooks/delete-webhook-response.text b/api-ref/source/samples/webhooks/delete-webhook-response.text deleted file mode 100644 index 9eb851a0..00000000 --- a/api-ref/source/samples/webhooks/delete-webhook-response.text +++ /dev/null @@ -1 +0,0 @@ -Request to delete webhook 2aac6d93-c4d3-4fc6-9d3b-8852be0c8c89 has been accepted. diff --git a/api-ref/source/samples/webhooks/list-webhooks-response.json b/api-ref/source/samples/webhooks/list-webhooks-response.json deleted file mode 100644 index 0aca0c54..00000000 --- a/api-ref/source/samples/webhooks/list-webhooks-response.json +++ /dev/null @@ -1,22 +0,0 @@ -[ - { - "function_id": "e47ca07c-b0cc-4501-901f-1c2e3df46724", - "description": "Webhook Hello World", - "created_at": "2019-06-07 02:48:16", - "updated_at": null, - "function_version": 0, - "webhook_url": "http://127.0.0.1:7070/v1/webhooks/726779f2-227f-43af-a891-4de85e8dadd5/invoke", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "726779f2-227f-43af-a891-4de85e8dadd5" - }, - { - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Jenkins", - "created_at": "2019-06-07 14:52:19", - "updated_at": null, - "function_version": 2, - "webhook_url": "http://127.0.0.1:7070/v1/webhooks/e3f34707-8fea-4d36-b558-e39b00b99105/invoke", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "e3f34707-8fea-4d36-b558-e39b00b99105" - } -] diff --git a/api-ref/source/samples/webhooks/show-webhook-response.json b/api-ref/source/samples/webhooks/show-webhook-response.json deleted file mode 100644 index 8818fdb4..00000000 --- a/api-ref/source/samples/webhooks/show-webhook-response.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Jenkins", - "created_at": "2019-06-07 14:52:19", - "updated_at": null, - "function_version": 2, - "webhook_url": "http://127.0.0.1:7070/v1/webhooks/e3f34707-8fea-4d36-b558-e39b00b99105/invoke", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "e3f34707-8fea-4d36-b558-e39b00b99105" -} diff --git a/api-ref/source/samples/webhooks/update-webhook-alias-request.json b/api-ref/source/samples/webhooks/update-webhook-alias-request.json deleted file mode 100644 index 43db3416..00000000 --- a/api-ref/source/samples/webhooks/update-webhook-alias-request.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "description": "Webhook based on function alias triggered by Gitlab", - "function_alias": "hq-staging-function" -} diff --git a/api-ref/source/samples/webhooks/update-webhook-request.json b/api-ref/source/samples/webhooks/update-webhook-request.json deleted file mode 100644 index f85eda1d..00000000 --- a/api-ref/source/samples/webhooks/update-webhook-request.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Gitlab", - "function_version": 2 -} diff --git a/api-ref/source/samples/webhooks/update-webhook-response.json b/api-ref/source/samples/webhooks/update-webhook-response.json deleted file mode 100644 index 7f88d254..00000000 --- a/api-ref/source/samples/webhooks/update-webhook-response.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "function_id": "f8e7357f-af4a-4448-87d8-6fed4c7db5b5", - "description": "Webhook triggered by Gitlab", - "created_at": "2019-06-07 14:53:22", - "updated_at": "2019-06-07 18:06:42", - "function_version": 2, - "webhook_url": "http://127.0.0.1:7070/v1/webhooks/2aac6d93-c4d3-4fc6-9d3b-8852be0c8c89/invoke", - "project_id": "5eeb5916ae4b43699f06ea422e581b83", - "id": "2aac6d93-c4d3-4fc6-9d3b-8852be0c8c89" -} diff --git a/api-ref/source/status.yaml b/api-ref/source/status.yaml deleted file mode 100644 index 68fdae79..00000000 --- a/api-ref/source/status.yaml +++ /dev/null @@ -1,61 +0,0 @@ -################# -# Success Codes # -################# -200: - default: | - Request was successful. -201: - default: | - Resource was created and is ready to use. -202: - default: | - Request was accepted for processing, but the processing has not been - completed. A 'location' header is included in the response which contains - a link to check the progress of the request. -204: - default: | - The server has fulfilled the request by deleting the resource. -300: - default: | - There are multiple choices for resources. The request has to be more - specific to successfully retrieve one of these resources. -302: - default: | - The response is about a redirection hint. The header of the response - usually contains a 'location' value where requesters can check to track - the real location of the resource. -################# -# Error Codes # -################# - -400: - default: | - Some content in the request was invalid. - resource_signal: | - The target resource doesn't support receiving a signal. -401: - default: | - User must authenticate before making a request. -403: - default: | - Policy does not allow current user to do this operation. -404: - default: | - The requested resource could not be found. -405: - default: | - Method is not valid for this endpoint. -409: - default: | - This operation conflicted with another operation on this resource. - duplicate_zone: | - There is already a zone with this name. -500: - default: | - Something went wrong inside the service. This should not happen usually. - If it does happen, it means the server has experienced some serious - problems. -503: - default: | - Service is not available. This is mostly caused by service configuration - errors which prevents the service from successful start up. diff --git a/api-ref/source/urls.inc b/api-ref/source/urls.inc deleted file mode 100644 index e568e9b7..00000000 --- a/api-ref/source/urls.inc +++ /dev/null @@ -1,31 +0,0 @@ -.. -*- rst -*- - -================== - Qinling Base URLs -================== - -All API calls through the rest of this document require authentication -with the OpenStack Identity service. They also required a ``url`` that -is extracted from the Identity token of type -``function-engine``. This will be the root url that every call below will be -added to build a full path. - -Note that if using OpenStack Identity service API v2, ``url`` can be -represented via ``adminURL``, ``internalURL`` or ``publicURL`` in endpoint -catalog. In Identity service API v3, ``url`` is represented with field -``interface`` including ``admin``, ``internal`` and ``public``. - -For instance, if the ``url`` is -``http://my-qinling-url.org/qinling/v1`` then the full API call for -``/functions`` is ``http://my-qinling-url.org/qinling/v1/functions``. - -Depending on the deployment the function engine url might be http or https, -a custom port, a custom path, and include your project id. The only way to -know the urls for your deployment is by using the service catalog. -The containers service URL should never be hard coded in applications, even -if they are only expected to work at a single site. It should always be -discovered from the Identity token. - -As such, for the rest of this document we will be using short hand -where ``GET /functions`` really means -``GET {your_qinling_url}/functions``. diff --git a/api-ref/source/versions.inc b/api-ref/source/versions.inc deleted file mode 100644 index c3b594e8..00000000 --- a/api-ref/source/versions.inc +++ /dev/null @@ -1,255 +0,0 @@ -.. -*- rst -*- - -========= - Versions -========= - -Lists, creates, detaches, shows details and deletes versions. - -Function versions are like git commits, theyā€™re snapshots of your -project history. Each version has a number that serves as its ID, -starting with 1 and incrementing up, and never reused. The code for -a published version of a function is immutable (i.e. cannot be changed). -So, a version number corresponds to a specific set of function code with -certainty. - - -Create a version -================ - -.. rest_method:: POST /v1/functions/{function_id}/versions - -Create a version. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - description: version_description - -Request Example ---------------- - -.. literalinclude:: samples/versions/create-version-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: version_description - - function_id: function_id - - count: version_count - - version_number: version_number - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - - -Response Example ----------------- - -.. literalinclude:: samples/versions/create-version-response.json - :language: javascript - - -List versions -============= - -.. rest_method:: GET /v1/functions/{function_id}/versions - -List versions. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: version_description - - function_id: function_id - - count: version_count - - version_number: version_number - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/versions/list-versions-response.json - :language: javascript - - -Show a version -============== - -.. rest_method:: GET /v1/functions/{function_id}/versions/{version_number} - -Show a version. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - version_number: path_version_number - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: version_description - - function_id: function_id - - count: version_count - - version_number: version_number - - project_id: project_id - - status: status - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/versions/show-version-response.json - :language: javascript - - -Detach a version -================ - -.. rest_method:: POST /v1/functions/{function_id}/versions/{version_number}/detach - -Detach a version. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - version_number: path_version_number - -Response Parameters -------------------- - -None - -Response Example ----------------- - - -.. literalinclude:: samples/versions/detach-version-response.text - :language: text - - -Delete a version -================ - -.. rest_method:: DELETE /v1/functions/{function_id}/versions/{version_number} - -Delete a version. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: path_function_id - - version_number: path_version_number - -Response Parameters -------------------- - -None - -Response Example ----------------- - -None diff --git a/api-ref/source/webhooks.inc b/api-ref/source/webhooks.inc deleted file mode 100644 index 0c70df03..00000000 --- a/api-ref/source/webhooks.inc +++ /dev/null @@ -1,272 +0,0 @@ -.. -*- rst -*- - -========= - Webhooks -========= - -Lists, creates, updates, shows details and deletes webhooks. - -Webhooks are a low-effort way to invoke the functions. They do not -require a bot user or authentication to use. - - -Create a webhook -================ - -.. rest_method:: POST /v1/webhooks - -Create a webhook. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 201 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - function_id: webhook_function_id - - function_alias: webhook_function_alias - - function_version: webhook_function_version - - description: webhook_description - -Request Example ---------------- - -.. literalinclude:: samples/webhooks/create-webhook-request.json - :language: javascript - -.. literalinclude:: samples/webhooks/create-webhook-alias-request.json - :language: javascript - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: webhook_description - - function_id: webhook_function_id - - version_number: webhook_function_version - - webhook_url: webhook_url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - -Response Example ----------------- - -.. literalinclude:: samples/webhooks/create-webhook-response.json - :language: javascript - - -List webhooks -============= - -.. rest_method:: GET /v1/webhooks - -List webhooks. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: webhook_description - - function_id: webhook_function_id - - version_number: webhook_function_version - - webhook_url: webhook_url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - - -Response Example ----------------- - -.. literalinclude:: samples/webhooks/list-webhooks-response.json - :language: javascript - - -Show a webhook -============== - -.. rest_method:: GET /v1/webhooks/{webhook_id} - -Show a webhook. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 200 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - webhook_id: path_webhook_id - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: webhook_description - - function_id: webhook_function_id - - version_number: webhook_function_version - - webhook_url: webhook_url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Response Example ----------------- - -.. literalinclude:: samples/webhooks/show-webhook-response.json - :language: javascript - - -Update a webhook -================ - -.. rest_method:: PUT /v1/webhooks/{webhook_id} - -Update a webhook. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 202 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - webhook_id: path_webhook_id - - function_id: webhook_function_id - - function_alias: webhook_function_alias - - function_version: webhook_function_version - - description: webhook_description - -Response Parameters -------------------- - -.. rest_parameters:: parameters.yaml - - - id: id - - description: webhook_description - - function_id: webhook_function_id - - version_number: webhook_function_version - - webhook_url: webhook_url - - project_id: project_id - - created_at: created_at - - updated_at: updated_at - -Request Example ---------------- - -.. literalinclude:: samples/webhooks/update-webhook-request.json - :language: javascript - -.. literalinclude:: samples/webhooks/update-webhook-alias-request.json - :language: javascript - -Response Example ----------------- - -.. literalinclude:: samples/webhooks/update-webhook-response.json - :language: javascript - - -Delete a webhook -================ - -.. rest_method:: DELETE /v1/webhooks/{webhook_id} - -Delete a webhook. - -Response Codes --------------- - -.. rest_status_code:: success status.yaml - - - 204 - -.. rest_status_code:: error status.yaml - - - 400 - - 401 - - 403 - -Request -------- - -.. rest_parameters:: parameters.yaml - - - x-auth-token: x-auth-token - - webhook_id: path_webhook_id - -Response Parameters -------------------- - -None - -Response Example ----------------- - -.. literalinclude:: samples/webhooks/delete-webhook-response.text - :language: text diff --git a/babel.cfg b/babel.cfg deleted file mode 100644 index 15cd6cb7..00000000 --- a/babel.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[python: **.py] - diff --git a/devstack/plugin.sh b/devstack/plugin.sh deleted file mode 100755 index 1c4ec3cf..00000000 --- a/devstack/plugin.sh +++ /dev/null @@ -1,226 +0,0 @@ -#!/usr/bin/env bash -# Save trace setting -XTRACE=$(set +o | grep xtrace) -set -o xtrace - - -function install_qinling { - git_clone $QINLING_REPO $QINLING_DIR $QINLING_BRANCH - setup_develop $QINLING_DIR -} - - -function install_qinlingclient { - if use_library_from_git "python-qinlingclient"; then - git_clone $QINLINGCLIENT_REPO $QINLINGCLIENT_DIR $QINLINGCLIENT_BRANCH - setup_develop $QINLINGCLIENT_DIR - else - pip_install python-qinlingclient - fi -} - - -function install_k8s { - pushd $QINLING_DIR - source tools/gate/kubeadm/setup_gate.sh - popd - - # Pre-fetch the docker images for runtimes and image function test. - for image in "$QINLING_PYTHON_RUNTIME_IMAGE" "$QINLING_NODEJS_RUNTIME_IMAGE" "$QINLING_SIDECAR_IMAGE" "openstackqinling/alpine-test" "lingxiankong/sleep" - do - sudo docker pull $image - done -} - - -function create_qinling_accounts { - create_service_user "qinling" "admin" - - local qinling_service=$(get_or_create_service "qinling" "function-engine" "Function Service") - qinling_api_url="$QINLING_SERVICE_PROTOCOL://$QINLING_SERVICE_HOST:$QINLING_SERVICE_PORT" - - get_or_create_endpoint $qinling_service \ - "$REGION_NAME" \ - "$qinling_api_url" \ - "$qinling_api_url" \ - "$qinling_api_url" - - # get or adds 'service' role to 'qinling' user on 'demo' project - get_or_add_user_project_role "service" "qinling" "demo" -} - - -function mkdir_chown_stack { - if [[ ! -d "$1" ]]; then - sudo mkdir -p "$1" - fi - sudo chown -R $STACK_USER:$STACK_USER "$1" -} - - -function configure_k8s_certificates { - pushd $QINLING_DIR - - mkdir_chown_stack "$QINLING_CONF_DIR"/pki - mkdir_chown_stack "$QINLING_CONF_DIR"/pki/kubernetes - - curl -L https://pkg.cfssl.org/R1.2/cfssl_linux-amd64 -o /tmp/cfssl - chmod +x /tmp/cfssl - curl -L https://pkg.cfssl.org/R1.2/cfssljson_linux-amd64 -o /tmp/cfssljson - chmod +x /tmp/cfssljson - - sudo /tmp/cfssl gencert -ca=/etc/kubernetes/pki/ca.crt -ca-key=/etc/kubernetes/pki/ca.key -config=example/kubernetes/cfssl-ca-config.json -profile=client example/kubernetes/cfssl-client-csr.json | /tmp/cfssljson -bare client - # The command above outputs client-key.pem, client.pem and client.csr - mv client-key.pem "$QINLING_CONF_DIR"/pki/kubernetes/qinling.key - mv client.pem "$QINLING_CONF_DIR"/pki/kubernetes/qinling.crt - rm -f client.csr - - cp /etc/kubernetes/pki/ca.crt "$QINLING_CONF_DIR"/pki/kubernetes/ca.crt - - popd -} - -function configure_etcd_certificates { - pushd $QINLING_DIR - - mkdir_chown_stack $QINLING_CONF_DIR/pki/etcd - sudo cp /etc/kubernetes/pki/etcd/ca.crt $QINLING_CONF_DIR/pki/etcd/ - - # Re-use k8s api server etcd client cert - sudo cp /etc/kubernetes/pki/apiserver-etcd-client.crt $QINLING_CONF_DIR/pki/etcd/qinling-etcd-client.crt - sudo cp /etc/kubernetes/pki/apiserver-etcd-client.key $QINLING_CONF_DIR/pki/etcd/qinling-etcd-client.key - - mkdir_chown_stack $QINLING_CONF_DIR/pki/etcd - # For the tempest user to read the key file when running tempest - chmod 644 $QINLING_CONF_DIR/pki/etcd/qinling-etcd-client.key - - popd -} - - -function configure_qinling { - mkdir_chown_stack "$QINLING_AUTH_CACHE_DIR" - rm -rf "$QINLING_AUTH_CACHE_DIR"/* - - mkdir_chown_stack "$QINLING_CONF_DIR" - rm -rf "$QINLING_CONF_DIR"/* - - mkdir_chown_stack "$QINLING_FUNCTION_STORAGE_DIR" - rm -rf "$QINLING_FUNCTION_STORAGE_DIR"/* - - cp $QINLING_DIR/etc/policy.json.sample $QINLING_POLICY_FILE - - # Generate Qinling configuration file and configure common parameters. - oslo-config-generator --config-file $QINLING_DIR/tools/config/config-generator.qinling.conf --output-file $QINLING_CONF_FILE - - iniset $QINLING_CONF_FILE oslo_policy policy_file $QINLING_POLICY_FILE - iniset $QINLING_CONF_FILE DEFAULT debug $QINLING_DEBUG - iniset $QINLING_CONF_FILE DEFAULT server all - iniset $QINLING_CONF_FILE DEFAULT logging_context_format_string "%(asctime)s %(process)d %(color)s %(levelname)s [%(request_id)s] %(message)s %(resource)s (%(name)s)" - iniset $QINLING_CONF_FILE storage file_system_dir $QINLING_FUNCTION_STORAGE_DIR - - # Setup keystone_authtoken section - configure_auth_token_middleware $QINLING_CONF_FILE qinling $QINLING_AUTH_CACHE_DIR - iniset $QINLING_CONF_FILE keystone_authtoken www_authenticate_uri $KEYSTONE_AUTH_URI_V3 - iniset $QINLING_CONF_FILE keystone_authtoken region_name "$REGION_NAME" - - # Setup RabbitMQ credentials - iniset_rpc_backend qinling $QINLING_CONF_FILE - - # Configure the database. - iniset $QINLING_CONF_FILE database connection `database_connection_url qinling` - - if [ "$QINLING_INSTALL_K8S" == "True" ]; then - # Configure Kubernetes API server certificates for qinling if required. - if [ "$QINLING_K8S_APISERVER_TLS" == "True" ]; then - iniset $QINLING_CONF_FILE kubernetes kube_host https://$(hostname -f):6443 - configure_k8s_certificates - sudo kubectl create -f $QINLING_DIR/example/kubernetes/k8s_qinling_role.yaml - else - iniset $QINLING_CONF_FILE kubernetes use_api_certificate False - fi - - # Config etcd TLS certs - configure_etcd_certificates - else - echo_summary "Skip k8s related configuration" - fi - - iniset $QINLING_CONF_FILE kubernetes replicas 5 -} - - -function init_qinling { - # (re)create qinling database - recreate_database qinling utf8 - - $QINLING_BIN_DIR/qinling-db-manage --config-file $QINLING_CONF_FILE upgrade head -} - - -function start_qinling { - run_process qinling-engine "$QINLING_BIN_DIR/qinling-engine --config-file $QINLING_CONF_FILE" - run_process qinling-api "$QINLING_BIN_DIR/qinling-api --config-file $QINLING_CONF_FILE" -} - - -function stop_qinling { - local serv - for serv in qinling-api qinling-engine; do - stop_process $serv - done -} - - -function cleanup_qinling { - sudo rm -rf $QINLING_AUTH_CACHE_DIR/* - sudo rm -rf $QINLING_CONF_DIR/* -} - - -# check for service enabled -if is_service_enabled qinling; then - if [[ "$1" == "stack" && "$2" == "install" ]]; then - # Perform installation of service source - echo_summary "Installing qinling" - install_qinling - echo_summary "Installing qinlingclient" - install_qinlingclient - - elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then - # Configure after the other layer 1 and 2 services have been configured - echo_summary "Configuring qinling" - if is_service_enabled key; then - create_qinling_accounts - fi - - if [ "$QINLING_INSTALL_K8S" == "True" ]; then - echo_summary "Installing kubernetes cluster" - install_k8s - else - echo_summary "Skip kubernetes cluster installation" - fi - - configure_qinling - - elif [[ "$1" == "stack" && "$2" == "extra" ]]; then - # Initialize and start the qinling service - echo_summary "Initializing qinling" - init_qinling - start_qinling - fi - - if [[ "$1" == "unstack" ]]; then - echo_summary "Shutting down qinling" - stop_qinling - fi - - if [[ "$1" == "clean" ]]; then - echo_summary "Cleaning qinling" - cleanup_qinling - fi -fi - - -# Restore xtrace -$XTRACE diff --git a/devstack/settings b/devstack/settings deleted file mode 100644 index 0e2424ee..00000000 --- a/devstack/settings +++ /dev/null @@ -1,30 +0,0 @@ -# Devstack settings -enable_service qinling qinling-api qinling-engine - - -# Set up default repos -QINLING_DIR=$DEST/qinling -QINLING_BIN_DIR=$(get_python_exec_prefix) -QINLING_REPO=${QINLING_REPO:-${GIT_BASE}/openstack/qinling.git} -QINLING_BRANCH=${QINLING_BRANCH:-master} - -QINLINGCLIENT_DIR=$DEST/python-qinlingclient -QINLINGCLIENT_REPO=${QINLINGCLIENT_REPO:-${GIT_BASE}/openstack/python-qinlingclient.git} -QINLINGCLIENT_BRANCH=${QINLINGCLIENT_BRANCH:-master} - -QINLING_SERVICE_HOST=${QINLING_SERVICE_HOST:-$SERVICE_HOST} -QINLING_SERVICE_PORT=${QINLING_SERVICE_PORT:-7070} -QINLING_SERVICE_PROTOCOL=${QINLING_SERVICE_PROTOCOL:-$SERVICE_PROTOCOL} - -QINLING_DEBUG=${QINLING_DEBUG:-True} -QINLING_CONF_DIR=${QINLING_CONF_DIR:-/etc/qinling} -QINLING_CONF_FILE=${QINLING_CONF_DIR}/qinling.conf -QINLING_POLICY_FILE=${QINLING_CONF_DIR}/policy.json -QINLING_AUTH_CACHE_DIR=${QINLING_AUTH_CACHE_DIR:-/var/cache/qinling} -QINLING_FUNCTION_STORAGE_DIR=${QINLING_FUNCTION_STORAGE_DIR:-/opt/qinling/function/packages} -QINLING_PYTHON_RUNTIME_IMAGE=${QINLING_PYTHON_RUNTIME_IMAGE:-openstackqinling/python3-runtime:0.0.2} -QINLING_NODEJS_RUNTIME_IMAGE=${QINLING_NODEJS_RUNTIME_IMAGE:-openstackqinling/nodejs-runtime:0.0.1} -QINLING_SIDECAR_IMAGE=${QINLING_SIDECAR_IMAGE:-openstackqinling/sidecar:0.0.2} - -QINLING_INSTALL_K8S=${QINLING_INSTALL_K8S:-True} -QINLING_K8S_APISERVER_TLS=${QINLING_K8S_APISERVER_TLS:-True} diff --git a/doc/README.rst b/doc/README.rst deleted file mode 100644 index 391a1468..00000000 --- a/doc/README.rst +++ /dev/null @@ -1,27 +0,0 @@ -======================== -Qinling Development Docs -======================== - -Files under this directory tree are used for generating the documentation -for the qinling source code. - -Developer documentation is built to: -https://docs.openstack.org/qinling/latest/ - -Tools -===== - -Sphinx - The Python Sphinx package is used to generate the documentation output. - Information on Sphinx, including formatting information for RST source - files, can be found in the `Sphinx online documentation - `_. - -Building Documentation -====================== - -Doc builds are performed using tox with the ``docs`` target:: - - % cd .. - % tox -e docs - diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index 073b1d63..00000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. -# this is required for the docs build jobs -sphinx>=2.0.0,!=2.1.0 # BSD -sphinxcontrib-apidoc>=0.2.0 # BSD -openstackdocstheme>=2.2.1 # Apache-2.0 -reno>=3.1.0 # Apache-2.0 -os-api-ref>=1.0.0 # Apache-2.0 diff --git a/doc/source/admin/index.rst b/doc/source/admin/index.rst deleted file mode 100644 index daaaaa28..00000000 --- a/doc/source/admin/index.rst +++ /dev/null @@ -1,32 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Administration/Operation Guide -============================== - -.. toctree:: - :maxdepth: 1 - - installation - runtime - security - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/admin/install/config_kubernetes.rst b/doc/source/admin/install/config_kubernetes.rst deleted file mode 100644 index f11dcd52..00000000 --- a/doc/source/admin/install/config_kubernetes.rst +++ /dev/null @@ -1,215 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Config Qinling with existing kubernetes/etcd cluster -==================================================== - -In most cases, it's not ideal to set up a new dedicated kubernetes cluster for -Qinling. The component which works with kubernetes cluster in Qinling is the -``qinling-engine``. Follow the steps below to configure Qinling to work with -existing kubernetes/etcd cluster, and make Qinling access the kubernetes/etcd -service with authentication and authorization. - -Prerequisites -~~~~~~~~~~~~~ - -* You know the kubernetes API address and etcd service address, for example: - - .. code-block:: console - - export K8S_ADDRESS=10.0.0.5 - export ETCD_ADDRESS=10.0.0.6 - export QINLING_SERVICE_USER=qinling - - .. end - - Make sure the kubernetes and etcd services are both accessible to external. - -* You know the IP address that ``qinling-engine`` service talks to kubernetes, - for example: - - .. code-block:: console - - export QINLING_ENGINE_ADDRESS=10.0.0.7 - - .. end - -* You have CA certificates of the kubernetes and etcd respectively and store on - the host that ``qinling-engine`` is running. - - .. code-block:: console - - export K8S_CA_CERT=$HOME/ca.crt - export K8S_CA_KEY=$HOME/ca.key - export ETCD_CA_CERT=$HOME/etcd_ca.crt - export ETCD_CA_KEY=$HOME/etcd_ca.key - - .. end - -* This guide assumes - `RBAC `_ is enabled in - the kubernetes cluster. -* All the following commands are supposed to be executed under ``root`` - permission. - -Qinling configurations -~~~~~~~~~~~~~~~~~~~~~~ - -Below are the options and their default values that relate to accessing the -Kubernetes API and etcd in Qinling's configuration file. - -.. code-block:: ini - - [kubernetes] - kube_host = https://127.0.0.1:8001 - use_api_certificate = True - ssl_ca_cert = /etc/qinling/pki/kubernetes/ca.crt - cert_file = /etc/qinling/pki/kubernetes/qinling.crt - key_file = /etc/qinling/pki/kubernetes/qinling.key - - [etcd] - host = 127.0.0.1 - port = 2379 - protocol = https - ca_cert = /etc/qinling/pki/etcd/ca.crt - cert_file = /etc/qinling/pki/etcd/qinling-etcd-client.crt - cert_key = /etc/qinling/pki/etcd/qinling-etcd-client.key - -.. end - -First, change the kubernetes and etcd service addresses in the config file, and -add the addresses that ``qinling-engine`` uses to talk to kubernetes services -to the ``trusted_cidrs`` option. We will create all the related certificates in -the following steps. - -.. note:: - - If the ``qinling-engine`` service is running behind a NAT device, make sure - you get the correct IP address that talks to kubernetes. - -.. code-block:: ini - - [kubernetes] - kube_host = https://${K8S_ADDRESS}:6443 - trusted_cidrs = ${QINLING_ENGINE_ADDRESS}/32 - ... - [etcd] - host = ${ETCD_ADDRESS} - ... - -.. end - -Generate and config client certificates for Qinling -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -There are a lot of -`tools `_ -out there for certificate generation. We use ``cfssl`` as the example here. - -#) Download and prepare the command line tools as needed. - - .. code-block:: console - - curl -L https://pkg.cfssl.org/R1.2/cfssl_linux-amd64 -o /tmp/cfssl - chmod +x /tmp/cfssl - curl -L https://pkg.cfssl.org/R1.2/cfssljson_linux-amd64 -o /tmp/cfssljson - chmod +x /tmp/cfssljson - - .. end - -#) Generate the kubernetes and etcd client certificates for Qinling. - - .. code-block:: console - - mkdir -p /tmp/certs; cd /tmp/certs - curl -SL https://raw.githubusercontent.com/openstack/qinling/master/example/kubernetes/cfssl-ca-config.json -o /tmp/certs/cfssl-ca-config.json - curl -SL https://raw.githubusercontent.com/openstack/qinling/master/example/kubernetes/cfssl-client-csr.json -o /tmp/certs/cfssl-client-csr.json - /tmp/cfssl gencert -ca=${K8S_CA_CERT} \ - -ca-key=${K8S_CA_KEY} \ - -config=/tmp/certs/cfssl-ca-config.json \ - -profile=client \ - /tmp/certs/cfssl-client-csr.json | /tmp/cfssljson -bare k8s-client - /tmp/cfssl gencert -ca=${ETCD_CA_CERT} \ - -ca-key=${ETCD_CA_KEY} \ - -config=/tmp/certs/cfssl-ca-config.json \ - -profile=client \ - /tmp/certs/cfssl-client-csr.json | /tmp/cfssljson -bare etcd-client - - .. end - -#) Move the certificates to the pre-defined locations in the config file and - ensure the qinling service user has the permission to those locations. - - .. code-block:: console - - mkdir -p /etc/qinling/pki/{kubernetes,etcd} - cp k8s-client-key.pem /etc/qinling/pki/kubernetes/qinling.key - cp k8s-client.pem /etc/qinling/pki/kubernetes/qinling.crt - cp etcd-client-key.pem /etc/qinling/pki/etcd/qinling-etcd-client.key - cp etcd-client.pem /etc/qinling/pki/etcd/qinling-etcd-client.crt - cp ${K8S_CA_CERT} /etc/qinling/pki/kubernetes/ca.crt - cp ${ETCD_CA_CERT} /etc/qinling/pki/etcd/ca.crt - chown -R ${QINLING_SERVICE_USER}:${QINLING_SERVICE_USER} /etc/qinling/pki - cd -; rm -rf /tmp/certs - - .. end - -Create Role and RoleBinding in Kubernetes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -According to the least privilege principle, the operation permission of qinling -service user in kubernetes cluster should be restricted, this could be easily -achieved by applying the pre-defined authorization manifest file. The following -command is supposed to be executed with ``admin`` access of the kubernetes -cluster. - -.. code-block:: console - - curl -sSL https://raw.githubusercontent.com/openstack/qinling/master/example/kubernetes/k8s_qinling_role.yaml | kubectl apply -f - - -.. end - -Restart Qinlig services -~~~~~~~~~~~~~~~~~~~~~~~ - -Restart all the Qinling services. Now Qinling is accessing the Kubernetes API -and etcd service using TLS. The requests that Qinling makes to the Kubernetes -API are also authorized. - -.. code-block:: console - - systemctl restart devstack@qinling-*.service - -.. end - -Access the Kubernetes API Insecurely (For testing purpose ONLY) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Qinling can also connect to the Kubernetes API insecurely if the Kubernetes API -server serves for insecure connections. However, this is not recommended and -should be used for testing purpose only. - -In the configuration file, under the ``kubernetes`` section, set ``kube_host`` -to the URI which the Kubernetes API serves for insecure HTTP connections, for -example, ``kube_host = http://localhost:8080``, and set ``use_api_certificate`` -to ``False`` to disable Qinling using a client certificate to access the -Kubernetes API. - -.. code-block:: ini - - [kubernetes] - kube_host = http://localhost:8080 - use_api_certificate = False - -.. end diff --git a/doc/source/admin/install/install_devstack.rst b/doc/source/admin/install/install_devstack.rst deleted file mode 100644 index 5a93dc6e..00000000 --- a/doc/source/admin/install/install_devstack.rst +++ /dev/null @@ -1,19 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Install Qinling in DevStack -=========================== - -Refer to -`Developer Guide `_ diff --git a/doc/source/admin/install/install_ubuntu.rst b/doc/source/admin/install/install_ubuntu.rst deleted file mode 100644 index 2bc38d86..00000000 --- a/doc/source/admin/install/install_ubuntu.rst +++ /dev/null @@ -1,363 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Install Qinling on Ubuntu 16.04 -=============================== - -This section describes how to install and configure the Function management -service, code-named qinling on the controller node that runs Ubuntu 16.04 (LTS). - -Prerequisites -------------- - -Before you install and configure Qinling, you must create a database, -service credentials, and API endpoints. - -#. To create the database, complete these steps: - - * Use the database access client to connect to the database - server as the ``root`` user: - - .. code-block:: console - - # mysql -u root -p - - * Create the ``qinling`` database: - - .. code-block:: console - - CREATE DATABASE qinling; - - * Grant proper access to the ``qinling`` database: - - .. code-block:: console - - GRANT ALL PRIVILEGES ON qinling.* TO 'qinling'@'localhost' \ - IDENTIFIED BY 'QINLING_DBPASS'; - GRANT ALL PRIVILEGES ON qinling.* TO 'qinling'@'%' \ - IDENTIFIED BY 'QINLING_DBPASS'; - - Replace ``QINLING_DBPASS`` with a suitable password. - - * Exit the database access client. - -#. Source the ``admin`` credentials to gain access to - admin-only CLI commands: - - .. code-block:: console - - $ . admin-openrc - -#. To create the service credentials, complete these steps: - - * Create the ``qinling`` user: - - .. code-block:: console - - $ openstack user create --domain default --password-prompt qinling - User Password: - Repeat User Password: - +---------------------+----------------------------------+ - | Field | Value | - +---------------------+----------------------------------+ - | domain_id | default | - | enabled | True | - | id | f77c97367087440da5f923bfcc66f68b | - | name | qinling | - | options | {} | - | password_expires_at | None | - +---------------------+----------------------------------+ - - * Add the ``admin`` role to the ``qinling`` user: - - .. code-block:: console - - $ openstack role add --project service --user qinling admin - - .. note:: - - This command provides no output. - - * Create the ``qinling`` service entities: - - .. code-block:: console - - $ openstack service create function-engine \ - --name qinling --description="Function Service" - +-------------+----------------------------------+ - | Field | Value | - +-------------+----------------------------------+ - | description | Function Service | - | enabled | True | - | id | 8811fab348b548e3adef6ff0b149edfb | - | name | qinling | - | type | function-engine | - +-------------+----------------------------------+ - -#. Create the Function engine service API endpoints: - - .. code-block:: console - - $ openstack endpoint create --region RegionOne \ - function-engine public http://controller:7070 - +--------------+----------------------------------+ - | Field | Value | - +--------------+----------------------------------+ - | enabled | True | - | id | 70937a84ed434256b11853b7e8a05d91 | - | interface | public | - | region | RegionOne | - | region_id | RegionOne | - | service_id | 8811fab348b548e3adef6ff0b149edfb | - | service_name | qinling | - | service_type | function-engine | - | url | http://controller:7070 | - +--------------+----------------------------------+ - $ openstack endpoint create --region RegionOne \ - function-engine internal http://controller:7070 - +--------------+----------------------------------+ - | Field | Value | - +--------------+----------------------------------+ - | enabled | True | - | id | 7249f13c00cf4ca788da3df3fac9cfe2 | - | interface | internal | - | region | RegionOne | - | region_id | RegionOne | - | service_id | 8811fab348b548e3adef6ff0b149edfb | - | service_name | qinling | - | service_type | function-engine | - | url | http://controller:7070 | - +--------------+----------------------------------+ - $ openstack endpoint create --region RegionOne \ - function-engine admin http://controller:7070 - +--------------+----------------------------------+ - | Field | Value | - +--------------+----------------------------------+ - | enabled | True | - | id | 7726669d928d47198388c599bfcd62a5 | - | interface | admin | - | region | RegionOne | - | region_id | RegionOne | - | service_id | 8811fab348b548e3adef6ff0b149edfb | - | service_name | qinling | - | service_type | function-engine | - | url | http://controller:7070 | - +--------------+----------------------------------+ - -Install and configure Qinling components ----------------------------------------- - -#. Create qinling user and necessary directories: - - * Create user: - - .. code-block:: console - - # groupadd --system qinling - # useradd --home-dir "/var/lib/qinling" \ - --create-home \ - --system \ - --shell /bin/false \ - -g qinling \ - qinling - - * Create directories: - - .. code-block:: console - - # mkdir -p /etc/qinling /var/lib/qinling/package - # chown -R qinling:qinling /etc/qinling /var/lib/qinling/package - -#. Clone and install qinling: - - .. code-block:: console - - # apt install -y python-pip - # cd /var/lib/qinling - # git clone https://opendev.org/openstack/qinling.git - # chown -R qinling:qinling qinling - # cd qinling - # pip install -e . - -#. Generate a sample configuration file: - - .. code-block:: console - - # su -s /bin/sh -c "oslo-config-generator \ - --config-file tools/config/config-generator.qinling.conf \ - --output-file etc/qinling.conf.sample" qinling - # su -s /bin/sh -c "cp etc/qinling.conf.sample \ - /etc/qinling/qinling.conf" qinling - # su -s /bin/sh -c "cp etc/policy.json.sample \ - /etc/qinling/policy.json" qinling - -#. Edit the ``/etc/qinling/qinling.conf``: - - * In the ``[DEFAULT]`` section, - configure ``RabbitMQ`` message queue access: - - .. code-block:: ini - - [DEFAULT] - ... - transport_url = rabbit://openstack:RABBIT_PASS@controller:5672/ - - Replace ``RABBIT_PASS`` with the password you chose for the - ``openstack`` account in ``RabbitMQ``. - - * In the ``[api]`` section, configure the IP address that Qinling API - server is going to listen: - - .. code-block:: ini - - [api] - ... - host = 10.0.0.9 - port = 7070 - - Replace ``10.0.0.9`` with the management interface IP address - of the controller node if different. - - * In the ``[database]`` section, configure database access: - - .. code-block:: ini - - [database] - ... - connection = mysql+pymysql://qinling:QINLING_DBPASS@controller/qinling?charset=utf8 - - Replace ``QINLING_DBPASS`` with the password you chose for - the qinling database. - - * In the ``[storage]`` section, configure function package storage path: - - .. code-block:: ini - - [storage] - ... - file_system_dir = /var/lib/qinling/package - - * In the ``[oslo_policy]`` section, configure the policy file path for - Qinling service: - - .. code-block:: ini - - [oslo_policy] - ... - policy_file = /etc/qinling/policy.json - - * In the ``[keystone_authtoken]`` section, configure - Identity service access: - - .. code-block:: ini - - [keystone_authtoken] - ... - memcached_servers = controller:11211 - www_authenticate_uri = http://controller:5000 - project_domain_name = default - project_name = service - user_domain_name = default - password = QINLING_PASS - username = qinling - auth_url = http://controller:5000 - auth_type = password - auth_version = v3 - - Replace QINLING_PASS with the password you chose for the qinling user in - the Identity service. - - .. note:: - - Make sure that ``/etc/qinling/qinling.conf`` still have the correct - permissions. You can set the permissions again with: - - # chown qinling:qinling /etc/qinling/qinling.conf - -#. Populate Qinling database: - - .. code-block:: console - - # su -s /bin/sh -c "qinling-db-manage --config-file \ - /etc/qinling/qinling.conf upgrade head" qinling - -Install and configure Kubernetes and etcd ------------------------------------------ - -Installing Kubernetes in not in the scope of this guide, you can refer to -`Kubernetes installation guide `_ for more -information. - -For etcd installation, you can refer to -`OpenStack Installation Guide `_. - -Qinling could also connect with existing kubernetes and etcd services, -`here `_ -is the guide for the detailed configuration. - -Finalize installation ---------------------- - -#. Create an upstart config for qinling-api, it could be named as - ``/etc/systemd/system/qinling-api.service``: - - .. code-block:: bash - - cat < /etc/systemd/system/qinling-api.service - [Unit] - Description = OpenStack Function Management Service API - - [Service] - ExecStart = /usr/local/bin/qinling-api - User = qinling - - [Install] - WantedBy = multi-user.target - EOF - -#. Create an upstart config for qinling-engine, it could be named as - ``/etc/systemd/system/qinling-engine.service``: - - .. code-block:: bash - - cat < /etc/systemd/system/qinling-engine.service - [Unit] - Description = OpenStack Function Management Service Engine - - [Service] - ExecStart = /usr/local/bin/qinling-engine - User = qinling - - [Install] - WantedBy = multi-user.target - EOF - -#. Enable and start qinling-api and qinling-engine: - - .. code-block:: console - - # systemctl enable qinling-api - # systemctl enable qinling-engine - - .. code-block:: console - - # systemctl start qinling-api - # systemctl start qinling-engine - -#. Verify that qinling-api and qinling-engine services are running: - - .. code-block:: console - - # systemctl status qinling-api - # systemctl status qinling-engine diff --git a/doc/source/admin/install/overview.rst b/doc/source/admin/install/overview.rst deleted file mode 100644 index 0de93b67..00000000 --- a/doc/source/admin/install/overview.rst +++ /dev/null @@ -1,49 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Service Overview -================ - -The Qinling project consists of the following components: - -qinling-api - A WSGI app that authenticates and routes requests to qinling-engine after - a preliminary handling for the request. - -qinling-engine - A standalone service whose purpose is to process operations such as runtime - maintenance, function execution operations, function autoscaling, etc. - -kubernetes - Qinling uses kubernetes as the default backend orchestrator, in order to - manage and maintain the underlying pods to run the functions. - -database - Qinling needs to interact with the database(usually MySQL) to store and - retrieve resource information. - -etcd - etcd is a distributed key-value store that provides fast read/write - operations for some specific internal resources in Qinling such as the - mapping from functions to the function services, mapping from function to the - workers, etc. In addition, etcd provides the locking mechanism in Qinling. - -Messaging queue - Routes information between the Qinling processes. - -Additionally, users can interact with Qinling service either by sending HTTP -request or using openstack CLI provided by -`python-qinlingclient `_. -Qinling Horizon dashboard is also available -`here `_. diff --git a/doc/source/admin/installation.rst b/doc/source/admin/installation.rst deleted file mode 100644 index 9a8f0644..00000000 --- a/doc/source/admin/installation.rst +++ /dev/null @@ -1,33 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Installation Guide -================== - -.. toctree:: - :maxdepth: 1 - - install/overview - install/install_devstack - install/install_ubuntu - install/config_kubernetes - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/admin/runtime.rst b/doc/source/admin/runtime.rst deleted file mode 100644 index ec9f5a26..00000000 --- a/doc/source/admin/runtime.rst +++ /dev/null @@ -1,129 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Create your own runtime -======================= - -Although there are several reference runtime implementations in-tree, it's very -easy to develop a new runtime for the preferred programming language not -implemented so far. - -.. note:: - - Actually, in the production environment(especially in the public cloud), - it's recommended that cloud providers provide their own runtime - implementation for security reasons. Knowing how the runtime is implemented - gives the malicious user the chance to attack the cloud environment. - -Qinling uses Kubernetes as the default container orchestrator, so this guide -will describe how the runtime containers working in the Kubernetes environment. - -There are two containers in a Kubernetes pod serving the runtime, one is called -"sidecar" which is responsible for downloading the function package if needed, -the other one is the actual runtime container that is also running as an HTTP -server. Once a Qinling runtime is created, there is a pool of such pods, when a -function is being executed, some pods(according to the autoscaling policy) are -chosen to run the function code. - -Usually, you only need to develop the runtime container and re-use the sidecar -container in the pod. There is only one public API that the runtime container -should provide. - -Public API provided by the runtime -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Request -------- - -Example request: - -.. code-block:: console - - POST /execute - -.. end - -Parameters provided by Qinling: - -1. The execution information. - - * **execution_id**: The Qinling execution UUID. - * **download_url**: The URL sent to Qinling to download the function - package. Here is an example for how to download function package in your - runtime implementation using ``requests`` python library, the request is - meant to send to the sidecar container, the final package should be put - in ``/var/qinling/packages/.zip`` if the request is - successfully handled by the sidecar: - - .. code-block:: console - - resp = requests.post( - 'http://localhost:9091/download', - json={ - 'download_url': download_url, - 'function_id': function_id, - 'token': params.get('token') - } - ) - - .. end - - * **function_id**: The Qinling function UUID. - * **entry**: The function entry that user defines when creating the - function, e.g. "hello_world.main" - * **input**: The dictionary of the function input that user defines when - creating the function execution. e.g. ``{"key": "value"}``. If the user - specifies the positional params when creating the function execution, the - input will be something like - ``{"__function_input": ("arg1", "arg2"), "key": "value"}`` - * **timeout**: The timeout in seconds user defines when creating the - function, the default value is 5. Your runtime implementation should take - this timeout value into account when executing the code. If the timeout - is reached, you should terminate the function execution and return an - appropriate error message. - * **cpu**: The CPU limit user defines when creating the function. Your - runtime is responsible for limiting the CPU resource usage when the - function is running. - * **memory_size**: The memory limit user defines when creating the - function. Your runtime is responsible for limiting the memory resource - usage when the function is running. - * **request_id**: The request UUID for the function execution which can be - used to track the execution for debugging purpose. - -2. The Information of the user who triggers the function execution. - - Most of that information is used for creating a Keystone session that could - be passed to the function, so it's easy to interact with the OpenStack - services in the function code. - - * **trust_id**: The trust UUID in Keystone. Please see for more information - in `Keystone official doc `__ - about Trust. - * **auth_url**: Identity service endpoint for authorization. - * **username**: Username for authentication. - * **password**: Password for authentication. - * **token**: Token for authentication. - -Response --------- - -Content in the response dictionary: - -* **output**: The return value of the function execution if it is successful, - otherwise the error message. -* **duration**: The execution duration in seconds. -* **logs**: The stdout content during the function execution. -* **success**: True or False. It should be False if the execution reaches - timeout, any exception raised inside user's function or the execution is - killed because of too much system resource consumed, etc. diff --git a/doc/source/admin/security.rst b/doc/source/admin/security.rst deleted file mode 100644 index 432da7eb..00000000 --- a/doc/source/admin/security.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Security Considerations -======================= - -This guide talks about the security considerations from the system administrator's perspective, how -to avoid to be affected by the "bad" function that the end user provides. - -Kubernetes -~~~~~~~~~~ - -Service account ---------------- - -Inter-Pods communication ------------------------- diff --git a/doc/source/cli/index.rst b/doc/source/cli/index.rst deleted file mode 100644 index a05bed41..00000000 --- a/doc/source/cli/index.rst +++ /dev/null @@ -1,11 +0,0 @@ -========================= -Qinling CLI Documentation -========================= - -In this section you will find information on Qinling's command line -interface. - -.. toctree:: - :maxdepth: 1 - - qinling-status diff --git a/doc/source/cli/qinling-status.rst b/doc/source/cli/qinling-status.rst deleted file mode 100644 index b237e1fa..00000000 --- a/doc/source/cli/qinling-status.rst +++ /dev/null @@ -1,83 +0,0 @@ -============== -qinling-status -============== - ------------------------------------------ -CLI interface for Qinling status commands ------------------------------------------ - -Synopsis -======== - -:: - - qinling-status [] - -Description -=========== - -:program:`qinling-status` is a tool that provides routines for checking the -status of a Qinling deployment. - -Options -======= - -The standard pattern for executing a :program:`qinling-status` command is:: - - qinling-status [] - -Run without arguments to see a list of available command categories:: - - qinling-status - -Categories are: - -* ``upgrade`` - -Detailed descriptions are below: - -You can also run with a category argument such as ``upgrade`` to see a list of -all commands in that category:: - - qinling-status upgrade - -These sections describe the available categories and arguments for -:program:`qinling-status`. - -Upgrade -~~~~~~~ - -.. _qinling-status-checks: - -``qinling-status upgrade check`` - Performs a release-specific readiness check before restarting services with - new code. For example, missing or changed configuration options, - incompatible object states, or other conditions that could lead to - failures while upgrading. - - **Return Codes** - - .. list-table:: - :widths: 20 80 - :header-rows: 1 - - * - Return code - - Description - * - 0 - - All upgrade readiness checks passed successfully and there is nothing - to do. - * - 1 - - At least one check encountered an issue and requires further - investigation. This is considered a warning but the upgrade may be OK. - * - 2 - - There was an upgrade status check failure that needs to be - investigated. This should be considered something that stops an - upgrade. - * - 255 - - An unexpected error occurred. - - **History of Checks** - - **2.0.0 (Stein)** - - * Sample check to be filled in with checks as they are added in Stein. diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100755 index 3b30dab5..00000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime as dt -import os -import sys - -sys.path.insert(0, os.path.abspath('../..')) -# -- General configuration ---------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [ - 'sphinx.ext.autodoc', - 'openstackdocstheme', -] - -# autodoc generation is a bit aggressive and a nuisance when doing heavy -# text edit cycles. -# execute "export SPHINX_DEBUG=1" in your terminal to disable - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -copyright = u"%d, OpenStack Foundation" % dt.datetime.now().year - -# If true, '()' will be appended to :func: etc. cross-reference text. -add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = True - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -html_theme_path = [] -html_theme = "openstackdocs" -html_static_path = [] - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Qinlingdoc' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', - 'Qinling.tex', - u'Qinling Documentation', - u'OpenStack Foundation', 'manual'), -] - -# Example configuration for intersphinx: refer to the Python standard library. -#intersphinx_mapping = {'http://docs.python.org/': None} - -# openstackdocstheme options -openstackdocs_repo_name = 'openstack/qinling' -openstackdocs_use_storyboard = True diff --git a/doc/source/contributor/architecture.rst b/doc/source/contributor/architecture.rst deleted file mode 100644 index 064a7267..00000000 --- a/doc/source/contributor/architecture.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Qinling Architecture -==================== \ No newline at end of file diff --git a/doc/source/contributor/contributing.rst b/doc/source/contributor/contributing.rst deleted file mode 100644 index 1a214ad1..00000000 --- a/doc/source/contributor/contributing.rst +++ /dev/null @@ -1,79 +0,0 @@ -============================ -So You Want to Contribute... -============================ - -For general information on contributing to OpenStack, please check out the -`contributor guide `_ to get started. -It covers all the basics that are common to all OpenStack projects: the accounts -you need, the basics of interacting with our Gerrit review system, how we -communicate as a community, etc. - -Below will cover the more project specific information you need to get started -with Qinling. - -Communication -~~~~~~~~~~~~~~ -.. This would be a good place to put the channel you chat in as a project; when/ - where your meeting is, the tags you prepend to your ML threads, etc. - -- IRC channel: #openstack-qinling -- Mailing list's prefix: [qinling] -- Currently, we don't have team meeting given we have a small group of core - reviewers and their timezones, the situation may change in the future. - -Contacting the Core Team -~~~~~~~~~~~~~~~~~~~~~~~~~ -.. This section should list the core team, their irc nicks, emails, timezones etc. If - all this info is maintained elsewhere (i.e. a wiki), you can link to that instead of - enumerating everyone here. - -The list of current Qinling core reviewers is available on `gerrit -`_. - -New Feature Planning -~~~~~~~~~~~~~~~~~~~~ -.. This section is for talking about the process to get a new feature in. Some - projects use blueprints, some want specs, some want both! Some projects - stick to a strict schedule when selecting what new features will be reviewed - for a release. - -Qinling doesn't use launchpad or separate specs repo for feature requirement. -You only need to create a task in `Storyboard -`_. - -Task Tracking -~~~~~~~~~~~~~~ -.. This section is about where you track tasks- launchpad? storyboard? is there more - than one launchpad project? what's the name of the project group in storyboard? - -We track our tasks in `Storyboard -`_ - -If you're looking for some smaller, easier work item to pick up and get started -on, search for the 'low-hanging-fruit' tag. - -Reporting a Bug -~~~~~~~~~~~~~~~ -.. Pretty self explanatory section, link directly to where people should report bugs for - your project. - -You found an issue and want to make sure we are aware of it? You can do so -on `Storyboard `_. - -Getting Your Patch Merged -~~~~~~~~~~~~~~~~~~~~~~~~~ -.. This section should have info about what it takes to get something merged. Do - you require one or two +2's before +W? Do some of your repos require unit test - changes with all patches? etc. - -Due to the small number of core reviewers of the Qinling project, we only need -one +2 before ``Workflow +1``. - -Project Team Lead Duties ------------------------- -.. this section is where you can put PTL specific duties not already listed in - the common PTL guide (linked below) or if you already have them written - up elsewhere, you can link to that doc here. - -All common PTL duties are enumerated here in the `PTL guide -`_. \ No newline at end of file diff --git a/doc/source/contributor/development-environment-devstack.rst b/doc/source/contributor/development-environment-devstack.rst deleted file mode 100644 index 312cb451..00000000 --- a/doc/source/contributor/development-environment-devstack.rst +++ /dev/null @@ -1,140 +0,0 @@ -.. - Copyright 2017 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Setting up a development environment with devstack -================================================== - -This page describes how to set up a working development -environment that can be used in deploying qinling on latest releases -of Ubuntu. These instructions assume you are already familiar -with git. Refer to `Getting the code`_ for additional information. - -.. _Getting the code: http://wiki.openstack.org/GettingTheCode - -Following these instructions will allow you to have a fully functional qinling -environment using the devstack project (a shell script to build -complete OpenStack development environments) on Ubuntu 16.04. - -Configuring devstack with Qinling ---------------------------------- - -Qinling can be enabled in devstack by using the plug-in based interface it -offers. - -.. note:: - - The following steps have been fully verified only on Ubuntu 16.04. - -Start by cloning the devstack repository using a non-root user: - -:: - - git clone https://github.com/openstack-dev/devstack - -Change to devstack directory: - -:: - - cd devstack/ - -Create the ``local.conf`` file with the following minimal devstack -configuration: - -.. code-block:: ini - - [[local|localrc]] - RECLONE=True - enable_plugin qinling https://github.com/openstack/qinling - - LIBS_FROM_GIT=python-qinlingclient - DATABASE_PASSWORD=password - ADMIN_PASSWORD=password - SERVICE_PASSWORD=password - SERVICE_TOKEN=password - RABBIT_PASSWORD=password - LOGFILE=$DEST/logs/stack.sh.log - LOG_COLOR=False - LOGDAYS=1 - - ENABLED_SERVICES=rabbit,mysql,key - -.. end - -Here are several things you could customize: - -* For multiple network cards, you need to specify the kubernetes API server's - advertise address manually. - - .. code-block:: console - - export EXTRA_KUBEADM_INIT_OPTS="--apiserver-advertise-address " - - .. end - -* Devstack will set up a new kubernetes cluster and re-use etcd service inside - the cluster for Qinling services, which means you don't need to add etcd to - the enabled services list in the ``local.conf`` file. -* If you already have an existing kubernetes/etcd cluster, add - ``QINLING_INSTALL_K8S=False`` to the ``local.conf`` file. You need to - manually config Qinling services after devstack completes, go to - `Config Qinling with existing Kubernetes cluster `_ - for more configuration details. -* If you want to interact with Qinling in Horizon dashboard, add the following - line to the ``local.conf`` file. - - .. code-block:: console - - enable_plugin qinling-dashboard https://opendev.org/openstack/qinling-dashboard - - .. end - -Running devstack ----------------- - -Run the ``stack.sh`` script: - -:: - - ./stack.sh - -After it completes, verify qinling service is installed properly: - -.. code-block:: console - - $ source openrc admin admin - $ openstack service list - +----------------------------------+----------+-----------------+ - | ID | Name | Type | - +----------------------------------+----------+-----------------+ - | 59be2ecc8b8d4e61af184ea3495bf207 | qinling | function-engine | - | e5891d41a929402384ef00ce7135a16d | keystone | identity | - +----------------------------------+----------+-----------------+ - $ openstack runtime list --print-empty - +----+------+-------+--------+-------------+------------+------------+------------+ - | Id | Name | Image | Status | Description | Project_id | Created_at | Updated_at | - +----+------+-------+--------+-------------+------------+------------+------------+ - +----+------+-------+--------+-------------+------------+------------+------------+ - -.. end - -Kubernetes Integration ----------------------- - -By default, Qinling uses Kubernetes as its orchestrator backend, so a k8s -all-in-one environment (and some other related tools, e.g. kubectl) is also -set up during devstack installation. - -Qinling devstack script uses `kubeadm `_ -for Kubernetes installation, refer to ``tools/gate/kubeadm/setup_gate.sh`` for -more detailed information about Qinling devstack installation. diff --git a/doc/source/contributor/index.rst b/doc/source/contributor/index.rst deleted file mode 100644 index 61974956..00000000 --- a/doc/source/contributor/index.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Contributor/Developer Guide -=========================== - -In this section you will find information helpful for contributing to qinling -project. - - -.. toctree:: - :maxdepth: 1 - - contributing - development-environment-devstack - architecture - Setting up your development Apache mod_wsgi/uWSGI - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/contributor/mod-wsgi.rst b/doc/source/contributor/mod-wsgi.rst deleted file mode 100644 index 43e2fc37..00000000 --- a/doc/source/contributor/mod-wsgi.rst +++ /dev/null @@ -1,100 +0,0 @@ -.. - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - - -============================ - Installing the API via WSGI -============================ - -This document provides two WSGI methods as examples: - -* uWSGI -* Apache ``mod_wsgi`` - -.. seealso:: - - https://governance.openstack.org/tc/goals/pike/deploy-api-in-wsgi.html#uwsgi-vs-mod-wsgi - - -The "wsgi.py" file -================== - -``qinling/api/wsgi.py`` file sets up the API WSGI application, it has to be copied into ``/var/www/cgi-bin/qinling`` directory. - -.. code-block:: console - - mkdir -p /var/www/cgi-bin/qinling - cp qinling/api/wsgi.py /var/www/cgi-bin/qinling - chown qinling:qinling -R /var/www/cgi-bin/qinling - - -Running with Apache and mod_wsgi -================================ - -The ``etc/apache2/qinling-api.conf`` file contains an example -of Apache virtualhost configured with ``mod_wsgi``. - -.. literalinclude:: ../../../etc/apache2/qinling-api.conf - -1. On deb-based systems copy or symlink the file to - ``/etc/apache2/sites-available``. - - For rpm-based systems the file will go in - ``/etc/httpd/conf.d``. - -2. Modify the ``WSGIDaemonProcess`` directive to set the ``user`` and - ``group`` values to an appropriate user on your server. In many - installations ``qinling`` will be correct. Modify the ``WSGIScriptAlias`` - directive to set the path of the wsgi script. - - If you are using a virtualenv ``WSGIDaemonProcess`` requires - ``python-path`` parameter, the value should be - ``/lib/python/site-packages``. - -3. Enable the ``qinling-api`` virtualhost. - - On deb-based systems: - - .. code-block:: console - - a2ensite qinling-api - systemctl reload apache2 - - On rpm-based systems: - - .. code-block:: console - - systemctl reload httpd - - -Running with uWSGI -================== - -The ``etc/uwsgi/qinling-api.yaml`` file contains an example -of uWSGI configuration. - -1. Create the ``qinling-api.yaml`` file. - - .. literalinclude:: ../../../etc/uwsgi/qinling-api.yaml - -2. Then start the uWSGI server: - - .. code-block:: console - - uwsgi ./qinling-api.yaml - - Or start in background with: - - .. code-block:: console - - uwsgi -d ./qinling-api.yaml diff --git a/doc/source/features.rst b/doc/source/features.rst deleted file mode 100644 index 2fe079f9..00000000 --- a/doc/source/features.rst +++ /dev/null @@ -1,48 +0,0 @@ -.. - Copyright 2017 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Qinling Features -================ - -This section does not intend to give you an exhaustive feature list of Qinling, -but some features which make Qinling userful, powerful, scalable and highly -available. - -Auto Scaling -~~~~~~~~~~~~ - -With Qinling, the function invocation can be automatically scaled up and down -to meet the needs of your function. It's not necessary to monitor usage by -yourself, Qinling can scale up new workers if traffic ticks up, and scale -back down when it drops. - -To handle any burst in traffic, Qinling will immediately increase the workers -concurrently executing functions by a predetermined amount. After the increased -load is handled successfully, the workers will be released in a predefined -expiration time. - -Webhook -~~~~~~~ - -Webhooks are a low-effort way to invoke the functions in Qinling. They do -not require a bot user or authentication to use. - -Sync/Async Function Executions -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Qinling allows the functions to be executed either synchronously or -asynchronously. For synchronous functions, the caller will be blocked to wait -for the responses. Asynchronous functions will be executed at the same time -point and the responses will be returned to the caller immediately, the caller -could check the result later on. \ No newline at end of file diff --git a/doc/source/function_developer/index.rst b/doc/source/function_developer/index.rst deleted file mode 100644 index fe7879ca..00000000 --- a/doc/source/function_developer/index.rst +++ /dev/null @@ -1,36 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Function Programming Guide -========================== - -In this section you will find information helpful for developing functions. You -can also find some code examples in ``example/functions`` folder. - - -.. toctree:: - :maxdepth: 1 - - python_dev - openstack_integration - monitoring - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/function_developer/monitoring.rst b/doc/source/function_developer/monitoring.rst deleted file mode 100644 index 1e4d8491..00000000 --- a/doc/source/function_developer/monitoring.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. - Copyright 2017 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Function execution monitoring -============================= - -Qinling automatically monitors functions on your behalf and records several -metrics in the database. These metrics include total requests, latency, and -error rates. - -You can query metrics for function and view the execution logs by using -qinling CLI, or the Restful API. The following procedures show you how to -access metrics using these different methods. diff --git a/doc/source/function_developer/openstack_integration.rst b/doc/source/function_developer/openstack_integration.rst deleted file mode 100644 index 92cf7063..00000000 --- a/doc/source/function_developer/openstack_integration.rst +++ /dev/null @@ -1,53 +0,0 @@ -.. - Copyright 2017 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Interact with other OpenStack services -====================================== - -It's very easy to interact with other OpenStack services in your function. -Let's take Python programming language and integration with Swift service for -an example. - -At the time you create a function, you specify an entry, which is a function -in your code module that Qinling can invoke when the service executes your -code. Use the following general syntax structure when creating a function in -Python which will interact with Swift service in OpenStack. - -.. code-block:: python - - import swiftclient - - def main(region_name, container, object, context=None, **kwargs): - conn = swiftclient.Connection( - session=context['os_session'], - os_options={'region_name': region_name}, - ) - - obj_info = conn.head_object(container, object) - return obj_info - -.. end - -In the above code, note the following: - -- Qinling supports most of OpenStack service clients, so you don't need to - install ``python-swiftclient`` in your code package. -- There is a parameter named ``context``, this is a parameter provided by - Qinling that is usually of the Python dict type. You can easily get a valid - Keystone session object from it. As a result, you don't need to pass any - sensitive data to Qinling in order to interact with OpenStack services. - -.. note:: - - Please avoid using ``context`` as your own parameter in the code. diff --git a/doc/source/function_developer/python_dev.rst b/doc/source/function_developer/python_dev.rst deleted file mode 100644 index eefc7628..00000000 --- a/doc/source/function_developer/python_dev.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. - Copyright 2017 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Python developer guide -====================== diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst deleted file mode 100644 index 019c090d..00000000 --- a/doc/source/glossary.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Qinling Glossary -================ - -As the Qinling project evolves, it's important that people working on Qinling, -users using Qinling, and operators deploying Qinling use a common set of -terminology in order to avoid misunderstandings and confusion. To that end, we -are providing the following glossary of terms. - -Runtime -~~~~~~~ - -The runtime is programming language support. In Qinling, each runtime is -encapsulated in a container image which is used for creating the runtime -container. Qinling communicates with the container by sending HTTP requests and -runs user function inside. Currently, Qinling only supports Python 2 and -Python 3 officially and Node.JS in experimental. diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index b1ecf14f..00000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. qinling documentation master file, created by - sphinx-quickstart on Tue Jul 9 22:26:36 2013. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to Qinling's documentation! -=================================== - -.. note:: - - Qinling (is pronounced "tŹƒinliŋ") refers to Qinling Mountains in southern - Shaanxi Province in China. The mountains provide a natural boundary between - North and South China and support a huge variety of plant and wildlife, some - of which is found nowhere else on Earth. - -Qinling is an OpenStack project to provide "Function as a service". This -project aims to provide a platform to support serverless functions (like AWS -Lambda). Qinling supports different container orchestration platforms -(Kubernetes/Swarm, etc.) and different function package storage backends -(local/Swift/S3) by nature using plugin mechanism. - -With Qinling, you can run code without provisioning or managing servers. You -pay only for the compute time you consumeā€”there's no charge when your code -isn't running. You can run code for virtually any type of application or -backend serviceā€”all with zero administration. Just upload your code and Qinling -takes care of everything required to run and scale your code with high -availability. You can set up your code to automatically trigger from other -OpenStack services or call it directly from any web or mobile app. - -* Free software: Apache license -* Documentation: https://docs.openstack.org/qinling/latest/ -* Source: https://opendev.org/openstack/qinling -* Features: https://storyboard.openstack.org/#!/project/927 -* Bug Track: https://storyboard.openstack.org/#!/project/927 -* IRC channel on Freenode: #openstack-qinling - - -Overview --------- - -.. toctree:: - :maxdepth: 2 - - quick_start - glossary - features - videos - -Administration/Operation Guide ------------------------------- - -.. toctree:: - :maxdepth: 2 - - admin/index - -CLI Guide ---------- - -.. toctree:: - :maxdepth: 2 - - cli/index - -Contributor/Developer Guide ---------------------------- - -.. toctree:: - :maxdepth: 2 - - contributor/index - specs/index - -User Guide ----------- - -.. toctree:: - :maxdepth: 2 - - user/index - -Function Programming Guide --------------------------- - -.. toctree:: - :maxdepth: 2 - - function_developer/index - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/quick_start.rst b/doc/source/quick_start.rst deleted file mode 100644 index 651b8fe4..00000000 --- a/doc/source/quick_start.rst +++ /dev/null @@ -1,246 +0,0 @@ -Quick Start -=========== - -Installation -~~~~~~~~~~~~ - -A fast and simple way to try Qinling is to create a Devstack environment -including all related components and dependencies of Qinling service. Please -refer to `Setting up a development environment with devstack`_ for how to -install Qinling service in OpenStack devstack environment. - -Qinling is a FaaS implemented on top of container orchestration system such as -Kubernetes, Swarm, etc. Particularly, Kubernetes is a reference backend -considering its popularity. A kubernetes cluster and its command line tool -have been installed in the devstack environment. - -Qinling can work with OpenStack Keystone for authentication, or it can work -without authentication at all. By default, authentication is enabled, set -``auth_enable = False`` to disable authentication. - -.. _Setting up a development environment with devstack: https://docs.openstack.org/qinling/latest/contributor/development-environment-devstack.html - - -Getting started with Qinling -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. note:: - - Currently, you can interact with Qinling using python-qinlingclient or - sending RESTful API directly. Both ways are described in this guide. - ``httpie`` is a convenient tool to send HTTP request, it will be installed - during following steps. - -Log into the devstack host, we will create python runtime/function/execution -in the following steps. - -#. (Optional) Prepare a docker image for a specific programming language. For - your convenience, there is a pre-built image - ``openstackqinling/python-runtime`` that you could directly use to create a - Python runtime in Qinling. Refer to the - `image creation guide `_ - for how to build your own runtime images to be used in Qinling. - -#. Create Python runtime using admin credential. ``Runtime`` in Qinling is the - environment in which the function is actually running, ``runtime`` is - supposed to be created/deleted/updated only by cloud operator. After - creation, check the runtime status until it's ``available`` before invoking - any functions: - - .. code-block:: console - - $ pip install httpie - $ cd $DEVSTACK_DIR && source openrc admin admin - $ TOKEN=$(openstack token issue -f yaml -c id | awk '{print $2}') - $ http POST http://localhost:7070/v1/runtimes name=python2.7 \ - image=openstackqinling/python-runtime X-Auth-Token:$TOKEN - HTTP/1.1 201 Created - Connection: keep-alive - Content-Length: 246 - Content-Type: application/json - Date: Mon, 11 Dec 2017 22:35:08 GMT - - { - "created_at": "2017-12-11 22:35:08.660498", - "description": null, - "id": "601efeb8-3e41-4e5c-a12a-986dbda252e3", - "image": "openstackqinling/python-runtime", - "is_public": true, - "name": "python2.7", - "project_id": "ce157785ffb24b3c862720283be4dbc8", - "status": "creating", - "updated_at": null - } - $ http GET http://localhost:7070/v1/runtimes/601efeb8-3e41-4e5c-a12a-986dbda252e3 \ - X-Auth-Token:$TOKEN - HTTP/1.1 200 OK - Connection: keep-alive - Content-Length: 298 - Content-Type: application/json - Date: Mon, 11 Dec 2017 22:37:01 GMT - - { - "created_at": "2017-12-11 22:35:09", - "description": null, - "id": "601efeb8-3e41-4e5c-a12a-986dbda252e3", - "image": "openstackqinling/python-runtime", - "is_public": true, - "name": "python2.7", - "project_id": "ce157785ffb24b3c862720283be4dbc8", - "status": "available", - "updated_at": "2017-12-11 22:35:13" - } - - .. end - - Using CLI: - - .. code-block:: console - - $ cd $DEVSTACK_DIR && source openrc admin admin - $ openstack runtime create openstackqinling/python-runtime --name python2.7 - +-------------+--------------------------------------+ - | Field | Value | - +-------------+--------------------------------------+ - | id | 4866b566-2c9a-4f00-9665-7808f7d811f8 | - | name | python2.7 | - | image | openstackqinling/python-runtime | - | status | available | - | description | None | - | project_id | ce157785ffb24b3c862720283be4dbc8 | - | created_at | 2017-12-11 22:40:16 | - | updated_at | None | - +-------------+--------------------------------------+ - - .. end - - Record the runtime ID for the function invocation later on. - -#. Create a customized Python function package: - - .. code-block:: console - - $ mkdir ~/qinling_test - $ cat < ~/qinling_test/github_test.py - import requests - def main(*args, **kwargs): - r = requests.get('https://api.github.com/events') - return len(r.json()) - if __name__ == '__main__': - main() - EOF - $ cd ~/qinling_test && zip -r ~/qinling_test/github_test.zip ./* - - .. end - -#. Create function: - - .. code-block:: console - - $ cd $DEVSTACK_DIR && source openrc demo demo - $ runtime_id=601efeb8-3e41-4e5c-a12a-986dbda252e3 - $ TOKEN=$(openstack token issue -f yaml -c id | awk '{print $2}') - $ http -f POST http://localhost:7070/v1/functions name=github_test \ - runtime_id=$runtime_id \ - code='{"source": "package"}' \ - entry='github_test.main' \ - package@~/qinling_test/github_test.zip \ - X-Auth-Token:$TOKEN - HTTP/1.1 201 Created - Connection: keep-alive - Content-Length: 303 - Content-Type: application/json - Date: Mon, 11 Dec 2017 23:20:26 GMT - - { - "code": { - "source": "package" - }, - "count": 0, - "created_at": "2017-12-11 23:20:26.600054", - "description": null, - "entry": "github_test.main", - "id": "cdce13b0-55c9-4a06-a67a-1cd1fe1fb161", - "name": "github_test", - "project_id": "c2a457c46df64ed4adcb31fdc80052d4", - "runtime_id": "601efeb8-3e41-4e5c-a12a-986dbda252e3" - } - - .. end - - Using CLI: - - .. code-block:: console - - $ openstack function create --name github_test \ - --runtime $runtime_id \ - --entry github_test.main \ - --package ~/qinling_test/github_test.zip - +-------------+--------------------------------------+ - | Field | Value | - +-------------+--------------------------------------+ - | id | c9195311-9aa7-4748-bd4b-1b0f9c28d858 | - | name | github_test | - | description | None | - | count | 0 | - | code | {u'source': u'package'} | - | runtime_id | 601efeb8-3e41-4e5c-a12a-986dbda252e3 | - | entry | github_test.main | - | created_at | 2017-12-11 23:21:21 | - | updated_at | None | - +-------------+--------------------------------------+ - - .. end - -#. Invoke the function by specifying ``function_id``: - - .. code-block:: console - - $ http POST http://localhost:7070/v1/executions \ - function_id=c9195311-9aa7-4748-bd4b-1b0f9c28d858 \ - X-Auth-Token:$TOKEN - HTTP/1.1 201 Created - Connection: keep-alive - Content-Length: 347 - Content-Type: application/json - Date: Mon, 11 Dec 2017 23:26:11 GMT - - { - "created_at": "2017-12-11 23:26:09", - "description": null, - "function_id": "c9195311-9aa7-4748-bd4b-1b0f9c28d858", - "id": "c3d61744-254a-4f41-8e6d-9e7dc1eb6a24", - "input": null, - "result": "{\"duration\": 1.299, \"output\": 30}", - "project_id": "c2a457c46df64ed4adcb31fdc80052d4", - "status": "success", - "sync": true, - "updated_at": "2017-12-11 23:26:12" - } - - - .. end - - Using CLI: - - .. code-block:: console - - $ openstack function execution create --function c9195311-9aa7-4748-bd4b-1b0f9c28d858 - +-------------+--------------------------------------+ - | Field | Value | - +-------------+--------------------------------------+ - | id | b7ffdd3a-a0a8-441b-874d-3b6dcf7446d9 | - | function_id | c9195311-9aa7-4748-bd4b-1b0f9c28d858 | - | description | None | - | input | {} | - | result | {"duration": 1.483, "output": 30} | - | status | success | - | sync | True | - | created_at | 2017-12-11 23:27:04 | - | updated_at | 2017-12-11 23:27:05 | - +-------------+--------------------------------------+ - - .. end - -Now, you have defined your first Qinling function and have it invoked -on-demand. Have fun with Qinling! diff --git a/doc/source/specs/customize_resource.rst b/doc/source/specs/customize_resource.rst deleted file mode 100644 index 98220758..00000000 --- a/doc/source/specs/customize_resource.rst +++ /dev/null @@ -1,165 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -========================================== -Add customized memory and cpu for function -========================================== - -https://storyboard.openstack.org/#!/story/2001586 - -Only users know the amount of resources that their functions may need, so -qinling should provide cpu and memory_size options for users to customize -resources. When creating function, allow user to specify cpu and memory_size -resources for the function so that qinling can allocate and manage resources -of cpu and memory more reasonably in this way. - - -Problem description -=================== - -- For deployers, they may want to manage system resources reasonably and - safely by restricting functions' resource occupancy so that more executions - could share the fixed amount of resources and non-interfering with each - other. - -- For users, applying for resources according to actual needs can help them - prevent the waste of resources, which will make sense when users need to pay - for the resources they use. - - -Proposed change -=============== - -For different types of functions, the ways of limiting resources are -distinguishing. - -- For image type functions, we could config the resource limitation in pod - definition, and then kubernetes will help us limit the whole pod, which - means the total amount of resources used by the function inside the pod - could never exceed the pod's limit. - -- For package/swift type function, it turns to be the runtime server's - responsibility to set resource limits for subprocess that function runs in. - -Some details are as following: - -First of all, make sure the values of memory_size and cpu saved in the function -database are valid. In ``api.controllers.v1.function.post``, we do type and -size check for cpu and memory_size params so that they must be integers and -within the range set in config.py, and qinling will supply default values for -them if users do not input anything. The default values are set to be the -minimum in this range. - -Besides, in ``api.controllers.v1.function.put``, we add cpu and memory_size to -``UPDATE_ALLOWED``. If user wants to update them, we will do type and size -check for cpu and memory_size params before updating function database. - -When creating execution, pass cpu and memory_size values saved in function -database to ``qinling.engine.default_engine.create_execution``. Here we will -do the check for cpu and memory_size values again in case the limited scope of -resources set in config.py have been reset. - -Then for image type function, pass both of them to -``orchestrator.kubernetes.manager._create_pod``, and we can get a pod with -limited cpu and memory resources. For package/swift type function, pass both of -them to the worker pod in the k8s deployment by using -``qinling.engine.utils.url_request``. In the selected worker pod, cpu and -memory_size values will be used to limit the total amount of resources that -function process and its subprocesses can use. - -We are considering using 'cgroup' in linux to limit cpu and memory because by -'cgroup' we can add pids of function process and its subprocesses to the same -'control group' and limit the total amount of resources. But we need to use -different users to set resource files in 'cgroup' and run functions because -function should not be granted permission to modify the resource setting files -in 'cgroup'. We use root to create a 'control group', and qinling can only -write to the 'tasks' file. - -- For cpu resource limitation, 'cpu.cfs_quota_us' and 'cpu.cfs_period_us' - files in 'cgroup' will be used to convert millicpu value. - -- For memory resource limitation, only 'memory.limit_in_bytes' file will be - used to limit RAM because now in k8s source code, the kubelet does not - support running with swap enabled. Although it also provides a workaround - '--fail-swap-on=false' to allow swap on, which may cause some performance - impacts, we would better to disable swap now. For more details about swap, - please see the references. - -Data model impact ------------------ - -Add a cpu column for function database to save cpu value specified by user. - -REST API impact ---------------- - -* Add cpu and memory_size options for function creation. - -* Allow to update cpu and memory_size values saved in function database. - -End user impact ---------------- - -When using python-qinlingclient to create/update function, the CLI may look -like 'openstack function create/update --cpu xxx --memory_size xxx ...'. - -Performance Impact ------------------- - -None. - -Deployer impact ---------------- - -The config options for min/max size of cpu and memory_size will be provided in -``qinling.config``. The unit of cpu is 'millicpu' and the unit of memory_size -is 'bytes'. - -Alternatives ------------- - -We have considered using resource.RLIMIT_AS to limit memory resource that -function can use. However if function forks other child processes, the child -processes will inherits its parent's resource limits, instead of sharing the -limits. - - -Implementation -============== - -Assignee(s) ------------ - -Jiangyuan - - -Dependencies -============ - -None. - - -Testing -======= - -None. - - -References -========== - -* Resource model - https://docs.python.org/2.7/library/resource.html#resource-limits -* Patch for image type function's resource limit - https://review.openstack.org/#/c/553947 -* IRC discussions - http://eavesdrop.openstack.org/irclogs/%23openstack-qinling/%23openstack-qinling.2018-03-23.log.html -* K8s source code about swap - https://github.com/kubernetes/kubernetes/blob/master/pkg/kubelet/cm/container_manager_linux.go#L203 -* An open issue in k8s about swap - https://github.com/kubernetes/kubernetes/issues/53533 -* Some discussions about why disable swap - https://serverfault.com/questions/881517/why-disable-swap-on-kubernetes diff --git a/doc/source/specs/function_aliases.rst b/doc/source/specs/function_aliases.rst deleted file mode 100644 index b62e1048..00000000 --- a/doc/source/specs/function_aliases.rst +++ /dev/null @@ -1,145 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -================================ -Support Qinling function aliases -================================ - -https://storyboard.openstack.org/#!/story/2001588 - -Function aliases are like pointers to the specific function versions. By using -aliases, you can access the specific version of a function an alias is pointing -to (for example, to invoke the function) without having to know the specific -version the alias is pointing to. Function aliases enable the following use -cases: - -- Easier support for promotion of new versions of functions and rollback when - needed. - -- Simplify management of event source mappings. - - -Problem description -=================== - -- As a function developer, you want to create an alias that points to function - version, and remapping of aliases to different function versions. - -- As an application developer who relies on the Qinling functions, I want a - safe and sustainable way to call the functions without changing the - applications after the function is updated. - - -Proposed change -=============== - -Data model impact ------------------ - -A new database table needs to be created to store the mappings from alias to -function and function version. - -REST API impact ---------------- - -* Create function alias that points to the specified function version. After - creation, Qinling returns the function alias information, including - function id, version id, alias name, description - and timestamps. - - * POST ``/aliases`` - * Parameters: function_id - * Parameters: description - * Parameters: function_version - * Parameters: name - - * the 'name' must be unique within the project - -* Update function alias. Update the function id and version to which the alias - points and alias description. - - * PUT ``/aliases/`` - * Parameters: function_id - * Parameters: description - * Parameters: function version - -* Get the specified function alias information. - - * GET ``/aliases/`` - -* List the aliases. - - * GET ``/aliases/`` - -* Delete specific function alias. When deleting alias, need to check if there is - any webhook/running job using the alias. - - * DELETE ``/aliases/`` - -* Create execution. Create execution with alias, so the execution will be - created with the function id and version number the alias points to. - -* Create job. Create job with alias, so the job will be created with the - function id and version number the alias points to. - -* Create webhook. Create webhook with alias, so the webhook will be - created with the function id and version number the alias points to. - -* Delete function. Qinling needs to check if there is any alias using that - function, if there is alias associated with the function, the function - deletion will fail. - -* Delete function version. Qinling needs to check if there is any alias using that - function version, if there is alias associated with the function version, - the function version deleteion will fail. - -End user impact ---------------- - -All the API changes should be supported in CLI. - -Performance Impact ------------------- - -None - -Deployer impact ---------------- - -Database migration script is provided. - -Alternatives ------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Dong Ma - - -Dependencies -============ - -None - - -Testing -======= - -Pay attention to the notes written in ``REST API impact`` section. - - -References -========== - -* Introduction to AWS Lambda Aliases - https://docs.aws.amazon.com/lambda/latest/dg/aliases-intro.html diff --git a/doc/source/specs/function_versioning.rst b/doc/source/specs/function_versioning.rst deleted file mode 100644 index 40900c4d..00000000 --- a/doc/source/specs/function_versioning.rst +++ /dev/null @@ -1,151 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -=================================== -Support Qinling function versioning -=================================== - -https://storyboard.openstack.org/#!/story/2001587 - -Function versions are like git commits, they're snapshots of your project -history. Each version has a number that serves as its ID, starting with 1 and -incrementing up, and never reused. The code for a published version of a -function is immutable (i.e. cannot be changed). So, a version number -corresponds to a specific set of function code with certainty. With function -versioning, users can get the following benefits: - -- Update the function code without breaking the existing applications that rely - on the function. - -- Easy to backup/restore different versions of code for the same function. - - -Problem description -=================== - -- As a function developer, I want to keep the existing code when I update the - function so that it is easy to restore when something is wrong during the - testing. - -- As an application developer who relies on the Qinling functions, I want a - safe and sustainable way to call the functions without changing the - applications after the function is updated. - - -Proposed change -=============== - -Data model impact ------------------ - -A new database table needs to be created to store the mappings from function -to its versions and locations. There may be different function versions that -stored in the same location, e.g. when user creates a new version but without -any function code change, the previous function version location will be -reused to save the storage space. - -A new field is added to execution table, job table and webhook table denoting -which function version the execution or job is using, 0 means using the latest -version. - -REST API impact ---------------- - -* Create function version. After creation, Qinling returns the function version - information, including function id, version uuid, version sequence - number(starting from 1), description and timestamps. - - * POST ``/functions//versions`` - * Parameters: description - -* Update function version. Only updating description is allowed for now. - - * PUT ``/functions//versions/`` - * Parameters: description - -* Get function versions. - - * GET ``/functions//versions/`` - -* Get/Download specific function version. We use version sequence number - instead of version uuid because it makes much more sense to end user. The - version uuid may be used internally. - - * GET ``/functions//versions/`` - * GET ``/functions//versions/?download=true`` - -* Delete specific function version. Function version can be deleted only if - there is no corresponding execution running and no association with running - jobs and webhooks. If the function version code location is shared with - others, do not delete the function version package. - - * DELETE ``/functions//versions/`` - -* Create execution. Version sequence number needs to be specified when - executing the function, latest version is used by default. - -* Create job. Version sequence number needs to be specified when creating the - job, latest version is used by default. - -* Create webhook. Version sequence number needs to be specified when creating - the webhook, latest version is used by default. - -* Delete function. Qinling needs to support to delete the function with all its - versions. - -* Download function package API is still support, but only for latest function - version. - -End user impact ---------------- - -All the API changes should be supported in CLI. - -Performance Impact ------------------- - -None - -Deployer impact ---------------- - -Database migration script is provided. - -Alternatives ------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Lingxian Kong - - -Dependencies -============ - -None - - -Testing -======= - -Pay attention to the notes written in ``REST API impact`` section. - - -References -========== - -* Introduction to AWS Lambda Versioning - https://docs.aws.amazon.com/lambda/latest/dg/versioning-intro.html - -* AWS Lambda Versioning Strategies - https://medium.com/@kevinng/aws-lambda-versioning-strategies-5ef877efd0be diff --git a/doc/source/specs/index.rst b/doc/source/specs/index.rst deleted file mode 100644 index 636cdaf6..00000000 --- a/doc/source/specs/index.rst +++ /dev/null @@ -1,47 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Qinling Feature Specifications -============================== - -This section contains detailed specification documents for different features -of Qinling. - - -Specs ------ - -.. toctree:: - :maxdepth: 1 - - function_versioning - customize_resource - function_aliases - -Template --------- - -.. toctree:: - :maxdepth: 1 - - template - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/specs/template.rst b/doc/source/specs/template.rst deleted file mode 100644 index 2c7f92c2..00000000 --- a/doc/source/specs/template.rst +++ /dev/null @@ -1,237 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -======================================== -Example Spec - The title of your feature -======================================== - -Include the URL of the feature in storyboard: - -https://storyboard.openstack.org/#!/story/XXXXXX - -Introduction paragraph -- why are we doing anything? A single paragraph of -prose that operators can understand. The title and this first paragraph -should be used as the subject line and body of the commit message respectively. - -Some notes about the usage of specs folder: - -* Not all blueprints need a spec. If a new feature is straightforward enough - that it doesn't need any design discussion, then no spec is required. It - should be decided on IRC meeting within the whole core team. - -* The aim of this document is first to define the problem we need to solve, - and second agree the overall approach to solve that problem. - -* This is not intended to be extensive documentation for a new feature. - For example, there is no need to specify the exact configuration changes, - nor the exact details of any DB model changes. But you should still define - that such changes are required, and be clear on how that will affect - upgrades. - -* You should aim to get your spec approved before writing your code. - While you are free to write prototypes and code before getting your spec - approved, it's possible that the outcome of the spec review process leads - you towards a fundamentally different solution than you first envisaged. - -* But, API changes are held to a much higher level of scrutiny. - As soon as an API change merges, we must assume it could be in production - somewhere, and as such, we then need to support that API change forever. - To avoid getting that wrong, we do want lots of details about API changes - upfront. - -Some notes about using this template: - -* Your spec should be in ReSTructured text, like this template. - -* Please wrap text at 79 columns. - -* The filename in the git repository should be similar to the title in - storyboard but in lower case combined with underscore. - -* Please do not delete any of the sections in this template. If you have - nothing to say for a whole section, just write: None. - -* For help with syntax, see http://sphinx-doc.org/rest.html - - -Problem description -=================== - -A detailed description of the problem. What problem is this feature -addressing? - - -Proposed change -=============== - -Here is where you cover the change you propose to make in detail. How do you -propose to solve this problem? - -If this is one part of a larger effort make it clear where this piece ends. In -other words, what's the scope of this effort? - -Data model impact ------------------ - -This section is optional. - -Changes which require modifications to the data model often have a wider impact -on the system. The community often has strong opinions on how the data model -should be evolved, from both a functional and performance perspective. It is -therefore important to capture and gain agreement as early as possible on any -proposed changes to the data model. - -Questions which need to be addressed by this section include: - -* What new database schema changes is this going to require? - -* What database migrations will accompany this change. - -* How will the initial set of new data objects be generated, for example if you - need to take into account existing workflow/execution, or modify other - existing data, please describe how that will work. - -REST API impact ---------------- - -This section is optional. - -Each API method which is either added or changed should have the following: - -* Specification for the method. - - * A description of the added or changed method. - - * Method type (POST/PUT/GET/DELETE). - - * Normal http response code(s). - - * Expected error http response code(s). - - * URL for the resource. - - * Parameters which can be passed via the url. - -* Example use case including typical API samples for both data supplied - by the caller and the response. - -End user impact ---------------- - -This section is optional. - -Aside from the API, are there other ways a user will interact with this -feature? - -* Does this change have an impact on python-qinlingclient? What does the user - interface there look like? - -Performance Impact ------------------- - -This section is optional. - -Describe any potential performance impact on the system, for example -how often will new code be called, and is there a major change to the calling -pattern of existing code. - -Examples of things to consider here include: - -* A small change in a utility function or a commonly used decorator can have a - large impacts on performance. - -* Calls which result in a database queries can have a profound impact on - performance when called in critical sections of the code. - -* Will the change include any locking, and if so what considerations are there - on holding the lock? - -Deployer impact ---------------- - -This section is optional. - -Discuss things that will affect how you deploy and configure OpenStack -that have not already been mentioned, such as: - -* What config options are being added? Are the default values ones which will - work well in real deployments? - -* Is this a change that takes immediate effect after its merged, or is it - something that has to be explicitly enabled? - -* If this change is a new binary, how would it be deployed? - -* Please state anything that those doing continuous deployment, or those - upgrading from the previous release, need to be aware of. Also describe - any plans to deprecate configuration values or features. - -Alternatives ------------- - -This section is optional. - -What other ways could we do this thing? Why aren't we using those? This doesn't -have to be a full literature review, but it should demonstrate that thought has -been put into why the proposed solution is an appropriate one. - - -Implementation -============== - -Assignee(s) ------------ - -Who is leading the writing of the code? Or is this a blueprint where you're -throwing it out there to see who picks it up? - -If more than one person is working on the implementation, please designate the -primary author and contact. - -Primary assignee: - - -Other contributors: - - - -Dependencies -============ - -This section is optional. - -* Include specific references to specs and/or features in Qinling, or in - other projects, that this one either depends on or is related to. - -* Does this feature require any new library dependencies or code otherwise not - included in Qinling? Or does it depend on a specific version of library? - - -Testing -======= - -This section is optional. - -Please discuss the important scenarios needed to test here, as well as -specific edge cases we should be ensuring work correctly. - - -References -========== - -This section is optional. - -Please add any useful references here. You are not required to have any -reference. Moreover, this specification should still make sense when your -references are unavailable. Examples of what you could include are: - -* Links to mailing list or IRC discussions - -* Links to notes from a summit session - -* Links to relevant research, if appropriate - -* Anything else you feel it is worthwhile to refer to \ No newline at end of file diff --git a/doc/source/user/aodh.rst b/doc/source/user/aodh.rst deleted file mode 100644 index e604f6cb..00000000 --- a/doc/source/user/aodh.rst +++ /dev/null @@ -1,224 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -How to trigger Qinling function in Aodh -======================================= - -`Aodh `_ is the Alarming service -project in OpenStack that enables the ability to trigger actions based on -defined rules against metric or event data collected by -`Ceilometer `_ or -`Gnocchi `_. - -We can use Aodh alarm to trigger Qinling functions by some cloud events, e.g. -when an instance is created in this guide. - -Step1: Create webhook for the function in Qinling -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Webhook in Qinling allows to trigger the function without providing OpenStack -credentials, the webhook URL is only known by the function owner. - -Suppose we have a simple Python function that prints out a string: - -.. code-block:: console - - $ cat ~/functions/hello_world.py - def main(name='World', **kwargs): - ret = 'Hello, %s' % name - return ret - -.. end - -Create a function using the Python script: - -.. code-block:: console - - $ RUNTIME_ID=bd516a15-3787-4652-938b-e25665c376e6 - $ openstack function create --runtime $RUNTIME_ID \ - --entry hello_world.main \ - --file ~/functions/hello_world.py - +-------------+-------------------------------------------------------------------------+ - | Field | Value | - +-------------+-------------------------------------------------------------------------+ - | id | 5b25e0ea-7f8d-487a-bce6-f6a2556a1e3f | - | name | None | - | description | None | - | count | 0 | - | code | {u'source': u'package', u'md5sum': u'9bad2959cafc9d89684fe7a336de9927'} | - | runtime_id | bd516a15-3787-4652-938b-e25665c376e6 | - | entry | hello_world.main | - | project_id | 360d69d06890407eab1a44573c1f3776 | - | created_at | 2018-05-13 06:58:13.208421 | - | updated_at | None | - +-------------+-------------------------------------------------------------------------+ - -.. end - -Create a webhook for the function: - -.. code-block:: console - - $ function_id=5b25e0ea-7f8d-487a-bce6-f6a2556a1e3f - $ openstack webhook create $function_id - +-------------+-------------------------------------------------------------------------------+ - | Field | Value | - +-------------+-------------------------------------------------------------------------------+ - | id | a5f82898-a4c3-4104-ad2d-40fbafbe8857 | - | function_id | 5b25e0ea-7f8d-487a-bce6-f6a2556a1e3f | - | description | None | - | project_id | 360d69d06890407eab1a44573c1f3776 | - | created_at | 2018-05-13 06:59:20.616092 | - | updated_at | None | - | webhook_url | http://10.0.0.14:7070/v1/webhooks/a5f82898-a4c3-4104-ad2d-40fbafbe8857/invoke | - +-------------+-------------------------------------------------------------------------------+ - -.. end - -The ``webhook_url`` could be used to trigger the function without any -authentication. Make note of this URL, as it will be used as the alarm action -when we create the alarm in Aodh. - -Step2: Create an event alarm in Aodh -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In Aodh, we only need to define an alarm that will be triggered by the event -``compute.instance.create`` (Nova will emit the event when there is a new VM -created), using the webhook URL from Step1. - -.. code-block:: console - - $ webhook_url=http://10.0.0.14:7070/v1/webhooks/a5f82898-a4c3-4104-ad2d-40fbafbe8857/invoke - $ aodh alarm create --name qinling-alarm \ - --type event --alarm-action $webhook_url \ - --repeat-actions false --event-type compute.instance.create - +---------------------------+------------------------------------------------------------------------------------+ - | Field | Value | - +---------------------------+------------------------------------------------------------------------------------+ - | alarm_actions | [u'http://10.0.0.14:7070/v1/webhooks/a5f82898-a4c3-4104-ad2d-40fbafbe8857/invoke'] | - | alarm_id | 1f85edea-a8a6-47ba-b1f5-9e3ac7ee61dc | - | description | Alarm when compute.instance.create event occurred. | - | enabled | True | - | event_type | compute.instance.create | - | insufficient_data_actions | [] | - | name | qinling-alarm | - | ok_actions | [] | - | project_id | 360d69d06890407eab1a44573c1f3776 | - | query | | - | repeat_actions | False | - | severity | low | - | state | insufficient data | - | state_reason | Not evaluated yet | - | state_timestamp | 2018-05-13T07:13:03.631059 | - | time_constraints | [] | - | timestamp | 2018-05-13T07:13:03.631059 | - | type | event | - | user_id | 26d9ec1da7fc4756b1940e69292565c2 | - +---------------------------+------------------------------------------------------------------------------------+ - -.. end - -Step3: Simulate an event trigger -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -For testing purpose, I wrote a Python script to generate an event for this Aodh -alarm so that we don't need to install and config Nova and Ceilometer service. -The script sends a notification with ``event_type`` as -``compute.instance.create`` to Aodh service directly. The script was tested -in a default Devstack environment. - -First, create a config file for the script: - -.. code-block:: console - - $ mkdir -p /etc/lingxian - $ cat < /etc/lingxian/lingxian.conf - [oslo_messaging_rabbit] - rabbit_userid = stackrabbit - rabbit_password = password - EOF - -.. end - -Download the script, modify the ``conf_file`` and ``project_id``, the -``project_id`` should be the same with the project who created the alarm. Run -the script: - -.. code-block:: console - - $ curl -sSO https://raw.githubusercontent.com/lingxiankong/qinling_utils/master/aodh_notifier_simulator.py - $ python aodh_notifier_simulator.py - -.. end - -Now the alarm should be triggered, and the webhook is invoked. Check the alarm -history, we could see the alarm state transition: - -.. code-block:: console - - $ alarm_id=1f85edea-a8a6-47ba-b1f5-9e3ac7ee61dc - $ aodh alarm-history show $alarm_id -f yaml - - detail: '{"transition_reason": "Event - hits the query .", "state": "alarm"}' - event_id: 3250eed6-edaf-41e8-bfa6-42b060f96e75 - timestamp: '2018-05-13T08:34:47.977951' - type: state transition - - detail: '{"state_reason": "Not evaluated yet", "user_id": "26d9ec1da7fc4756b1940e69292565c2", - "name": "qinling-alarm", "state": "insufficient data", "timestamp": "2018-05-13T07:13:03.631059", - "description": "Alarm when compute.instance.create event occurred.", "enabled": - true, "state_timestamp": "2018-05-13T07:13:03.631059", "rule": {"query": [], "event_type": - "compute.instance.create"}, "alarm_id": "1f85edea-a8a6-47ba-b1f5-9e3ac7ee61dc", - "time_constraints": [], "insufficient_data_actions": [], "repeat_actions": false, - "ok_actions": [], "project_id": "360d69d06890407eab1a44573c1f3776", "type": "event", - "alarm_actions": ["http://10.0.0.14:7070/v1/webhooks/a5f82898-a4c3-4104-ad2d-40fbafbe8857/invoke"], - "severity": "low"}' - event_id: 231ca53e-5d74-4191-8136-b332d2d91f1a - timestamp: '2018-05-13T07:13:03.631059' - type: creation - -.. end - -Check the function execution in Qinling: - -.. code-block:: console - - $ function_id=5b25e0ea-7f8d-487a-bce6-f6a2556a1e3f - $ openstack function execution list --filter function_id=$function_id -f yaml - - Created_at: '2018-05-13 08:34:49' - Description: Created by Webhook a5f82898-a4c3-4104-ad2d-40fbafbe8857 - Function_id: 5b25e0ea-7f8d-487a-bce6-f6a2556a1e3f - Id: 41b351fa-a96b-4d86-ba77-33f7bca3dad1 - Input: '{"current": "alarm", "alarm_id": "1f85edea-a8a6-47ba-b1f5-9e3ac7ee61dc", - "reason": "Event - hits the query .", "severity": "low", "reason_data": {"type": "event", - "event": {"event_type": "compute.instance.create", "traits": [["project_id", 1, - "360d69d06890407eab1a44573c1f3776"], ["service", 1, "nova"], ["vm_name", 1, "new_instance"], - ["vm_id", 1, "ba2b30a0-1b14-4ad4-9a66-f24ece912cad"]], "message_signature": "bcfb59e386d5375dbb7ded9910900a98536f168d377f52ae7ffd89159c0019f5", - "raw": {}, "generated": "2017-10-03T10:02:38.305378", "message_id": "ac6ce4ae-546a-47cc-a0cb-ad1bae44ca61"}}, - "alarm_name": "qinling-alarm", "previous": "insufficient data"}' - Project_id: 360d69d06890407eab1a44573c1f3776 - Result: '{"duration": 0.084, "output": "Hello, World"}' - Status: success - Sync: false - Updated_at: '2018-05-13 08:34:53' - -.. end - -Conclusion -~~~~~~~~~~ - -Although a Qinling function can be invoked on demand, trigger the function -according to the cloud events automatically(i.e. event-driven) can bring more -power to your function and make your whole application more efficient and cost -effective. diff --git a/doc/source/user/cookbook_function.rst b/doc/source/user/cookbook_function.rst deleted file mode 100644 index e7fa9f3c..00000000 --- a/doc/source/user/cookbook_function.rst +++ /dev/null @@ -1,647 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Function Cookbook -================= - -Introduction -~~~~~~~~~~~~ - -Qinling function lets you execute your code in a serverless environment without -having to first create a VM or container. This cookbook contains several -examples for how to create functions in Qinling. - -Examples -~~~~~~~~ - -Create Python function with libraries in a package --------------------------------------------------- - -This guide describes how to create a python function with libraries in a -package and how to invoke the function in a Python runtime(the steps assume -there is already a Python 2.7 runtime available in the deployment). - -The function resizes an image which stores in Swift and uploads the resized -image to a new container with a same object name. For the function to work, a -python library called ``Pillow`` needs to be installed together with the -function code, the ``python-swiftclient`` doesn't need to be installed because -Qinling supports it as a built-in library in Qinling's default Python 2.7 -runtime implementation. - -The function needs two positional parameters: - -* ``container_name``: The container name in Swift that the original image file - is stored in. -* ``object_name``: The object name in the container. - -There is no output for the function itself, but you can check the function -execution log to see the whole process. - -.. note:: - - The following process has been tested in a Devstack environment in which - Swift is also installed. - -#. Create a directory, for example ``~/qinling_test`` - - .. code-block:: console - - mkdir ~/qinling_test - - .. end - -#. Write a custom python code for resizing an image at the root level of the - directory created above. - - .. code-block:: console - - cat < ~/qinling_test/resize_image.py - import os - - from PIL import Image - import swiftclient - from swiftclient.exceptions import ClientException - - - def resize_image(image_path, resized_path): - with Image.open(image_path) as image: - image.thumbnail(tuple(x / 4 for x in image.size)) - image.save(resized_path) - - - def main(context, container_name, object_name): - conn = swiftclient.Connection( - session=context['os_session'], - os_options={'region_name': 'RegionOne'}, - ) - - # Download original image - image_path = os.path.abspath('./%s' % object_name) - _, obj_contents = conn.get_object(container_name, object_name) - with open(image_path, 'w') as local: - local.write(obj_contents) - - print('Downloaded object %s from container %s' % (object_name, container_name)) - - thumb_path = os.path.abspath('./%s_resized.png' % object_name) - resize_image(image_path, thumb_path) - - print('Resized.') - - # Create new container if needed - new_container_name = '%s_resized' % container_name - try: - conn.head_container(new_container_name) - except ClientException: - conn.put_container(new_container_name) - print("New container %s created." % new_container_name) - - # Upload resized image - with open(thumb_path, 'r') as new_local: - conn.put_object( - new_container_name, - object_name, - contents=new_local, - content_type='text/plain' - ) - os.remove(image_path) - os.remove(thumb_path) - - print('Uploaded object %s to container %s' % (object_name, new_container_name)) - EOF - - .. end - -#. Install the python libraries necessary for the program execution using - ``pip``. The libraries need to be installed at the root level of the - directory. - - .. code-block:: console - - pip install module-name -t path/to/dir - - .. end - - In this example, we would install the library ``Pillow`` in the project - directory. - - .. code-block:: console - - pip install Pillow -t ~/qinling_test - - .. end - - .. note:: - - Qinling's default Python runtime includes most of the OpenStack project - SDKs, so you don't need to include python-swiftclient in your function - code package, but you can optionally include it for your local testing. - -#. Add the contents of the whole directory to a zip file which is now your - function code package. Make sure you zip the contents of the directory and - not the directory itself. - - .. code-block:: console - - cd ~/qinling_test; zip -r9 ~/qinling_test/resize_image.zip . - - .. end - -#. Create function and get the function ID, replace the ``runtime_id`` with - the one in your deployment. - - .. code-block:: console - - runtime_id=601efeb8-3e41-4e5c-a12a-986dbda252e3 - openstack function create --name resize_image \ - --runtime $runtime_id \ - --entry resize_image.main \ - --package ~/qinling_test/resize_image.zip - +-------------+-------------------------------------------------------------------------+ - | Field | Value | - +-------------+-------------------------------------------------------------------------+ - | id | f8b18de6-1751-46d6-8c0d-0f1ecf943d12 | - | name | resize_test | - | description | None | - | count | 0 | - | code | {u'source': u'package', u'md5sum': u'ae7ad9ae450a8c5c31dca8e96f42247c'} | - | runtime_id | 685c1e6c-e175-4b32-9ec4-244d39c1077e | - | entry | resize_image.main | - | project_id | a1e58c83923a4e2ca9370df6007c7fe6 | - | created_at | 2018-07-03 04:38:50.147277 | - | updated_at | None | - | cpu | 100 | - | memory_size | 33554432 | - +-------------+-------------------------------------------------------------------------+ - function_id=f8b18de6-1751-46d6-8c0d-0f1ecf943d12 - - .. end - -#. Upload an image to Swift. - - .. code-block:: console - - curl -SL https://docs.openstack.org/arch-design/_images/osog_0001.png -o ~/origin.jpg - openstack container create origin_folder - +---------------------------------------+---------------+------------------------------------+ - | account | container | x-trans-id | - +---------------------------------------+---------------+------------------------------------+ - | AUTH_a1e58c83923a4e2ca9370df6007c7fe6 | origin_folder | tx664a23a4a6e345b6af30d-005b3b6127 | - +---------------------------------------+---------------+------------------------------------+ - openstack object create origin_folder ~/origin.jpg --name image - +--------+---------------+----------------------------------+ - | object | container | etag | - +--------+---------------+----------------------------------+ - | image | origin_folder | 07855978284adfcbbf76954a7c654a74 | - +--------+---------------+----------------------------------+ - openstack object show origin_folder image - +----------------+---------------------------------------+ - | Field | Value | - +----------------+---------------------------------------+ - | account | AUTH_a1e58c83923a4e2ca9370df6007c7fe6 | - | container | origin_folder | - | content-length | 45957 | - | content-type | application/octet-stream | - | etag | 07855978284adfcbbf76954a7c654a74 | - | last-modified | Tue, 03 Jul 2018 11:44:33 GMT | - | object | image | - +----------------+---------------------------------------+ - - .. end - -#. Invoke the function by specifying function_id and the function inputs as - well. - - .. code-block:: console - - openstack function execution create $function_id --input '{"container_name": "origin_folder", "object_name": "image"}' - +------------------+-------------------------------------------------------------+ - | Field | Value | - +------------------+-------------------------------------------------------------+ - | id | 04c60ae7-08c9-454c-9b2c-0bbf36391159 | - | function_id | d3de49fc-7488-4635-aa48-84e754881eb8 | - | function_version | 0 | - | description | None | - | input | {"object_name": "image", "container_name": "origin_folder"} | - | result | {"duration": 2.74, "output": null} | - | status | success | - | sync | True | - | project_id | a1e58c83923a4e2ca9370df6007c7fe6 | - | created_at | 2018-07-03 09:12:12 | - | updated_at | 2018-07-03 09:12:16 | - +------------------+-------------------------------------------------------------+ - - .. end - -#. Check the function execution log. - - .. code-block:: console - - openstack function execution log show 04c60ae7-08c9-454c-9b2c-0bbf36391159 - Start execution: 04c60ae7-08c9-454c-9b2c-0bbf36391159 - Downloaded object image from container origin_folder - Resized. - New container origin_folder_resized created. - Uploaded object image to container origin_folder_resized - Finished execution: 04c60ae7-08c9-454c-9b2c-0bbf36391159 - - .. end - -#. Verify that a new object of smaller size was created in a new container in - Swift. - - .. code-block:: console - - openstack container list - +-----------------------+ - | Name | - +-----------------------+ - | origin_folder | - | origin_folder_resized | - +-----------------------+ - openstack object list origin_folder_resized - +-------+ - | Name | - +-------+ - | image | - +-------+ - openstack object show origin_folder_resized image - +----------------+---------------------------------------+ - | Field | Value | - +----------------+---------------------------------------+ - | account | AUTH_a1e58c83923a4e2ca9370df6007c7fe6 | - | container | origin_folder_resized | - | content-length | 31779 | - | content-type | text/plain | - | etag | f737cc7f0fe5c15d8a6897c8fe159c02 | - | last-modified | Tue, 03 Jul 2018 11:46:40 GMT | - | object | image | - +----------------+---------------------------------------+ - - .. end - - Pay attention to the object ``content-length`` value which is smaller than - the original object. - -Create a function stored in OpenStack Swift -------------------------------------------- - -OpenStack object storage service, swift can be integrated with Qinling to -create functions. You can upload your function package to swift and create -the function by specifying the container name and object name in Swift. In this -example the function would return ``"Hello, World"`` by default, you can -replace the string with the function input. The steps assume there is already -a Python 2.7 runtime available in the deployment. - -#. Create a function deployment package. - - .. code-block:: console - - mkdir ~/qinling_swift_test - cd ~/qinling_swift_test - cat < hello_world.py - def main(name='World',**kwargs): - ret = 'Hello, %s' % name - return ret - EOF - - cd ~/qinling_swift_test && zip -r ~/qinling_swift_test/hello_world.zip ./* - - .. end - -#. Upload the file to swift - - .. code-block:: console - - openstack container create functions - - +---------------------------------------+------------------+------------------------------------+ - | account | container | x-trans-id | - +---------------------------------------+------------------+------------------------------------+ - | AUTH_6ae7142bff0542d8a8f3859ffa184236 | functions | 9b45bef5ab2658acb9b72ee32f39dbc8 | - +---------------------------------------+------------------+------------------------------------+ - - openstack object create functions hello_world.zip - - +-----------------+-----------+----------------------------------+ - | object | container | etag | - +-----------------+-----------+----------------------------------+ - | hello_world.zip | functions | 9b45bef5ab2658acb9b72ee32f39dbc8 | - +-----------------+-----------+----------------------------------+ - - openstack object show functions hello_world.zip - - +----------------+---------------------------------------+ - | Field | Value | - +----------------+---------------------------------------+ - | account | AUTH_6ae7142bff0542d8a8f3859ffa184236 | - | container | functions | - | content-length | 246 | - | content-type | application/zip | - | etag | 9b45bef5ab2658acb9b72ee32f39dbc8 | - | last-modified | Wed, 18 Jul 2018 17:45:23 GMT | - | object | hello_world.zip | - +----------------+---------------------------------------+ - - .. end - -#. Create a function and get the function ID, replace the - ``runtime_id`` with the one in your deployment. Also, specify swift - container and object name. - - .. code-block:: console - - openstack function create --name hello_world \ - --runtime $runtime_id \ - --entry hello_world.main \ - --container functions \ - --object hello_world.zip - - +-------------+----------------------------------------------------------------------------------------------+ - | Field | Value | - +-------------+----------------------------------------------------------------------------------------------+ - | id | f1102bca-fbb4-4baf-874d-ed33bf8251f7 | - | name | hello_world | - | description | None | - | count | 0 | - | code | {u'source': u'swift', u'swift': {u'object': u'hello_world.zip', u'container': u'functions'}} | - | runtime_id | 0d8bcf73-910b-4fec-86b1-38ace8bd0766 | - | entry | hello_world.main | - | project_id | 6ae7142bff0542d8a8f3859ffa184236 | - | created_at | 2018-07-18 17:46:29.974506 | - | updated_at | None | - | cpu | 100 | - | memory_size | 33554432 | - +-------------+----------------------------------------------------------------------------------------------+ - - .. end - -#. Invoke the function by specifying function_id - - .. code-block:: console - - function_id=f1102bca-fbb4-4baf-874d-ed33bf8251f7 - openstack function execution create $function_id - - +------------------+-----------------------------------------------+ - | Field | Value | - +------------------+-----------------------------------------------+ - | id | 3451393d-60c6-4172-bbdf-c681929fae07 | - | function_id | f1102bca-fbb4-4baf-874d-ed33bf8251f7 | - | function_version | 0 | - | description | None | - | input | None | - | result | {"duration": 0.031, "output": "Hello, World"} | - | status | success | - | sync | True | - | project_id | 6ae7142bff0542d8a8f3859ffa184236 | - | created_at | 2018-07-18 17:49:46 | - | updated_at | 2018-07-18 17:49:48 | - +------------------+-----------------------------------------------+ - - .. end - - It is very easy and simple to use Qinling with swift. We have successfully created and - invoked a function using OpenStack Swift. - -Create image(docker) type function ----------------------------------- - -With the help of Docker Hub you would be able to create image type functions in -Qinling. As a prerequisite, you need to have a Docker Hub account. In the -following instructions replace ``DOCKER_USER`` with your own docker hub -username. - -#. In this tutorial we would be create docker image with latest Python3 - installed. We will create a python script which would be included in the image. - Finally we create a Dockerfile to build the image. - - .. code-block:: console - - mkdir ~/qinling_test - cd ~/qinling_test - cat < ~/qinling_test/hello.py - import sys - import time - - def main(): - print('Hello', sys.argv[1]) - time.sleep(3) - - if __name__ == '__main__': - main() - EOF - - cat < ~/qinling_test/Dockerfile - FROM python:3.7.0-alpine3.7 - COPY . /qinling_test - WORKDIR /qinling_test - ENTRYPOINT [ "python", "./hello.py" ] - CMD ["Qinling"] - EOF - - .. end - -#. You need first run docker login to authenticate, build the image and push - to Docker Hub. - - .. code-block:: console - - docker login - docker build -t DOCKER_USER/qinling_test . - docker push DOCKER_USER/qinlng_test - - .. end - -#. Create an image type function by providing the docker image name. - - .. code-block:: console - - $ openstack function create --name docker_test --image DOCKER_USER/qinling_test - +-------------+--------------------------------------------------------------+ - | Field | Value | - +-------------+--------------------------------------------------------------+ - | id | 6fa6932d-ee43-41d4-891c-77a96b52c697 | - | name | docker_test | - | description | None | - | count | 0 | - | code | {u'source': u'image', u'image': u'DOCKER_USER/qinling_test'} | - | runtime_id | None | - | entry | None | - | project_id | 6ae7142bff0542d8a8f3859ffa184236 | - | created_at | 2018-08-05 00:37:07.336918 | - | updated_at | None | - | cpu | 100 | - | memory_size | 33554432 | - +-------------+--------------------------------------------------------------+ - - .. end - -#. Invoke the function by specifying the function name or ID. - - .. code-block:: console - - $ openstack function execution create docker_test - +------------------+--------------------------------------+ - | Field | Value | - +------------------+--------------------------------------+ - | id | 8fe0e2e9-2133-4abb-8cd4-f2f14935cab4 | - | function_id | 6fa6932d-ee43-41d4-891c-77a96b52c697 | - | function_version | 0 | - | description | None | - | input | None | - | result | {"duration": 3} | - | status | success | - | sync | True | - | project_id | 6ae7142bff0542d8a8f3859ffa184236 | - | created_at | 2018-08-05 00:37:25 | - | updated_at | 2018-08-05 00:37:29 | - +------------------+--------------------------------------+ - - .. end - -#. Check the execution log. - - .. code-block:: console - - $ openstack function execution log show 8fe0e2e9-2133-4abb-8cd4-f2f14935cab4 - Hello Qinling - - .. end - -Config timeout for the function -------------------------------- - -In the cloud, you need to pay for the cloud resources that are used to run your -Qinling function. To prevent your function from running indefinitely, you -specify a timeout. When the specified timeout is reached, Qinling terminates -execution of the function. We recommend you set this value based on your -expected execution time. The default is 5 seconds and you can set it up to 300 -seconds. - -.. note:: - - This guide assumes you already have a Python2 or Python3 runtime available - in the deployment - -#. Create a Python function that simply sleeps for 10 seconds to simulate a - long-running function. - - .. code-block:: console - - mkdir ~/qinling_test && cd ~/qinling_test - cat < test_sleep.py - import time - def main(seconds=10, **kwargs): - time.sleep(seconds) - EOF - - .. end - -#. Create the Qinling function. - - .. code-block:: console - - $ openstack function create --runtime $runtime_id --entry test_sleep.main --file ~/qinling_test/test_sleep.py --name test_sleep - +-------------+-------------------------------------------------------------------------+ - | Field | Value | - +-------------+-------------------------------------------------------------------------+ - | id | 6c2cb248-5065-4a0a-9b7a-06818693358c | - | name | test_sleep | - | description | None | - | count | 0 | - | code | {u'source': u'package', u'md5sum': u'c0830d40dbef48b11af9e63a653799ac'} | - | runtime_id | ba429da0-b800-4f27-96ea-eb527bd68004 | - | entry | test_sleep.main | - | project_id | d256a42b9f8e4d66805d91655b36a318 | - | created_at | 2018-09-10 01:43:06.250137 | - | updated_at | None | - | cpu | 100 | - | memory_size | 33554432 | - | timeout | 5 | - +-------------+-------------------------------------------------------------------------+ - - .. end - -#. Invoke the function. You will see the execution is terminated after about 5 - seconds(the default timeout). - - .. code-block:: console - - $ openstack function execution create test_sleep - +------------------+--------------------------------------------------------------+ - | Field | Value | - +------------------+--------------------------------------------------------------+ - | id | e096f4b3-85a7-4356-93e9-5f583e802aa2 | - | function_id | 6c2cb248-5065-4a0a-9b7a-06818693358c | - | function_version | 0 | - | description | None | - | input | None | - | result | {"duration": 5.097, "output": "Function execution timeout."} | - | status | failed | - | sync | True | - | project_id | d256a42b9f8e4d66805d91655b36a318 | - | created_at | 2018-09-10 01:44:46 | - | updated_at | 2018-09-10 01:44:55 | - +------------------+--------------------------------------------------------------+ - - .. end - -#. Update the function by setting a longer timeout value. - - .. code-block:: console - - $ openstack function update test_sleep --timeout 15 - +-------------+-------------------------------------------------------------------------+ - | Field | Value | - +-------------+-------------------------------------------------------------------------+ - | id | 6c2cb248-5065-4a0a-9b7a-06818693358c | - | name | test_sleep | - | description | None | - | count | 1 | - | code | {u'source': u'package', u'md5sum': u'c0830d40dbef48b11af9e63a653799ac'} | - | runtime_id | ba429da0-b800-4f27-96ea-eb527bd68004 | - | entry | test_sleep.main | - | project_id | d256a42b9f8e4d66805d91655b36a318 | - | created_at | 2018-09-10 01:43:06 | - | updated_at | 2018-09-10 02:01:38.510319 | - | cpu | 100 | - | memory_size | 33554432 | - | timeout | 15 | - +-------------+-------------------------------------------------------------------------+ - - .. end - -#. Invoke the function again to verify the function is successfully executed. - - .. code-block:: console - - $ openstack function execution create test_sleep - +------------------+--------------------------------------+ - | Field | Value | - +------------------+--------------------------------------+ - | id | 6dd91e1d-df91-4e19-92b6-3bec474ee09a | - | function_id | 6c2cb248-5065-4a0a-9b7a-06818693358c | - | function_version | 0 | - | description | None | - | input | None | - | result | {"duration": 10.143, "output": null} | - | status | success | - | sync | True | - | project_id | d256a42b9f8e4d66805d91655b36a318 | - | created_at | 2018-09-10 02:03:56 | - | updated_at | 2018-09-10 02:04:06 | - +------------------+--------------------------------------+ - - .. end diff --git a/doc/source/user/cookbook_function_version.rst b/doc/source/user/cookbook_function_version.rst deleted file mode 100644 index 0dff95db..00000000 --- a/doc/source/user/cookbook_function_version.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Function version Cookbook -========================= - -Introduction -~~~~~~~~~~~~ - -Examples -~~~~~~~~ \ No newline at end of file diff --git a/doc/source/user/cookbook_job.rst b/doc/source/user/cookbook_job.rst deleted file mode 100644 index ec31bd65..00000000 --- a/doc/source/user/cookbook_job.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Job Cookbook -============ - -Introduction -~~~~~~~~~~~~ - -Examples -~~~~~~~~ \ No newline at end of file diff --git a/doc/source/user/cookbook_webhook.rst b/doc/source/user/cookbook_webhook.rst deleted file mode 100644 index 940ee3e9..00000000 --- a/doc/source/user/cookbook_webhook.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Webhook Cookbook -================ - -Introduction -~~~~~~~~~~~~ - -Examples -~~~~~~~~ \ No newline at end of file diff --git a/doc/source/user/index.rst b/doc/source/user/index.rst deleted file mode 100644 index 33d49b6b..00000000 --- a/doc/source/user/index.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. - Copyright 2010-2011 United States Government as represented by the - Administrator of the National Aeronautics and Space Administration. - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -User Guide -========== - -Demos ------ - -* `(2017.10) Trigger Qinling function based on object upload event in Swift `_ -* `(2018.05) Qinling function autoscaling `_ -* `(2018.05) Qinling function versioning `_ -* `(2018.08) Qinling horizon demo `_ - -Cookbooks ---------- - -.. toctree:: - :maxdepth: 1 - - cookbook_function - cookbook_function_version - cookbook_webhook - cookbook_job - -Integration with other services -------------------------------- - -.. toctree:: - :maxdepth: 1 - - aodh - -Indices and tables ------------------- - -* :ref:`genindex` -* :ref:`search` diff --git a/doc/source/videos.rst b/doc/source/videos.rst deleted file mode 100644 index a76a4b70..00000000 --- a/doc/source/videos.rst +++ /dev/null @@ -1,28 +0,0 @@ -.. - Copyright 2018 Catalyst IT Ltd - All Rights Reserved. - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -Qinling Videos -============== - -This page contains Qinling related videos including the OpenStack Summit -presentations, the project introduction, etc. - -* 2017.10, - `Qinling introduction demo `_ -* 2017.11, OpenStack Sydney Summit, - `Make your application "serverless" `_ -* 2018.05, OpenStack Vancouver Summit, - `How to implement FaaS in openstack `_ -* 2018.05, OpenStack Vancouver Summit, - `Qinling project update `_ diff --git a/etc/apache2/qinling-api.conf b/etc/apache2/qinling-api.conf deleted file mode 100644 index e39dcee5..00000000 --- a/etc/apache2/qinling-api.conf +++ /dev/null @@ -1,48 +0,0 @@ -# Qinling API port -Listen 7070 - -################# -# Hardening 1/2 # -################# -ServerSignature Off -ServerTokens Prod -TraceEnable off - - - - DocumentRoot "/var/www/cgi-bin/qinling" - - # Avoid this issue: https://bugs.launchpad.net/charm-heat/+bug/1717615 - AllowEncodedSlashes On - - ########### - # Logging # - ########### - # Paths have to be changed to fit your deployment - ErrorLog "/var/log/apache2/qinling_wsgi_error.log" - LogFormat "%{X-Forwarded-For}i %l %u %t \"%r\" %>s %b %D \"%{Referer}i\" \"%{User-Agent}i\"" logformat - CustomLog "/var/log/apache2/qinling_wsgi_access.log" logformat - - ###################### - # WSGI configuration # - ###################### - WSGIApplicationGroup %{GLOBAL} - # Paths and user/group have to be changed to fit your deployment - # Here Python 3.6 is issued. - WSGIDaemonProcess qinling group=qinling processes=5 threads=1 user=qinling python-path=/var/lib/kolla/venv/lib/python3.6/site-packages - WSGIProcessGroup qinling - WSGIScriptAlias / "/var/www/cgi-bin/qinling/wsgi.py" - WSGIPassAuthorization On - - ################# - # Hardening 2/2 # - ################# - # Paths have to be changed to fit your deployment - - - Options Indexes FollowSymLinks MultiViews - Require all granted - - - - diff --git a/etc/policy.json.sample b/etc/policy.json.sample deleted file mode 100644 index 2a4f22c1..00000000 --- a/etc/policy.json.sample +++ /dev/null @@ -1,27 +0,0 @@ -{ - "context_is_admin": "role:admin or is_admin:1", - "owner" : "project_id:%(project_id)s", - "admin_or_owner": "rule:context_is_admin or rule:owner", - "default": "rule:admin_or_owner", - - "runtime:create": "rule:context_is_admin", - "runtime:update": "rule:context_is_admin", - "runtime:delete": "rule:context_is_admin", - "runtime_pool:get_all": "rule:context_is_admin", - - "function:get_all:all_projects": "rule:context_is_admin", - "function_worker:get_all": "rule:context_is_admin", - "function:scale_up": "rule:context_is_admin", - "function:scale_down": "rule:context_is_admin", - "function:detach": "rule:context_is_admin", - - "function_version:scale_up": "rule:context_is_admin", - "function_version:scale_down": "rule:context_is_admin", - "function_version:detach": "rule:context_is_admin", - - "execution:get_all:all_projects": "rule:context_is_admin", - - "webhook:get_all:all_projects": "rule:context_is_admin", - - "job:get_all:all_projects": "rule:context_is_admin", -} diff --git a/etc/uwsgi/qinling-api.yaml b/etc/uwsgi/qinling-api.yaml deleted file mode 100644 index 561554e5..00000000 --- a/etc/uwsgi/qinling-api.yaml +++ /dev/null @@ -1,29 +0,0 @@ -uwsgi: - http-socket: 0.0.0.0:7070 - # Paths have to be changed to fit your deployment - wsgi-file: /var/www/cgi-bin/qinling/wsgi.py - chdir: /var/lib/kolla/venv/lib/python3.6/site-packages - pythonpath: /var/lib/kolla/venv/lib/python3.6/site-packages - virtualenv: /var/lib/kolla/venv - - plugins: python3 - # Set uid and gip to a appropriate user on your server. In many - # installations qinling will be correct - uid: qinling - gid: qinling - - processes: 5 - threads: 1 - vacuum: true - harakiri: 20 - buffer-size: 65535 - post-buffering: 8192 - # Set die-on-term and exit-on-reload so that uWSGI shuts down - die-on-term: true - exit-on-reload: true - master: true - enable-threads: true - # uWSGI recommends this to prevent thundering herd on accept - thunder-lock: true - honour-stdin: true - memory-report: false diff --git a/example/functions/python/openstack/create_thumbnail.py b/example/functions/python/openstack/create_thumbnail.py deleted file mode 100644 index bf4c6898..00000000 --- a/example/functions/python/openstack/create_thumbnail.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import os - -from PIL import Image -import swiftclient -from swiftclient.exceptions import ClientException - - -def resize_image(image_path, resized_path): - with Image.open(image_path) as image: - image.thumbnail(tuple(x / 4 for x in image.size)) - image.save(resized_path) - - -def main(context, container_name, object_name): - conn = swiftclient.Connection( - session=context['os_session'], - os_options={'region_name': 'RegionOne'}, - ) - - # Download original image - image_path = os.path.abspath('./%s' % object_name) - _, obj_contents = conn.get_object(container_name, object_name) - with open(image_path, 'w') as local: - local.write(obj_contents) - - print('Downloaded object %s from container %s' % - (object_name, container_name)) - - thumb_path = os.path.abspath('./%s_resized.png' % object_name) - resize_image(image_path, thumb_path) - - print('Resized.') - - # Create new container if needed - new_container_name = '%s_resized' % container_name - try: - conn.head_container(new_container_name) - except ClientException: - conn.put_container(new_container_name) - print("New container %s created." % new_container_name) - - # Upload resized image - with open(thumb_path, 'r') as new_local: - conn.put_object( - new_container_name, - object_name, - contents=new_local, - content_type='text/plain' - ) - os.remove(image_path) - os.remove(thumb_path) - - print('Uploaded object %s to container %s' % - (object_name, new_container_name)) diff --git a/example/functions/python/openstack/get_swift_object.py b/example/functions/python/openstack/get_swift_object.py deleted file mode 100644 index 1b60499d..00000000 --- a/example/functions/python/openstack/get_swift_object.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import swiftclient - - -def stat_object(context, container, object): - conn = swiftclient.Connection( - session=context['os_session'], - os_options={'region_name': 'RegionOne'}, - ) - - obj_header = conn.head_object(container, object) - - return obj_header diff --git a/example/functions/python/openstack/send_zaqar_message.py b/example/functions/python/openstack/send_zaqar_message.py deleted file mode 100644 index 4fa56a49..00000000 --- a/example/functions/python/openstack/send_zaqar_message.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import requests -from zaqarclient.queues import client - - -def _send_message(z_client, queue_name, status, server=''): - queue_name = queue_name or 'test_queue' - queue = z_client.queue(queue_name) - queue.post({"body": {'status': status, 'server': server}}) - print 'message posted.' - - -def check_and_trigger(context, **kwargs): - file_name = 'count.txt' - r = requests.get('http://httpbin.org/status/500') - - if r.status_code != requests.codes.ok: - if not os.path.isfile(file_name): - count = 1 - with open(file_name, 'w') as f: - f.write(str(count)) - else: - with open(file_name, 'r+') as f: - count = int(f.readline()) - count += 1 - if count == 3: - # Send message and stop trigger after 3 checks - z_client = client.Client( - session=context['os_session'], - version=2, - ) - _send_message(z_client, kwargs.get('queue'), r.status_code, - 'api1.production.catalyst.co.nz') - - f.seek(0) - f.write(str(count)) - f.truncate() - - print('new count: %s' % count) - else: - try: - os.remove(file_name) - except OSError: - pass diff --git a/example/kubernetes/cfssl-ca-config.json b/example/kubernetes/cfssl-ca-config.json deleted file mode 100644 index c5fc98be..00000000 --- a/example/kubernetes/cfssl-ca-config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "signing": { - "default": { - "expiry": "168h" - }, - "profiles": { - "client": { - "expiry": "8760h", - "usages": [ - "signing", - "key encipherment", - "client auth" - ] - } - } - } -} diff --git a/example/kubernetes/cfssl-client-csr.json b/example/kubernetes/cfssl-client-csr.json deleted file mode 100644 index 6626637d..00000000 --- a/example/kubernetes/cfssl-client-csr.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "CN": "qinling", - "key": { - "algo": "rsa", - "size": 2048 - } -} diff --git a/example/kubernetes/k8s_qinling_role.yaml b/example/kubernetes/k8s_qinling_role.yaml deleted file mode 100644 index 94cd8756..00000000 --- a/example/kubernetes/k8s_qinling_role.yaml +++ /dev/null @@ -1,77 +0,0 @@ ---- -kind: ClusterRole -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: qinling -rules: - - apiGroups: [""] - resources: ["nodes", "namespaces"] - verbs: ["list"] - - apiGroups: [""] - resources: ["namespaces"] - resourceNames: ["qinling"] - verbs: ["create"] ---- -kind: ClusterRoleBinding -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: qinling -subjects: -- kind: User - name: qinling - apiGroup: rbac.authorization.k8s.io -roleRef: - kind: ClusterRole - name: qinling - apiGroup: rbac.authorization.k8s.io ---- -# The qinling namespace should be created for the role and rolebinding -apiVersion: v1 -kind: Namespace -metadata: - name: qinling ---- -kind: Role -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: qinling - namespace: qinling -rules: -- apiGroups: [""] - resources: ["services"] - verbs: ["list", "get", "create", "delete"] -- apiGroups: [""] - resources: ["pods"] - verbs: ["list", "get", "create", "patch", "delete", "deletecollection"] -- apiGroups: [""] - resources: ["pods/log"] - verbs: ["get"] -- apiGroups: ["extensions"] - resources: ["deployments"] - verbs: ["get", "create", "patch", "deletecollection"] -- apiGroups: ["extensions"] - resources: ["deployments/rollback"] - verbs: ["create"] -- apiGroups: ["extensions"] - resources: ["deployments/status"] - verbs: ["get"] -- apiGroups: ["extensions"] - resources: ["replicasets"] - verbs: ["deletecollection"] -- apiGroups: ["extensions"] - resources: ["networkpolicies"] - verbs: ["list", "get", "create", "delete"] ---- -kind: RoleBinding -apiVersion: rbac.authorization.k8s.io/v1 -metadata: - name: qinling - namespace: qinling -subjects: -- kind: User - name: qinling - apiGroup: rbac.authorization.k8s.io -roleRef: - kind: Role - name: qinling - apiGroup: rbac.authorization.k8s.io diff --git a/lower-constraints.txt b/lower-constraints.txt deleted file mode 100644 index ea72d25a..00000000 --- a/lower-constraints.txt +++ /dev/null @@ -1,126 +0,0 @@ -alembic==0.9.8 -amqp==2.2.2 -appdirs==1.4.3 -asn1crypto==0.24.0 -Babel==2.3.4 -bcrypt==3.1.4 -beautifulsoup4==4.6.0 -cachetools==2.0.1 -certifi==2018.1.18 -cffi==1.11.5 -chardet==3.0.4 -cliff==2.11.0 -cmd2==0.8.1 -contextlib2==0.5.5 -cotyledon==1.3.0 -coverage==4.0 -croniter==0.3.4 -cryptography==2.1.4 -debtcollector==1.19.0 -decorator==4.2.1 -dogpile.cache==0.6.5 -enum-compat==0.0.2 -etcd3gw==0.2.3 -eventlet==0.20.0 -extras==1.0.0 -fasteners==0.14.1 -fixtures==3.0.0 -future==0.16.0 -futurist==1.2.0 -google-auth==1.4.1 -greenlet==0.4.13 -idna==2.6 -ipaddress==1.0.19 -iso8601==0.1.12 -Jinja2==2.10 -jsonschema==2.6.0 -keystoneauth1==3.4.0 -keystonemiddleware==4.17.0 -kombu==4.1.0 -kubernetes==6.0.0 -linecache2==1.0.0 -logutils==0.3.5 -Mako==1.0.7 -MarkupSafe==1.0 -monotonic==1.4 -mox3==0.25.0 -msgpack==0.5.6 -netaddr==0.7.19 -netifaces==0.10.6 -oauthlib==2.0.6 -os-api-ref==1.4.0 -os-client-config==1.29.0 -oslo.cache==1.29.0 -oslo.concurrency==3.26.0 -oslo.config==5.2.0 -oslo.context==2.20.0 -oslo.db==4.27.0 -oslo.i18n==3.20.0 -oslo.log==3.36.0 -oslo.messaging==5.29.0 -oslo.middleware==3.35.0 -oslo.policy==1.30.0 -oslo.serialization==2.18.0 -oslo.service==1.24.0 -oslo.upgradecheck==0.1.0 -oslo.utils==3.33.0 -oslotest==3.2.0 -paramiko==2.4.1 -Paste==2.0.3 -PasteDeploy==1.5.2 -pbr==2.0.0 -pecan==1.0.0 -pika==0.10.0 -pika-pool==0.1.3 -prettytable==0.7.2 -pyasn1==0.4.2 -pyasn1-modules==0.2.1 -pycadf==2.7.0 -pycparser==2.18 -pyinotify==0.9.6 -PyMySQL==0.7.6 -PyNaCl==1.2.1 -pyparsing==2.2.0 -pyperclip==1.6.0 -python-dateutil==2.5.3 -python-editor==1.0.3 -python-keystoneclient==3.15.0 -python-mimeparse==1.6.0 -python-subunit==1.2.0 -python-swiftclient==3.2.0 -pytz==2018.3 -PyYAML==3.12 -repoze.lru==0.7 -requests==2.18.4 -requests-oauthlib==0.8.0 -requestsexceptions==1.4.0 -rfc3986==1.1.0 -Routes==2.4.1 -rsa==3.4.2 -setproctitle==1.1.10 -setuptools==21.0.0 -simplegeneric==0.8.1 -SQLAlchemy==1.0.10 -sqlalchemy-migrate==0.11.0 -sqlparse==0.2.4 -statsd==3.2.2 -stestr==2.0.0 -stevedore==1.20.0 -tempest==17.1.0 -Tempita==0.5.2 -tenacity==4.4.0 -testrepository==0.0.18 -testresources==2.0.1 -testscenarios==0.4 -testtools==2.2.0 -traceback2==1.4.0 -unittest2==1.1.0 -urllib3==1.22 -vine==1.1.4 -voluptuous==0.11.1 -waitress==1.1.0 -WebOb==1.7.4 -websocket-client==0.47.0 -WebTest==2.0.29 -wrapt==1.10.11 -WSME==0.8.0 diff --git a/playbooks/post.yaml b/playbooks/post.yaml deleted file mode 100644 index 0a77fe74..00000000 --- a/playbooks/post.yaml +++ /dev/null @@ -1,8 +0,0 @@ -- hosts: all - tasks: - - name: cleanup the Neutron config dir - become: true - file: - path: /etc/neutron - state: absent - diff --git a/playbooks/pre.yaml b/playbooks/pre.yaml deleted file mode 100644 index 35f77559..00000000 --- a/playbooks/pre.yaml +++ /dev/null @@ -1,10 +0,0 @@ -- hosts: all - tasks: - - name: create Neutron config dir for devstack job - become: true - file: - path: /etc/neutron - owner: stack - recurse: yes - state: directory - diff --git a/qinling/__init__.py b/qinling/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/api/__init__.py b/qinling/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/api/access_control.py b/qinling/api/access_control.py deleted file mode 100644 index 82c40ffd..00000000 --- a/qinling/api/access_control.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Access Control API server.""" - -from keystonemiddleware import auth_token -from oslo_config import cfg -from oslo_policy import policy - -from qinling import exceptions as exc - -_ENFORCER = None - - -def setup(app): - if cfg.CONF.pecan.auth_enable: - conf = dict(cfg.CONF.keystone_authtoken) - - # Change auth decisions of requests to the app itself. - conf.update({'delay_auth_decision': True}) - - _ensure_enforcer_initialization() - - return auth_token.AuthProtocol(app, conf) - else: - return app - - -def enforce(action, context, target=None, do_raise=True, - exc=exc.NotAllowedException): - """Verifies that the action is valid on the target in this context. - - :param action: String, representing the action to be checked. - This should be colon separated for clarity. - i.e. ``workflows:create`` - :param context: Qinling context. - :param target: Dictionary, representing the object of the action. - For object creation, this should be a dictionary - representing the location of the object. - e.g. ``{'project_id': context.project_id}`` - :param do_raise: if True (the default), raises specified exception. - :param exc: Exception to be raised if not authorized. Default is - qinling.exceptions.NotAllowedException. - - :return: returns True if authorized and False if not authorized and - do_raise is False. - """ - if not cfg.CONF.pecan.auth_enable: - return - - ctx_dict = context.to_policy_values() - - target_obj = { - 'project_id': ctx_dict['project_id'], - 'user_id': ctx_dict['user_id'], - } - - target_obj.update(target or {}) - _ensure_enforcer_initialization() - - return _ENFORCER.enforce( - action, - target_obj, - ctx_dict, - do_raise=do_raise, - exc=exc - ) - - -def _ensure_enforcer_initialization(): - global _ENFORCER - if not _ENFORCER: - _ENFORCER = policy.Enforcer(cfg.CONF) - _ENFORCER.load_rules() diff --git a/qinling/api/app.py b/qinling/api/app.py deleted file mode 100644 index 4dd7052e..00000000 --- a/qinling/api/app.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg -from oslo_log import log as logging -import oslo_middleware.cors as cors_middleware -import oslo_middleware.http_proxy_to_wsgi as http_proxy_to_wsgi_middleware -import pecan - -from qinling.api import access_control -from qinling import config as q_config -from qinling import context as ctx -from qinling.db import api as db_api -from qinling.services import periodics - -LOG = logging.getLogger(__name__) - - -def get_pecan_config(): - # Set up the pecan configuration. - opts = cfg.CONF.pecan - - cfg_dict = { - "app": { - "root": opts.root, - "modules": opts.modules, - "debug": opts.debug, - "auth_enable": opts.auth_enable - } - } - - return pecan.configuration.conf_from_dict(cfg_dict) - - -def setup_app(config=None): - if not config: - config = get_pecan_config() - - q_config.set_config_defaults() - - app_conf = dict(config.app) - - db_api.setup_db() - - if cfg.CONF.api.enable_job_handler: - LOG.info('Starting periodic tasks...') - periodics.start_job_handler() - - app = pecan.make_app( - app_conf.pop('root'), - hooks=lambda: [ctx.ContextHook(), ctx.AuthHook()], - logging=getattr(config, 'logging', {}), - **app_conf - ) - - # Set up access control. - app = access_control.setup(app) - - # Create HTTPProxyToWSGI wrapper - app = http_proxy_to_wsgi_middleware.HTTPProxyToWSGI(app, cfg.CONF) - - # Create a CORS wrapper, and attach mistral-specific defaults that must be - # included in all CORS responses. - return cors_middleware.CORS(app, cfg.CONF) - - -def init_wsgi(): - # By default, oslo.config parses the CLI args if no args is provided. - # As a result, invoking this wsgi script from gunicorn leads to the error - # with argparse complaining that the CLI options have already been parsed. - q_config.parse_args(args=[]) - - return setup_app() diff --git a/qinling/api/controllers/__init__.py b/qinling/api/controllers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/api/controllers/root.py b/qinling/api/controllers/root.py deleted file mode 100644 index 4cd9da96..00000000 --- a/qinling/api/controllers/root.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2013 - Mirantis, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_log import log as logging -import pecan -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import root as v1_root - -LOG = logging.getLogger(__name__) - -API_STATUS = wtypes.Enum(str, 'SUPPORTED', 'CURRENT', 'DEPRECATED') - - -class APIVersion(resources.Resource): - """An API Version.""" - - id = wtypes.text - "The version identifier." - - status = API_STATUS - "The status of the API (SUPPORTED, CURRENT or DEPRECATED)." - - links = wtypes.ArrayType(resources.Link) - "The link to the versioned API." - - @classmethod - def sample(cls): - return cls( - id='v1.0', - status='CURRENT', - links=[ - resources.Link(target_name='v1', rel="self", - href='http://example.com:7070/v1') - ] - ) - - -class APIVersions(resources.Resource): - """API Versions.""" - versions = wtypes.ArrayType(APIVersion) - - @classmethod - def sample(cls): - v1 = APIVersion(id='v1.0', status='CURRENT', rel="self", - href='http://example.com:7070/v1') - return cls(versions=[v1]) - - -class RootController(object): - v1 = v1_root.Controller() - - @wsme_pecan.wsexpose(APIVersions) - def index(self): - LOG.info("Fetching API versions.") - - host_url_v1 = '%s/%s' % (pecan.request.application_url, 'v1') - api_v1 = APIVersion( - id='v1.0', - status='CURRENT', - links=[resources.Link(href=host_url_v1, target='v1', - rel="self", )] - ) - - return APIVersions(versions=[api_v1]) diff --git a/qinling/api/controllers/v1/__init__.py b/qinling/api/controllers/v1/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/api/controllers/v1/execution.py b/qinling/api/controllers/v1/execution.py deleted file mode 100644 index 644a328e..00000000 --- a/qinling/api/controllers/v1/execution.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_log import log as logging -import pecan -from pecan import rest -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import rpc -from qinling.utils import executions -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) - - -class ExecutionLogController(rest.RestController): - @rest_utils.wrap_pecan_controller_exception - @pecan.expose(content_type='text/plain') - def get_all(self, execution_id): - LOG.info("Get logs for execution %s.", execution_id) - execution_db = db_api.get_execution(execution_id) - - return execution_db.logs - - -class ExecutionsController(rest.RestController): - log = ExecutionLogController() - - def __init__(self, *args, **kwargs): - self.engine_client = rpc.get_engine_client() - self.type = 'execution' - - super(ExecutionsController, self).__init__(*args, **kwargs) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Execution, - body=resources.Execution, - status_code=201 - ) - def post(self, body): - ctx = context.get_ctx() - acl.enforce('execution:create', ctx) - - params = body.to_dict() - if not (params.get("function_id") or params.get("function_alias")): - raise exc.InputException( - 'Either function_alias or function_id must be provided.' - ) - - LOG.info("Creating %s. [params=%s]", self.type, params) - - db_model = executions.create_execution(self.engine_client, params) - - return resources.Execution.from_db_obj(db_model) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Executions, wtypes.text, bool, wtypes.text, - wtypes.text, wtypes.text) - def get_all(self, function_id=None, all_projects=False, project_id=None, - status=None, description=None): - """Return a list of executions. - - :param function_id: Optional. Filtering executions by function_id. - :param project_id: Optional. Admin user can query other projects - resources, the param is ignored for normal user. - :param all_projects: Optional. Get resources of all projects. - :param status: Optional. Filter by execution status. - :param description: Optional. Filter by description. - """ - project_id, all_projects = rest_utils.get_project_params( - project_id, all_projects - ) - if all_projects: - acl.enforce('execution:get_all:all_projects', context.get_ctx()) - - filters = rest_utils.get_filters( - function_id=function_id, - project_id=project_id, - status=status, - description=description - ) - LOG.info("Get all %ss. filters=%s", self.type, filters) - - db_execs = db_api.get_executions(insecure=all_projects, **filters) - executions = [resources.Execution.from_db_obj(db_model) - for db_model in db_execs] - - return resources.Executions(executions=executions) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Execution, types.uuid) - def get(self, id): - LOG.info("Get resource.", resource={'type': self.type, 'id': id}) - - execution_db = db_api.get_execution(id) - - return resources.Execution.from_db_obj(execution_db) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=204) - def delete(self, id): - """Delete the specified Execution.""" - LOG.info("Delete resource.", resource={'type': self.type, 'id': id}) - - return db_api.delete_execution(id) diff --git a/qinling/api/controllers/v1/function.py b/qinling/api/controllers/v1/function.py deleted file mode 100644 index d88a882d..00000000 --- a/qinling/api/controllers/v1/function.py +++ /dev/null @@ -1,546 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections -import json - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import strutils -import pecan -from pecan import rest -from webob.static import FileIter -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import function_version -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import rpc -from qinling.storage import base as storage_base -from qinling.utils import common -from qinling.utils import constants -from qinling.utils import etcd_util -from qinling.utils.openstack import keystone as keystone_util -from qinling.utils.openstack import swift as swift_util -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - -POST_REQUIRED = set(['code']) -CODE_SOURCE = set(['package', 'swift', 'image']) -UPDATE_ALLOWED = set(['name', 'description', 'code', 'package', 'entry', - 'cpu', 'memory_size', 'timeout']) - - -class FunctionWorkerController(rest.RestController): - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.FunctionWorkers, types.uuid, int) - def get_all(self, function_id, function_version=0): - acl.enforce('function_worker:get_all', context.get_ctx()) - - LOG.info("Getting workers for function %s(version %s).", function_id, - function_version) - - workers = etcd_util.get_workers(function_id, version=function_version) - workers = [ - resources.FunctionWorker.from_dict( - { - 'function_id': function_id, - 'function_version': function_version, - 'worker_name': w - } - ) for w in workers - ] - - return resources.FunctionWorkers(workers=workers) - - -class FunctionsController(rest.RestController): - workers = FunctionWorkerController() - versions = function_version.FunctionVersionsController() - - _custom_actions = { - 'scale_up': ['POST'], - 'scale_down': ['POST'], - 'detach': ['POST'], - } - - def __init__(self, *args, **kwargs): - self.storage_provider = storage_base.load_storage_provider(CONF) - self.engine_client = rpc.get_engine_client() - - super(FunctionsController, self).__init__(*args, **kwargs) - - def _check_swift(self, container, object): - # Auth needs to be enabled because qinling needs to check swift - # object using user's credential. - if not CONF.pecan.auth_enable: - raise exc.InputException('Swift object not supported.') - - if not swift_util.check_object(container, object): - raise exc.InputException('Failed to validate object in Swift.') - - @rest_utils.wrap_pecan_controller_exception - @pecan.expose(content_type='application/zip') - @pecan.expose('json') - def get(self, id): - """Get function information or download function package. - - This method can support HTTP request using either - 'Accept:application/json' or no 'Accept' header. - """ - ctx = context.get_ctx() - acl.enforce('function:get', ctx) - - download = strutils.bool_from_string( - pecan.request.GET.get('download', False) - ) - func_db = db_api.get_function(id) - - if not download: - LOG.info("Getting function %s.", id) - pecan.override_template('json') - return resources.Function.from_db_obj(func_db).to_dict() - - LOG.info("Downloading function %s", id) - source = func_db.code['source'] - - if source == constants.PACKAGE_FUNCTION: - f = self.storage_provider.retrieve(func_db.project_id, id, - func_db.code['md5sum']) - elif source == constants.SWIFT_FUNCTION: - container = func_db.code['swift']['container'] - obj = func_db.code['swift']['object'] - f = swift_util.download_object(container, obj) - else: - msg = 'Download image function is not allowed.' - pecan.abort( - status_code=405, - detail=msg, - headers={'Server-Error-Message': msg} - ) - - pecan.response.app_iter = (f if isinstance(f, collections.Iterable) - else FileIter(f)) - pecan.response.headers['Content-Disposition'] = ( - 'attachment; filename="%s"' % id - ) - - @rest_utils.wrap_pecan_controller_exception - @pecan.expose('json') - def post(self, **kwargs): - # When using image to create function, runtime_id is not a required - # param. - if not POST_REQUIRED.issubset(set(kwargs.keys())): - raise exc.InputException( - 'Required param is missing. Required: %s' % POST_REQUIRED - ) - LOG.info("Creating function, params: %s", kwargs) - - values = { - 'name': kwargs.get('name'), - 'description': kwargs.get('description'), - 'runtime_id': kwargs.get('runtime_id'), - 'code': json.loads(kwargs['code']), - 'entry': kwargs.get('entry', 'main.main'), - 'cpu': kwargs.get('cpu', CONF.resource_limits.default_cpu), - 'memory_size': kwargs.get( - 'memory_size', CONF.resource_limits.default_memory - ), - 'timeout': kwargs.get( - 'timeout', CONF.resource_limits.default_timeout - ), - } - - common.validate_int_in_range( - 'timeout', values['timeout'], CONF.resource_limits.min_timeout, - CONF.resource_limits.max_timeout - ) - common.validate_int_in_range( - 'cpu', values['cpu'], CONF.resource_limits.min_cpu, - CONF.resource_limits.max_cpu - ) - common.validate_int_in_range( - 'memory', values['memory_size'], CONF.resource_limits.min_memory, - CONF.resource_limits.max_memory - ) - - source = values['code'].get('source') - if not source or source not in CODE_SOURCE: - raise exc.InputException( - 'Invalid code source specified, available sources: %s' % - ', '.join(CODE_SOURCE) - ) - - if source != constants.IMAGE_FUNCTION: - if not kwargs.get('runtime_id'): - raise exc.InputException('"runtime_id" must be specified.') - - runtime = db_api.get_runtime(kwargs['runtime_id']) - if runtime.status != 'available': - raise exc.InputException( - 'Runtime %s is not available.' % kwargs['runtime_id'] - ) - - store = False - create_trust = True - if source == constants.PACKAGE_FUNCTION: - store = True - md5sum = values['code'].get('md5sum') - data = kwargs['package'].file.read() - elif source == constants.SWIFT_FUNCTION: - swift_info = values['code'].get('swift', {}) - - if not (swift_info.get('container') and swift_info.get('object')): - raise exc.InputException("Both container and object must be " - "provided for swift type function.") - - self._check_swift( - swift_info.get('container'), - swift_info.get('object') - ) - else: - create_trust = False - values['entry'] = None - - if cfg.CONF.pecan.auth_enable and create_trust: - try: - values['trust_id'] = keystone_util.create_trust().id - LOG.debug('Trust %s created', values['trust_id']) - except Exception: - raise exc.TrustFailedException( - 'Trust creation failed for function.' - ) - - # Create function and store the package data inside a db transaction so - # that the function won't be created if any error happened during - # package store. - with db_api.transaction(): - func_db = db_api.create_function(values) - if store: - try: - ctx = context.get_ctx() - _, actual_md5 = self.storage_provider.store( - ctx.projectid, func_db.id, data, md5sum=md5sum - ) - values['code'].update({"md5sum": actual_md5}) - func_db = db_api.update_function(func_db.id, values) - except Exception as e: - LOG.exception("Failed to store function package.") - keystone_util.delete_trust(values['trust_id']) - raise e - - pecan.response.status = 201 - return resources.Function.from_db_obj(func_db).to_dict() - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Functions, bool, wtypes.text) - def get_all(self, all_projects=False, project_id=None): - """Return a list of functions. - - :param project_id: Optional. Admin user can query other projects - resources, the param is ignored for normal user. - :param all_projects: Optional. Get resources of all projects. - """ - project_id, all_projects = rest_utils.get_project_params( - project_id, all_projects - ) - if all_projects: - acl.enforce('function:get_all:all_projects', context.get_ctx()) - - filters = rest_utils.get_filters( - project_id=project_id, - ) - LOG.info("Get all functions. filters=%s", filters) - db_functions = db_api.get_functions(insecure=all_projects, **filters) - functions = [resources.Function.from_db_obj(db_model) - for db_model in db_functions] - - return resources.Functions(functions=functions) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=204) - def delete(self, id): - """Delete the specified function. - - Delete function will also delete all its versions. - """ - LOG.info("Delete function %s.", id) - - with db_api.transaction(): - func_db = db_api.get_function(id) - if len(func_db.jobs) > 0: - raise exc.NotAllowedException( - 'The function is still associated with running job(s).' - ) - if len(func_db.webhooks) > 0: - raise exc.NotAllowedException( - 'The function is still associated with webhook(s).' - ) - if len(func_db.aliases) > 0: - raise exc.NotAllowedException( - 'The function is still associated with function alias(es).' - ) - - # Even admin user can not delete other project's function because - # the trust associated can only be removed by function owner. - if func_db.project_id != context.get_ctx().projectid: - raise exc.NotAllowedException( - 'Function can only be deleted by its owner.' - ) - - # Delete trust if needed - if func_db.trust_id: - keystone_util.delete_trust(func_db.trust_id) - - for version_db in func_db.versions: - # Delete all resources created by orchestrator asynchronously. - self.engine_client.delete_function( - id, - version=version_db.version_number - ) - # Delete etcd keys - etcd_util.delete_function( - id, - version=version_db.version_number - ) - # Delete function version packages. Versions is only supported - # for package type function. - self.storage_provider.delete( - func_db.project_id, - id, - None, - version=version_db.version_number - ) - - # Delete resources for function version 0(func_db.versions==[]) - self.engine_client.delete_function(id) - etcd_util.delete_function(id) - - source = func_db.code['source'] - if source == constants.PACKAGE_FUNCTION: - self.storage_provider.delete(func_db.project_id, id, - func_db.code['md5sum']) - - # This will also delete function service mapping and function - # versions as well. - db_api.delete_function(id) - - @rest_utils.wrap_pecan_controller_exception - @pecan.expose('json') - def put(self, id, **kwargs): - """Update function. - - - Function can not being used by job. - - Function can not being executed. - - (TODO)Function status should be changed so no execution will create - when function is updating. - """ - values = {} - - try: - for key in UPDATE_ALLOWED: - if kwargs.get(key) is not None: - if key == "code": - kwargs[key] = json.loads(kwargs[key]) - values.update({key: kwargs[key]}) - except Exception as e: - raise exc.InputException("Invalid input, %s" % str(e)) - - LOG.info('Update function %s, params: %s', id, values) - ctx = context.get_ctx() - - if values.get('timeout'): - common.validate_int_in_range( - 'timeout', values['timeout'], CONF.resource_limits.min_timeout, - CONF.resource_limits.max_timeout - ) - - db_update_only = set(['name', 'description', 'timeout']) - if set(values.keys()).issubset(db_update_only): - func_db = db_api.update_function(id, values) - else: - source = values.get('code', {}).get('source') - md5sum = values.get('code', {}).get('md5sum') - cpu = values.get('cpu') - memory_size = values.get('memory_size') - - # Check cpu and memory_size values when updating. - if cpu is not None: - common.validate_int_in_range( - 'cpu', values['cpu'], CONF.resource_limits.min_cpu, - CONF.resource_limits.max_cpu - ) - if memory_size is not None: - common.validate_int_in_range( - 'memory', values['memory_size'], - CONF.resource_limits.min_memory, - CONF.resource_limits.max_memory - ) - - with db_api.transaction(): - pre_func = db_api.get_function(id) - - if len(pre_func.jobs) > 0: - raise exc.NotAllowedException( - 'The function is still associated with running job(s).' - ) - - pre_source = pre_func.code['source'] - pre_md5sum = pre_func.code.get('md5sum') - - if source and source != pre_source: - raise exc.InputException( - "The function code type can not be changed." - ) - - if pre_source == constants.IMAGE_FUNCTION: - raise exc.InputException( - "The image type function code can not be changed." - ) - - # Package type function. 'code' and 'entry' make sense only if - # 'package' is provided - package_updated = False - if (pre_source == constants.PACKAGE_FUNCTION and - values.get('package') is not None): - if md5sum and md5sum == pre_md5sum: - raise exc.InputException( - "The function code checksum is not changed." - ) - - # Update the package data. - data = values['package'].file.read() - package_updated, md5sum = self.storage_provider.store( - ctx.projectid, - id, - data, - md5sum=md5sum - ) - values.setdefault('code', {}).update( - {"md5sum": md5sum, "source": pre_source} - ) - values.pop('package') - - # Swift type function - if (pre_source == constants.SWIFT_FUNCTION and - "swift" in values.get('code', {})): - swift_info = values['code']["swift"] - - if not (swift_info.get('container') or - swift_info.get('object')): - raise exc.InputException( - "Either container or object must be provided for " - "swift type function update." - ) - - new_swift_info = pre_func.code['swift'] - new_swift_info.update(swift_info) - - self._check_swift( - new_swift_info.get('container'), - new_swift_info.get('object') - ) - - values['code'] = { - "source": pre_source, - "swift": new_swift_info - } - - # Delete allocated resources in orchestrator and etcd. - self.engine_client.delete_function(id) - etcd_util.delete_function(id) - - func_db = db_api.update_function(id, values) - - # Delete the old function package if needed - if package_updated: - self.storage_provider.delete(ctx.projectid, id, pre_md5sum) - - pecan.response.status = 200 - return resources.Function.from_db_obj(func_db).to_dict() - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - None, - types.uuid, - body=resources.ScaleInfo, - status_code=202 - ) - def scale_up(self, id, scale): - """Scale up the containers for function execution. - - This is admin only operation. The load monitoring of function execution - depends on the monitoring solution of underlying orchestrator. - """ - acl.enforce('function:scale_up', context.get_ctx()) - - func_db = db_api.get_function(id) - params = scale.to_dict() - - LOG.info('Starting to scale up function %s, params: %s', id, params) - - self.engine_client.scaleup_function( - id, - runtime_id=func_db.runtime_id, - count=params['count'] - ) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - None, - types.uuid, - body=resources.ScaleInfo, - status_code=202 - ) - def scale_down(self, id, scale): - """Scale down the containers for function execution. - - This is admin only operation. The load monitoring of function execution - depends on the monitoring solution of underlying orchestrator. - """ - acl.enforce('function:scale_down', context.get_ctx()) - - db_api.get_function(id) - workers = etcd_util.get_workers(id) - params = scale.to_dict() - if len(workers) <= 1: - LOG.info('No need to scale down function %s', id) - return - - LOG.info('Starting to scale down function %s, params: %s', id, params) - self.engine_client.scaledown_function(id, count=params['count']) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=202) - def detach(self, id): - """Detach the function from its underlying workers. - - This is admin only operation, which gives admin user a safe way to - clean up the underlying resources allocated for the function. - """ - acl.enforce('function:detach', context.get_ctx()) - - db_api.get_function(id) - LOG.info('Starting to detach function %s', id) - - # Delete allocated resources in orchestrator and etcd keys. - self.engine_client.delete_function(id) - etcd_util.delete_function(id) diff --git a/qinling/api/controllers/v1/function_alias.py b/qinling/api/controllers/v1/function_alias.py deleted file mode 100644 index d0915110..00000000 --- a/qinling/api/controllers/v1/function_alias.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from oslo_config import cfg -from oslo_log import log as logging -from pecan import rest -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - -POST_REQUIRED = set(['name', 'function_id']) -UPDATE_ALLOWED = set(['function_id', 'function_version', 'description']) - - -class FunctionAliasesController(rest.RestController): - def __init__(self, *args, **kwargs): - self.type = 'function_alias' - - super(FunctionAliasesController, self).__init__(*args, **kwargs) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.FunctionAlias, - body=resources.FunctionAlias, - status_code=201 - ) - def post(self, body): - """Create a new alias for the specified function. - - The supported body params: - - function_id: Required. Function id the alias points to. - - name: Required. Alias name, must be unique within the project. - - function_version: Optional. Version number the alias points to. - - description: Optional. The description of the new alias. - """ - ctx = context.get_ctx() - acl.enforce('function_alias:create', ctx) - - params = body.to_dict() - if not POST_REQUIRED.issubset(set(params.keys())): - raise exc.InputException( - 'Required param is missing. Required: %s' % POST_REQUIRED - ) - LOG.info("Creating Alias, params: %s", params) - - values = { - 'function_id': params.get('function_id'), - 'name': params.get('name'), - 'function_version': params.get('function_version'), - 'description': params.get('description'), - } - - alias = db_api.create_function_alias(**values) - - LOG.info("New alias created.") - return resources.FunctionAlias.from_db_obj(alias) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.FunctionAlias, wtypes.text) - def get(self, alias_name): - acl.enforce('function_alias:get', context.get_ctx()) - LOG.info("Getting function alias %s.", alias_name) - - alias = db_api.get_function_alias(alias_name) - - return resources.FunctionAlias.from_db_obj(alias) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.FunctionAliases, bool, wtypes.text) - def get_all(self, all_projects=False, project_id=None): - """Get all the function aliases. - - :param project_id: Optional. Admin user can query other projects - resources, the param is ignored for normal user. - :param all_projects: Optional. Get resources of all projects. - """ - ctx = context.get_ctx() - project_id, all_projects = rest_utils.get_project_params( - project_id, all_projects - ) - if all_projects: - acl.enforce('function_version:get_all:all_projects', ctx) - - filters = rest_utils.get_filters(project_id=project_id) - - LOG.info("Get all function aliases. filters=%s", filters) - - db_aliases = db_api.get_function_aliases( - insecure=all_projects, **filters) - aliases = [resources.FunctionAlias.from_db_obj(db_model) - for db_model in db_aliases] - - return resources.FunctionAliases(function_aliases=aliases) - - @wsme_pecan.wsexpose(None, wtypes.text, status_code=204) - def delete(self, alias_name): - """Delete a specific alias. - - """ - ctx = context.get_ctx() - acl.enforce('function_alias:delete', ctx) - LOG.info("Deleting alias %s.", alias_name) - - db_api.delete_function_alias(alias_name) - - LOG.info("Alias %s deleted.", alias_name) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.FunctionAlias, - wtypes.text, - body=resources.FunctionAlias, - ) - def put(self, alias_name, body): - """Update alias for the specified function. - - The supported body params: - - function_id: Optional. Function id the alias point to. - - function_version: Optional. Version number the alias point to. - - description: Optional. The description of the alias. - """ - ctx = context.get_ctx() - acl.enforce('function_alias:update', ctx) - - params = body.to_dict() - values = {} - for key in UPDATE_ALLOWED: - if params.get(key) is not None: - values.update({key: params[key]}) - LOG.info("Updating Alias %s, params: %s", alias_name, values) - - alias = db_api.update_function_alias(alias_name, **values) - - LOG.info("Alias %s updated.", alias_name) - return resources.FunctionAlias.from_db_obj(alias) diff --git a/qinling/api/controllers/v1/function_version.py b/qinling/api/controllers/v1/function_version.py deleted file mode 100644 index a4077366..00000000 --- a/qinling/api/controllers/v1/function_version.py +++ /dev/null @@ -1,361 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import strutils -import pecan -from pecan import rest -import tenacity -from webob.static import FileIter -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import rpc -from qinling.storage import base as storage_base -from qinling.utils import constants -from qinling.utils import etcd_util -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -class FunctionVersionsController(rest.RestController): - _custom_actions = { - 'scale_up': ['POST'], - 'scale_down': ['POST'], - 'detach': ['POST'], - } - - def __init__(self, *args, **kwargs): - self.type = 'function_version' - self.storage_provider = storage_base.load_storage_provider(CONF) - self.engine_client = rpc.get_engine_client() - - super(FunctionVersionsController, self).__init__(*args, **kwargs) - - @tenacity.retry( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(30), - reraise=True, - retry=tenacity.retry_if_exception_type(exc.EtcdLockException) - ) - def _create_function_version(self, project_id, function_id, **kwargs): - with etcd_util.get_function_version_lock(function_id) as lock: - if not lock.is_acquired(): - raise exc.EtcdLockException( - "Etcd: failed to acquire version lock for function %s." % - function_id - ) - - with db_api.transaction(): - # Get latest function package md5 and version number - func_db = db_api.get_function(function_id, insecure=False) - if func_db.code['source'] != constants.PACKAGE_FUNCTION: - raise exc.NotAllowedException( - "Function versioning only allowed for %s type " - "function." % - constants.PACKAGE_FUNCTION - ) - - l_md5 = func_db.code['md5sum'] - l_version = func_db.latest_version - - if len(func_db.versions) >= constants.MAX_VERSION_NUMBER: - raise exc.NotAllowedException( - 'Can not exceed maximum number(%s) of versions' % - constants.MAX_VERSION_NUMBER - ) - - # Check if the latest package changed since last version - changed = self.storage_provider.changed_since(project_id, - function_id, - l_md5, - l_version) - if not changed: - raise exc.NotAllowedException( - 'Function package not changed since the latest ' - 'version %s.' % l_version - ) - - LOG.info("Creating %s, function_id: %s, old_version: %d", - self.type, function_id, l_version) - - # Create new version and copy package. - self.storage_provider.copy(project_id, function_id, l_md5, - l_version) - version = db_api.increase_function_version(function_id, - l_version, - **kwargs) - func_db.latest_version = l_version + 1 - - LOG.info("New version %d for function %s created.", l_version + 1, - function_id) - return version - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.FunctionVersion, - types.uuid, - body=resources.FunctionVersion, - status_code=201 - ) - def post(self, function_id, body): - """Create a new version for the function. - - Only allow to create version for package type function. - - The supported boy params: - - description: Optional. The description of the new version. - """ - ctx = context.get_ctx() - acl.enforce('function_version:create', ctx) - - params = body.to_dict() - values = { - 'description': params.get('description'), - } - - # Try to create a new function version within lock and db transaction - try: - version = self._create_function_version( - ctx.project_id, function_id, **values - ) - except exc.EtcdLockException as e: - LOG.exception(str(e)) - # Reraise a generic exception as the end users should not know - # the underlying details. - raise exc.QinlingException('Internal server error.') - - return resources.FunctionVersion.from_db_obj(version) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.FunctionVersions, types.uuid) - def get_all(self, function_id): - """Get all the versions of the given function. - - Admin user can get all versions for the normal user's function. - """ - acl.enforce('function_version:get_all', context.get_ctx()) - LOG.info("Getting versions for function %s.", function_id) - - # Getting function and versions needs to happen in a db transaction - with db_api.transaction(): - func_db = db_api.get_function(function_id) - db_versions = func_db.versions - - versions = [resources.FunctionVersion.from_db_obj(v) - for v in db_versions] - - return resources.FunctionVersions(function_versions=versions) - - @rest_utils.wrap_pecan_controller_exception - @pecan.expose() - @pecan.expose('json') - def get(self, function_id, version): - """Get function version or download function version package. - - This method can support HTTP request using either - 'Accept:application/json' or no 'Accept' header. - """ - ctx = context.get_ctx() - acl.enforce('function_version:get', ctx) - - download = strutils.bool_from_string( - pecan.request.GET.get('download', False) - ) - version = int(version) - - version_db = db_api.get_function_version(function_id, version) - - if not download: - LOG.info("Getting version %s for function %s.", version, - function_id) - pecan.override_template('json') - return resources.FunctionVersion.from_db_obj(version_db).to_dict() - - LOG.info("Downloading version %s for function %s.", version, - function_id) - - f = self.storage_provider.retrieve(version_db.project_id, function_id, - None, version=version) - - if isinstance(f, collections.Iterable): - pecan.response.app_iter = f - else: - pecan.response.app_iter = FileIter(f) - pecan.response.headers['Content-Type'] = 'application/zip' - pecan.response.headers['Content-Disposition'] = ( - 'attachment; filename="%s_%s"' % (function_id, version) - ) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, int, status_code=204) - def delete(self, function_id, version): - """Delete a specific function version. - - - The version should not being used by any job - - The version should not being used by any webhook - - Admin user can not delete normal user's version - """ - ctx = context.get_ctx() - acl.enforce('function_version:delete', ctx) - LOG.info("Deleting version %s of function %s.", version, function_id) - - with db_api.transaction(): - version_db = db_api.get_function_version(function_id, version, - insecure=False) - latest_version = version_db.function.latest_version - - version_jobs = db_api.get_jobs( - function_id=version_db.function_id, - function_version=version_db.version_number, - status={'nin': ['done', 'cancelled']} - ) - if len(version_jobs) > 0: - raise exc.NotAllowedException( - 'The function version is still associated with running ' - 'job(s).' - ) - - version_webhook = db_api.get_webhooks( - function_id=version_db.function_id, - function_version=version_db.version_number, - ) - if len(version_webhook) > 0: - raise exc.NotAllowedException( - 'The function version is still associated with webhook.' - ) - - filters = rest_utils.get_filters( - function_id=version_db.function_id, - function_version=version_db.version_number - ) - version_aliases = db_api.get_function_aliases(**filters) - if len(version_aliases) > 0: - raise exc.NotAllowedException( - 'The function version is still associated with alias.' - ) - - # Delete resources for function version - self.engine_client.delete_function(function_id, version=version) - etcd_util.delete_function(function_id, version=version) - - self.storage_provider.delete(ctx.projectid, function_id, None, - version=version) - - db_api.delete_function_version(function_id, version) - - if latest_version == version: - version_db.function.latest_version = latest_version - 1 - - LOG.info("Version %s of function %s deleted.", version, function_id) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - None, - types.uuid, - int, - body=resources.ScaleInfo, - status_code=202 - ) - def scale_up(self, function_id, version, scale): - """Scale up the workers for function version execution. - - This is admin only operation. The load monitoring of execution - depends on the monitoring solution of underlying orchestrator. - """ - acl.enforce('function_version:scale_up', context.get_ctx()) - - func_db = db_api.get_function(function_id) - - # If version=0, it's equivalent to /functions//scale_up - if version > 0: - db_api.get_function_version(function_id, version) - - params = scale.to_dict() - - LOG.info('Starting to scale up function %s(version %s), params: %s', - function_id, version, params) - - self.engine_client.scaleup_function( - function_id, - runtime_id=func_db.runtime_id, - version=version, - count=params['count'] - ) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - None, - types.uuid, - int, - body=resources.ScaleInfo, - status_code=202 - ) - def scale_down(self, function_id, version, scale): - """Scale down the workers for function version execution. - - This is admin only operation. The load monitoring of execution - depends on the monitoring solution of underlying orchestrator. - """ - acl.enforce('function_version:scale_down', context.get_ctx()) - - db_api.get_function(function_id) - params = scale.to_dict() - - # If version=0, it's equivalent to /functions//scale_down - if version > 0: - db_api.get_function_version(function_id, version) - - workers = etcd_util.get_workers(function_id, version=version) - if len(workers) <= 1: - LOG.info('No need to scale down function %s(version %s)', - function_id, version) - return - - LOG.info('Starting to scale down function %s(version %s), params: %s', - function_id, version, params) - self.engine_client.scaledown_function(function_id, version=version, - count=params['count']) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, int, status_code=202) - def detach(self, function_id, version): - """Detach the function version from its underlying workers. - - This is admin only operation, which gives admin user a safe way to - clean up the underlying resources allocated for the function version. - """ - acl.enforce('function_version:detach', context.get_ctx()) - - db_api.get_function(function_id) - # If version=0, it's equivalent to /functions//detach - if version > 0: - db_api.get_function_version(function_id, version) - - LOG.info('Starting to detach function %s(version %s)', function_id, - version) - - # Delete allocated resources in orchestrator and etcd keys. - self.engine_client.delete_function(function_id, version=version) - etcd_util.delete_function(function_id, version=version) diff --git a/qinling/api/controllers/v1/job.py b/qinling/api/controllers/v1/job.py deleted file mode 100644 index 09acae7f..00000000 --- a/qinling/api/controllers/v1/job.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime - -import croniter -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import timeutils -from pecan import rest -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import status -from qinling.utils import jobs -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - -UPDATE_ALLOWED = set(['name', 'function_input', 'status', 'pattern', - 'next_execution_time']) - - -class JobsController(rest.RestController): - type = 'job' - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Job, - body=resources.Job, - status_code=201 - ) - def post(self, job): - """Creates a new job.""" - params = job.to_dict() - if not (params.get("function_id") or params.get("function_alias")): - raise exc.InputException( - 'Either function_alias or function_id must be provided.' - ) - - LOG.info("Creating %s, params: %s", self.type, params) - - # Check the input params. - first_time, next_time, count = jobs.validate_job(params) - - version = params.get('function_version', 0) - function_alias = params.get('function_alias') - - if function_alias: - # Check if the alias exists. - db_api.get_function_alias(function_alias) - else: - # Check the function(version) exists. - db_api.get_function(params['function_id']) - if version > 0: - # Check if the version exists. - db_api.get_function_version(params['function_id'], version) - - values = { - 'name': params.get('name'), - 'pattern': params.get('pattern'), - 'first_execution_time': first_time, - 'next_execution_time': next_time, - 'count': count, - 'function_alias': function_alias, - 'function_id': params.get("function_id"), - 'function_version': version, - 'function_input': params.get('function_input'), - 'status': status.RUNNING - } - db_job = db_api.create_job(values) - - return resources.Job.from_db_obj(db_job) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=204) - def delete(self, id): - LOG.info("Delete resource.", resource={'type': self.type, 'id': id}) - return db_api.delete_job(id) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Job, types.uuid) - def get(self, id): - LOG.info("Get resource.", resource={'type': self.type, 'id': id}) - job_db = db_api.get_job(id) - - return resources.Job.from_db_obj(job_db) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Jobs, bool, wtypes.text) - def get_all(self, all_projects=False, project_id=None): - project_id, all_projects = rest_utils.get_project_params( - project_id, all_projects - ) - if all_projects: - acl.enforce('job:get_all:all_projects', context.get_ctx()) - - filters = rest_utils.get_filters( - project_id=project_id, - ) - LOG.info("Get all %ss. filters=%s", self.type, filters) - db_jobs = db_api.get_jobs(insecure=all_projects, **filters) - jobs = [resources.Job.from_db_obj(db_model) - for db_model in db_jobs] - - return resources.Jobs(jobs=jobs) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Job, - types.uuid, - body=resources.Job - ) - def put(self, id, job): - """Update job definition. - - 1. Can not update a finished job. - 2. Can not change job type. - 3. Allow to pause a one-shot job and resume before its first execution - time. - """ - values = {} - for key in UPDATE_ALLOWED: - if job.to_dict().get(key) is not None: - values.update({key: job.to_dict()[key]}) - - LOG.info('Update resource, params: %s', values, - resource={'type': self.type, 'id': id}) - - new_status = values.get('status') - pattern = values.get('pattern') - next_execution_time = values.get('next_execution_time') - - job_db = db_api.get_job(id) - - if job_db.status in [status.DONE, status.CANCELLED]: - raise exc.InputException('Can not update a finished job.') - - if pattern: - if not job_db.pattern: - raise exc.InputException('Can not change job type.') - jobs.validate_pattern(pattern) - elif pattern == '' and job_db.pattern: - raise exc.InputException('Can not change job type.') - - valid_states = [status.RUNNING, status.CANCELLED, status.PAUSED] - if new_status and new_status not in valid_states: - raise exc.InputException('Invalid status.') - - if next_execution_time: - values['next_execution_time'] = jobs.validate_next_time( - next_execution_time - ) - elif (job_db.status == status.PAUSED and - new_status == status.RUNNING): - p = job_db.pattern or pattern - - if not p: - # Check if the next execution time for one-shot job is still - # valid. - jobs.validate_next_time(job_db.next_execution_time) - else: - # Update next_execution_time for recurring job. - values['next_execution_time'] = croniter.croniter( - p, timeutils.utcnow() - ).get_next(datetime.datetime) - - updated_job = db_api.update_job(id, values) - return resources.Job.from_db_obj(updated_job) diff --git a/qinling/api/controllers/v1/resources.py b/qinling/api/controllers/v1/resources.py deleted file mode 100644 index 0ac17ee3..00000000 --- a/qinling/api/controllers/v1/resources.py +++ /dev/null @@ -1,362 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json - -import wsme -from wsme import types as wtypes - -from qinling.api.controllers.v1 import types - - -class Resource(wtypes.Base): - """REST API Resource.""" - - _wsme_attributes = [] - - def to_dict(self): - d = {} - - for attr in self._wsme_attributes: - attr_val = getattr(self, attr.name) - if not isinstance(attr_val, wtypes.UnsetType): - d[attr.name] = attr_val - - return d - - @classmethod - def from_dict(cls, d): - obj = cls() - - for key, val in d.items(): - if hasattr(obj, key): - setattr(obj, key, val) - - return obj - - @classmethod - def from_db_obj(cls, db_obj): - return cls.from_dict(db_obj.to_dict()) - - def __str__(self): - """WSME based implementation of __str__.""" - - res = "%s [" % type(self).__name__ - - first = True - for attr in self._wsme_attributes: - if not first: - res += ', ' - else: - first = False - - res += "%s='%s'" % (attr.name, getattr(self, attr.name)) - - return res + "]" - - def to_json(self): - return json.dumps(self.to_dict()) - - @classmethod - def get_fields(cls): - obj = cls() - - return [attr.name for attr in obj._wsme_attributes] - - -class ResourceList(Resource): - """Resource containing the list of other resources.""" - - next = wtypes.text - """A link to retrieve the next subset of the resource list""" - - @property - def collection(self): - return getattr(self, self._type) - - @classmethod - def convert_with_links(cls, resources, limit, url=None, fields=None, - **kwargs): - resource_collection = cls() - - setattr(resource_collection, resource_collection._type, resources) - - resource_collection.next = resource_collection.get_next( - limit, - url=url, - fields=fields, - **kwargs - ) - - return resource_collection - - def has_next(self, limit): - """Return whether resources has more items.""" - return len(self.collection) and len(self.collection) == limit - - def get_next(self, limit, url=None, fields=None, **kwargs): - """Return a link to the next subset of the resources.""" - if not self.has_next(limit): - return wtypes.Unset - - q_args = ''.join( - ['%s=%s&' % (key, value) for key, value in kwargs.items()] - ) - - resource_args = ( - '?%(args)slimit=%(limit)d&marker=%(marker)s' % - { - 'args': q_args, - 'limit': limit, - 'marker': self.collection[-1].id - } - ) - - # Fields is handled specially here, we can move it above when it's - # supported by all resources query. - if fields: - resource_args += '&fields=%s' % fields - - next_link = "%(host_url)s/v2/%(resource)s%(args)s" % { - 'host_url': url, - 'resource': self._type, - 'args': resource_args - } - - return next_link - - def to_dict(self): - d = {} - - for attr in self._wsme_attributes: - attr_val = getattr(self, attr.name) - - if isinstance(attr_val, list): - if isinstance(attr_val[0], Resource): - d[attr.name] = [v.to_dict() for v in attr_val] - elif not isinstance(attr_val, wtypes.UnsetType): - d[attr.name] = attr_val - - return d - - -class Link(Resource): - """Web link.""" - - href = wtypes.text - target = wtypes.text - rel = wtypes.text - - -class Function(Resource): - id = wtypes.text - name = wtypes.text - description = wtypes.text - cpu = int - memory_size = int - timeout = int - runtime_id = wsme.wsattr(types.uuid, readonly=True) - code = types.jsontype - entry = wtypes.text - count = wsme.wsattr(int, readonly=True) - latest_version = wsme.wsattr(int, readonly=True) - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wtypes.text - updated_at = wtypes.text - - -class Functions(ResourceList): - functions = [Function] - - def __init__(self, **kwargs): - self._type = 'functions' - - super(Functions, self).__init__(**kwargs) - - -class FunctionWorker(Resource): - function_id = wsme.wsattr(types.uuid, readonly=True) - function_version = wsme.wsattr(int, readonly=True) - worker_name = wsme.wsattr(wtypes.text, readonly=True) - - -class FunctionWorkers(ResourceList): - workers = [FunctionWorker] - - def __init__(self, **kwargs): - self._type = 'workers' - super(FunctionWorkers, self).__init__(**kwargs) - - -class Runtime(Resource): - id = wtypes.text - name = wtypes.text - image = wtypes.text - description = wtypes.text - is_public = wsme.wsattr(bool, default=True) - trusted = bool - status = wsme.wsattr(wtypes.text, readonly=True) - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - - -class Runtimes(ResourceList): - runtimes = [Runtime] - - def __init__(self, **kwargs): - self._type = 'environments' - - super(Runtimes, self).__init__(**kwargs) - - -class RuntimePoolCapacity(Resource): - total = wsme.wsattr(int, readonly=True) - available = wsme.wsattr(int, readonly=True) - - -class RuntimePool(Resource): - name = wsme.wsattr(wtypes.text, readonly=True) - capacity = wsme.wsattr(RuntimePoolCapacity, readonly=True) - - -class Execution(Resource): - id = types.uuid - function_id = wsme.wsattr(types.uuid) - function_version = wsme.wsattr(int, default=0) - function_alias = wsme.wsattr(wtypes.text) - description = wtypes.text - status = wsme.wsattr(wtypes.text, readonly=True) - sync = bool - input = wtypes.text - result = wsme.wsattr(types.jsontype, readonly=True) - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - - @classmethod - def from_dict(cls, d): - obj = cls() - - for key, val in d.items(): - if key == 'input' and val is not None: - if val.get('__function_input'): - setattr(obj, key, val.get('__function_input')) - else: - setattr(obj, key, json.dumps(val)) - continue - if hasattr(obj, key): - setattr(obj, key, val) - - return obj - - -class Executions(ResourceList): - executions = [Execution] - - def __init__(self, **kwargs): - self._type = 'executions' - - super(Executions, self).__init__(**kwargs) - - -class Job(Resource): - id = types.uuid - name = wtypes.text - function_id = types.uuid - function_alias = wtypes.text - function_version = wsme.wsattr(int, default=0) - function_input = wtypes.text - status = wtypes.text - pattern = wtypes.text - count = int - first_execution_time = wtypes.text - next_execution_time = wtypes.text - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - - -class Jobs(ResourceList): - jobs = [Job] - - def __init__(self, **kwargs): - self._type = 'jobs' - - super(Jobs, self).__init__(**kwargs) - - -class ScaleInfo(Resource): - count = wtypes.IntegerType(minimum=1) - - -class Webhook(Resource): - id = types.uuid - function_id = types.uuid - function_alias = wtypes.text - function_version = wsme.wsattr(int) - description = wtypes.text - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - webhook_url = wsme.wsattr(wtypes.text, readonly=True) - - -class Webhooks(ResourceList): - webhooks = [Webhook] - - def __init__(self, **kwargs): - self._type = 'webhooks' - - super(Webhooks, self).__init__(**kwargs) - - -class FunctionVersion(Resource): - id = types.uuid - description = wtypes.text - function_id = wsme.wsattr(types.uuid, readonly=True) - version_number = wsme.wsattr(int, readonly=True) - count = wsme.wsattr(int, readonly=True) - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - - -class FunctionVersions(ResourceList): - function_versions = [FunctionVersion] - - def __init__(self, **kwargs): - self._type = 'function_versions' - - super(FunctionVersions, self).__init__(**kwargs) - - -class FunctionAlias(Resource): - id = types.uuid - name = wtypes.text - description = wtypes.text - function_id = types.uuid - function_version = wsme.wsattr(int) - project_id = wsme.wsattr(wtypes.text, readonly=True) - created_at = wsme.wsattr(wtypes.text, readonly=True) - updated_at = wsme.wsattr(wtypes.text, readonly=True) - - -class FunctionAliases(ResourceList): - function_aliases = [FunctionAlias] - - def __init__(self, **kwargs): - self._type = 'function_aliases' - - super(FunctionAliases, self).__init__(**kwargs) diff --git a/qinling/api/controllers/v1/root.py b/qinling/api/controllers/v1/root.py deleted file mode 100644 index 5708835c..00000000 --- a/qinling/api/controllers/v1/root.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import pecan -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api.controllers.v1 import execution -from qinling.api.controllers.v1 import function -from qinling.api.controllers.v1 import function_alias -from qinling.api.controllers.v1 import job -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import runtime -from qinling.api.controllers.v1 import webhook - - -class RootResource(resources.Resource): - """Root resource for API version 1. - - It references all other resources belonging to the API. - """ - uri = wtypes.text - - -class Controller(object): - """API root controller for version 1.""" - functions = function.FunctionsController() - runtimes = runtime.RuntimesController() - executions = execution.ExecutionsController() - jobs = job.JobsController() - webhooks = webhook.WebhooksController() - aliases = function_alias.FunctionAliasesController() - - @wsme_pecan.wsexpose(RootResource) - def index(self): - return RootResource(uri='%s/%s' % (pecan.request.application_url, - 'v1')) diff --git a/qinling/api/controllers/v1/runtime.py b/qinling/api/controllers/v1/runtime.py deleted file mode 100644 index d305bf92..00000000 --- a/qinling/api/controllers/v1/runtime.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_log import log as logging -from pecan import rest -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import rpc -from qinling import status -from qinling.utils import etcd_util -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) - -POST_REQUIRED = set(['image']) -UPDATE_ALLOWED = set(['name', 'description', 'image']) - - -class RuntimesController(rest.RestController): - _custom_actions = {'pool': ['GET']} - - def __init__(self, *args, **kwargs): - self.engine_client = rpc.get_engine_client() - self.type = 'runtime' - - super(RuntimesController, self).__init__(*args, **kwargs) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Runtime, types.uuid) - def get(self, id): - LOG.info("Get resource.", resource={'type': self.type, 'id': id}) - - runtime_db = db_api.get_runtime(id) - - return resources.Runtime.from_db_obj(runtime_db) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Runtimes) - def get_all(self): - LOG.info("Get all %ss.", self.type) - - runtimes = [resources.Runtime.from_db_obj(db_model) - for db_model in db_api.get_runtimes()] - - return resources.Runtimes(runtimes=runtimes) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Runtime, - body=resources.Runtime, - status_code=201 - ) - def post(self, runtime): - acl.enforce('runtime:create', context.get_ctx()) - - params = runtime.to_dict() - if 'trusted' not in params: - params['trusted'] = True - - if not POST_REQUIRED.issubset(set(params.keys())): - raise exc.InputException( - 'Required param is missing. Required: %s' % POST_REQUIRED - ) - - LOG.info("Creating %s, params: %s", self.type, params) - - params.update({'status': status.CREATING}) - - db_model = db_api.create_runtime(params) - self.engine_client.create_runtime(db_model.id) - - return resources.Runtime.from_db_obj(db_model) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=204) - def delete(self, id): - acl.enforce('runtime:delete', context.get_ctx()) - - LOG.info("Delete resource.", resource={'type': self.type, 'id': id}) - - with db_api.transaction(): - runtime_db = db_api.get_runtime(id) - - # Runtime can not be deleted if still associate with functions. - funcs = db_api.get_functions(insecure=True, runtime_id={'eq': id}) - if len(funcs): - raise exc.NotAllowedException( - 'Runtime %s is still in use.' % id - ) - - runtime_db.status = status.DELETING - - # Clean related resources asynchronously - self.engine_client.delete_runtime(id) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Runtime, - types.uuid, - body=resources.Runtime - ) - def put(self, id, runtime): - """Update runtime. - - Currently, we support update name, description, image. When - updating image, send message to engine for asynchronous - handling. - """ - acl.enforce('runtime:update', context.get_ctx()) - - values = {} - for key in UPDATE_ALLOWED: - if runtime.to_dict().get(key) is not None: - values.update({key: runtime.to_dict()[key]}) - - LOG.info('Update resource, params: %s', values, - resource={'type': self.type, 'id': id}) - - image = values.get('image') - - with db_api.transaction(): - if image is not None: - pre_runtime = db_api.get_runtime(id) - if pre_runtime.status != status.AVAILABLE: - raise exc.RuntimeNotAvailableException( - 'Runtime %s is not available.' % id - ) - - pre_image = pre_runtime.image - if pre_image != image: - # Ensure there is no function running in the runtime. - db_funcs = db_api.get_functions( - insecure=True, fields=['id'], runtime_id=id - ) - func_ids = [func.id for func in db_funcs] - - for id in func_ids: - if etcd_util.get_service_url(id): - raise exc.NotAllowedException( - 'Runtime %s is still in use by functions.' % id - ) - - values['status'] = status.UPGRADING - self.engine_client.update_runtime( - id, - image=image, - pre_image=pre_image, - ) - - runtime_db = db_api.update_runtime(id, values) - - return resources.Runtime.from_db_obj(runtime_db) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.RuntimePool, types.uuid) - def pool(self, id): - """Get the pool information for the runtime. - - This operation should be admin only. - - We don't check the runtime existence, because this function - also helps us to check the underlying pool even after the runtime - is already deleted. - """ - acl.enforce('runtime_pool:get_all', context.get_ctx()) - - LOG.info("Getting pool information for runtime %s.", id) - capacity = self.engine_client.get_runtime_pool(id) - pool_capacity = resources.RuntimePoolCapacity.from_dict(capacity) - - return resources.RuntimePool.from_dict( - {"name": id, "capacity": pool_capacity} - ) diff --git a/qinling/api/controllers/v1/types.py b/qinling/api/controllers/v1/types.py deleted file mode 100644 index 9333374e..00000000 --- a/qinling/api/controllers/v1/types.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json - -from oslo_utils import uuidutils -from wsme import types as wtypes - -from qinling import exceptions as exc - - -class ListType(wtypes.UserType): - """A simple list type.""" - - basetype = wtypes.text - name = 'list' - - @staticmethod - def validate(value): - """Validate and convert the input to a ListType. - - :param value: A comma separated string of values - :returns: A list of values. - """ - items = [v.strip().lower() for v in str(value).split(',')] - - # remove empty items. - return [x for x in items if x] - - @staticmethod - def frombasetype(value): - return ListType.validate(value) if value is not None else None - - -class UniqueListType(ListType): - """A simple list type with no duplicate items.""" - - name = 'uniquelist' - - @staticmethod - def validate(value): - """Validate and convert the input to a UniqueListType. - - :param value: A comma separated string of values. - :returns: A list with no duplicate items. - """ - items = ListType.validate(value) - - seen = set() - - return [x for x in items if not (x in seen or seen.add(x))] - - @staticmethod - def frombasetype(value): - return UniqueListType.validate(value) if value is not None else None - - -class UuidType(wtypes.UserType): - """A simple UUID type. - - The builtin UuidType class in wsme.types doesn't work properly with pecan. - """ - - basetype = wtypes.text - name = 'uuid' - - @staticmethod - def validate(value): - if not uuidutils.is_uuid_like(value): - raise exc.InputException( - "Expected a uuid but received %s." % value - ) - - return value - - @staticmethod - def frombasetype(value): - return UuidType.validate(value) if value is not None else None - - -class JsonType(wtypes.UserType): - """A simple JSON type.""" - - basetype = wtypes.text - name = 'json' - - def validate(self, value): - if not value: - return {} - - if not isinstance(value, dict): - raise exc.InputException( - 'JsonType field value must be a dictionary [actual=%s]' % value - ) - - return value - - def frombasetype(self, value): - if isinstance(value, dict): - return value - try: - return json.loads(value) if value is not None else None - except TypeError as e: - raise ValueError(e) - - def tobasetype(self, value): - # Value must be a dict. - return json.dumps(value) if value is not None else None - - -uuid = UuidType() -list = ListType() -uniquelist = UniqueListType() -jsontype = JsonType() diff --git a/qinling/api/controllers/v1/webhook.py b/qinling/api/controllers/v1/webhook.py deleted file mode 100644 index 30edbeb9..00000000 --- a/qinling/api/controllers/v1/webhook.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import copy -import json - -from oslo_log import log as logging -import pecan -from pecan import rest -from wsme import types as wtypes -import wsmeext.pecan as wsme_pecan - -from qinling.api import access_control as acl -from qinling.api.controllers.v1 import resources -from qinling.api.controllers.v1 import types -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import rpc -from qinling.utils import constants -from qinling.utils import executions -from qinling.utils.openstack import keystone as keystone_utils -from qinling.utils import rest_utils - -LOG = logging.getLogger(__name__) - -UPDATE_ALLOWED = set(['function_id', 'function_version', 'description', - 'function_alias']) - - -class WebhooksController(rest.RestController): - _custom_actions = { - 'invoke': ['POST'], - } - - def __init__(self, *args, **kwargs): - self.type = 'webhook' - self.engine_client = rpc.get_engine_client() - self.qinling_endpoint = keystone_utils.get_qinling_endpoint() - - super(WebhooksController, self).__init__(*args, **kwargs) - - def _add_webhook_url(self, id, webhook): - """Add webhook_url attribute for webhook. - - We generate the url dynamically in case the service url is changing. - """ - res = copy.deepcopy(webhook) - url = '/'.join( - [self.qinling_endpoint.strip('/'), constants.CURRENT_VERSION, - 'webhooks', id, 'invoke'] - ) - res.update({'webhook_url': url}) - return res - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Webhook, types.uuid) - def get(self, id): - LOG.info("Get %s %s.", self.type, id) - webhook = db_api.get_webhook(id).to_dict() - return resources.Webhook.from_dict(self._add_webhook_url(id, webhook)) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(resources.Webhooks, bool, wtypes.text) - def get_all(self, all_projects=False, project_id=None): - project_id, all_projects = rest_utils.get_project_params( - project_id, all_projects - ) - if all_projects: - acl.enforce('webhook:get_all:all_projects', context.get_ctx()) - - filters = rest_utils.get_filters( - project_id=project_id, - ) - - LOG.info("Get all %ss. filters=%s", self.type, filters) - webhooks = [] - for i in db_api.get_webhooks(insecure=all_projects, **filters): - webhooks.append( - resources.Webhook.from_dict( - self._add_webhook_url(i.id, i.to_dict()) - ) - ) - - return resources.Webhooks(webhooks=webhooks) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Webhook, - body=resources.Webhook, - status_code=201 - ) - def post(self, webhook): - acl.enforce('webhook:create', context.get_ctx()) - - params = webhook.to_dict() - if not (params.get("function_id") or params.get("function_alias")): - raise exc.InputException( - 'Either function_alias or function_id must be provided.' - ) - - function_id = params.get('function_id', "") - version = params.get('function_version', 0) - function_alias = params.get('function_alias', "") - - if function_alias: - alias_db = db_api.get_function_alias(function_alias) - function_id = alias_db.function_id - version = alias_db.function_version - # If function_alias is provided, we don't store either functin id - # or function version. - params.update({'function_id': None, - 'function_version': None}) - - LOG.info("Creating %s, params: %s", self.type, params) - - # Even admin user can not expose normal user's function - db_api.get_function(function_id, insecure=False) - if version > 0: - db_api.get_function_version(function_id, version) - - webhook_d = db_api.create_webhook(params).to_dict() - - return resources.Webhook.from_dict( - self._add_webhook_url(webhook_d['id'], webhook_d) - ) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose(None, types.uuid, status_code=204) - def delete(self, id): - acl.enforce('webhook:delete', context.get_ctx()) - LOG.info("Delete %s %s.", self.type, id) - db_api.delete_webhook(id) - - @rest_utils.wrap_wsme_controller_exception - @wsme_pecan.wsexpose( - resources.Webhook, - types.uuid, - body=resources.Webhook - ) - def put(self, id, webhook): - acl.enforce('webhook:update', context.get_ctx()) - - values = {} - for key in UPDATE_ALLOWED: - if webhook.to_dict().get(key) is not None: - values.update({key: webhook.to_dict()[key]}) - - LOG.info('Update %s %s, params: %s', self.type, id, values) - - # Even admin user can not expose normal user's function - webhook_db = db_api.get_webhook(id, insecure=False) - pre_alias = webhook_db.function_alias - pre_function_id = webhook_db.function_id - pre_version = webhook_db.function_version - - new_alias = values.get("function_alias") - new_function_id = values.get("function_id", pre_function_id) - new_version = values.get("function_version", pre_version) - - function_id = pre_function_id - version = pre_version - if new_alias and new_alias != pre_alias: - alias_db = db_api.get_function_alias(new_alias) - function_id = alias_db.function_id - version = alias_db.function_version - # If function_alias is provided, we don't store either functin id - # or function version. - values.update({'function_id': None, - 'function_version': None}) - elif new_function_id != pre_function_id or new_version != pre_version: - function_id = new_function_id - version = new_version - values.update({"function_alias": None}) - - db_api.get_function(function_id, insecure=False) - if version and version > 0: - db_api.get_function_version(function_id, version) - - webhook = db_api.update_webhook(id, values).to_dict() - return resources.Webhook.from_dict(self._add_webhook_url(id, webhook)) - - @rest_utils.wrap_pecan_controller_exception - @pecan.expose('json') - def invoke(self, id, **kwargs): - with db_api.transaction(): - # The webhook url can be accessed without authentication, so - # insecure is used here - webhook_db = db_api.get_webhook(id, insecure=True) - function_alias = webhook_db.function_alias - - if function_alias: - alias = db_api.get_function_alias(function_alias, - insecure=True) - function_id = alias.function_id - function_version = alias.function_version - function_db = db_api.get_function(function_id, insecure=True) - else: - function_db = webhook_db.function - function_id = webhook_db.function_id - function_version = webhook_db.function_version - - trust_id = function_db.trust_id - project_id = function_db.project_id - - LOG.info( - 'Invoking function %s(version %s) by webhook %s', - function_id, function_version, id - ) - - # Setup user context - ctx = keystone_utils.create_trust_context(trust_id, project_id) - context.set_ctx(ctx) - - params = { - 'function_id': function_id, - 'function_version': function_version, - 'sync': False, - 'input': json.dumps(kwargs), - 'description': constants.EXECUTION_BY_WEBHOOK % id - } - execution = executions.create_execution(self.engine_client, params) - pecan.response.status = 202 - - return {'execution_id': execution.id} diff --git a/qinling/api/service.py b/qinling/api/service.py deleted file mode 100644 index f645dfac..00000000 --- a/qinling/api/service.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_concurrency import processutils -from oslo_config import cfg -from oslo_log import log as logging -from oslo_service import service -from oslo_service import wsgi - -from qinling.api import app - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -class WSGIService(service.ServiceBase): - """Provides ability to launch Qinling API from wsgi app.""" - - def __init__(self): - self.app = app.setup_app() - - self.workers = CONF.api.api_workers - if self.workers is not None and self.workers < 1: - LOG.warning( - "Value of config option api_workers must be integer " - "greater than 1. Input value ignored." - ) - self.workers = None - self.workers = self.workers or processutils.get_worker_count() - - self.server = wsgi.Server( - cfg.CONF, - "qinling_api", - self.app, - host=cfg.CONF.api.host, - port=cfg.CONF.api.port, - use_ssl=cfg.CONF.api.enable_ssl_api - ) - - def start(self): - self.server.start() - - def stop(self): - self.server.stop() - - def wait(self): - self.server.wait() - - def reset(self): - self.server.reset() diff --git a/qinling/api/wsgi.py b/qinling/api/wsgi.py deleted file mode 100644 index 110d7bcf..00000000 --- a/qinling/api/wsgi.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2019 - Ormuco, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from qinling.api import app - -application = app.init_wsgi() diff --git a/qinling/cmd/__init__.py b/qinling/cmd/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/cmd/api.py b/qinling/cmd/api.py deleted file mode 100644 index 65a03890..00000000 --- a/qinling/cmd/api.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2017 - Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import eventlet -eventlet.monkey_patch() - -import sys - -from oslo_config import cfg -from oslo_log import log as logging -from oslo_service import service - -from qinling.api import service as api_service -from qinling import config -from qinling import rpc -from qinling.utils import common - -CONF = cfg.CONF - - -def main(): - try: - config.parse_args(args=common.get_properly_ordered_parameters()) - common.print_server_info("api") - logging.setup(CONF, 'qinling') - # Initialize RPC configuration. - rpc.get_transport() - - api_server = api_service.WSGIService() - launcher = service.launch(CONF, api_server, workers=api_server.workers, - restart_method='mutate') - launcher.wait() - except RuntimeError as excp: - sys.stderr.write("ERROR: %s\n" % excp) - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/qinling/cmd/engine.py b/qinling/cmd/engine.py deleted file mode 100644 index 3f054177..00000000 --- a/qinling/cmd/engine.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2017 - Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys - -import cotyledon -from cotyledon import oslo_config_glue -from oslo_config import cfg -from oslo_log import log as logging - -from qinling import config -from qinling.engine import service as eng_service -from qinling import rpc -from qinling.utils import common - -CONF = cfg.CONF - - -def main(): - try: - config.parse_args(args=common.get_properly_ordered_parameters()) - common.print_server_info("engine") - logging.setup(CONF, 'qinling') - # Initialize RPC configuration. - rpc.get_transport() - - sm = cotyledon.ServiceManager() - sm.add( - eng_service.EngineService, - workers=1, - ) - oslo_config_glue.setup(sm, CONF, reload_method="mutate") - sm.run() - except RuntimeError as excp: - sys.stderr.write("ERROR: %s\n" % excp) - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/qinling/cmd/status.py b/qinling/cmd/status.py deleted file mode 100644 index 80e0e6e3..00000000 --- a/qinling/cmd/status.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright (c) 2018 NEC, Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from oslo_config import cfg -from oslo_upgradecheck import upgradecheck - -CONF = cfg.CONF - - -class Checks(upgradecheck.UpgradeCommands): - - """Contains upgrade checks - - Various upgrade checks should be added as separate methods in this class - and added to _upgrade_checks tuple. - """ - - def _sample_check(self): - """This is sample check added to test the upgrade check framework - - It needs to be removed after adding any real upgrade check - """ - return upgradecheck.Result(upgradecheck.Code.SUCCESS, 'Sample detail') - - _upgrade_checks = ( - # Sample check added for now. - # Whereas in future real checks must be added here in tuple - ('Sample Check', _sample_check), - ) - - -def main(): - return upgradecheck.main( - CONF, project='qinling', upgrade_command=Checks()) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/qinling/config.py b/qinling/config.py deleted file mode 100644 index 372f1125..00000000 --- a/qinling/config.py +++ /dev/null @@ -1,369 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from keystoneauth1 import loading -from keystonemiddleware import auth_token -from oslo_concurrency import processutils -from oslo_config import cfg -from oslo_log import log -from oslo_middleware import cors - -from qinling import version - -CONF = cfg.CONF - -launch_opt = cfg.ListOpt( - 'server', - default=['all'], - help='Specifies which qinling server to start by the launch script.' -) - -default_opts = [ - cfg.StrOpt( - 'qinling_endpoint', - help='Qinling service endpoint.' - ), -] - -API_GROUP = 'api' -api_opts = [ - cfg.StrOpt('host', default='0.0.0.0', help='Qinling API server host.'), - cfg.PortOpt('port', default=7070, help='Qinling API server port.'), - cfg.BoolOpt( - 'enable_ssl_api', - default=False, - help='Enable the integrated stand-alone API to service requests' - 'via HTTPS instead of HTTP.' - ), - cfg.IntOpt( - 'api_workers', - default=processutils.get_worker_count(), - help='Number of workers for Qinling API service ' - 'default is equal to the number of CPUs available if that can ' - 'be determined, else a default worker count of 1 is returned.' - ), - cfg.BoolOpt( - 'enable_job_handler', - default=True, - help='Enable job handler.' - ), -] - -PECAN_GROUP = 'pecan' -pecan_opts = [ - cfg.StrOpt( - 'root', - default='qinling.api.controllers.root.RootController', - help='Pecan root controller' - ), - cfg.ListOpt( - 'modules', - default=["qinling.api"], - help='A list of modules where pecan will search for applications.' - ), - cfg.BoolOpt( - 'debug', - default=False, - help='Enables the ability to display tracebacks in the browser and' - ' interactively debug during development.' - ), - cfg.BoolOpt( - 'auth_enable', - default=True, - help='Enables user authentication in pecan.' - ) -] - -ENGINE_GROUP = 'engine' -engine_opts = [ - cfg.StrOpt( - 'host', - default='0.0.0.0', - help='Name of the engine node. This can be an opaque ' - 'identifier. It is not necessarily a hostname, ' - 'FQDN, or IP address.' - ), - cfg.StrOpt( - 'topic', - default='qinling_engine', - help='The message topic that the engine listens on.' - ), - cfg.StrOpt( - 'orchestrator', - default='kubernetes', - choices=['kubernetes', 'swarm'], - help='The container orchestrator.' - ), - cfg.IntOpt( - 'function_service_expiration', - default=3600, - help='Maximum service time in seconds for function in orchestrator.' - ), - cfg.IntOpt( - 'function_concurrency', - default=3, - help='Maximum number of concurrent executions per function.' - ), - cfg.StrOpt( - 'sidecar_image', - default='openstackqinling/sidecar:0.0.2', - help='The sidecar image being used together with the worker.' - ), -] - -STORAGE_GROUP = 'storage' -storage_opts = [ - cfg.StrOpt( - 'file_system_dir', - help='Directory to store function packages.' - ), - cfg.StrOpt( - 'provider', - default='local', - choices=['local', 'swift'], - help='Storage provider for function code package.' - ), -] - -KUBERNETES_GROUP = 'kubernetes' -kubernetes_opts = [ - cfg.StrOpt( - 'namespace', - default='qinling', - help='Resources scope created by Qinling.' - ), - cfg.IntOpt( - 'replicas', - default=3, - help='Number of desired replicas in deployment.' - ), - cfg.StrOpt( - 'kube_host', - default='http://127.0.0.1:8001', - help='Kubernetes server address, e.g. you can start a proxy to the ' - 'Kubernetes API server by using "kubectl proxy" command.' - ), - cfg.BoolOpt( - 'use_api_certificate', - default=True, - help='Whether to use client certificates to connect to the ' - 'Kubernetes API server.' - ), - cfg.StrOpt( - 'ssl_ca_cert', - default='/etc/qinling/pki/kubernetes/ca.crt', - help='Path to the CA certificate for qinling to use to connect to ' - 'the Kubernetes API server.' - ), - cfg.StrOpt( - 'cert_file', - default='/etc/qinling/pki/kubernetes/qinling.crt', - help='Path to the client certificate for qinling to use to ' - 'connect to the Kubernetes API server.' - ), - cfg.StrOpt( - 'key_file', - default='/etc/qinling/pki/kubernetes/qinling.key', - help='Path to the client certificate key file for qinling to use to ' - 'connect to the Kubernetes API server.' - ), - cfg.StrOpt( - 'log_devel', - default='INFO', - choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], - help='Log level for kubernetes operations.' - ), - cfg.ListOpt( - 'trusted_cidrs', - deprecated_for_removal=True, - item_type=cfg.types.String(), - default=[], - help='List of CIDR that have access to the services in ' - 'Kubernetes, e.g. trusted_cidrs=127.0.0.1/32,198.72.124.109/32. ' - 'If it is empty list, the default value is the host IP address ' - 'that the qinling-engine service is running on.' - ) -] - -ETCD_GROUP = 'etcd' -etcd_opts = [ - cfg.StrOpt( - 'host', - default='127.0.0.1', - help='Etcd service host address.' - ), - cfg.PortOpt( - 'port', - default=2379, - help='Etcd service port.' - ), - cfg.StrOpt( - 'protocol', - default='https', - choices=['http', 'https'], - help='Etcd connection protocol.' - ), - cfg.StrOpt( - 'ca_cert', - default='/etc/qinling/pki/etcd/ca.crt', - help='Path to CA certificate file to use to securely ' - 'connect to etcd server.' - ), - cfg.StrOpt( - 'cert_file', - default='/etc/qinling/pki/etcd/qinling-etcd-client.crt', - help='Path to client certificate file to use to securely ' - 'connect to etcd server.' - ), - cfg.StrOpt( - 'cert_key', - default='/etc/qinling/pki/etcd/qinling-etcd-client.key', - help='Path to client certificate key file to use to securely ' - 'connect to etcd server.' - ), -] - -RLIMITS_GROUP = 'resource_limits' -rlimits_opts = [ - cfg.IntOpt( - 'default_cpu', - default=100, - help='Default cpu resource(unit: millicpu).' - ), - cfg.IntOpt( - 'min_cpu', - default=100, - help='Minimum cpu resource(unit: millicpu).' - ), - cfg.IntOpt( - 'max_cpu', - default=300, - help='Maximum cpu resource(unit: millicpu).' - ), - cfg.IntOpt( - 'default_memory', - default=33554432, - help='Default memory resource(unit: bytes).' - ), - cfg.IntOpt( - 'min_memory', - default=33554432, - help='Minimum memory resource(unit: bytes).' - ), - cfg.IntOpt( - 'max_memory', - default=134217728, - help='Maximum memory resource(unit: bytes).' - ), - cfg.IntOpt( - 'default_timeout', - default=5, - help='Default function execution timeout(unit: seconds)' - ), - cfg.IntOpt( - 'min_timeout', - default=1, - help='Minimum function execution timeout(unit: seconds).' - ), - cfg.IntOpt( - 'max_timeout', - default=300, - help='Maximum function execution timeout(unit: seconds).' - ), -] - - -def list_opts(): - keystone_middleware_opts = auth_token.list_opts() - keystone_loading_opts = [( - 'keystone_authtoken', loading.get_auth_plugin_conf_options('password') - )] - - qinling_opts = [ - (API_GROUP, api_opts), - (PECAN_GROUP, pecan_opts), - (ENGINE_GROUP, engine_opts), - (STORAGE_GROUP, storage_opts), - (KUBERNETES_GROUP, kubernetes_opts), - (ETCD_GROUP, etcd_opts), - (RLIMITS_GROUP, rlimits_opts), - (None, [launch_opt]), - (None, default_opts), - ] - - return keystone_middleware_opts + keystone_loading_opts + qinling_opts - - -def parse_args(args=None, usage=None, default_config_files=None): - CLI_OPTS = [launch_opt] - CONF.register_cli_opts(CLI_OPTS) - - for group, options in list_opts(): - CONF.register_opts(list(options), group) - - _DEFAULT_LOG_LEVELS = [ - 'eventlet.wsgi.server=WARN', - 'oslo_service.periodic_task=INFO', - 'oslo_service.loopingcall=INFO', - 'oslo_db=WARN', - 'oslo_concurrency.lockutils=WARN', - 'kubernetes.client.rest=%s' % CONF.kubernetes.log_devel, - 'keystoneclient=INFO', - 'requests.packages.urllib3.connectionpool=CRITICAL', - 'urllib3.connectionpool=CRITICAL', - 'cotyledon=INFO', - 'futurist.periodics=WARN' - ] - default_log_levels = log.get_default_log_levels() - default_log_levels.extend(_DEFAULT_LOG_LEVELS) - log.set_defaults(default_log_levels=default_log_levels) - log.register_options(CONF) - - CONF( - args=args, - project='qinling', - version=version, - usage=usage, - default_config_files=default_config_files - ) - - -def set_config_defaults(): - """This method updates all configuration default values.""" - set_cors_middleware_defaults() - - -def set_cors_middleware_defaults(): - """Update default configuration options for oslo.middleware.""" - cors.set_defaults( - allow_headers=['X-Auth-Token', - 'X-Identity-Status', - 'X-Roles', - 'X-Service-Catalog', - 'X-User-Id', - 'X-Tenant-Id', - 'X-Project-Id', - 'X-User-Name', - 'X-Project-Name'], - allow_methods=['GET', - 'PUT', - 'POST', - 'DELETE', - 'PATCH'], - expose_headers=['X-Auth-Token', - 'X-Subject-Token', - 'X-Service-Token', - 'X-Project-Id', - 'X-User-Name', - 'X-Project-Name'] - ) diff --git a/qinling/context.py b/qinling/context.py deleted file mode 100644 index a56020ca..00000000 --- a/qinling/context.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import re - -from oslo_config import cfg -from oslo_context import context as oslo_context -import pecan -from pecan import hooks - -from qinling import exceptions as exc -from qinling.utils import thread_local - -CONF = cfg.CONF -ALLOWED_WITHOUT_AUTH = ['/', '/v1/'] -WEBHOOK_REG = '^/v1/webhooks/[a-f0-9-]+/invoke$' -CTX_THREAD_LOCAL_NAME = "QINLING_APP_CTX_THREAD_LOCAL" -DEFAULT_PROJECT_ID = "default" - - -def authenticate(req): - # Refer to: - # https://docs.openstack.org/keystonemiddleware/latest/middlewarearchitecture.html#exchanging-user-information - identity_status = req.headers.get('X-Identity-Status') - service_identity_status = req.headers.get('X-Service-Identity-Status') - - if (identity_status == 'Confirmed' or - service_identity_status == 'Confirmed'): - return - - if req.headers.get('X-Auth-Token'): - msg = 'Auth token is invalid: %s' % req.headers['X-Auth-Token'] - else: - msg = 'Authentication required' - - raise exc.UnauthorizedException(msg) - - -class AuthHook(hooks.PecanHook): - def before(self, state): - if not CONF.pecan.auth_enable: - return - if state.request.path in ALLOWED_WITHOUT_AUTH: - return - if re.search(WEBHOOK_REG, state.request.path): - return - - try: - authenticate(state.request) - except Exception as e: - msg = "Failed to validate access token: %s" % str(e) - - pecan.abort( - status_code=401, - detail=msg, - headers={'Server-Error-Message': msg} - ) - - -def has_ctx(): - return thread_local.has_thread_local(CTX_THREAD_LOCAL_NAME) - - -def get_ctx(): - if not has_ctx(): - raise exc.ApplicationContextNotFoundException() - - return thread_local.get_thread_local(CTX_THREAD_LOCAL_NAME) - - -def set_ctx(new_ctx): - thread_local.set_thread_local(CTX_THREAD_LOCAL_NAME, new_ctx) - - -class Context(oslo_context.RequestContext): - def __init__(self, is_trust_scoped=False, trust_id=None, is_admin=False, - **kwargs): - self.is_trust_scoped = is_trust_scoped - self.trust_id = trust_id - - super(Context, self).__init__(is_admin=is_admin, **kwargs) - - @property - def projectid(self): - if CONF.pecan.auth_enable: - return self.project_id - else: - return DEFAULT_PROJECT_ID - - def convert_to_dict(self): - """Return a dictionary of context attributes. - - Use get_logging_values() instead of to_dict() from parent class to get - more information from the context. This method is not named "to_dict" - to avoid recursive calling. - """ - ctx_dict = self.get_logging_values() - ctx_dict.update( - { - 'is_trust_scoped': self.is_trust_scoped, - 'trust_id': self.trust_id, - 'auth_token': self.auth_token, - } - ) - - return ctx_dict - - @classmethod - def from_dict(cls, values, **kwargs): - """Construct a context object from a provided dictionary.""" - kwargs.setdefault( - 'is_trust_scoped', values.get('is_trust_scoped', False) - ) - kwargs.setdefault('trust_id', values.get('trust_id')) - - return super(Context, cls).from_dict(values, **kwargs) - - @classmethod - def from_environ(cls, env): - context = super(Context, cls).from_environ(env) - context.is_admin = True if 'admin' in context.roles else False - - return context - - -class ContextHook(hooks.PecanHook): - def before(self, state): - context_obj = Context.from_environ(state.request.environ) - set_ctx(context_obj) - - def after(self, state): - set_ctx(None) diff --git a/qinling/db/__init__.py b/qinling/db/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/db/api.py b/qinling/db/api.py deleted file mode 100644 index 7d3f6681..00000000 --- a/qinling/db/api.py +++ /dev/null @@ -1,251 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import contextlib - -from oslo_db import api as db_api - - -_BACKEND_MAPPING = { - 'sqlalchemy': 'qinling.db.sqlalchemy.api', -} - -IMPL = db_api.DBAPI('sqlalchemy', backend_mapping=_BACKEND_MAPPING) - - -def setup_db(): - IMPL.setup_db() - - -def drop_db(): - IMPL.drop_db() - - -def start_tx(): - IMPL.start_tx() - - -def commit_tx(): - IMPL.commit_tx() - - -def rollback_tx(): - IMPL.rollback_tx() - - -def end_tx(): - IMPL.end_tx() - - -@contextlib.contextmanager -def transaction(): - with IMPL.transaction(): - yield - - -def delete_all(): - """A helper function for testing.""" - delete_jobs(insecure=True) - delete_webhooks(insecure=True) - delete_executions(insecure=True) - delete_function_aliases(insecure=True) - delete_functions(insecure=True) - delete_runtimes(insecure=True) - - -def conditional_update(model, values, expected_values, **kwargs): - return IMPL.conditional_update(model, values, expected_values, **kwargs) - - -def get_function(id, insecure=None): - """Get function from db. - - 'insecure' param is needed for job handler and webhook. - """ - return IMPL.get_function(id, insecure=insecure) - - -def get_functions(limit=None, marker=None, sort_keys=None, - sort_dirs=None, fields=None, **kwargs): - return IMPL.get_functions( - limit=limit, - marker=marker, - sort_keys=sort_keys, - sort_dirs=sort_dirs, - fields=fields, - **kwargs - ) - - -def create_function(values): - return IMPL.create_function(values) - - -def update_function(id, values): - return IMPL.update_function(id, values) - - -def delete_function(id): - return IMPL.delete_function(id) - - -def delete_functions(**kwargs): - return IMPL.delete_functions(**kwargs) - - -def create_runtime(values): - return IMPL.create_runtime(values) - - -def get_runtime(id): - return IMPL.get_runtime(id) - - -def get_runtimes(): - return IMPL.get_runtimes() - - -def delete_runtime(id): - return IMPL.delete_runtime(id) - - -def update_runtime(id, values): - return IMPL.update_runtime(id, values) - - -def delete_runtimes(**kwargs): - return IMPL.delete_runtimes(**kwargs) - - -def create_execution(values): - return IMPL.create_execution(values) - - -def get_execution(id): - return IMPL.get_execution(id) - - -def get_executions(**filters): - return IMPL.get_executions(**filters) - - -def delete_execution(id): - return IMPL.delete_execution(id) - - -def update_execution(id, values): - return IMPL.update_execution(id, values) - - -def delete_executions(**kwargs): - return IMPL.delete_executions(**kwargs) - - -def create_job(values): - return IMPL.create_job(values) - - -def get_job(id): - return IMPL.get_job(id) - - -def get_next_jobs(before): - return IMPL.get_next_jobs(before) - - -def delete_job(id): - return IMPL.delete_job(id) - - -def update_job(id, values): - return IMPL.update_job(id, values) - - -def get_jobs(**kwargs): - return IMPL.get_jobs(**kwargs) - - -def delete_jobs(**kwargs): - return IMPL.delete_jobs(**kwargs) - - -def create_webhook(values): - return IMPL.create_webhook(values) - - -def get_webhook(id, insecure=None): - return IMPL.get_webhook(id, insecure=insecure) - - -def get_webhooks(**kwargs): - return IMPL.get_webhooks(**kwargs) - - -def delete_webhook(id): - return IMPL.delete_webhook(id) - - -def update_webhook(id, values): - return IMPL.update_webhook(id, values) - - -def delete_webhooks(**kwargs): - return IMPL.delete_webhooks(**kwargs) - - -def increase_function_version(function_id, old_version, **kwargs): - """This function is meant to be invoked within locking section.""" - return IMPL.increase_function_version(function_id, old_version, **kwargs) - - -def get_function_version(function_id, version, **kwargs): - return IMPL.get_function_version(function_id, version, **kwargs) - - -# This function is only used in unit test. -def update_function_version(function_id, version, **kwargs): - return IMPL.update_function_version(function_id, version, **kwargs) - - -def delete_function_version(function_id, version): - return IMPL.delete_function_version(function_id, version) - - -def get_function_versions(**kwargs): - return IMPL.get_function_versions(**kwargs) - - -def create_function_alias(**kwargs): - return IMPL.create_function_alias(**kwargs) - - -def get_function_alias(name, **kwargs): - return IMPL.get_function_alias(name, **kwargs) - - -def get_function_aliases(**kwargs): - return IMPL.get_function_aliases(**kwargs) - - -def update_function_alias(name, **kwargs): - return IMPL.update_function_alias(name, **kwargs) - - -def delete_function_alias(name, **kwargs): - return IMPL.delete_function_alias(name, **kwargs) - - -# For unit test -def delete_function_aliases(**kwargs): - return IMPL.delete_function_aliases(**kwargs) diff --git a/qinling/db/base.py b/qinling/db/base.py deleted file mode 100644 index 92138da4..00000000 --- a/qinling/db/base.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools - -from oslo_config import cfg -from oslo_db import options as db_options -from oslo_db.sqlalchemy import session as db_session - -from qinling import context -from qinling import exceptions as exc -from qinling.utils import thread_local - -# Note(dzimine): sqlite only works for basic testing. -db_options.set_defaults(cfg.CONF, connection="sqlite:///qinling.sqlite") -_FACADE = None -_DB_SESSION_THREAD_LOCAL_NAME = "db_sql_alchemy_session" - - -def _get_facade(): - global _FACADE - if _FACADE is None: - _FACADE = db_session.EngineFacade.from_config(cfg.CONF, sqlite_fk=True) - return _FACADE - - -def get_session(expire_on_commit=False, autocommit=False): - """Helper method to grab session.""" - facade = _get_facade() - return facade.get_session(expire_on_commit=expire_on_commit, - autocommit=autocommit) - - -def get_engine(): - facade = _get_facade() - return facade.get_engine() - - -def _get_thread_local_session(): - return thread_local.get_thread_local(_DB_SESSION_THREAD_LOCAL_NAME) - - -def _get_or_create_thread_local_session(): - ses = _get_thread_local_session() - - if ses: - return ses, False - - ses = get_session() - _set_thread_local_session(ses) - - return ses, True - - -def _set_thread_local_session(session): - thread_local.set_thread_local(_DB_SESSION_THREAD_LOCAL_NAME, session) - - -def start_tx(): - """Starts transaction. - - Opens new database session and starts new transaction assuming - there wasn't any opened sessions within the same thread. - """ - if _get_thread_local_session(): - raise exc.DBError( - "Database transaction has already been started." - ) - - _set_thread_local_session(get_session()) - - -def commit_tx(): - """Commits previously started database transaction.""" - ses = _get_thread_local_session() - - if not ses: - raise exc.DBError( - "Nothing to commit. Database transaction" - " has not been previously started." - ) - - ses.commit() - - -def rollback_tx(): - """Rolls back previously started database transaction.""" - ses = _get_thread_local_session() - - if not ses: - raise exc.DBError( - "Nothing to roll back. Database transaction has not been started." - ) - - ses.rollback() - - -def end_tx(): - """Ends transaction. - - Ends current database transaction. - It rolls back all uncommitted changes and closes database session. - """ - ses = _get_thread_local_session() - - if not ses: - raise exc.DBError( - "Database transaction has not been started." - ) - - if ses.dirty: - rollback_tx() - - ses.close() - _set_thread_local_session(None) - - -def session_aware(): - """Decorator for methods working within db session.""" - - def _decorator(func): - @functools.wraps(func) - def _within_session(*args, **kw): - ses, created = _get_or_create_thread_local_session() - - try: - kw['session'] = ses - - result = func(*args, **kw) - - if created: - ses.commit() - - return result - except Exception: - if created: - ses.rollback() - raise - finally: - if created: - _set_thread_local_session(None) - ses.close() - - return _within_session - - return _decorator - - -def insecure_aware(): - """Decorator for methods working within insecure db query or not.""" - - def _decorator(func): - @functools.wraps(func) - def _with_insecure(*args, **kw): - if kw.get('insecure') is None: - insecure = context.get_ctx().is_admin - kw['insecure'] = insecure - return func(*args, **kw) - - return _with_insecure - - return _decorator - - -@session_aware() -def model_query(model, columns=(), session=None): - """Query helper. - - :param model: Base model to query. - :param columns: Optional. Which columns to be queried. - """ - if columns: - return session.query(*columns) - - return session.query(model) diff --git a/qinling/db/sqlalchemy/__init__.py b/qinling/db/sqlalchemy/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/db/sqlalchemy/api.py b/qinling/db/sqlalchemy/api.py deleted file mode 100644 index 2b93a466..00000000 --- a/qinling/db/sqlalchemy/api.py +++ /dev/null @@ -1,619 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import contextlib -import sys -import threading - -from oslo_config import cfg -from oslo_db import exception as oslo_db_exc -from oslo_db.sqlalchemy import utils as db_utils -from oslo_log import log as logging -import sqlalchemy as sa - -from qinling import context -from qinling.db import base as db_base -from qinling.db.sqlalchemy import filters as db_filters -from qinling.db.sqlalchemy import model_base -from qinling.db.sqlalchemy import models -from qinling import exceptions as exc -from qinling import status - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - -_SCHEMA_LOCK = threading.RLock() -_initialized = False - - -def get_backend(): - """Consumed by openstack common code. - - The backend is this module itself. - :return: Name of db backend. - """ - return sys.modules[__name__] - - -def setup_db(): - global _initialized - - with _SCHEMA_LOCK: - if _initialized: - return - - try: - models.Function.metadata.create_all(db_base.get_engine()) - - _initialized = True - except sa.exc.OperationalError as e: - raise exc.DBError("Failed to setup database: %s" % str(e)) - - -def drop_db(): - global _initialized - - with _SCHEMA_LOCK: - if not _initialized: - return - - try: - models.Function.metadata.drop_all(db_base.get_engine()) - - _initialized = False - except Exception as e: - raise exc.DBError("Failed to drop database: %s" % str(e)) - - -def start_tx(): - db_base.start_tx() - - -def commit_tx(): - db_base.commit_tx() - - -def rollback_tx(): - db_base.rollback_tx() - - -def end_tx(): - db_base.end_tx() - - -@contextlib.contextmanager -def transaction(): - start_tx() - - try: - yield - commit_tx() - finally: - end_tx() - - -def _secure_query(model, *columns): - query = db_base.model_query(model, columns) - - if not issubclass(model, model_base.QinlingSecureModelBase): - return query - - if model == models.Runtime: - query_criterion = sa.or_( - model.project_id == context.get_ctx().projectid, - model.is_public - ) - else: - query_criterion = model.project_id == context.get_ctx().projectid - - query = query.filter(query_criterion) - - return query - - -def _paginate_query(model, limit=None, marker=None, sort_keys=None, - sort_dirs=None, query=None): - if not query: - query = _secure_query(model) - - sort_keys = sort_keys if sort_keys else [] - - if 'id' not in sort_keys: - sort_keys.append('id') - sort_dirs.append('asc') if sort_dirs else None - - query = db_utils.paginate_query( - query, - model, - limit, - sort_keys, - marker=marker, - sort_dirs=sort_dirs - ) - - return query - - -def _get_collection(model, insecure=False, limit=None, marker=None, - sort_keys=None, sort_dirs=None, fields=None, **filters): - columns = ( - tuple([getattr(model, f) for f in fields if hasattr(model, f)]) - if fields else () - ) - - query = (db_base.model_query(model, columns) if insecure - else _secure_query(model, *columns)) - query = db_filters.apply_filters(query, model, **filters) - query = _paginate_query( - model, - limit, - marker, - sort_keys, - sort_dirs, - query - ) - - try: - return query.all() - except Exception as e: - raise exc.DBError( - "Failed when querying database, error type: %s, " - "error message: %s" % (e.__class__.__name__, str(e)) - ) - - -def _get_collection_sorted_by_time(model, insecure=False, fields=None, - sort_keys=['created_at'], **kwargs): - return _get_collection( - model=model, - insecure=insecure, - sort_keys=sort_keys, - fields=fields, - **kwargs - ) - - -@db_base.insecure_aware() -def _get_db_object_by_id(model, id, insecure=None): - query = db_base.model_query(model) if insecure else _secure_query(model) - - return query.filter_by(id=id).first() - - -@db_base.insecure_aware() -def _delete_all(model, insecure=None, **kwargs): - # NOTE(kong): If we use 'in_' operator in _secure_query(), delete() - # method will raise error with default parameter. Please refer to - # http://docs.sqlalchemy.org/en/rel_1_0/orm/query.html#sqlalchemy.orm.query.Query.delete - query = db_base.model_query(model) if insecure else _secure_query(model) - query.filter_by(**kwargs).delete(synchronize_session="fetch") - - -@db_base.insecure_aware() -def _get_db_object_by_name(model, name, insecure=None): - query = db_base.model_query(model) if insecure else _secure_query(model) - - return query.filter_by(name=name).first() - - -@db_base.session_aware() -def conditional_update(model, values, expected_values, insecure=False, - filters=None, session=None): - """Compare-and-swap conditional update SQLAlchemy implementation.""" - filters = filters or {} - filters.update(expected_values) - query = (db_base.model_query(model) if insecure else _secure_query(model)) - query = db_filters.apply_filters(query, model, **filters) - update_args = {'synchronize_session': False} - - # Return True if we were able to change any DB entry, False otherwise - result = query.update(values, **update_args) - - return 0 != result - - -@db_base.insecure_aware() -@db_base.session_aware() -def get_function(id, insecure=None, session=None): - function = _get_db_object_by_id(models.Function, id, insecure=insecure) - - if not function: - raise exc.DBEntityNotFoundError("Function not found [id=%s]" % id) - - return function - - -@db_base.session_aware() -def get_functions(session=None, **kwargs): - return _get_collection_sorted_by_time(models.Function, **kwargs) - - -@db_base.session_aware() -def create_function(values, session=None): - func = models.Function() - func.update(values.copy()) - - try: - func.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for Function: %s" % e.columns - ) - - return func - - -@db_base.session_aware() -def update_function(id, values, session=None): - function = get_function(id) - function.update(values.copy()) - - return function - - -@db_base.session_aware() -def delete_function(id, session=None): - function = get_function(id) - - session.delete(function) - - -@db_base.session_aware() -def delete_functions(session=None, **kwargs): - return _delete_all(models.Function, **kwargs) - - -@db_base.session_aware() -def create_runtime(values, session=None): - runtime = models.Runtime() - runtime.update(values.copy()) - - try: - runtime.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for Runtime: %s" % e.columns - ) - - return runtime - - -@db_base.session_aware() -def get_runtime(id, session=None): - model = models.Runtime - filters = sa.and_( - model.id == id, - sa.or_(model.project_id == context.get_ctx().projectid, - model.is_public), - ) - runtime = db_base.model_query(model).filter(filters).first() - - if not runtime: - raise exc.DBEntityNotFoundError("Runtime not found [id=%s]" % id) - - return runtime - - -@db_base.session_aware() -def get_runtimes(session=None, **kwargs): - return _get_collection_sorted_by_time(models.Runtime, **kwargs) - - -@db_base.session_aware() -def delete_runtime(id, session=None): - # Because we don't allow normal user to delete runtime in api layer, so it - # is safe to get runtime here - runtime = get_runtime(id) - session.delete(runtime) - - -@db_base.session_aware() -def update_runtime(id, values, session=None): - # Because we don't allow normal user to update runtime in api layer, so it - # is safe to get runtime here - runtime = get_runtime(id) - runtime.update(values.copy()) - - return runtime - - -@db_base.insecure_aware() -@db_base.session_aware() -def delete_runtimes(session=None, insecure=None, **kwargs): - return _delete_all(models.Runtime, insecure=insecure, **kwargs) - - -@db_base.session_aware() -def create_execution(values, session=None): - execution = models.Execution() - execution.update(values.copy()) - - try: - execution.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for Execution: %s" % e.columns - ) - - return execution - - -@db_base.insecure_aware() -@db_base.session_aware() -def get_execution(id, insecure=None, session=None): - execution = _get_db_object_by_id(models.Execution, id, insecure=insecure) - - if not execution: - raise exc.DBEntityNotFoundError("Execution not found [id=%s]" % id) - - return execution - - -@db_base.session_aware() -def update_execution(id, values, session=None): - execution = get_execution(id) - execution.update(values.copy()) - - return execution - - -@db_base.session_aware() -def get_executions(session=None, **kwargs): - return _get_collection_sorted_by_time(models.Execution, **kwargs) - - -@db_base.session_aware() -def delete_execution(id, session=None): - execution = get_execution(id) - - session.delete(execution) - - -@db_base.insecure_aware() -@db_base.session_aware() -def delete_executions(session=None, insecure=None, **kwargs): - return _delete_all(models.Execution, insecure=insecure, **kwargs) - - -@db_base.session_aware() -def create_job(values, session=None): - job = models.Job() - job.update(values) - - try: - job.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for Job: %s" % e.columns - ) - - return job - - -@db_base.session_aware() -def get_job(id, session=None): - job = _get_db_object_by_id(models.Job, id) - if not job: - raise exc.DBEntityNotFoundError("Job not found [id=%s]" % id) - - return job - - -@db_base.session_aware() -def delete_job(id, session=None): - get_job(id) - - # Delete the job by ID and get the affected row count. - table = models.Job.__table__ - result = session.execute(table.delete().where(table.c.id == id)) - - return result.rowcount - - -@db_base.session_aware() -def update_job(id, values, session=None): - job = get_job(id) - job.update(values.copy()) - - return job - - -@db_base.session_aware() -def get_next_jobs(before, session=None): - return _get_collection( - models.Job, insecure=True, sort_keys=['next_execution_time'], - sort_dirs=['asc'], next_execution_time={'lt': before}, - status=status.RUNNING - ) - - -@db_base.session_aware() -def get_jobs(session=None, **kwargs): - return _get_collection_sorted_by_time(models.Job, **kwargs) - - -@db_base.insecure_aware() -@db_base.session_aware() -def delete_jobs(session=None, insecure=None, **kwargs): - return _delete_all(models.Job, insecure=insecure, **kwargs) - - -@db_base.session_aware() -def create_webhook(values, session=None): - webhook = models.Webhook() - webhook.update(values.copy()) - - try: - webhook.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for webhook: %s" % e.columns - ) - - return webhook - - -@db_base.insecure_aware() -@db_base.session_aware() -def get_webhook(id, insecure=None, session=None): - webhook = _get_db_object_by_id(models.Webhook, id, insecure=insecure) - - if not webhook: - raise exc.DBEntityNotFoundError("Webhook not found [id=%s]" % id) - - return webhook - - -@db_base.session_aware() -def get_webhooks(session=None, **kwargs): - return _get_collection_sorted_by_time(models.Webhook, **kwargs) - - -@db_base.session_aware() -def delete_webhook(id, session=None): - webhook = get_webhook(id) - session.delete(webhook) - - -@db_base.session_aware() -def update_webhook(id, values, session=None): - webhook = get_webhook(id) - webhook.update(values.copy()) - - return webhook - - -@db_base.insecure_aware() -@db_base.session_aware() -def delete_webhooks(session=None, insecure=None, **kwargs): - return _delete_all(models.Webhook, insecure=insecure, **kwargs) - - -@db_base.session_aware() -def increase_function_version(function_id, old_version, session=None, - **kwargs): - """This function is supposed to be invoked within locking section.""" - version = models.FunctionVersion() - kwargs.update( - { - "function_id": function_id, - "version_number": old_version + 1 - } - ) - version.update(kwargs.copy()) - - try: - version.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for function_versions: %s" % e.columns - ) - - return version - - -@db_base.insecure_aware() -@db_base.session_aware() -def get_function_version(function_id, version, session=None, insecure=None): - if insecure: - query = db_base.model_query(models.FunctionVersion) - else: - query = _secure_query(models.FunctionVersion) - - version_db = query.filter_by( - function_id=function_id, version_number=version - ).first() - - if not version_db: - raise exc.DBEntityNotFoundError( - "FunctionVersion not found [function_id=%s, version_number=%s]" % - (function_id, version) - ) - - return version_db - - -# This function is only used in unit test. -@db_base.session_aware() -def update_function_version(function_id, version, session=None, **kwargs): - version_db = get_function_version(function_id, version, session=session) - version_db.update(kwargs.copy()) - - return version_db - - -@db_base.session_aware() -def delete_function_version(function_id, version, session=None): - version_db = get_function_version(function_id, version) - session.delete(version_db) - - -@db_base.session_aware() -def get_function_versions(session=None, **kwargs): - return _get_collection_sorted_by_time(models.FunctionVersion, **kwargs) - - -@db_base.session_aware() -def create_function_alias(session=None, **kwargs): - alias = models.FunctionAlias() - alias.update(kwargs.copy()) - - try: - alias.save(session=session) - except oslo_db_exc.DBDuplicateEntry as e: - raise exc.DBError( - "Duplicate entry for function_aliases: %s" % e.columns - ) - - return alias - - -@db_base.insecure_aware() -@db_base.session_aware() -def get_function_alias(name, session=None, insecure=None): - alias = _get_db_object_by_name(models.FunctionAlias, - name, - insecure=insecure) - - if not alias: - raise exc.DBEntityNotFoundError( - "FunctionAlias not found [name=%s]" % - (name) - ) - - return alias - - -@db_base.session_aware() -def get_function_aliases(session=None, **kwargs): - return _get_collection_sorted_by_time(models.FunctionAlias, **kwargs) - - -@db_base.session_aware() -def update_function_alias(name, session=None, **kwargs): - alias_db = get_function_alias(name, session=session) - alias_db.update(kwargs.copy()) - - return alias_db - - -@db_base.session_aware() -def delete_function_alias(name, session=None): - alias_db = get_function_alias(name) - session.delete(alias_db) - - -@db_base.session_aware() -def delete_function_aliases(session=None, **kwargs): - return _delete_all(models.FunctionAlias, **kwargs) diff --git a/qinling/db/sqlalchemy/filters.py b/qinling/db/sqlalchemy/filters.py deleted file mode 100644 index 029c29fd..00000000 --- a/qinling/db/sqlalchemy/filters.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2016 NEC Corporation. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -def apply_filters(query, model, **filters): - filter_dict = {} - - for key, value in filters.items(): - column_attr = getattr(model, key) - - if isinstance(value, dict): - if 'in' in value: - query = query.filter(column_attr.in_(value['in'])) - elif 'nin' in value: - query = query.filter(~column_attr.in_(value['nin'])) - elif 'neq' in value: - query = query.filter(column_attr != value['neq']) - elif 'gt' in value: - query = query.filter(column_attr > value['gt']) - elif 'gte' in value: - query = query.filter(column_attr >= value['gte']) - elif 'lt' in value: - query = query.filter(column_attr < value['lt']) - elif 'lte' in value: - query = query.filter(column_attr <= value['lte']) - elif 'eq' in value: - query = query.filter(column_attr == value['eq']) - elif 'has' in value: - like_pattern = '%{0}%'.format(value['has']) - - query = query.filter(column_attr.like(like_pattern)) - else: - filter_dict[key] = value - - if filter_dict: - query = query.filter_by(**filter_dict) - - return query diff --git a/qinling/db/sqlalchemy/migration/__init__.py b/qinling/db/sqlalchemy/migration/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/db/sqlalchemy/migration/alembic.ini b/qinling/db/sqlalchemy/migration/alembic.ini deleted file mode 100644 index 7a00cabc..00000000 --- a/qinling/db/sqlalchemy/migration/alembic.ini +++ /dev/null @@ -1,58 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = qinling/db/sqlalchemy/migration/alembic_migrations - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -sqlalchemy.url = - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S \ No newline at end of file diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/README.md b/qinling/db/sqlalchemy/migration/alembic_migrations/README.md deleted file mode 100644 index 09e9e442..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/README.md +++ /dev/null @@ -1,61 +0,0 @@ -The migrations in `alembic_migrations/versions` contain the changes needed to migrate -between Qinling database revisions. A migration occurs by executing a script that -details the changes needed to upgrade the database. The migration scripts -are ordered so that multiple scripts can run sequentially. The scripts are executed by -Qinling's migration wrapper which uses the Alembic library to manage the migration. Qinling -supports migration from Pike or later. - -You can upgrade to the latest database version via: -``` -qinling-db-manage --config-file /path/to/qinling.conf upgrade head -``` - -To check the current database version: -``` -qinling-db-manage --config-file /path/to/qinling.conf current -``` - -To create a script to run the migration offline: -``` -qinling-db-manage --config-file /path/to/qinling.conf upgrade head --sql -``` - -To run the offline migration between specific migration versions: -``` -qinling-db-manage --config-file /path/to/qinling.conf upgrade : --sql -``` - -Upgrade the database incrementally: -``` -qinling-db-manage --config-file /path/to/qinling.conf upgrade --delta <# of revs> -``` - -Or, upgrade the database to one newer revision: -``` -qinling-db-manage --config-file /path/to/qinling.conf upgrade +1 -``` - -Create new revision: -``` -qinling-db-manage --config-file /path/to/qinling.conf revision -m "description of revision" --autogenerate -``` - -Create a blank file: -``` -qinling-db-manage --config-file /path/to/qinling.conf revision -m "description of revision" -``` - -This command does not perform any migrations, it only sets the revision. -Revision may be any existing revision. Use this command carefully. -``` -qinling-db-manage --config-file /path/to/qinling.conf stamp -``` - -To verify that the timeline does branch, you can run this command: -``` -qinling-db-manage --config-file /path/to/qinling.conf check_migration -``` - -If the migration path has branch, you can find the branch point via: -``` -qinling-db-manage --config-file /path/to/qinling.conf history \ No newline at end of file diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/__init__.py b/qinling/db/sqlalchemy/migration/alembic_migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/env.py b/qinling/db/sqlalchemy/migration/alembic_migrations/env.py deleted file mode 100644 index d5a421c2..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/env.py +++ /dev/null @@ -1,82 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from alembic import context -from logging import config as c -from oslo_utils import importutils -from sqlalchemy import create_engine -from sqlalchemy import pool - -from qinling.db.sqlalchemy import model_base - - -importutils.try_import('qinling.db.sqlalchemy.models') - -# This is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config -qinling_config = config.qinling_config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -c.fileConfig(config.config_file_name) - -# Add your model's MetaData object here for 'autogenerate' support. -target_metadata = model_base.QinlingSecureModelBase.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - context.configure(url=qinling_config.database.connection) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = create_engine( - qinling_config.database.connection, - poolclass=pool.NullPool - ) - - connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/script.py.mako b/qinling/db/sqlalchemy/migration/alembic_migrations/script.py.mako deleted file mode 100644 index efbb28a3..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/script.py.mako +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright ${create_date.year} OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - - -def upgrade(): - ${upgrades if upgrades else "pass"} \ No newline at end of file diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/001_pike.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/001_pike.py deleted file mode 100644 index 3730823c..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/001_pike.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2017 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Pike release - -Revision ID: 001 -Revises: None -Create Date: 2017-05-03 12:02:51.935368 - -""" - -# revision identifiers, used by Alembic. -revision = '001' -down_revision = None - -import re - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.schema import CreateTable - -from qinling.db.sqlalchemy import types as st - - -@compiles(CreateTable) -def _add_if_not_exists(element, compiler, **kw): - output = compiler.visit_create_table(element, **kw) - if element.element.info.get("check_ifexists"): - output = re.sub( - "^\s*CREATE TABLE", "CREATE TABLE IF NOT EXISTS", output, re.S) - return output - - -def upgrade(): - op.create_table( - 'runtimes', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('image', sa.String(length=255), nullable=False), - sa.Column('status', sa.String(length=32), nullable=False), - sa.Column('is_public', sa.BOOLEAN, nullable=False), - sa.PrimaryKeyConstraint('id'), - info={"check_ifexists": True} - ) - - op.create_table( - 'functions', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('runtime_id', sa.String(length=36), nullable=True), - sa.Column('memory_size', sa.Integer, nullable=True), - sa.Column('timeout', sa.Integer, nullable=True), - sa.Column('code', st.JsonLongDictType(), nullable=False), - sa.Column('entry', sa.String(length=80), nullable=True), - sa.Column('count', sa.Integer, nullable=False), - sa.Column('trust_id', sa.String(length=80), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['runtime_id'], [u'runtimes.id']), - info={"check_ifexists": True} - ) - - op.create_table( - 'executions', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('function_id', sa.String(length=36), nullable=False), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('status', sa.String(length=32), nullable=False), - sa.Column('sync', sa.BOOLEAN, nullable=False), - sa.Column('input', st.JsonLongDictType(), nullable=True), - sa.Column('result', st.JsonLongDictType(), nullable=True), - sa.Column('logs', sa.Text(), nullable=True), - sa.PrimaryKeyConstraint('id'), - info={"check_ifexists": True} - ) - - op.create_table( - 'jobs', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('function_id', sa.String(length=36), nullable=False), - sa.Column('function_input', sa.String(length=255), nullable=True), - sa.Column('status', sa.String(length=32), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), - sa.Column('pattern', sa.String(length=32), nullable=True), - sa.Column('first_execution_time', sa.DateTime(), nullable=True), - sa.Column('next_execution_time', sa.DateTime(), nullable=False), - sa.Column('count', sa.Integer(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['function_id'], [u'functions.id']), - info={"check_ifexists": True} - ) - - op.create_table( - 'webhooks', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('function_id', sa.String(length=36), nullable=False), - sa.PrimaryKeyConstraint('id'), - info={"check_ifexists": True} - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/002_add_function_version_support.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/002_add_function_version_support.py deleted file mode 100644 index 571aedc6..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/002_add_function_version_support.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add function version support - -Revision ID: 002 -Revises: 001 -Create Date: 2018-04-12 00:12:45.461970 - -""" - -# revision identifiers, used by Alembic. -revision = '002' -down_revision = '001' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.create_table( - 'function_versions', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('function_id', sa.String(length=36), nullable=False), - sa.Column('description', sa.String(length=255), nullable=True), - sa.Column('version_number', sa.Integer, nullable=False), - sa.Column('count', sa.Integer, nullable=False), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['function_id'], [u'functions.id']), - sa.UniqueConstraint('function_id', 'version_number', 'project_id'), - sa.Index( - 'function_versions_project_id_function_id_version_number', - 'project_id', 'function_id', 'version_number' - ) - ) - - op.add_column( - 'functions', - sa.Column('latest_version', sa.Integer, nullable=False), - ) - - op.add_column( - 'executions', - sa.Column('function_version', sa.Integer, nullable=False), - ) - - op.add_column( - 'jobs', - sa.Column('function_version', sa.Integer, nullable=False), - ) - - op.add_column( - 'webhooks', - sa.Column('function_version', sa.Integer, nullable=False), - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/003_add_cpu_column.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/003_add_cpu_column.py deleted file mode 100644 index b7953768..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/003_add_cpu_column.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add cpu column for functions - -Revision ID: 003 -Revises: 002 -Create Date: 2018-04-19 06:03:46.687706 - -""" - -# revision identifiers, used by Alembic. -revision = '003' -down_revision = '002' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column( - 'functions', - sa.Column('cpu', sa.Integer(), nullable=False) - ) - - op.alter_column( - 'functions', - 'memory_size', - existing_type=sa.Integer(), - nullable=False - ) - # ### end Alembic commands ### diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/004_add_function_aliases_support.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/004_add_function_aliases_support.py deleted file mode 100644 index 5e5b6286..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/004_add_function_aliases_support.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add function aliases support - -Revision ID: 004 -Revises: 003 -Create Date: 2018-05-17 03:09:04.888969 - -""" - -# revision identifiers, used by Alembic. -revision = '004' -down_revision = '003' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.create_table( - 'function_aliases', - sa.Column('created_at', sa.DateTime(), nullable=True), - sa.Column('updated_at', sa.DateTime(), nullable=True), - sa.Column('project_id', sa.String(length=80), nullable=False), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('function_id', sa.String(length=36), nullable=False), - sa.Column('function_version', sa.Integer, nullable=False), - sa.Column('description', sa.String(length=255), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['function_id'], [u'functions.id']), - sa.UniqueConstraint('function_id', 'function_version', 'project_id'), - sa.UniqueConstraint('name', 'project_id'), - sa.Index( - 'function_aliases_project_id_function_id_function_version', - 'project_id', 'function_id', 'function_version' - ), - sa.Index( - 'function_aliases_project_id_name', - 'project_id', 'name' - ) - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/005_add_trusted_for_runtime.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/005_add_trusted_for_runtime.py deleted file mode 100644 index 95b3ab63..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/005_add_trusted_for_runtime.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add trusted field for runtimes table - -Revision ID: 005 -Revises: 004 -Create Date: 2018-07-24 12:00:00.888969 - -""" - -# revision identifiers, used by Alembic. -revision = '005' -down_revision = '004' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.add_column( - 'runtimes', - sa.Column('trusted', sa.BOOLEAN, nullable=False, default=True, - server_default="1") - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/006_function_alias_for_job.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/006_function_alias_for_job.py deleted file mode 100644 index eec6bcbe..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/006_function_alias_for_job.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2019 Catalyst Cloud Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add function_alias field for jobs table - -Revision ID: 006 -Revises: 005 -""" - -revision = '006' -down_revision = '005' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.add_column( - 'jobs', - sa.Column('function_alias', sa.String(length=255), nullable=True) - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/007_function_id_for_job.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/007_function_id_for_job.py deleted file mode 100644 index ec3e2650..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/007_function_id_for_job.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 Catalyst Cloud Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Make function id nullable for jobs table - -Revision ID: 007 -Revises: 006 -""" - -revision = '007' -down_revision = '006' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.alter_column( - 'jobs', - 'function_id', - existing_type=sa.String(length=36), - nullable=True - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/008_function_alias_for_execution.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/008_function_alias_for_execution.py deleted file mode 100644 index b830ad72..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/008_function_alias_for_execution.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2019 - Ormuco Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add function_alias field for executions table -Revision ID: 008 -Revises: 007 -""" - -revision = '008' -down_revision = '007' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.add_column( - 'executions', - sa.Column('function_alias', sa.String(length=255), nullable=True) - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/009_function_alias_for_webhook.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/009_function_alias_for_webhook.py deleted file mode 100644 index c6609ffb..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/009_function_alias_for_webhook.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2019 - Ormuco Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""add function_alias field for webhooks table -Revision ID: 009 -Revises: 008 -""" - -revision = '009' -down_revision = '008' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.add_column( - 'webhooks', - sa.Column('function_alias', sa.String(length=255), nullable=True) - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/010_function_id_for_execution.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/010_function_id_for_execution.py deleted file mode 100644 index 471625c7..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/010_function_id_for_execution.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2019 - Ormuco Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Make function id nullable for executions table - -Revision ID: 010 -Revises: 009 -""" - -revision = '010' -down_revision = '009' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.alter_column( - 'executions', - 'function_id', - existing_type=sa.String(length=36), - nullable=True - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/011_function_id_for_webhook.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/011_function_id_for_webhook.py deleted file mode 100644 index c676b4bc..00000000 --- a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/011_function_id_for_webhook.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 - Ormuco Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Make function id nullable for webhooks table - -Revision ID: 011 -Revises: 010 -""" - -revision = '011' -down_revision = '010' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - op.alter_column( - 'webhooks', - 'function_id', - existing_type=sa.String(length=36), - nullable=True - ) - op.alter_column( - 'webhooks', - 'function_version', - existing_type=sa.Integer, - nullable=True - ) diff --git a/qinling/db/sqlalchemy/migration/alembic_migrations/versions/__init__.py b/qinling/db/sqlalchemy/migration/alembic_migrations/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/db/sqlalchemy/migration/cli.py b/qinling/db/sqlalchemy/migration/cli.py deleted file mode 100644 index 67365a28..00000000 --- a/qinling/db/sqlalchemy/migration/cli.py +++ /dev/null @@ -1,120 +0,0 @@ -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Starter script for qinling-db-manage.""" - -import os -import sys - -from alembic import command as alembic_cmd -from alembic import config as alembic_cfg -from alembic import util as alembic_u -from oslo_config import cfg -from oslo_utils import importutils - -# We need to import qinling.api.app to -# make sure we register all needed options. -importutils.try_import('qinling.api.app') - -CONF = cfg.CONF - - -def do_alembic_command(config, cmd, *args, **kwargs): - try: - getattr(alembic_cmd, cmd)(config, *args, **kwargs) - except alembic_u.CommandError as e: - alembic_u.err(str(e)) - - -def do_check_migration(config, _cmd): - do_alembic_command(config, 'branches') - - -def do_upgrade(config, cmd): - if not CONF.command.revision and not CONF.command.delta: - raise SystemExit('You must provide a revision or relative delta') - - revision = CONF.command.revision - - if CONF.command.delta: - sign = '+' if CONF.command.name == 'upgrade' else '-' - revision = sign + str(CONF.command.delta) - - do_alembic_command(config, cmd, revision, sql=CONF.command.sql) - - -def do_stamp(config, cmd): - do_alembic_command( - config, cmd, - CONF.command.revision, - sql=CONF.command.sql - ) - - -def do_revision(config, cmd): - do_alembic_command( - config, cmd, - message=CONF.command.message, - autogenerate=CONF.command.autogenerate, - sql=CONF.command.sql - ) - - -def add_command_parsers(subparsers): - for name in ['current', 'history', 'branches']: - parser = subparsers.add_parser(name) - parser.set_defaults(func=do_alembic_command) - - parser = subparsers.add_parser('upgrade') - parser.add_argument('--delta', type=int) - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision', nargs='?') - parser.set_defaults(func=do_upgrade) - - parser = subparsers.add_parser('stamp') - parser.add_argument('--sql', action='store_true') - parser.add_argument('revision', nargs='?') - parser.set_defaults(func=do_stamp) - - parser = subparsers.add_parser('revision') - parser.add_argument('-m', '--message') - parser.add_argument('--autogenerate', action='store_true') - parser.add_argument('--sql', action='store_true') - parser.set_defaults(func=do_revision) - - -command_opt = cfg.SubCommandOpt('command', - title='Command', - help='Available commands', - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - - -def main(): - config = alembic_cfg.Config( - os.path.join(os.path.dirname(__file__), 'alembic.ini') - ) - config.set_main_option( - 'script_location', - 'qinling.db.sqlalchemy.migration:alembic_migrations' - ) - # attach the Qinling conf to the Alembic conf - config.qinling_config = CONF - - CONF(project='qinling') - CONF.command.func(config, CONF.command.name) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/qinling/db/sqlalchemy/model_base.py b/qinling/db/sqlalchemy/model_base.py deleted file mode 100644 index b988d15f..00000000 --- a/qinling/db/sqlalchemy/model_base.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_db.sqlalchemy import models as oslo_models -import sqlalchemy as sa -from sqlalchemy.ext import declarative - -from qinling import context -from qinling.utils import common - - -def id_column(): - return sa.Column( - sa.String(36), - primary_key=True, - default=common.generate_unicode_uuid - ) - - -def get_project_id(): - return context.get_ctx().projectid - - -class _QinlingModelBase(oslo_models.ModelBase, oslo_models.TimestampMixin): - """Base class for all Qinling SQLAlchemy DB Models.""" - - __table__ = None - - __hash__ = object.__hash__ - - def __init__(self, **kwargs): - for key, value in kwargs.items(): - setattr(self, key, value) - - def __eq__(self, other): - if type(self) is not type(other): - return False - - for col in self.__table__.columns: - # In case of single table inheritance a class attribute - # corresponding to a table column may not exist so we need - # to skip these attributes. - if (hasattr(self, col.name) and hasattr(other, col.name) and - getattr(self, col.name) != getattr(other, col.name)): - return False - - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def to_dict(self): - """sqlalchemy based automatic to_dict method.""" - d = {} - - for col in self.__table__.columns: - d[col.name] = getattr(self, col.name) - - common.datetime_to_str(d, 'created_at') - common.datetime_to_str(d, 'updated_at') - - return d - - def get_clone(self): - """Clones current object, loads all fields and returns the result.""" - m = self.__class__() - - for col in self.__table__.columns: - if hasattr(self, col.name): - setattr(m, col.name, getattr(self, col.name)) - - setattr(m, 'created_at', getattr(self, 'created_at').isoformat(' ')) - - updated_at = getattr(self, 'updated_at') - if updated_at: - setattr(m, 'updated_at', updated_at.isoformat(' ')) - return m - - def __repr__(self): - return '%s %s' % (type(self).__name__, self.to_dict().__repr__()) - - -QinlingModelBase = declarative.declarative_base(cls=_QinlingModelBase) - - -class QinlingSecureModelBase(QinlingModelBase): - """Base class for all secure models.""" - __abstract__ = True - - id = id_column() - project_id = sa.Column( - sa.String(80), - nullable=False, - default=get_project_id - ) diff --git a/qinling/db/sqlalchemy/models.py b/qinling/db/sqlalchemy/models.py deleted file mode 100644 index 3459c1cc..00000000 --- a/qinling/db/sqlalchemy/models.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sqlalchemy as sa -from sqlalchemy.orm import relationship - -from qinling.db.sqlalchemy import model_base -from qinling.db.sqlalchemy import types as st -from qinling.utils import common - - -class Runtime(model_base.QinlingSecureModelBase): - __tablename__ = 'runtimes' - - name = sa.Column(sa.String(255)) - description = sa.Column(sa.String(255)) - image = sa.Column(sa.String(255), nullable=False) - status = sa.Column(sa.String(32), nullable=False) - is_public = sa.Column(sa.BOOLEAN, default=True) - trusted = sa.Column(sa.BOOLEAN, default=True) - - -class Function(model_base.QinlingSecureModelBase): - __tablename__ = 'functions' - - name = sa.Column(sa.String(255), nullable=True) - description = sa.Column(sa.String(255)) - runtime_id = sa.Column( - sa.String(36), sa.ForeignKey(Runtime.id), nullable=True - ) - cpu = sa.Column(sa.Integer, default=0) - memory_size = sa.Column(sa.Integer, default=0) - timeout = sa.Column(sa.Integer) - code = sa.Column(st.JsonLongDictType(), nullable=False) - entry = sa.Column(sa.String(80), nullable=True) - count = sa.Column(sa.Integer, default=0) - trust_id = sa.Column(sa.String(80)) - latest_version = sa.Column(sa.Integer, default=0) - - -class Execution(model_base.QinlingSecureModelBase): - __tablename__ = 'executions' - - function_alias = sa.Column(sa.String(255), nullable=True) - function_id = sa.Column(sa.String(36), nullable=True) - function_version = sa.Column(sa.Integer, default=0) - status = sa.Column(sa.String(32), nullable=False) - sync = sa.Column(sa.BOOLEAN, default=True) - input = sa.Column(st.JsonLongDictType()) - result = sa.Column(st.JsonLongDictType()) - description = sa.Column(sa.String(255)) - logs = sa.Column(sa.Text(), nullable=True) - - -class Job(model_base.QinlingSecureModelBase): - __tablename__ = 'jobs' - - name = sa.Column(sa.String(255), nullable=True) - pattern = sa.Column(sa.String(32), nullable=True) - status = sa.Column(sa.String(32), nullable=False) - first_execution_time = sa.Column(sa.DateTime, nullable=True) - next_execution_time = sa.Column(sa.DateTime, nullable=False) - count = sa.Column(sa.Integer) - function_id = sa.Column( - sa.String(36), - sa.ForeignKey(Function.id), - nullable=True - ) - function = relationship('Function', back_populates="jobs") - function_input = sa.Column(sa.String(255), nullable=True) - function_version = sa.Column(sa.Integer, default=0) - function_alias = sa.Column(sa.String(255), nullable=True) - - def to_dict(self): - d = super(Job, self).to_dict() - common.datetime_to_str(d, 'first_execution_time') - common.datetime_to_str(d, 'next_execution_time') - return d - - -class Webhook(model_base.QinlingSecureModelBase): - __tablename__ = 'webhooks' - - function_alias = sa.Column(sa.String(255), nullable=True) - function_id = sa.Column( - sa.String(36), - sa.ForeignKey(Function.id), - nullable=True - ) - function_version = sa.Column(sa.Integer, nullable=True) - description = sa.Column(sa.String(255)) - - -class FunctionVersion(model_base.QinlingSecureModelBase): - __tablename__ = 'function_versions' - - __table_args__ = ( - sa.UniqueConstraint('project_id', 'function_id', 'version_number'), - sa.Index( - '%s_project_id_function_id_version_number' % __tablename__, - 'project_id', - 'function_id', - 'version_number' - ) - ) - - function_id = sa.Column( - sa.String(36), - sa.ForeignKey(Function.id, ondelete='CASCADE') - ) - function = relationship('Function', back_populates="versions") - description = sa.Column(sa.String(255), nullable=True) - version_number = sa.Column(sa.Integer, default=0) - count = sa.Column(sa.Integer, default=0) - - -class FunctionAlias(model_base.QinlingSecureModelBase): - __tablename__ = 'function_aliases' - - __table_args__ = ( - sa.UniqueConstraint('project_id', 'function_id', 'function_version'), - sa.Index( - '%s_project_id_function_id_function_version' % __tablename__, - 'project_id', - 'function_id', - 'function_version' - ), - sa.UniqueConstraint('project_id', 'name'), - sa.Index( - '%s_project_id_name' % __tablename__, - 'project_id', - 'name' - ) - ) - - function_id = sa.Column( - sa.String(36), - sa.ForeignKey(Function.id) - ) - name = sa.Column(sa.String(255), nullable=False) - description = sa.Column(sa.String(255), nullable=True) - function_version = sa.Column(sa.Integer, default=0) - - -# Only get running jobs -Function.jobs = relationship( - "Job", - back_populates="function", - primaryjoin=( - "and_(Function.id==Job.function_id, " - "~Job.status.in_(['done', 'cancelled']))" - ) -) -Function.webhooks = relationship("Webhook", uselist=True, backref="function") -Function.versions = relationship( - "FunctionVersion", - order_by="FunctionVersion.version_number", - uselist=True, - lazy='select', - cascade="all, delete-orphan" -) -Function.aliases = relationship( - "FunctionAlias", - uselist=True, - backref="function" -) diff --git a/qinling/db/sqlalchemy/types.py b/qinling/db/sqlalchemy/types.py deleted file mode 100644 index bb76e0c1..00000000 --- a/qinling/db/sqlalchemy/types.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# This module implements SQLAlchemy-based types for dict and list -# expressed by json-strings -# - -from oslo_serialization import jsonutils -import sqlalchemy as sa -from sqlalchemy.dialects import mysql -from sqlalchemy.ext import mutable - - -class JsonEncoded(sa.TypeDecorator): - """Represents an immutable structure as a json-encoded string.""" - - impl = sa.Text - - def process_bind_param(self, value, dialect): - if value is not None: - value = jsonutils.dumps(value) - return value - - def process_result_value(self, value, dialect): - if value is not None: - value = jsonutils.loads(value) - return value - - -class MutableList(mutable.Mutable, list): - @classmethod - def coerce(cls, key, value): - """Convert plain lists to MutableList.""" - if not isinstance(value, MutableList): - if isinstance(value, list): - return MutableList(value) - - # this call will raise ValueError - return mutable.Mutable.coerce(key, value) - return value - - def __add__(self, value): - """Detect list add events and emit change events.""" - list.__add__(self, value) - self.changed() - - def append(self, value): - """Detect list add events and emit change events.""" - list.append(self, value) - self.changed() - - def __setitem__(self, key, value): - """Detect list set events and emit change events.""" - list.__setitem__(self, key, value) - self.changed() - - def __delitem__(self, i): - """Detect list del events and emit change events.""" - list.__delitem__(self, i) - self.changed() - - -def JsonDictType(): - """Returns an SQLAlchemy Column Type suitable to store a Json dict.""" - return mutable.MutableDict.as_mutable(JsonEncoded) - - -def JsonListType(): - """Returns an SQLAlchemy Column Type suitable to store a Json array.""" - return MutableList.as_mutable(JsonEncoded) - - -def LongText(): - # TODO(rakhmerov): Need to do for postgres. - return sa.Text().with_variant(mysql.LONGTEXT(), 'mysql') - - -class JsonEncodedLongText(JsonEncoded): - impl = LongText() - - -def JsonLongDictType(): - return mutable.MutableDict.as_mutable(JsonEncodedLongText) diff --git a/qinling/engine/__init__.py b/qinling/engine/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/engine/default_engine.py b/qinling/engine/default_engine.py deleted file mode 100644 index 7f0c3184..00000000 --- a/qinling/engine/default_engine.py +++ /dev/null @@ -1,274 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo_config import cfg -from oslo_log import log as logging -import requests -import tenacity - -from qinling.db import api as db_api -from qinling.engine import utils -from qinling import exceptions as exc -from qinling import status -from qinling.utils import constants -from qinling.utils import etcd_util - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -class DefaultEngine(object): - def __init__(self, orchestrator, qinling_endpoint): - self.orchestrator = orchestrator - self.qinling_endpoint = qinling_endpoint - self.session = requests.Session() - - def create_runtime(self, ctx, runtime_id): - LOG.info('Start to create runtime %s.', runtime_id) - - with db_api.transaction(): - runtime = db_api.get_runtime(runtime_id) - - try: - self.orchestrator.create_pool( - runtime_id, - runtime.image, - trusted=runtime.trusted - ) - runtime.status = status.AVAILABLE - LOG.info('Runtime %s created.', runtime_id) - except Exception as e: - LOG.exception( - 'Failed to create pool for runtime %s. Error: %s', - runtime_id, - str(e) - ) - runtime.status = status.ERROR - - def delete_runtime(self, ctx, runtime_id): - LOG.info('Start to delete runtime %s.', runtime_id) - - self.orchestrator.delete_pool(runtime_id) - db_api.delete_runtime(runtime_id) - - LOG.info('Deleted runtime %s.', runtime_id) - - def update_runtime(self, ctx, runtime_id, image=None, pre_image=None): - LOG.info( - 'Start to update runtime %s, image: %s, pre_image: %s', - runtime_id, image, pre_image - ) - - ret = self.orchestrator.update_pool(runtime_id, image=image) - - if ret: - values = {'status': status.AVAILABLE} - db_api.update_runtime(runtime_id, values) - - LOG.info('Updated runtime %s.', runtime_id) - else: - values = {'status': status.AVAILABLE, 'image': pre_image} - db_api.update_runtime(runtime_id, values) - - LOG.info('Rollbacked runtime %s.', runtime_id) - - def get_runtime_pool(self, ctx, runtime_id): - LOG.info("Getting pool information for runtime %s", runtime_id) - - return self.orchestrator.get_pool(runtime_id) - - @tenacity.retry( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(30), - reraise=True, - retry=tenacity.retry_if_exception_type(exc.EtcdLockException) - ) - def function_load_check(self, function_id, version, runtime_id): - """Check function load and scale the workers if needed. - - :return: None if no need to scale up otherwise return the service url - """ - with etcd_util.get_worker_lock(function_id, version) as lock: - if not lock.is_acquired(): - raise exc.EtcdLockException( - 'Etcd: failed to get worker lock for function %s' - '(version %s).' % (function_id, version) - ) - - workers = etcd_util.get_workers(function_id, version) - running_execs = db_api.get_executions( - function_id=function_id, - function_version=version, - status=status.RUNNING - ) - concurrency = (len(running_execs) or 1) / (len(workers) or 1) - if (len(workers) == 0 or - concurrency > CONF.engine.function_concurrency): - LOG.info( - 'Scale up function %s(version %s). Current concurrency: ' - '%s, execution number %s, worker number %s', - function_id, version, concurrency, len(running_execs), - len(workers) - ) - - # NOTE(kong): The increase step could be configurable - return self.scaleup_function(None, function_id, version, - runtime_id, 1) - - def create_execution(self, ctx, execution_id, function_id, - function_version, runtime_id, input=None): - LOG.info( - 'Creating execution. execution_id=%s, function_id=%s, ' - 'function_version=%s, runtime_id=%s, input=%s', - execution_id, function_id, function_version, runtime_id, input - ) - - function = db_api.get_function(function_id) - source = function.code['source'] - rlimit = { - 'cpu': function.cpu, - 'memory_size': function.memory_size - } - image = None - identifier = None - labels = None - svc_url = None - is_image_source = source == constants.IMAGE_FUNCTION - - # Auto scale workers if needed - if not is_image_source: - try: - svc_url = self.function_load_check(function_id, - function_version, - runtime_id) - except ( - exc.OrchestratorException, - exc.EtcdLockException - ) as e: - utils.handle_execution_exception(execution_id, str(e)) - return - - temp_url = etcd_util.get_service_url(function_id, function_version) - svc_url = svc_url or temp_url - if svc_url: - func_url = '%s/execute' % svc_url - LOG.debug( - 'Found service url for function: %s(version %s), execution: ' - '%s, url: %s', - function_id, function_version, execution_id, func_url - ) - - data = utils.get_request_data( - CONF, function_id, function_version, execution_id, - rlimit, input, function.entry, function.trust_id, - self.qinling_endpoint, function.timeout - ) - success, res = utils.url_request( - self.session, func_url, body=data - ) - - utils.finish_execution(execution_id, success, res, - is_image_source=is_image_source) - return - - if is_image_source: - image = function.code['image'] - identifier = ('%s-%s' % (execution_id, function_id))[:63] - else: - identifier = runtime_id - labels = {'runtime_id': runtime_id} - - try: - # For image function, it will be executed inside this method; - # For package type function it only sets up underlying resources - # and get a service url. If the service url is already created - # beforehand, nothing happens. - _, svc_url = self.orchestrator.prepare_execution( - function_id, - function_version, - rlimit=rlimit, - image=image, - identifier=identifier, - labels=labels, - input=input, - ) - except exc.OrchestratorException as e: - utils.handle_execution_exception(execution_id, str(e)) - return - - # For image type function, wait for its completion and retrieve the - # worker log; - # For package type function, invoke and get log - success, res = self.orchestrator.run_execution( - execution_id, - function_id, - function_version, - rlimit=rlimit if svc_url else None, - input=input, - identifier=identifier, - service_url=svc_url, - entry=function.entry, - trust_id=function.trust_id, - timeout=function.timeout - ) - - utils.finish_execution(execution_id, success, res, - is_image_source=is_image_source) - - def delete_function(self, ctx, function_id, function_version=0): - """Deletes underlying resources allocated for function.""" - LOG.info('Start to delete function %s(version %s).', function_id, - function_version) - - self.orchestrator.delete_function(function_id, function_version) - - LOG.info('Deleted function %s(version %s).', function_id, - function_version) - - def scaleup_function(self, ctx, function_id, function_version, runtime_id, - count=1): - worker_names, service_url = self.orchestrator.scaleup_function( - function_id, - function_version, - identifier=runtime_id, - count=count - ) - - for name in worker_names: - etcd_util.create_worker(function_id, name, - version=function_version) - - etcd_util.create_service_url(function_id, service_url, - version=function_version) - - LOG.info('Finished scaling up function %s(version %s).', function_id, - function_version) - - return service_url - - def scaledown_function(self, ctx, function_id, function_version=0, - count=1): - workers = etcd_util.get_workers(function_id, function_version) - worker_deleted_num = ( - count if len(workers) > count else len(workers) - 1 - ) - workers = workers[:worker_deleted_num] - - for worker in workers: - LOG.debug('Removing worker %s', worker) - self.orchestrator.delete_worker(worker) - etcd_util.delete_worker(function_id, worker, - version=function_version) - - LOG.info('Finished scaling down function %s(version %s).', function_id, - function_version) diff --git a/qinling/engine/service.py b/qinling/engine/service.py deleted file mode 100644 index 613d5183..00000000 --- a/qinling/engine/service.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import cotyledon -from oslo_config import cfg -from oslo_log import log as logging -import oslo_messaging as messaging -from oslo_messaging.rpc import dispatcher - -from qinling.db import api as db_api -from qinling.engine import default_engine as engine -from qinling.orchestrator import base as orchestra_base -from qinling import rpc -from qinling.services import periodics -from qinling.utils.openstack import keystone as keystone_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -class EngineService(cotyledon.Service): - def __init__(self, worker_id): - super(EngineService, self).__init__(worker_id) - self.server = None - - def run(self): - qinling_endpoint = keystone_utils.get_qinling_endpoint() - orchestrator = orchestra_base.load_orchestrator(CONF, qinling_endpoint) - db_api.setup_db() - - topic = CONF.engine.topic - server = CONF.engine.host - transport = messaging.get_rpc_transport(CONF) - target = messaging.Target(topic=topic, server=server, fanout=False) - endpoint = engine.DefaultEngine(orchestrator, qinling_endpoint) - access_policy = dispatcher.DefaultRPCAccessPolicy - self.server = messaging.get_rpc_server( - transport, - target, - [endpoint], - executor='threading', - access_policy=access_policy, - serializer=rpc.ContextSerializer( - messaging.serializer.JsonPayloadSerializer()) - ) - - LOG.info('Starting function mapping periodic task...') - periodics.start_function_mapping_handler(endpoint) - - LOG.info('Starting engine...') - self.server.start() - - def terminate(self): - periodics.stop() - - if self.server: - LOG.info('Stopping engine...') - self.server.stop() - self.server.wait() diff --git a/qinling/engine/utils.py b/qinling/engine/utils.py deleted file mode 100644 index bd425bfa..00000000 --- a/qinling/engine/utils.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import time - -from oslo_log import log as logging -import requests -import tenacity - -from qinling import context -from qinling.db import api as db_api -from qinling import status -from qinling.utils import constants - -LOG = logging.getLogger(__name__) - - -def url_request(request_session, url, body=None): - """Send request to a service url.""" - exception = None - - # Send ping request first to make sure the url works - try: - temp = url.split('/') - temp[-1] = 'ping' - ping_url = '/'.join(temp) - r = tenacity.Retrying( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(30), - reraise=True, - retry=tenacity.retry_if_exception_type(IOError) - ) - r.call(request_session.get, ping_url, timeout=(3, 3), verify=False) - except Exception as e: - LOG.exception( - "Failed to request url %s, error: %s", ping_url, str(e) - ) - return False, {'output': 'Function execution failed.'} - - for a in range(10): - res = None - try: - # Default execution max duration is 3min, could be configurable - res = request_session.post( - url, json=body, timeout=(3, 180), verify=False - ) - return True, res.json() - except requests.ConnectionError as e: - exception = e - time.sleep(1) - except Exception as e: - LOG.exception( - "Failed to request url %s, error: %s", url, str(e) - ) - if res: - LOG.error("Response status: %s, content: %s", - res.status_code, res.content) - - return False, {'output': 'Function execution timeout.'} - - LOG.exception("Could not connect to function service. Reason: %s", - exception) - - return False, {'output': 'Internal service error.'} - - -def get_request_data(conf, function_id, version, execution_id, rlimit, input, - entry, trust_id, qinling_endpoint, timeout): - """Prepare the request body should send to the worker.""" - ctx = context.get_ctx() - - if version == 0: - download_url = ( - '%s/%s/functions/%s?download=true' % - (qinling_endpoint.strip('/'), constants.CURRENT_VERSION, - function_id) - ) - else: - download_url = ( - '%s/%s/functions/%s/versions/%s?download=true' % - (qinling_endpoint.strip('/'), constants.CURRENT_VERSION, - function_id, version) - ) - - data = { - 'execution_id': execution_id, - 'cpu': rlimit['cpu'], - 'memory_size': rlimit['memory_size'], - 'input': input, - 'function_id': function_id, - 'function_version': version, - 'entry': entry, - 'download_url': download_url, - 'request_id': ctx.request_id, - 'timeout': timeout, - } - if conf.pecan.auth_enable: - data.update( - { - 'token': ctx.auth_token, - 'auth_url': conf.keystone_authtoken.www_authenticate_uri, - 'username': conf.keystone_authtoken.username, - 'password': conf.keystone_authtoken.password, - 'trust_id': trust_id - } - ) - - return data - - -def db_set_execution_status(execution_id, execution_status, logs, res): - db_api.update_execution( - execution_id, - { - 'status': execution_status, - 'logs': logs, - 'result': res - } - ) - - -def finish_execution(execution_id, success, res, is_image_source=False): - logs = res.pop('logs', '') - success = success and res.pop('success', True) - - LOG.debug( - 'Finished execution %s, success: %s', execution_id, success - ) - db_set_execution_status( - execution_id, status.SUCCESS if success else status.FAILED, - logs, res - ) - - -def handle_execution_exception(execution_id, exc_str): - # This method should be called from an exception handler - LOG.exception( - 'Error running execution %s: %s', execution_id, exc_str - ) - db_set_execution_status( - execution_id, status.ERROR, - '', - {'output': 'Function execution failed.'} - ) diff --git a/qinling/exceptions.py b/qinling/exceptions.py deleted file mode 100644 index a17dfeb1..00000000 --- a/qinling/exceptions.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class QinlingException(Exception): - """Qinling specific exception. - - Reserved for situations that are not critical for program continuation. - It is possible to recover from this type of problems automatically and - continue program execution. Such problems may be related with invalid user - input (such as invalid syntax) or temporary environmental problems. - - In case if an instance of a certain exception type bubbles up to API layer - then this type of exception it must be associated with an http code so it's - clear how to represent it for a client. - - To correctly use this class, inherit from it and define a 'message' and - 'http_code' properties. - """ - message = "An unknown exception occurred" - http_code = 500 - - def __init__(self, message=None): - if message is not None: - self.message = message - - super(QinlingException, self).__init__( - '%d: %s' % (self.http_code, self.message)) - - @property - def code(self): - """This is here for webob to read. - - https://github.com/Pylons/webob/blob/master/webob/exc.py - """ - return self.http_code - - def __str__(self): - return self.message - - -class InputException(QinlingException): - http_code = 400 - - -class UnauthorizedException(QinlingException): - http_code = 401 - message = "Unauthorized" - - -class NotAllowedException(QinlingException): - http_code = 403 - message = "Operation not allowed" - - -class ConflictException(QinlingException): - http_code = 409 - message = ("The request could not be completed due to a conflict with the " - "current state of the target resource") - - -class RuntimeNotAvailableException(QinlingException): - http_code = 409 - message = "Runtime not available" - - -class DBError(QinlingException): - http_code = 400 - - -class DBEntityNotFoundError(DBError): - http_code = 404 - message = "Object not found" - - -class RuntimeNotFoundException(QinlingException): - http_code = 404 - message = "Runtime not found" - - -class ApplicationContextNotFoundException(QinlingException): - http_code = 400 - message = "Application context not found" - - -class StorageNotFoundException(QinlingException): - http_code = 404 - message = "Storage file not found" - - -class StorageProviderException(QinlingException): - http_code = 500 - - -class OrchestratorException(QinlingException): - http_code = 500 - message = "Orchestrator error." - - -class TrustFailedException(QinlingException): - http_code = 500 - message = "Trust operation failed." - - -class SwiftException(QinlingException): - http_code = 500 - message = "Failed to communicate with Swift." - - -class EtcdLockException(QinlingException): - http_code = 409 - message = 'Etcd lock failed' - - -class TimeoutException(QinlingException): - http_code = 500 - message = 'Function execution timeout' diff --git a/qinling/orchestrator/__init__.py b/qinling/orchestrator/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/orchestrator/base.py b/qinling/orchestrator/base.py deleted file mode 100644 index ee5ec270..00000000 --- a/qinling/orchestrator/base.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import abc - -from stevedore import driver - -from qinling import exceptions as exc - -ORCHESTRATOR = None - - -class OrchestratorBase(object, metaclass=abc.ABCMeta): - """OrchestratorBase interface.""" - - @abc.abstractmethod - def create_pool(self, name, image, trusted=True, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def delete_pool(self, name, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def update_pool(self, name, image=None, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def get_pool(self, name, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def prepare_execution(self, function_id, function_version, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def run_execution(self, execution_id, function_id, function_version, - **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def delete_function(self, function_id, function_version, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def scaleup_function(self, function_id, function_version, **kwargs): - raise NotImplementedError - - @abc.abstractmethod - def delete_worker(self, worker_name, **kwargs): - raise NotImplementedError - - -def load_orchestrator(conf, qinling_endpoint): - global ORCHESTRATOR - - if not ORCHESTRATOR: - try: - mgr = driver.DriverManager('qinling.orchestrator', - conf.engine.orchestrator, - invoke_on_load=True, - invoke_args=[conf, qinling_endpoint]) - - ORCHESTRATOR = mgr.driver - except Exception as e: - raise exc.OrchestratorException( - 'Failed to load orchestrator: %s. Error: %s' % - (conf.engine.orchestrator, str(e)) - ) - - return ORCHESTRATOR diff --git a/qinling/orchestrator/kubernetes/__init__.py b/qinling/orchestrator/kubernetes/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/orchestrator/kubernetes/manager.py b/qinling/orchestrator/kubernetes/manager.py deleted file mode 100644 index baf3a780..00000000 --- a/qinling/orchestrator/kubernetes/manager.py +++ /dev/null @@ -1,592 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy -import json -import os -import time - -import jinja2 -from oslo_log import log as logging -import requests -import tenacity -import yaml - -from qinling.engine import utils -from qinling import exceptions as exc -from qinling.orchestrator import base -from qinling.orchestrator.kubernetes import utils as k8s_util -from qinling.utils import common - - -LOG = logging.getLogger(__name__) - -TEMPLATES_DIR = (os.path.dirname(os.path.realpath(__file__)) + '/templates/') - - -class KubernetesManager(base.OrchestratorBase): - def __init__(self, conf, qinling_endpoint): - self.conf = conf - self.qinling_endpoint = qinling_endpoint - - clients = k8s_util.get_k8s_clients(self.conf) - self.v1 = clients['v1'] - self.v1extension = clients['v1extension'] - # self.apps_v1 = clients['apps_v1'] - - # Create namespace if not exists - self._ensure_namespace() - - # Get templates. - template_loader = jinja2.FileSystemLoader( - searchpath=os.path.dirname(TEMPLATES_DIR) - ) - jinja_env = jinja2.Environment( - loader=template_loader, autoescape=True, trim_blocks=True, - lstrip_blocks=True - ) - self.deployment_template = jinja_env.get_template('deployment.j2') - self.service_template = jinja_env.get_template('service.j2') - self.pod_template = jinja_env.get_template('pod.j2') - - # Refer to - # http://docs.python-requests.org/en/master/user/advanced/#session-objects - self.session = requests.Session() - - def _ensure_namespace(self): - ret = self.v1.list_namespace() - cur_names = [i.metadata.name for i in ret.items] - - if self.conf.kubernetes.namespace not in cur_names: - LOG.info('Creating namespace: %s', self.conf.kubernetes.namespace) - - namespace_body = { - 'apiVersion': 'v1', - 'kind': 'Namespace', - 'metadata': { - 'name': self.conf.kubernetes.namespace, - 'labels': { - 'name': self.conf.kubernetes.namespace - } - }, - } - - self.v1.create_namespace(namespace_body) - - LOG.info('Namespace %s created.', self.conf.kubernetes.namespace) - - @tenacity.retry( - wait=tenacity.wait_fixed(2), - stop=tenacity.stop_after_delay(600), - reraise=True, - retry=tenacity.retry_if_exception_type(exc.OrchestratorException) - ) - def _wait_deployment_available(self, name): - ret = self.v1extension.read_namespaced_deployment( - name, - self.conf.kubernetes.namespace - ) - - if (not ret.status.replicas or - ret.status.replicas != ret.status.available_replicas): - raise exc.OrchestratorException('Deployment %s not ready.' % name) - - def get_pool(self, name): - total = 0 - available = 0 - - try: - ret = self.v1extension.read_namespaced_deployment( - name, - namespace=self.conf.kubernetes.namespace - ) - except Exception: - raise exc.RuntimeNotFoundException() - - if not ret.status.replicas: - return {"total": total, "available": available} - - total = ret.status.replicas - - labels = {'runtime_id': name} - selector = common.convert_dict_to_string(labels) - ret = self.v1.list_namespaced_pod( - self.conf.kubernetes.namespace, - label_selector='!function_id,%s' % selector - ) - available = len(ret.items) - - return {"total": total, "available": available} - - def create_pool(self, name, image, trusted=True): - deployment_body = self.deployment_template.render( - { - "name": name, - "labels": {'runtime_id': name}, - "replicas": self.conf.kubernetes.replicas, - "container_name": 'worker', - "image": image, - "sidecar_image": self.conf.engine.sidecar_image, - "trusted": str(trusted).lower() - } - ) - - LOG.info( - "Creating deployment for runtime %s: \n%s", name, deployment_body - ) - - self.v1extension.create_namespaced_deployment( - body=yaml.safe_load(deployment_body), - namespace=self.conf.kubernetes.namespace, - async_req=False - ) - - self._wait_deployment_available(name) - - LOG.info("Deployment for runtime %s created.", name) - - def delete_pool(self, name): - """Delete all resources belong to the deployment.""" - LOG.info("Deleting deployment %s", name) - - labels = {'runtime_id': name} - selector = common.convert_dict_to_string(labels) - - self.v1extension.delete_collection_namespaced_replica_set( - self.conf.kubernetes.namespace, - label_selector=selector - ) - LOG.info("ReplicaSets in deployment %s deleted.", name) - - ret = self.v1.list_namespaced_service( - self.conf.kubernetes.namespace, label_selector=selector - ) - names = [i.metadata.name for i in ret.items] - for svc_name in names: - self.v1.delete_namespaced_service( - svc_name, - self.conf.kubernetes.namespace - ) - LOG.info("Services in deployment %s deleted.", name) - - self.v1extension.delete_collection_namespaced_deployment( - self.conf.kubernetes.namespace, - label_selector=selector, - field_selector='metadata.name=%s' % name - ) - # Should delete pods after deleting deployment to avoid pods are - # recreated by k8s. - self.v1.delete_collection_namespaced_pod( - self.conf.kubernetes.namespace, - label_selector=selector - ) - LOG.info("Pods in deployment %s deleted.", name) - LOG.info("Deployment %s deleted.", name) - - @tenacity.retry( - wait=tenacity.wait_fixed(5), - stop=tenacity.stop_after_delay(600), - reraise=True, - retry=tenacity.retry_if_exception_type(exc.OrchestratorException) - ) - def _wait_for_upgrade(self, deploy_name): - ret = self.v1extension.read_namespaced_deployment( - deploy_name, - self.conf.kubernetes.namespace - ) - if ret.status.unavailable_replicas is not None: - raise exc.OrchestratorException("Deployment %s upgrade not " - "ready." % deploy_name) - - def update_pool(self, name, image=None): - """Deployment rolling-update. - - Return True if successful, otherwise return False after rolling back. - """ - LOG.info('Start to do rolling-update deployment %s', name) - - body = { - 'spec': { - 'template': { - 'spec': { - 'containers': [ - { - 'name': 'worker', - 'image': image - } - ] - } - } - } - } - self.v1extension.patch_namespaced_deployment( - name, self.conf.kubernetes.namespace, body - ) - - try: - time.sleep(10) - self._wait_for_upgrade(name) - except exc.OrchestratorException: - LOG.warn("Timeout when waiting for the deployment %s upgrade, " - "Start to roll back.", name) - - body = {"rollbackTo": {"revision": 0}} - try: - self.v1extension.create_namespaced_deployment_rollback( - name, self.conf.kubernetes.namespace, body - ) - except Exception: - # TODO(lxkong): remove the exception catch until kubernetes - # python lib has a new release. Refer to - # https://github.com/kubernetes-client/python/issues/491 - pass - - return False - - return True - - def _choose_available_pods(self, labels, count=1, function_id=None, - function_version=0): - # If there is already a pod for function, reuse it. - if function_id: - ret = self.v1.list_namespaced_pod( - self.conf.kubernetes.namespace, - label_selector='function_id=%s,function_version=%s' % - (function_id, function_version) - ) - if len(ret.items) >= count: - LOG.debug( - "Function %s(version %s) already associates to a pod with " - "at least %d worker(s). ", - function_id, function_version, count - ) - return ret.items[:count] - - selector = common.convert_dict_to_string(labels) - ret = self.v1.list_namespaced_pod( - self.conf.kubernetes.namespace, - label_selector='!function_id,%s' % selector - ) - - if len(ret.items) < count: - return [] - - return ret.items[-count:] - - def _prepare_pod(self, pod, deployment_name, function_id, version, - labels=None): - """Pod preparation. - - 1. Update pod labels. - 2. Expose service. - """ - pod_name = pod.metadata.name - labels = labels or {} - - LOG.info( - 'Prepare pod %s in deployment %s for function %s(version %s)', - pod_name, deployment_name, function_id, version - ) - - # Update pod label. - pod_labels = self._update_pod_label( - pod, - # pod label value should be string - {'function_id': function_id, 'function_version': str(version)} - ) - - # Create service for the chosen pod. - service_name = "service-%s-%s" % (function_id, version) - labels.update( - {'function_id': function_id, 'function_version': str(version)} - ) - - # TODO(kong): Make the service type configurable. - service_body = self.service_template.render( - { - "service_name": service_name, - "labels": labels, - "selector": pod_labels - } - ) - try: - ret = self.v1.create_namespaced_service( - self.conf.kubernetes.namespace, yaml.safe_load(service_body) - ) - LOG.debug( - 'Service created for pod %s, service name: %s', - pod_name, service_name - ) - except Exception as e: - # Service already exists - if e.status == 409: - LOG.debug( - 'Service already exists for pod %s, service name: %s', - pod_name, service_name - ) - time.sleep(1) - ret = self.v1.read_namespaced_service( - service_name, self.conf.kubernetes.namespace - ) - else: - raise - - # Get external ip address for an arbitrary node. - node_port = ret.spec.ports[0].node_port - nodes = self.v1.list_node() - addresses = nodes.items[0].status.addresses - node_ip = None - for addr in addresses: - if addr.type == 'ExternalIP': - node_ip = addr.address - - # FIXME: test purpose using minikube - if not node_ip: - for addr in addresses: - if addr.type == 'InternalIP': - node_ip = addr.address - - pod_service_url = 'http://%s:%s' % (node_ip, node_port) - - return pod_name, pod_service_url - - def _create_pod(self, image, rlimit, pod_name, labels, input): - """Create pod for image type function.""" - if not input: - input_list = [] - elif isinstance(input, dict) and input.get('__function_input'): - input_list = input.get('__function_input').split() - else: - input_list = list(json.loads(input)) - - pod_body = self.pod_template.render( - { - "pod_name": pod_name, - "labels": labels, - "pod_image": image, - "input": input_list, - "req_cpu": str(rlimit['cpu']), - "limit_cpu": str(rlimit['cpu']), - "req_memory": str(rlimit['memory_size']), - "limit_memory": str(rlimit['memory_size']) - } - ) - - LOG.info( - "Creating pod %s for image function:\n%s", pod_name, pod_body - ) - - try: - self.v1.create_namespaced_pod( - self.conf.kubernetes.namespace, - body=yaml.safe_load(pod_body), - ) - except Exception: - LOG.exception("Failed to create pod.") - raise exc.OrchestratorException('Execution preparation failed.') - - def _update_pod_label(self, pod, new_label): - name = pod.metadata.name - - pod_labels = copy.deepcopy(pod.metadata.labels) or {} - pod_labels.update(new_label) - body = { - 'metadata': { - 'labels': pod_labels - } - } - self.v1.patch_namespaced_pod( - name, self.conf.kubernetes.namespace, body - ) - - LOG.debug('Labels updated for pod %s', name) - - return pod_labels - - def prepare_execution(self, function_id, version, rlimit=None, image=None, - identifier=None, labels=None, input=None): - """Prepare service URL for function version. - - :param rlimit: optional argument passed to limit cpu/mem resources. - - For image function, create a single pod with rlimit and input, so the - function will be executed in the resource limited pod. - - For normal function, choose a pod from the pool and expose a service, - return the service URL. - - return a tuple includes pod name and the servise url. - """ - pods = None - - labels = labels or {'function_id': function_id} - - if image: - if not rlimit: - LOG.critical('Param rlimit is required for image function.') - raise exc.OrchestratorException( - 'Execution preparation failed.' - ) - - self._create_pod(image, rlimit, identifier, labels, input) - - return identifier, None - else: - pods = self._choose_available_pods(labels, function_id=function_id, - function_version=version) - - if not pods: - LOG.critical('No worker available.') - raise exc.OrchestratorException('Execution preparation failed.') - - try: - pod_name, url = self._prepare_pod( - pods[0], identifier, function_id, version, labels - ) - return pod_name, url - except Exception: - LOG.exception('Pod preparation failed.') - self.delete_function(function_id, version, labels) - raise exc.OrchestratorException('Execution preparation failed.') - - def run_execution(self, execution_id, function_id, version, rlimit=None, - input=None, identifier=None, service_url=None, - entry='main.main', trust_id=None, timeout=None): - """Run execution. - - Return a tuple including the result and the output. - """ - if service_url: - func_url = '%s/execute' % service_url - data = utils.get_request_data( - self.conf, function_id, version, execution_id, rlimit, input, - entry, trust_id, self.qinling_endpoint, timeout - ) - LOG.debug( - 'Invoke function %s(version %s), url: %s, data: %s', - function_id, version, func_url, data - ) - - return utils.url_request(self.session, func_url, body=data) - else: - # Wait for image type function execution to be finished - def _wait_complete(): - pod = self.v1.read_namespaced_pod( - identifier, - self.conf.kubernetes.namespace - ) - status = pod.status.phase - - if status == 'Succeeded': - return pod - - raise exc.TimeoutException() - - duration = 0 - try: - r = tenacity.Retrying( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_delay(timeout), - retry=tenacity.retry_if_exception_type( - exc.TimeoutException), - reraise=True - ) - pod = r.call(_wait_complete) - - statuses = pod.status.container_statuses - for s in statuses: - if hasattr(s.state, "terminated"): - end_time = s.state.terminated.finished_at - start_time = s.state.terminated.started_at - delta = end_time - start_time - duration = delta.seconds - break - except exc.TimeoutException: - LOG.exception( - "Timeout for function execution %s, pod %s", - execution_id, identifier - ) - - self.v1.delete_namespaced_pod( - identifier, - self.conf.kubernetes.namespace - ) - LOG.debug('Pod %s deleted.', identifier) - - return False, {'output': 'Function execution timeout.', - 'duration': timeout} - except Exception: - LOG.exception("Failed to wait for pod %s", identifier) - return False, {'output': 'Function execution failed.', - 'duration': duration} - - log = self.v1.read_namespaced_pod_log( - identifier, - self.conf.kubernetes.namespace, - ) - - return True, {'duration': duration, 'logs': log} - - def delete_function(self, function_id, version, labels=None): - """Delete related resources for function. - - - Delete service - - Delete pods - """ - pre_label = { - 'function_id': function_id, - 'function_version': str(version) - } - labels = labels or pre_label - selector = common.convert_dict_to_string(labels) - - ret = self.v1.list_namespaced_service( - self.conf.kubernetes.namespace, label_selector=selector - ) - names = [i.metadata.name for i in ret.items] - for svc_name in names: - self.v1.delete_namespaced_service( - svc_name, - self.conf.kubernetes.namespace - ) - - self.v1.delete_collection_namespaced_pod( - self.conf.kubernetes.namespace, - label_selector=selector - ) - - def scaleup_function(self, function_id, version, identifier=None, count=1): - pod_names = [] - labels = {'runtime_id': identifier} - pods = self._choose_available_pods(labels, count=count) - - if not pods: - raise exc.OrchestratorException('Not enough workers available.') - - for pod in pods: - pod_name, service_url = self._prepare_pod( - pod, identifier, function_id, version, labels - ) - pod_names.append(pod_name) - - LOG.info('Pods scaled up for function %s(version %s): %s', function_id, - version, pod_names) - - return pod_names, service_url - - def delete_worker(self, pod_name, **kwargs): - self.v1.delete_namespaced_pod( - pod_name, - self.conf.kubernetes.namespace, - ) diff --git a/qinling/orchestrator/kubernetes/templates/deployment.j2 b/qinling/orchestrator/kubernetes/templates/deployment.j2 deleted file mode 100644 index dcf882b0..00000000 --- a/qinling/orchestrator/kubernetes/templates/deployment.j2 +++ /dev/null @@ -1,58 +0,0 @@ -apiVersion: extensions/v1beta1 -kind: Deployment -metadata: - name: {{ name }} - labels: - {% for key, value in labels.items() %} - {{ key }}: {{ value }} - {% endfor %} -spec: - replicas: {{ replicas }} - selector: - matchLabels: - {% for key, value in labels.items() %} - {{ key }}: {{ value }} - {% endfor %} - template: - metadata: - labels: - {% for key, value in labels.items() %} - {{ key }}: {{ value }} - {% endfor %} - annotations: - io.kubernetes.cri-o.TrustedSandbox: "{{ trusted }}" - spec: - terminationGracePeriodSeconds: 5 - automountServiceAccountToken: false - volumes: - - name: package-folder - emptyDir: {} - - name: cgroup-folder - hostPath: - path: /sys/fs/cgroup - containers: - - name: {{ container_name }} - image: {{ image }} - imagePullPolicy: IfNotPresent - ports: - - containerPort: 9090 - volumeMounts: - - name: package-folder - mountPath: /var/qinling/packages - - name: cgroup-folder - mountPath: /qinling_cgroup - env: - - name: POD_UID - valueFrom: - fieldRef: - fieldPath: metadata.uid - - name: QOS_CLASS - value: "BestEffort" - - name: sidecar - image: {{ sidecar_image }} - imagePullPolicy: IfNotPresent - ports: - - containerPort: 9091 - volumeMounts: - - name: package-folder - mountPath: /var/qinling/packages diff --git a/qinling/orchestrator/kubernetes/templates/pod.j2 b/qinling/orchestrator/kubernetes/templates/pod.j2 deleted file mode 100644 index 125a6c92..00000000 --- a/qinling/orchestrator/kubernetes/templates/pod.j2 +++ /dev/null @@ -1,31 +0,0 @@ -apiVersion: v1 -kind: Pod -metadata: - name: {{ pod_name }} - labels: - {% for key, value in labels.items() %} - {{ key }}: {{ value }} - {% endfor %} - annotations: - io.kubernetes.cri-o.TrustedSandbox: "false" -spec: - terminationGracePeriodSeconds: 5 - automountServiceAccountToken: false - containers: - - name: {{ pod_name }} - image: {{ pod_image }} - imagePullPolicy: IfNotPresent - {% if input %} - args: - {% for item in input %} - - "{{ item | safe }}" - {% endfor %} - {% endif %} - restartPolicy: Never - resources: - limits: - cpu: {{ limit_cpu }}m - memory: {{ limit_memory }} - requests: - cpu: {{ req_cpu }}m - memory: {{ req_memory }} diff --git a/qinling/orchestrator/kubernetes/templates/service.j2 b/qinling/orchestrator/kubernetes/templates/service.j2 deleted file mode 100644 index e2452fec..00000000 --- a/qinling/orchestrator/kubernetes/templates/service.j2 +++ /dev/null @@ -1,18 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - name: {{ service_name }} - labels: - {% for key, value in labels.items() %} - {{ key }}: "{{ value }}" - {% endfor %} -spec: - type: NodePort - selector: - {% for key, value in selector.items() %} - {{ key}}: "{{ value }}" - {% endfor %} - ports: - - protocol: TCP - port: 9090 - targetPort: 9090 diff --git a/qinling/orchestrator/kubernetes/utils.py b/qinling/orchestrator/kubernetes/utils.py deleted file mode 100644 index c4e0e972..00000000 --- a/qinling/orchestrator/kubernetes/utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kubernetes.client import api_client -# from kubernetes.client.apis import apps_v1_api -from kubernetes.client.apis import core_v1_api -from kubernetes.client.apis import extensions_v1beta1_api -from kubernetes.client import configuration as k8s_config - - -def get_k8s_clients(conf): - config = k8s_config.Configuration() - config.host = conf.kubernetes.kube_host - if conf.kubernetes.use_api_certificate: - config.ssl_ca_cert = conf.kubernetes.ssl_ca_cert - config.cert_file = conf.kubernetes.cert_file - config.key_file = conf.kubernetes.key_file - else: - config.verify_ssl = False - client = api_client.ApiClient(configuration=config) - v1 = core_v1_api.CoreV1Api(client) - v1extension = extensions_v1beta1_api.ExtensionsV1beta1Api(client) - # apps_v1 = apps_v1_api.AppsV1Api(client) - - clients = { - 'v1': v1, - # 'apps_v1': apps_v1 - 'v1extension': v1extension - } - - return clients diff --git a/qinling/orchestrator/swarm/__init__.py b/qinling/orchestrator/swarm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/rpc.py b/qinling/rpc.py deleted file mode 100644 index 4716144f..00000000 --- a/qinling/rpc.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg -import oslo_messaging as messaging -from oslo_messaging.rpc import client - -from qinling import context as ctx -from qinling import exceptions as exc - -_TRANSPORT = None -_ENGINE_CLIENT = None - - -def cleanup(): - """Intended to be used by tests to recreate all RPC related objects.""" - - global _TRANSPORT - global _ENGINE_CLIENT - - _TRANSPORT = None - _ENGINE_CLIENT = None - - -def get_transport(): - global _TRANSPORT - - if not _TRANSPORT: - _TRANSPORT = messaging.get_rpc_transport(cfg.CONF) - - return _TRANSPORT - - -def get_engine_client(): - global _ENGINE_CLIENT - - if not _ENGINE_CLIENT: - _ENGINE_CLIENT = EngineClient(get_transport()) - - return _ENGINE_CLIENT - - -def _wrap_exception_and_reraise(exception): - message = "%s: %s" % (exception.__class__.__name__, exception.args[0]) - - raise exc.QinlingException(message) - - -def wrap_messaging_exception(method): - """This decorator unwrap remote error in one of QinlingException. - - oslo.messaging has different behavior on raising exceptions - when fake or rabbit transports are used. In case of rabbit - transport it raises wrapped RemoteError which forwards directly - to API. Wrapped RemoteError contains one of QinlingException raised - remotely on Engine and for correct exception interpretation we - need to unwrap and raise given exception and manually send it to - API layer. - """ - def decorator(*args, **kwargs): - try: - return method(*args, **kwargs) - except exc.QinlingException: - raise - except (client.RemoteError, Exception) as e: - if hasattr(e, 'exc_type') and hasattr(exc, e.exc_type): - exc_cls = getattr(exc, e.exc_type) - raise exc_cls(e.value) - - _wrap_exception_and_reraise(e) - - return decorator - - -class ContextSerializer(messaging.Serializer): - def __init__(self, base): - self._base = base - - def serialize_entity(self, context, entity): - if not self._base: - return entity - return self._base.serialize_entity(context, entity) - - def deserialize_entity(self, context, entity): - if not self._base: - return entity - return self._base.deserialize_entity(context, entity) - - def serialize_context(self, context): - return context.convert_to_dict() - - def deserialize_context(self, context): - qinling_ctx = ctx.Context.from_dict(context) - ctx.set_ctx(qinling_ctx) - - return qinling_ctx - - -class EngineClient(object): - """RPC Engine client.""" - - def __init__(self, transport): - """Constructs an RPC client for engine. - - :param transport: Messaging transport. - """ - serializer = ContextSerializer( - messaging.serializer.JsonPayloadSerializer()) - - self.topic = cfg.CONF.engine.topic - - self._client = messaging.RPCClient( - transport, - messaging.Target(topic=self.topic), - serializer=serializer - ) - - @wrap_messaging_exception - def create_runtime(self, id): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'create_runtime', - runtime_id=id - ) - - @wrap_messaging_exception - def delete_runtime(self, id): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'delete_runtime', - runtime_id=id - ) - - @wrap_messaging_exception - def update_runtime(self, id, image=None, pre_image=None): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'update_runtime', - runtime_id=id, - image=image, - pre_image=pre_image - ) - - @wrap_messaging_exception - def get_runtime_pool(self, runtime_id): - return self._client.prepare(topic=self.topic, server=None).call( - ctx.get_ctx(), - 'get_runtime_pool', - runtime_id=runtime_id - ) - - @wrap_messaging_exception - def create_execution(self, execution_id, function_id, version, runtime_id, - input=None, is_sync=True): - method_client = self._client.prepare(topic=self.topic, server=None) - - if is_sync: - return method_client.call( - ctx.get_ctx(), - 'create_execution', - execution_id=execution_id, - function_id=function_id, - function_version=version, - runtime_id=runtime_id, - input=input - ) - else: - method_client.cast( - ctx.get_ctx(), - 'create_execution', - execution_id=execution_id, - function_id=function_id, - function_version=version, - runtime_id=runtime_id, - input=input - ) - - @wrap_messaging_exception - def delete_function(self, id, version=0): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'delete_function', - function_id=id, - function_version=version - ) - - @wrap_messaging_exception - def scaleup_function(self, id, runtime_id, version=0, count=1): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'scaleup_function', - function_id=id, - runtime_id=runtime_id, - function_version=version, - count=count - ) - - @wrap_messaging_exception - def scaledown_function(self, id, version=0, count=1): - return self._client.prepare(topic=self.topic, server=None).cast( - ctx.get_ctx(), - 'scaledown_function', - function_id=id, - function_version=version, - count=count - ) diff --git a/qinling/services/__init__.py b/qinling/services/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/services/periodics.py b/qinling/services/periodics.py deleted file mode 100644 index c3988037..00000000 --- a/qinling/services/periodics.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -from datetime import timedelta -import threading - -from futurist import periodics -from oslo_config import cfg -from oslo_log import log as logging -from oslo_utils import timeutils - -from qinling import context -from qinling.db import api as db_api -from qinling.db.sqlalchemy import models -from qinling import rpc -from qinling import status -from qinling.utils import constants -from qinling.utils import etcd_util -from qinling.utils import executions -from qinling.utils import jobs -from qinling.utils.openstack import keystone as keystone_utils - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF -_periodic_tasks = {} - - -@periodics.periodic(300) -def handle_function_service_expiration(ctx, engine): - """Clean up resources related to expired functions. - - If it's image function, we will rely on the orchestrator itself to do the - image clean up, e.g. image collection feature in kubernetes. - """ - context.set_ctx(ctx) - delta = timedelta(seconds=CONF.engine.function_service_expiration) - expiry_time = datetime.utcnow() - delta - - results = db_api.get_functions( - sort_keys=['updated_at'], - insecure=True, - updated_at={'lte': expiry_time} - ) - - for func_db in results: - if not etcd_util.get_service_url(func_db.id, 0): - continue - - LOG.info( - 'Deleting service mapping and workers for function ' - '%s(version 0)', - func_db.id - ) - - # Delete resources related to the function - engine.delete_function(ctx, func_db.id, 0) - # Delete etcd keys - etcd_util.delete_function(func_db.id, 0) - - versions = db_api.get_function_versions( - sort_keys=['updated_at'], - insecure=True, - updated_at={'lte': expiry_time}, - ) - - for v in versions: - if not etcd_util.get_service_url(v.function_id, v.version_number): - continue - - LOG.info( - 'Deleting service mapping and workers for function ' - '%s(version %s)', - v.function_id, v.version_number - ) - - # Delete resources related to the function - engine.delete_function(ctx, v.function_id, v.version_number) - # Delete etcd keys - etcd_util.delete_function(v.function_id, v.version_number) - - -@periodics.periodic(3) -def handle_job(engine_client): - """Execute job task with no db transactions.""" - jobs_db = db_api.get_next_jobs(timeutils.utcnow() + timedelta(seconds=3)) - - for job in jobs_db: - job_id = job.id - func_alias = job.function_alias - - if func_alias: - alias = db_api.get_function_alias(func_alias, insecure=True) - func_id = alias.function_id - func_version = alias.function_version - else: - func_id = job.function_id - func_version = job.function_version - - LOG.debug("Processing job: %s, function: %s(version %s)", job_id, - func_id, func_version) - - func_db = db_api.get_function(func_id, insecure=True) - trust_id = func_db.trust_id - - try: - # Setup context before schedule job. - ctx = keystone_utils.create_trust_context( - trust_id, job.project_id - ) - context.set_ctx(ctx) - - if (job.count is not None and job.count > 0): - job.count -= 1 - - # Job delete/update is done using UPDATE ... FROM ... WHERE - # non-locking clause. - if job.count == 0: - modified = db_api.conditional_update( - models.Job, - { - 'status': status.DONE, - 'count': 0 - }, - { - 'id': job_id, - 'status': status.RUNNING - }, - insecure=True, - ) - else: - next_time = jobs.get_next_execution_time( - job.pattern, - job.next_execution_time - ) - - modified = db_api.conditional_update( - models.Job, - { - 'next_execution_time': next_time, - 'count': job.count - }, - { - 'id': job_id, - 'next_execution_time': job.next_execution_time - }, - insecure=True, - ) - - if not modified: - LOG.warning( - 'Job %s has been already handled by another periodic ' - 'task.', job_id - ) - continue - - LOG.debug( - "Starting to execute function %s(version %s) by job %s", - func_id, func_version, job_id - ) - - params = { - 'function_id': func_id, - 'function_version': func_version, - 'input': job.function_input, - 'sync': False, - 'description': constants.EXECUTION_BY_JOB % job_id - } - executions.create_execution(engine_client, params) - except Exception: - LOG.exception("Failed to process job %s", job_id) - finally: - context.set_ctx(None) - - -def start_function_mapping_handler(engine): - """Start function mapping handler thread. - - Function mapping handler is supposed to be running with engine service. - """ - worker = periodics.PeriodicWorker([]) - worker.add( - handle_function_service_expiration, - ctx=context.Context(), - engine=engine - ) - _periodic_tasks[constants.PERIODIC_FUNC_MAPPING_HANDLER] = worker - - thread = threading.Thread(target=worker.start) - thread.setDaemon(True) - thread.start() - - LOG.info('Function mapping handler started.') - - -def start_job_handler(): - """Start job handler thread. - - Job handler is supposed to be running with api service. - """ - worker = periodics.PeriodicWorker([]) - engine_client = rpc.get_engine_client() - worker.add( - handle_job, - engine_client=engine_client - ) - _periodic_tasks[constants.PERIODIC_JOB_HANDLER] = worker - - thread = threading.Thread(target=worker.start) - thread.setDaemon(True) - thread.start() - - LOG.info('Job handler started.') - - -def stop(task=None): - if not task: - for name, worker in _periodic_tasks.items(): - LOG.info('Stopping periodic task: %s', name) - worker.stop() - del _periodic_tasks[name] - else: - worker = _periodic_tasks.get(task) - if worker: - LOG.info('Stopping periodic task: %s', task) - worker.stop() - del _periodic_tasks[task] diff --git a/qinling/status.py b/qinling/status.py deleted file mode 100644 index 0e5af521..00000000 --- a/qinling/status.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -CREATING = 'creating' -AVAILABLE = 'available' -UPGRADING = 'upgrading' -ERROR = 'error' -DELETING = 'deleting' -RUNNING = 'running' -DONE = 'done' -PAUSED = 'paused' -CANCELLED = 'cancelled' -SUCCESS = 'success' -FAILED = 'failed' diff --git a/qinling/storage/__init__.py b/qinling/storage/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/storage/base.py b/qinling/storage/base.py deleted file mode 100644 index 8769f3e3..00000000 --- a/qinling/storage/base.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import abc - -from stevedore import driver - -from qinling import exceptions as exc - -STORAGE_PROVIDER = None - - -class PackageStorage(object, metaclass=abc.ABCMeta): - """PackageStorage interface.""" - - @abc.abstractmethod - def store(self, project_id, function, data, **kwargs): - """Store the function package data. - - :param project_id: Project ID. - :param function: Function ID. - :param data: Package file content. - :param kwargs: A dict may including - - md5sum: The MD5 provided by the user. - :return: A tuple (if the package is updated, MD5 value of the package) - """ - raise NotImplementedError - - @abc.abstractmethod - def retrieve(self, project_id, function, md5sum, version=0): - """Get function package data. - - :param project_id: Project ID. - :param function: Function ID. - :param md5sum: The function MD5. - :param version: Optional. The function version number. - :return: File descriptor that needs to close outside. - """ - raise NotImplementedError - - @abc.abstractmethod - def delete(self, project_id, function, md5sum, version=0): - raise NotImplementedError - - @abc.abstractmethod - def changed_since(self, project_id, function, l_md5, version): - """Check if the function package has changed. - - Check if the function package has changed between lastest and the - specified version. - - :param project_id: Project ID. - :param function: Function ID. - :param l_md5: Latest function package md5sum. - :param version: The version number compared with. - :return: True if changed otherwise False. - """ - raise NotImplementedError - - @abc.abstractmethod - def copy(self, project_id, function, l_md5, old_version): - """Copy function package for a new version. - - :param project_id: Project ID. - :param function: Function ID. - :param l_md5: Latest function package md5sum. - :param old_version: The version number that should copy from. - :return: None - """ - raise NotImplementedError - - -def load_storage_provider(conf): - global STORAGE_PROVIDER - - if not STORAGE_PROVIDER: - try: - mgr = driver.DriverManager( - 'qinling.storage.provider', - conf.storage.provider, - invoke_on_load=True, - invoke_args=[conf] - ) - - STORAGE_PROVIDER = mgr.driver - except Exception as e: - raise exc.StorageProviderException( - 'Failed to load storage provider: %s. Error: %s' % - (conf.storage.provider, str(e)) - ) - - return STORAGE_PROVIDER diff --git a/qinling/storage/file_system.py b/qinling/storage/file_system.py deleted file mode 100644 index 93f5bdb3..00000000 --- a/qinling/storage/file_system.py +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import shutil -import zipfile - -from oslo_log import log as logging -from oslo_utils import fileutils - -from qinling import exceptions as exc -from qinling.storage import base -from qinling.utils import common - -LOG = logging.getLogger(__name__) -PACKAGE_NAME_TEMPLATE = "%s_%s.zip" -# Package path name including project ID -PACKAGE_PATH_TEMPLATE = "%s/%s_%s.zip" -# Package path name including version -PACKAGE_VERSION_TEMPLATE = "%s_%s_%s.zip" - - -class FileSystemStorage(base.PackageStorage): - """Interact with file system for function package storage.""" - - def __init__(self, conf): - self.base_path = conf.storage.file_system_dir - - def store(self, project_id, function, data, md5sum=None): - """Store the function package data to local file system. - - :param project_id: Project ID. - :param function: Function ID. - :param data: Package file content. - :param md5sum: The MD5 provided by the user. - :return: A tuple (if the package is updated, MD5 value of the package) - """ - LOG.debug( - 'Store package, function: %s, project: %s', function, project_id - ) - - project_path = os.path.join(self.base_path, project_id) - fileutils.ensure_tree(project_path) - - # Check md5 - md5_actual = common.md5(content=data) - if md5sum and md5_actual != md5sum: - raise exc.InputException("Package md5 mismatch.") - - func_zip = os.path.join( - project_path, - PACKAGE_NAME_TEMPLATE % (function, md5_actual) - ) - if os.path.exists(func_zip): - return False, md5_actual - - # Save package - new_func_zip = os.path.join(project_path, '%s.zip.new' % function) - with open(new_func_zip, 'wb') as fd: - fd.write(data) - - if not zipfile.is_zipfile(new_func_zip): - fileutils.delete_if_exists(new_func_zip) - raise exc.InputException("Package is not a valid ZIP package.") - - os.rename(new_func_zip, func_zip) - - return True, md5_actual - - def retrieve(self, project_id, function, md5sum, version=0): - """Get function package data. - - If version is not 0, return the package data of that specific function - version. - - :param project_id: Project ID. - :param function: Function ID. - :param md5sum: The function MD5. - :param version: Optional. The function version number. - :return: File descriptor that needs to close outside. - """ - LOG.debug( - 'Getting package data, function: %s, version: %s, md5sum: %s, ' - 'project: %s', - function, version, md5sum, project_id - ) - - if version != 0: - project_dir = os.path.join(self.base_path, project_id) - for filename in os.listdir(project_dir): - root, ext = os.path.splitext(filename) - if (root.startswith("%s_%d" % (function, version)) - and ext == '.zip'): - func_zip = os.path.join(project_dir, filename) - break - else: - raise exc.StorageNotFoundException( - 'Package of version %d function %s for project %s not ' - 'found.' % (version, function, project_id) - ) - else: - func_zip = os.path.join( - self.base_path, - PACKAGE_PATH_TEMPLATE % (project_id, function, md5sum) - ) - - if not os.path.exists(func_zip): - raise exc.StorageNotFoundException( - 'Package of function %s for project %s not found.' % - (function, project_id) - ) - - f = open(func_zip, 'rb') - LOG.debug('Found package data for function %s version %d', function, - version) - - return f - - def delete(self, project_id, function, md5sum, version=0): - LOG.debug( - 'Deleting package data, function: %s, version: %s, md5sum: %s, ' - 'project: %s', - function, version, md5sum, project_id - ) - - if version != 0: - project_dir = os.path.join(self.base_path, project_id) - for filename in os.listdir(project_dir): - root, ext = os.path.splitext(filename) - if (root.startswith("%s_%d" % (function, version)) - and ext == '.zip'): - func_zip = os.path.join(project_dir, filename) - break - else: - return - else: - func_zip = os.path.join( - self.base_path, - PACKAGE_PATH_TEMPLATE % (project_id, function, md5sum) - ) - - if os.path.exists(func_zip): - os.remove(func_zip) - - def changed_since(self, project_id, function, l_md5, version): - """Check if the function package has changed. - - Check if the function package has changed between lastest and the - specified version. - - :param project_id: Project ID. - :param function: Function ID. - :param l_md5: Latest function package md5sum. - :param version: The version number compared with. - :return: True if changed otherwise False. - """ - # If it's the first version creation, don't check. - if version == 0: - return True - - version_path = os.path.join( - self.base_path, project_id, - PACKAGE_VERSION_TEMPLATE % (function, version, l_md5) - ) - if os.path.exists(version_path): - return False - - return True - - def copy(self, project_id, function, l_md5, old_version): - """Copy function package for a new version. - - :param project_id: Project ID. - :param function: Function ID. - :param l_md5: Latest function package md5sum. - :param old_version: The version number that should copy from. - :return: None - """ - src_package = os.path.join(self.base_path, - project_id, - PACKAGE_NAME_TEMPLATE % (function, l_md5) - ) - dest_package = os.path.join(self.base_path, - project_id, - PACKAGE_VERSION_TEMPLATE % - (function, old_version + 1, l_md5)) - - try: - shutil.copyfile(src_package, dest_package) - except Exception: - msg = "Failed to create new function version." - LOG.exception(msg) - raise exc.StorageProviderException(msg) diff --git a/qinling/tests/__init__.py b/qinling/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/__init__.py b/qinling/tests/unit/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/api/__init__.py b/qinling/tests/unit/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/api/base.py b/qinling/tests/unit/api/base.py deleted file mode 100644 index d61f1ab4..00000000 --- a/qinling/tests/unit/api/base.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import shutil -import tempfile -from unittest import mock - -from oslo_config import cfg -import pecan -import pecan.testing -from webtest import app as webtest_app - -from qinling.tests.unit import base - -CONF = cfg.CONF - - -class APITest(base.DbTestCase): - def setUp(self): - super(APITest, self).setUp() - - # Config package directory before app starts. - package_dir = tempfile.mkdtemp(prefix='tmp_qinling') - self.override_config('file_system_dir', package_dir, 'storage') - self.addCleanup(shutil.rmtree, package_dir, True) - - # Disable authentication by default for API tests. - self.override_config('auth_enable', False, group='pecan') - - # Disable job handler. The following pecan app instantiation will - # invoke qinling.api.app:setup_app() - self.override_config('enable_job_handler', False, group='api') - - pecan_opts = CONF.pecan - self.app = pecan.testing.load_test_app({ - 'app': { - 'root': pecan_opts.root, - 'modules': pecan_opts.modules, - 'debug': pecan_opts.debug, - 'auth_enable': False, - } - }) - - self.addCleanup(pecan.set_config, {}, overwrite=True) - - self.patch_ctx = mock.patch('qinling.context.Context.from_environ') - self.mock_ctx = self.patch_ctx.start() - self.mock_ctx.return_value = self.ctx - self.addCleanup(self.patch_ctx.stop) - - def _assertNotFound(self, url): - try: - self.app.get(url, headers={'Accept': 'application/json'}) - except webtest_app.AppError as error: - self.assertIn('Bad response: 404 Not Found', str(error)) - return - - self.fail('Expected 404 Not found but got OK') - - def _assertUnauthorized(self, url): - try: - self.app.get(url, headers={'Accept': 'application/json'}) - except webtest_app.AppError as error: - self.assertIn('Bad response: 401 Unauthorized', str(error)) - return - - self.fail('Expected 401 Unauthorized but got OK') diff --git a/qinling/tests/unit/api/controllers/__init__.py b/qinling/tests/unit/api/controllers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/api/controllers/v1/__init__.py b/qinling/tests/unit/api/controllers/v1/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/api/controllers/v1/test_execution.py b/qinling/tests/unit/api/controllers/v1/test_execution.py deleted file mode 100644 index 0bfe6eb1..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_execution.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling import status -from qinling.tests.unit.api import base - - -class TestExecutionController(base.APITest): - def setUp(self): - super(TestExecutionController, self).setUp() - - db_func = self.create_function() - self.func_id = db_func.id - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_create_with_function(self, mock_create_execution): - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - - self.assertEqual(201, resp.status_int) - - resp = self.app.get('/v1/functions/%s' % self.func_id) - - self.assertEqual(1, resp.json.get('count')) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_create_with_version(self, mock_rpc): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - body = { - 'function_id': self.func_id, - 'function_version': 1 - } - - resp = self.app.post_json('/v1/executions', body) - self.assertEqual(201, resp.status_int) - - resp = self.app.get('/v1/functions/%s' % self.func_id) - self.assertEqual(0, resp.json.get('count')) - - resp = self.app.get('/v1/functions/%s/versions/1' % self.func_id) - self.assertEqual(1, resp.json.get('count')) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_create_with_alias(self, mock_rpc): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'name': name - } - db_api.create_function_alias(**body) - - execution_body = { - 'function_alias': name - } - resp = self.app.post_json('/v1/executions', execution_body) - self.assertEqual(201, resp.status_int) - self.assertEqual(name, resp.json.get('function_alias')) - - resp = self.app.get('/v1/functions/%s' % self.func_id) - self.assertEqual(0, resp.json.get('count')) - - resp = self.app.get('/v1/functions/%s/versions/1' % self.func_id) - self.assertEqual(1, resp.json.get('count')) - - def test_create_with_invalid_alias(self): - body = { - 'function_alias': 'fake_alias', - } - - resp = self.app.post_json('/v1/executions', body, expect_errors=True) - - self.assertEqual(404, resp.status_int) - - def test_create_without_required_params(self): - resp = self.app.post( - '/v1/executions', - params={}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_create_rpc_error(self, mock_create_execution): - mock_create_execution.side_effect = exc.QinlingException - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - - self.assertEqual(201, resp.status_int) - self.assertEqual(status.ERROR, resp.json.get('status')) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_get(self, mock_create_execution): - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - - self.assertEqual(201, resp.status_int) - - resp = self.app.get('/v1/executions/%s' % resp.json.get('id')) - - self.assertEqual(self.func_id, resp.json.get('function_id')) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_get_all(self, mock_create_execution): - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - exec_id = resp.json.get('id') - - self.assertEqual(201, resp.status_int) - - resp = self.app.get('/v1/executions') - - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item( - resp.json['executions'], id=exec_id - ) - self._assertDictContainsSubset(actual, body) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_get_all_filter(self, mock_create_execution): - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - exec_id = resp.json.get('id') - self.assertEqual(201, resp.status_int) - - # Test filtering by 'function_id' - resp = self.app.get('/v1/executions?function_id=%s' % self.func_id) - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item( - resp.json['executions'], id=exec_id - ) - self._assertDictContainsSubset(actual, body) - - # Test filtering by 'status' - resp = self.app.get( - '/v1/executions?function_id=%s&status=running' % self.func_id - ) - self.assertEqual(200, resp.status_int) - self._assert_single_item(resp.json['executions'], id=exec_id) - - @mock.patch('qinling.rpc.EngineClient.create_execution') - def test_delete(self, mock_create_execution): - body = { - 'function_id': self.func_id, - } - resp = self.app.post_json('/v1/executions', body) - exec_id = resp.json.get('id') - - resp = self.app.delete('/v1/executions/%s' % exec_id) - - self.assertEqual(204, resp.status_int) diff --git a/qinling/tests/unit/api/controllers/v1/test_function.py b/qinling/tests/unit/api/controllers/v1/test_function.py deleted file mode 100644 index 92f1545b..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_function.py +++ /dev/null @@ -1,641 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -import json -import tempfile -from unittest import mock -import uuid - -from oslo_config import cfg - -from qinling.db import api as db_api -from qinling import status -from qinling.tests.unit.api import base -from qinling.tests.unit import base as unit_base -from qinling.utils import constants - - -class TestFunctionController(base.APITest): - def setUp(self): - super(TestFunctionController, self).setUp() - - # Insert a runtime record in db for each test case. The data will be - # removed automatically in tear down. - db_runtime = self.create_runtime() - self.runtime_id = db_runtime.id - - @mock.patch('qinling.storage.file_system.FileSystemStorage.store') - def test_post(self, mock_store): - mock_store.return_value = (True, 'fake_md5') - - with tempfile.NamedTemporaryFile() as f: - body = { - 'name': self.rand_name('function', prefix=self.prefix), - 'code': json.dumps({"source": "package"}), - 'runtime_id': self.runtime_id, - } - resp = self.app.post( - '/v1/functions', - params=body, - upload_files=[('package', f.name, f.read())] - ) - - self.assertEqual(201, resp.status_int) - self.assertEqual(1, mock_store.call_count) - - body.update( - { - 'entry': 'main.main', - 'code': {"source": "package", "md5sum": "fake_md5"}, - 'timeout': cfg.CONF.resource_limits.default_timeout - } - ) - self._assertDictContainsSubset(resp.json, body) - - @mock.patch('qinling.storage.file_system.FileSystemStorage.store') - def test_post_timeout(self, mock_store): - mock_store.return_value = (True, 'fake_md5') - - with tempfile.NamedTemporaryFile() as f: - body = { - 'runtime_id': self.runtime_id, - 'code': json.dumps({"source": "package"}), - 'timeout': 3 - } - resp = self.app.post( - '/v1/functions', - params=body, - upload_files=[('package', f.name, f.read())] - ) - - self.assertEqual(201, resp.status_int) - self.assertEqual(3, resp.json['timeout']) - - def test_post_timeout_invalid(self): - with tempfile.NamedTemporaryFile() as f: - body = { - 'runtime_id': self.runtime_id, - 'code': json.dumps({"source": "package"}), - 'timeout': cfg.CONF.resource_limits.max_timeout + 1 - } - resp = self.app.post( - '/v1/functions', - params=body, - upload_files=[('package', f.name, f.read())], - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'timeout resource limitation not within the allowable range', - resp.json['faultstring'] - ) - - @mock.patch("qinling.utils.openstack.keystone.create_trust") - @mock.patch('qinling.utils.openstack.keystone.get_swiftclient') - @mock.patch('qinling.context.AuthHook.before') - def test_post_from_swift(self, mock_auth, mock_client, mock_trust): - self.override_config('auth_enable', True, group='pecan') - - swift_conn = mock.Mock() - mock_client.return_value = swift_conn - swift_conn.head_object.return_value = { - 'accept-ranges': 'bytes', - 'content-length': str(constants.MAX_PACKAGE_SIZE - 1) - } - mock_trust.return_value.id = str(uuid.uuid4()) - - body = { - 'name': 'swift_function', - 'code': json.dumps( - { - "source": "swift", - "swift": {"container": "container", "object": "object"} - } - ), - 'runtime_id': self.runtime_id, - } - resp = self.app.post('/v1/functions', params=body) - - self.assertEqual(201, resp.status_int) - - body.update( - { - 'entry': 'main.main', - 'code': { - "source": "swift", - "swift": {"container": "container", "object": "object"} - } - } - ) - self._assertDictContainsSubset(resp.json, body) - - def test_post_swift_not_enough_params(self): - body = { - 'name': 'swift_function', - 'code': json.dumps( - { - "source": "swift", - "swift": {"container": "fake-container"} - } - ), - 'runtime_id': self.runtime_id, - } - resp = self.app.post( - '/v1/functions', - params=body, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - @mock.patch('qinling.utils.openstack.keystone.get_swiftclient') - @mock.patch('qinling.context.AuthHook.before') - def test_post_swift_size_exceed(self, mock_auth, mock_client): - self.override_config('auth_enable', True, group='pecan') - swift_conn = mock.Mock() - mock_client.return_value = swift_conn - swift_conn.head_object.return_value = { - 'accept-ranges': 'bytes', - 'content-length': str(constants.MAX_PACKAGE_SIZE + 1) - } - - body = { - 'name': 'swift_function', - 'code': json.dumps( - { - "source": "swift", - "swift": {"container": "container", "object": "object"} - } - ), - 'runtime_id': self.runtime_id, - } - resp = self.app.post( - '/v1/functions', - params=body, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - def test_get(self): - db_func = self.create_function(runtime_id=self.runtime_id) - expected = { - 'id': db_func.id, - "code": {"source": "package", "md5sum": "fake_md5"}, - "name": db_func.name, - 'entry': 'main.main', - "project_id": unit_base.DEFAULT_PROJECT_ID, - "cpu": cfg.CONF.resource_limits.default_cpu, - "memory_size": cfg.CONF.resource_limits.default_memory, - "timeout": cfg.CONF.resource_limits.default_timeout, - } - - resp = self.app.get('/v1/functions/%s' % db_func.id) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, expected) - - def test_get_all(self): - db_func = self.create_function(runtime_id=self.runtime_id) - expected = { - 'id': db_func.id, - "name": db_func.name, - 'entry': 'main.main', - "project_id": unit_base.DEFAULT_PROJECT_ID, - "cpu": cfg.CONF.resource_limits.default_cpu, - "memory_size": cfg.CONF.resource_limits.default_memory, - "timeout": cfg.CONF.resource_limits.default_timeout, - } - - resp = self.app.get('/v1/functions') - - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item( - resp.json['functions'], id=db_func.id - ) - self._assertDictContainsSubset(actual, expected) - - def test_put_name(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - resp = self.app.put_json( - '/v1/functions/%s' % db_func.id, {'name': 'new_name'} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual('new_name', resp.json['name']) - - def test_put_timeout(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - resp = self.app.put_json( - '/v1/functions/%s' % db_func.id, {'timeout': 10} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(10, resp.json['timeout']) - - def test_put_timeout_invalid(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - # Check for type of cpu values. - resp = self.app.put_json( - '/v1/functions/%s' % db_func.id, - {'timeout': cfg.CONF.resource_limits.max_timeout + 1}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'timeout resource limitation not within the allowable range', - resp.json['faultstring'] - ) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.storage.file_system.FileSystemStorage.store') - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - @mock.patch('qinling.rpc.EngineClient.delete_function') - def test_put_package(self, mock_delete_func, mock_delete, mock_store, - mock_etcd_del): - db_func = self.create_function(runtime_id=self.runtime_id) - mock_store.return_value = (True, "fake_md5_changed") - - with tempfile.NamedTemporaryFile() as f: - resp = self.app.put( - '/v1/functions/%s' % db_func.id, - params={}, - upload_files=[('package', f.name, f.read())] - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(1, mock_store.call_count) - self.assertEqual('fake_md5_changed', resp.json['code'].get('md5sum')) - - mock_delete_func.assert_called_once_with(db_func.id) - mock_etcd_del.assert_called_once_with(db_func.id) - mock_delete.assert_called_once_with(unit_base.DEFAULT_PROJECT_ID, - db_func.id, "fake_md5") - - @mock.patch('qinling.storage.file_system.FileSystemStorage.store') - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - def test_put_package_md5_not_change(self, file_delete_mock, - etcd_delete_mock, function_delete_mock, - store_mock): - db_func = self.create_function(runtime_id=self.runtime_id) - store_mock.return_value = (False, "fake_md5") - - with tempfile.NamedTemporaryFile() as f: - resp = self.app.put( - '/v1/functions/%s' % db_func.id, - params={}, - upload_files=[('package', f.name, f.read())] - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual('fake_md5', resp.json['code'].get('md5sum')) - function_delete_mock.assert_called_once_with(db_func.id) - etcd_delete_mock.assert_called_once_with(db_func.id) - self.assertFalse(file_delete_mock.called) - - def test_put_package_same_md5_provided(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - with tempfile.NamedTemporaryFile() as f: - resp = self.app.put( - '/v1/functions/%s' % db_func.id, - params={ - "code": json.dumps( - {"md5sum": "fake_md5", "source": "package"} - ) - }, - upload_files=[('package', f.name, f.read())], - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.utils.openstack.swift.check_object') - @mock.patch('qinling.context.AuthHook.before') - def test_put_swift_function(self, mock_auth, mock_check, mock_etcd_delete, - mock_func_delete): - self.override_config('auth_enable', True, group='pecan') - mock_check.return_value = True - - db_func = self.create_function( - runtime_id=self.runtime_id, - code={ - "source": "swift", - "swift": {"container": "fake-container", "object": "fake-obj"} - } - ) - - body = { - 'code': json.dumps( - { - "source": "swift", - "swift": {"object": "new-obj"} - } - ), - } - resp = self.app.put_json('/v1/functions/%s' % db_func.id, body) - - self.assertEqual(200, resp.status_int) - swift_info = { - 'code': { - "source": "swift", - "swift": {"container": "fake-container", "object": "new-obj"} - } - } - self._assertDictContainsSubset(resp.json, swift_info) - - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.utils.openstack.swift.check_object') - @mock.patch('qinling.context.AuthHook.before') - def test_put_swift_function_without_source(self, mock_auth, mock_check, - mock_etcd_delete, - mock_func_delete): - self.override_config('auth_enable', True, group='pecan') - mock_check.return_value = True - - db_func = self.create_function( - runtime_id=self.runtime_id, - code={ - "source": "swift", - "swift": {"container": "fake-container", "object": "fake-obj"} - } - ) - - body = { - 'code': json.dumps( - { - "swift": {"object": "new-obj"} - } - ), - } - resp = self.app.put_json('/v1/functions/%s' % db_func.id, body) - - self.assertEqual(200, resp.status_int) - swift_info = { - 'code': { - "source": "swift", - "swift": {"container": "fake-container", "object": "new-obj"} - } - } - self._assertDictContainsSubset(resp.json, swift_info) - - def test_put_cpu_with_type_error(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - # Check for type of cpu values. - resp = self.app.put_json( - '/v1/functions/%s' % db_func.id, {'cpu': 'non-int'}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'Invalid cpu resource specified. An integer is required.', - resp.json['faultstring'] - ) - - def test_put_cpu_with_overrun_error(self): - db_func = self.create_function(runtime_id=self.runtime_id) - - # Check for cpu error with input out of range. - resp = self.app.put_json( - '/v1/functions/%s' % db_func.id, {'cpu': 0}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'cpu resource limitation not within the allowable range', - resp.json['faultstring'] - ) - - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - def test_put_cpu_and_memorysize(self, mock_delete_func, mock_etcd_del, - mock_storage_delete): - # Test for updating cpu/mem with good input values. - db_func = self.create_function(runtime_id=self.runtime_id) - - req_body = { - 'cpu': str(cfg.CONF.resource_limits.default_cpu + 1), - 'memory_size': str(cfg.CONF.resource_limits.default_memory + 1) - } - - resp = self.app.put_json('/v1/functions/%s' % db_func.id, req_body) - - self.assertEqual(200, resp.status_int) - self.assertEqual( - cfg.CONF.resource_limits.default_cpu + 1, - resp.json['cpu'] - ) - self.assertEqual( - cfg.CONF.resource_limits.default_memory + 1, - resp.json['memory_size'] - ) - mock_delete_func.assert_called_once_with(db_func.id) - mock_etcd_del.assert_called_once_with(db_func.id) - self.assertFalse(mock_storage_delete.called) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - def test_delete(self, mock_delete, mock_delete_func, mock_etcd_delete): - db_func = self.create_function(runtime_id=self.runtime_id) - resp = self.app.delete('/v1/functions/%s' % db_func.id) - - self.assertEqual(204, resp.status_int) - mock_delete.assert_called_once_with( - unit_base.DEFAULT_PROJECT_ID, db_func.id, "fake_md5" - ) - mock_delete_func.assert_called_once_with(db_func.id) - mock_etcd_delete.assert_called_once_with(db_func.id) - - def test_delete_with_running_job(self): - db_func = self.create_function(runtime_id=self.runtime_id) - self.create_job( - function_id=db_func.id, - status=status.AVAILABLE, - first_execution_time=datetime.utcnow(), - next_execution_time=datetime.utcnow(), - count=1 - ) - - resp = self.app.delete( - '/v1/functions/%s' % db_func.id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - def test_delete_with_webhook(self): - db_func = self.create_function(runtime_id=self.runtime_id) - self.create_webhook(function_id=db_func.id) - - resp = self.app.delete( - '/v1/functions/%s' % db_func.id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - def test_delete_with_versions(self, mock_package_delete, - mock_engine_delete, mock_etcd_delete): - db_func = self.create_function(runtime_id=self.runtime_id) - func_id = db_func.id - # Create two versions for the function - db_api.increase_function_version(func_id, 0) - db_api.increase_function_version(func_id, 1) - - resp = self.app.delete('/v1/functions/%s' % func_id) - - self.assertEqual(204, resp.status_int) - - self.assertEqual(3, mock_package_delete.call_count) - self.assertEqual(3, mock_engine_delete.call_count) - self.assertEqual(3, mock_etcd_delete.call_count) - - mock_package_delete.assert_has_calls( - [ - mock.call(unit_base.DEFAULT_PROJECT_ID, func_id, None, - version=1), - mock.call(unit_base.DEFAULT_PROJECT_ID, func_id, None, - version=2), - mock.call(unit_base.DEFAULT_PROJECT_ID, func_id, "fake_md5") - ] - ) - - mock_engine_delete.assert_has_calls( - [ - mock.call(func_id, version=1), - mock.call(func_id, version=2), - mock.call(func_id) - ] - ) - - mock_etcd_delete.assert_has_calls( - [ - mock.call(func_id, version=1), - mock.call(func_id, version=2), - mock.call(func_id) - ] - ) - - def test_delete_with_version_associate_webhook(self): - db_func = self.create_function(runtime_id=self.runtime_id) - func_id = db_func.id - db_api.increase_function_version(func_id, 0) - self.create_webhook(func_id, function_version=1) - - resp = self.app.delete( - '/v1/functions/%s' % func_id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - def test_delete_with_alias(self): - db_func = self.create_function(runtime_id=self.runtime_id) - func_id = db_func.id - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': func_id, - 'name': name - } - db_api.create_function_alias(**body) - - resp = self.app.delete( - '/v1/functions/%s' % func_id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - @mock.patch('qinling.rpc.EngineClient.scaleup_function') - def test_scale_up(self, scaleup_function_mock): - db_func = self.create_function(runtime_id=self.runtime_id) - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/scale_up' % db_func.id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaleup_function_mock.assert_called_once_with( - db_func.id, runtime_id=self.runtime_id, count=1) - - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.rpc.EngineClient.scaledown_function') - def test_scale_down(self, scaledown_function_mock, get_workers_mock): - db_func = self.create_function(runtime_id=self.runtime_id) - get_workers_mock.return_value = [mock.Mock(), mock.Mock()] - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/scale_down' % db_func.id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaledown_function_mock.assert_called_once_with(db_func.id, count=1) - - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.rpc.EngineClient.scaledown_function') - def test_scale_down_no_need( - self, scaledown_function_mock, get_workers_mock - ): - db_func = self.create_function(runtime_id=self.runtime_id) - get_workers_mock.return_value = [mock.Mock()] - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/scale_down' % db_func.id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaledown_function_mock.assert_not_called() - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - def test_detach( - self, engine_delete_function_mock, etcd_delete_function_mock - ): - db_func = self.create_function(runtime_id=self.runtime_id) - - resp = self.app.post( - '/v1/functions/%s/detach' % db_func.id - ) - - self.assertEqual(202, resp.status_int) - engine_delete_function_mock.assert_called_once_with(db_func.id) - etcd_delete_function_mock.assert_called_once_with(db_func.id) diff --git a/qinling/tests/unit/api/controllers/v1/test_function_alias.py b/qinling/tests/unit/api/controllers/v1/test_function_alias.py deleted file mode 100644 index 4cf4aa36..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_function_alias.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2018 OpenStack Foundation. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from qinling import context -from qinling.db import api as db_api -from qinling import exceptions as exc -from qinling.tests.unit.api import base -from qinling.tests.unit import base as unit_base - - -class TestFunctionAliasController(base.APITest): - def setUp(self): - super(TestFunctionAliasController, self).setUp() - - self.db_func = self.create_function() - self.func_id = self.db_func.id - - def test_post(self): - name = 'TestAlias' - body = {'function_id': self.func_id, - 'name': name, - 'description': 'new alias'} - - resp = self.app.post_json('/v1/aliases', body) - - self.assertEqual(201, resp.status_int) - self._assertDictContainsSubset(resp.json, body) - - context.set_ctx(self.ctx) - - func_alias_db = db_api.get_function_alias(name) - self.assertEqual(name, func_alias_db.name) - self.assertEqual(0, func_alias_db.function_version) - - def test_post_without_required_params(self): - body = {} - - resp = self.app.post_json('/v1/aliases', - body, - expect_errors=True) - - self.assertEqual(400, resp.status_int) - self.assertIn("Required param is missing.", resp.json['faultstring']) - - def test_get(self): - name = 'TestAlias' - function_version = 0 - body = {'function_id': self.func_id, - 'function_version': function_version, - 'name': name, - 'description': 'new alias'} - db_api.create_function_alias(**body) - - resp = self.app.get('/v1/aliases/%s' % name) - - context.set_ctx(self.ctx) - - self.assertEqual(200, resp.status_int) - self.assertEqual("new alias", resp.json.get('description')) - - def test_get_notfound(self): - resp = self.app.get('/v1/aliases/%s' % 'fake_name', - expect_errors=True) - - self.assertEqual(404, resp.status_int) - self.assertIn("FunctionAlias not found", resp.json['faultstring']) - - def test_get_all(self): - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'name': name - } - db_api.create_function_alias(**body) - - resp = self.app.get('/v1/aliases') - - self.assertEqual(200, resp.status_int) - - expected = { - "name": name, - 'function_id': self.func_id, - 'function_version': 0, - "project_id": unit_base.DEFAULT_PROJECT_ID, - } - actual = self._assert_single_item(resp.json['function_aliases'], - name=name) - self._assertDictContainsSubset(actual, expected) - - def test_delete(self): - name = self.rand_name(name="alias", prefix=self.prefix) - function_version = 0 - body = {'function_id': self.func_id, - 'function_version': function_version, - 'name': name, - 'description': 'new alias'} - - db_api.create_function_alias(**body) - - resp = self.app.delete('/v1/aliases/%s' % name) - - self.assertEqual(204, resp.status_int) - - context.set_ctx(self.ctx) - - self.assertRaises(exc.DBEntityNotFoundError, - db_api.get_function_alias, - name) - - def test_put(self): - name = self.rand_name(name="alias", prefix=self.prefix) - function_version = 0 - body = {'function_id': self.func_id, - 'function_version': function_version, - 'name': name, - 'description': 'new alias'} - - db_api.create_function_alias(**body) - - body['function_version'] = 1 - body['description'] = 'update alias' - - resp = self.app.put_json('/v1/aliases/%s' % name, body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, body) - - def test_put_without_optional_params(self): - name = self.rand_name(name="alias", prefix=self.prefix) - function_version = 1 - body = {'function_id': self.func_id, - 'function_version': function_version, - 'name': name, - 'description': 'new alias'} - - db_api.create_function_alias(**body) - - update_body = {} - - resp = self.app.put_json('/v1/aliases/%s' % name, update_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, body) diff --git a/qinling/tests/unit/api/controllers/v1/test_function_version.py b/qinling/tests/unit/api/controllers/v1/test_function_version.py deleted file mode 100644 index 2b803cf8..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_function_version.py +++ /dev/null @@ -1,278 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -from datetime import timedelta -import json -from unittest import mock - - -from qinling import context -from qinling.db import api as db_api -from qinling import status -from qinling.tests.unit.api import base -from qinling.tests.unit import base as unit_base - - -class TestFunctionVersionController(base.APITest): - def setUp(self): - super(TestFunctionVersionController, self).setUp() - - self.db_func = self.create_function() - self.func_id = self.db_func.id - - @mock.patch('qinling.storage.file_system.FileSystemStorage.copy') - @mock.patch('qinling.storage.file_system.FileSystemStorage.changed_since') - @mock.patch('qinling.utils.etcd_util.get_function_version_lock') - def test_post(self, mock_etcd_lock, mock_changed, mock_copy): - lock = mock.Mock() - mock_etcd_lock.return_value.__enter__.return_value = lock - lock.is_acquired.return_value = True - mock_changed.return_value = True - - # Getting function and versions needs to happen in a db transaction - with db_api.transaction(): - func_db = db_api.get_function(self.func_id) - self.assertEqual(0, len(func_db.versions)) - - body = {'description': 'new version'} - resp = self.app.post_json('/v1/functions/%s/versions' % self.func_id, - body) - - self.assertEqual(201, resp.status_int) - self._assertDictContainsSubset(resp.json, body) - - mock_changed.assert_called_once_with(unit_base.DEFAULT_PROJECT_ID, - self.func_id, "fake_md5", 0) - mock_copy.assert_called_once_with(unit_base.DEFAULT_PROJECT_ID, - self.func_id, "fake_md5", 0) - - # We need to set context as it was removed after the API call - context.set_ctx(self.ctx) - - with db_api.transaction(): - func_db = db_api.get_function(self.func_id) - self.assertEqual(1, len(func_db.versions)) - - # Verify the latest function version by calling API - resp = self.app.get('/v1/functions/%s' % self.func_id) - - self.assertEqual(200, resp.status_int) - self.assertEqual(1, resp.json.get('latest_version')) - - @mock.patch('qinling.storage.file_system.FileSystemStorage.changed_since') - @mock.patch('qinling.utils.etcd_util.get_function_version_lock') - def test_post_not_change(self, mock_etcd_lock, mock_changed): - lock = mock.Mock() - mock_etcd_lock.return_value.__enter__.return_value = lock - lock.is_acquired.return_value = True - mock_changed.return_value = False - - body = {'description': 'new version'} - resp = self.app.post_json('/v1/functions/%s/versions' % self.func_id, - body, - expect_errors=True) - - self.assertEqual(403, resp.status_int) - - @mock.patch('qinling.utils.etcd_util.get_function_version_lock') - def test_post_max_versions(self, mock_etcd_lock): - lock = mock.Mock() - mock_etcd_lock.return_value.__enter__.return_value = lock - lock.is_acquired.return_value = True - - for i in range(10): - self.create_function_version(i, function_id=self.func_id) - - resp = self.app.post_json('/v1/functions/%s/versions' % self.func_id, - {}, - expect_errors=True) - - self.assertEqual(403, resp.status_int) - - @mock.patch('qinling.utils.etcd_util.get_function_version_lock') - def test_post_etcd_lock_failed(self, mock_etcd_lock): - lock = mock.Mock() - mock_etcd_lock.return_value.__enter__.return_value = lock - lock.is_acquired.return_value = False - - body = {'description': 'new version'} - resp = self.app.post_json('/v1/functions/%s/versions' % self.func_id, - body, expect_errors=True) - - self.assertEqual(500, resp.status_int) - self.assertEqual("Internal server error.", resp.json['faultstring']) - - def test_get_all(self): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - - resp = self.app.get('/v1/functions/%s/versions' % self.func_id) - - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item(resp.json['function_versions'], - version_number=1) - self.assertEqual("version 1", actual.get('description')) - - def test_get(self): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - - resp = self.app.get('/v1/functions/%s/versions/1' % self.func_id) - - self.assertEqual(200, resp.status_int) - self.assertEqual("version 1", resp.json.get('description')) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - @mock.patch('qinling.storage.file_system.FileSystemStorage.delete') - def test_delete(self, mock_package_delete, mock_engine_delete, - mock_etcd_delete): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - - resp = self.app.delete('/v1/functions/%s/versions/1' % self.func_id) - - self.assertEqual(204, resp.status_int) - mock_engine_delete.assert_called_once_with(self.func_id, version=1) - mock_etcd_delete.assert_called_once_with(self.func_id, version=1) - mock_package_delete.assert_called_once_with( - unit_base.DEFAULT_PROJECT_ID, - self.func_id, None, version=1 - ) - - # We need to set context as it was removed after the API call - context.set_ctx(self.ctx) - - with db_api.transaction(): - func_db = db_api.get_function(self.func_id) - self.assertEqual(0, len(func_db.versions)) - self.assertEqual(0, func_db.latest_version) - - def test_delete_with_running_job(self): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - self.create_job( - self.func_id, - function_version=1, - status=status.RUNNING, - first_execution_time=datetime.utcnow(), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - ) - - resp = self.app.delete( - '/v1/functions/%s/versions/1' % self.func_id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - def test_delete_with_webhook(self): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - self.create_webhook(self.func_id, function_version=1) - - resp = self.app.delete( - '/v1/functions/%s/versions/1' % self.func_id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - def test_delete_with_alias(self): - db_api.increase_function_version(self.func_id, 0, - description="version 1") - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'name': name - } - db_api.create_function_alias(**body) - - resp = self.app.delete( - '/v1/functions/%s/versions/1' % self.func_id, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - - @mock.patch('qinling.rpc.EngineClient.scaleup_function') - def test_scale_up(self, scaleup_function_mock): - db_api.increase_function_version(self.func_id, 0) - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/versions/1/scale_up' % self.func_id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaleup_function_mock.assert_called_once_with( - self.func_id, - runtime_id=self.db_func.runtime_id, - version=1, - count=1 - ) - - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.rpc.EngineClient.scaledown_function') - def test_scale_down(self, scaledown_function_mock, get_workers_mock): - db_api.increase_function_version(self.func_id, 0) - get_workers_mock.return_value = [mock.Mock(), mock.Mock()] - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/versions/1/scale_down' % self.func_id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaledown_function_mock.assert_called_once_with(self.func_id, - version=1, count=1) - - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.rpc.EngineClient.scaledown_function') - def test_scale_down_no_need(self, scaledown_function_mock, - get_workers_mock): - db_api.increase_function_version(self.func_id, 0) - get_workers_mock.return_value = [mock.Mock()] - - body = {'count': 1} - resp = self.app.post( - '/v1/functions/%s/versions/1/scale_down' % self.func_id, - params=json.dumps(body), - content_type='application/json' - ) - - self.assertEqual(202, resp.status_int) - scaledown_function_mock.assert_not_called() - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.rpc.EngineClient.delete_function') - def test_detach(self, engine_delete_function_mock, - etcd_delete_function_mock): - db_api.increase_function_version(self.func_id, 0) - - resp = self.app.post( - '/v1/functions/%s/versions/1/detach' % self.func_id - ) - - self.assertEqual(202, resp.status_int) - engine_delete_function_mock.assert_called_once_with(self.func_id, - version=1) - etcd_delete_function_mock.assert_called_once_with(self.func_id, - version=1) diff --git a/qinling/tests/unit/api/controllers/v1/test_function_worker.py b/qinling/tests/unit/api/controllers/v1/test_function_worker.py deleted file mode 100644 index f89ee281..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_function_worker.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from oslo_utils import uuidutils -from qinling.tests.unit.api import base - - -class TestFunctionWorkerController(base.APITest): - @mock.patch('qinling.utils.etcd_util.get_workers') - def test_get_all_workers(self, mock_get_workers): - function_id = uuidutils.generate_uuid() - mock_get_workers.return_value = ['test_worker0', 'test_worker1'] - - resp = self.app.get('/v1/functions/%s/workers' % function_id) - self.assertEqual(200, resp.status_int) - self._assert_multiple_items( - resp.json['workers'], 2, function_id=function_id - ) - self._assert_single_item( - resp.json['workers'], worker_name='test_worker0' - ) - self._assert_single_item( - resp.json['workers'], worker_name='test_worker1' - ) - - @mock.patch('qinling.utils.etcd_util.get_workers') - def test_get_all_version_workers(self, mock_get_workers): - function_id = uuidutils.generate_uuid() - mock_get_workers.return_value = ['test_worker0', 'test_worker1'] - - resp = self.app.get( - '/v1/functions/%s/workers?function_version=1' % function_id - ) - - self.assertEqual(200, resp.status_int) - mock_get_workers.assert_called_once_with(function_id, version=1) - self._assert_multiple_items( - resp.json['workers'], - 2, - function_id=function_id, - function_version=1 - ) - self._assert_single_item( - resp.json['workers'], worker_name='test_worker0' - ) - self._assert_single_item( - resp.json['workers'], worker_name='test_worker1' - ) - - def test_get_all_version_workers_not_int(self): - function_id = uuidutils.generate_uuid() - resp = self.app.get( - '/v1/functions/%s/workers?function_version=invalid' % function_id, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) diff --git a/qinling/tests/unit/api/controllers/v1/test_job.py b/qinling/tests/unit/api/controllers/v1/test_job.py deleted file mode 100644 index e3728a1d..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_job.py +++ /dev/null @@ -1,320 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -from datetime import timedelta - -from dateutil import parser - -from qinling import context as auth_context -from qinling.db import api as db_api -from qinling import status -from qinling.tests.unit.api import base - - -class TestJobController(base.APITest): - def setUp(self): - super(TestJobController, self).setUp() - - # Insert a function record in db for each test case. The data will be - # removed automatically in db clean up. - db_function = self.create_function() - self.function_id = db_function.id - - def test_create_with_function(self): - body = { - 'name': self.rand_name('job', prefix=self.prefix), - 'first_execution_time': str( - datetime.utcnow() + timedelta(hours=1)), - 'function_id': self.function_id - } - resp = self.app.post_json('/v1/jobs', body) - - self.assertEqual(201, resp.status_int) - - def test_create_with_version(self): - db_api.increase_function_version(self.function_id, 0) - - body = { - 'name': self.rand_name('job', prefix=self.prefix), - 'first_execution_time': str( - datetime.utcnow() + timedelta(hours=1)), - 'function_id': self.function_id, - 'function_version': 1, - } - resp = self.app.post_json('/v1/jobs', body) - - self.assertEqual(201, resp.status_int) - self.assertEqual(1, resp.json.get('function_version')) - - def test_create_with_alias(self): - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.function_id, - 'name': name - } - db_api.create_function_alias(**body) - - job_body = { - 'name': self.rand_name('job', prefix=self.prefix), - 'first_execution_time': str( - datetime.utcnow() + timedelta(hours=1)), - 'function_alias': name - } - - resp = self.app.post_json('/v1/jobs', job_body) - - self.assertEqual(201, resp.status_int) - self.assertEqual(name, resp.json.get('function_alias')) - self.assertIsNone(resp.json.get('function_id')) - - def test_create_with_invalid_alias(self): - body = { - 'function_alias': 'fake_alias', - 'first_execution_time': str( - datetime.utcnow() + timedelta(hours=1)), - } - - resp = self.app.post_json('/v1/jobs', body, expect_errors=True) - - self.assertEqual(404, resp.status_int) - - def test_create_without_required_params(self): - resp = self.app.post( - '/v1/jobs', - params={}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - def test_create_pattern(self): - body = { - 'name': self.rand_name('job', prefix=self.prefix), - 'function_id': self.function_id, - 'pattern': '0 21 * * *', - 'count': 10 - } - resp = self.app.post_json('/v1/jobs', body) - - self.assertEqual(201, resp.status_int) - - res = resp.json - self.assertEqual( - res["first_execution_time"], - res["next_execution_time"] - ) - - def test_create_both_pattern_and_first_execution_time(self): - body = { - 'name': self.rand_name('job', prefix=self.prefix), - 'function_id': self.function_id, - 'pattern': '0 21 * * *', - 'first_execution_time': str( - datetime.utcnow() + timedelta(hours=1)), - 'count': 10 - } - resp = self.app.post_json('/v1/jobs', body) - - self.assertEqual(201, resp.status_int) - - res = resp.json - self.assertGreaterEqual( - parser.parse(res["next_execution_time"], ignoretz=True), - parser.parse(res["first_execution_time"], ignoretz=True) - ) - - def test_delete(self): - job_id = self.create_job( - self.function_id, - first_execution_time=datetime.utcnow(), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - status=status.RUNNING, - count=1 - ).id - - resp = self.app.delete('/v1/jobs/%s' % job_id) - - self.assertEqual(204, resp.status_int) - - def test_update_one_shot_job(self): - job_id = self.create_job( - self.function_id, - first_execution_time=datetime.utcnow(), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - status=status.RUNNING, - count=1 - ).id - - req_body = { - 'name': 'new_name', - 'status': status.PAUSED - } - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - req_body = { - 'status': status.RUNNING - } - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - def test_update_one_shot_job_failed(self): - job_id = self.create_job( - self.function_id, - first_execution_time=datetime.utcnow(), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - status=status.RUNNING, - count=1 - ).id - url = '/v1/jobs/%s' % job_id - - # Try to change job type - resp = self.app.put_json( - url, - {'pattern': '*/1 * * * *'}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn('Can not change job type.', resp.json['faultstring']) - - # Try to resume job but the execution time is invalid - auth_context.set_ctx(self.ctx) - self.addCleanup(auth_context.set_ctx, None) - db_api.update_job( - job_id, - { - 'next_execution_time': datetime.utcnow() - timedelta(hours=1), - 'status': status.PAUSED - } - ) - resp = self.app.put_json( - url, - {'status': status.RUNNING}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'Execution time must be at least 1 minute in the future', - resp.json['faultstring'] - ) - - def test_update_recurring_job(self): - job_id = self.create_job( - self.function_id, - first_execution_time=datetime.utcnow() + timedelta(hours=1), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - pattern='0 */1 * * *', - status=status.RUNNING, - count=10 - ).id - - next_hour_and_half = datetime.utcnow() + timedelta(hours=1.5) - next_two_hours = datetime.utcnow() + timedelta(hours=2) - - req_body = { - 'next_execution_time': str( - next_hour_and_half.strftime('%Y-%m-%dT%H:%M:%SZ') - ), - 'pattern': '1 */1 * * *' - } - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - # Pause the job and resume with a valid next_execution_time - req_body = { - 'status': status.PAUSED - } - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - req_body = { - 'status': status.RUNNING, - 'next_execution_time': str( - next_two_hours.strftime('%Y-%m-%dT%H:%M:%SZ') - ), - } - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - # Pause the job and resume without specifying next_execution_time - auth_context.set_ctx(self.ctx) - self.addCleanup(auth_context.set_ctx, None) - db_api.update_job( - job_id, - { - 'next_execution_time': datetime.utcnow() - timedelta(hours=1), - 'status': status.PAUSED - } - ) - - req_body = {'status': status.RUNNING} - resp = self.app.put_json('/v1/jobs/%s' % job_id, req_body) - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, req_body) - - def test_update_recurring_job_failed(self): - job_id = self.create_job( - self.function_id, - first_execution_time=datetime.utcnow() + timedelta(hours=1), - next_execution_time=datetime.utcnow() + timedelta(hours=1), - pattern='0 */1 * * *', - status=status.RUNNING, - count=10 - ).id - url = '/v1/jobs/%s' % job_id - - # Try to change job type - resp = self.app.put_json( - url, - {'pattern': ''}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn('Can not change job type.', resp.json['faultstring']) - - # Pause the job and try to resume with an invalid next_execution_time - auth_context.set_ctx(self.ctx) - self.addCleanup(auth_context.set_ctx, None) - db_api.update_job(job_id, {'status': status.PAUSED}) - resp = self.app.put_json( - url, - { - 'status': status.RUNNING, - 'next_execution_time': str( - datetime.utcnow() - timedelta(hours=1) - ) - }, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - self.assertIn( - 'Execution time must be at least 1 minute in the future', - resp.json['faultstring'] - ) diff --git a/qinling/tests/unit/api/controllers/v1/test_runtime.py b/qinling/tests/unit/api/controllers/v1/test_runtime.py deleted file mode 100644 index ec257cd5..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_runtime.py +++ /dev/null @@ -1,178 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from qinling.db import api as db_api -from qinling import status -from qinling.tests.unit.api import base -from qinling.tests.unit import base as test_base - - -class TestRuntimeController(base.APITest): - def setUp(self): - super(TestRuntimeController, self).setUp() - - # Insert a runtime record in db. The data will be removed in db clean - # up. - self.db_runtime = self.create_runtime() - self.runtime_id = self.db_runtime.id - - def test_get(self): - resp = self.app.get('/v1/runtimes/%s' % self.runtime_id) - - expected = { - 'id': self.runtime_id, - "image": self.db_runtime.image, - "name": self.db_runtime.name, - "project_id": test_base.DEFAULT_PROJECT_ID, - "status": status.AVAILABLE, - "is_public": True, - } - - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, expected) - - def test_get_all(self): - resp = self.app.get('/v1/runtimes') - - expected = { - 'id': self.runtime_id, - "image": self.db_runtime.image, - "name": self.db_runtime.name, - "project_id": test_base.DEFAULT_PROJECT_ID, - "status": status.AVAILABLE, - "is_public": True, - } - - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item( - resp.json['runtimes'], id=self.runtime_id - ) - self._assertDictContainsSubset(actual, expected) - - @mock.patch('qinling.rpc.EngineClient.create_runtime') - def test_post(self, mock_create_time): - body = { - 'name': self.rand_name('runtime', prefix=self.prefix), - 'image': self.rand_name('image', prefix=self.prefix), - } - resp = self.app.post_json('/v1/runtimes', body) - - self.assertEqual(201, resp.status_int) - - body.update({"trusted": True}) - self._assertDictContainsSubset(resp.json, body) - - mock_create_time.assert_called_once_with(resp.json['id']) - - @mock.patch('qinling.rpc.EngineClient.create_runtime') - def test_post_without_image(self, mock_create_time): - body = { - 'name': self.rand_name('runtime', prefix=self.prefix), - } - resp = self.app.post_json('/v1/runtimes', body, expect_errors=True) - - self.assertEqual(400, resp.status_int) - mock_create_time.assert_not_called() - - @mock.patch('qinling.rpc.EngineClient.delete_runtime') - def test_delete(self, mock_delete_runtime): - resp = self.app.delete('/v1/runtimes/%s' % self.runtime_id) - - self.assertEqual(204, resp.status_int) - mock_delete_runtime.assert_called_once_with(self.runtime_id) - - @mock.patch('qinling.rpc.EngineClient.delete_runtime') - def test_delete_runtime_with_function_associated(self, - mock_delete_runtime): - self.create_function(self.runtime_id) - resp = self.app.delete( - '/v1/runtimes/%s' % self.runtime_id, expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - mock_delete_runtime.assert_not_called() - - def test_put_name(self): - resp = self.app.put_json( - '/v1/runtimes/%s' % self.runtime_id, {'name': 'new_name'} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual('new_name', resp.json['name']) - - def test_put_image_runtime_not_available(self): - db_runtime = db_api.create_runtime( - { - 'name': self.rand_name('runtime', prefix=self.prefix), - 'image': self.rand_name('image', prefix=self.prefix), - 'project_id': test_base.DEFAULT_PROJECT_ID, - 'status': status.CREATING - } - ) - runtime_id = db_runtime.id - - resp = self.app.put_json( - '/v1/runtimes/%s' % runtime_id, {'image': 'new_image'}, - expect_errors=True - ) - - self.assertEqual(409, resp.status_int) - - @mock.patch('qinling.rpc.EngineClient.update_runtime') - def test_put_image(self, mock_update_runtime): - resp = self.app.put_json( - '/v1/runtimes/%s' % self.runtime_id, {'image': 'new_image'} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual('new_image', resp.json['image']) - mock_update_runtime.assert_called_once_with( - self.runtime_id, - image='new_image', - pre_image=self.db_runtime.image - ) - - @mock.patch('qinling.utils.etcd_util.get_service_url') - @mock.patch('qinling.rpc.EngineClient.update_runtime') - def test_put_image_not_allowed(self, mock_update_runtime, mock_etcd_url): - mock_etcd_url.return_value = True - function_id = self.create_function(self.runtime_id).id - - resp = self.app.put_json( - '/v1/runtimes/%s' % self.runtime_id, {'image': 'new_image'}, - expect_errors=True - ) - - self.assertEqual(403, resp.status_int) - mock_update_runtime.assert_not_called() - mock_etcd_url.assert_called_once_with(function_id) - - @mock.patch('qinling.rpc.EngineClient.get_runtime_pool') - def test_get_runtime_pool(self, mock_get_pool): - mock_get_pool.return_value = {"total": 3, "available": 2} - - resp = self.app.get('/v1/runtimes/%s/pool' % self.runtime_id) - - expected = { - "capacity": { - "available": 2, - "total": 3 - }, - "name": self.runtime_id - } - - self.assertEqual(200, resp.status_int) - self.assertEqual(expected, resp.json) diff --git a/qinling/tests/unit/api/controllers/v1/test_webhook.py b/qinling/tests/unit/api/controllers/v1/test_webhook.py deleted file mode 100644 index 22c171bb..00000000 --- a/qinling/tests/unit/api/controllers/v1/test_webhook.py +++ /dev/null @@ -1,267 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import json -from unittest import mock - -from qinling import context -from qinling.db import api as db_api -from qinling.tests.unit.api import base -from qinling.utils import constants - - -class TestWebhookController(base.APITest): - def setUp(self): - super(TestWebhookController, self).setUp() - db_func = self.create_function() - self.func_id = db_func.id - - def test_crud(self): - # Create - body = { - 'function_id': self.func_id, - 'description': 'webhook test' - } - resp = self.app.post_json('/v1/webhooks', body) - self.assertEqual(201, resp.status_int) - webhook_id = resp.json.get('id') - self.assertIn(self.qinling_endpoint, resp.json.get('webhook_url')) - - # Get - resp = self.app.get('/v1/webhooks/%s' % webhook_id) - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, body) - - # List - resp = self.app.get('/v1/webhooks') - self.assertEqual(200, resp.status_int) - actual = self._assert_single_item( - resp.json['webhooks'], id=webhook_id - ) - self._assertDictContainsSubset(actual, body) - - # Update - resp = self.app.put_json( - '/v1/webhooks/%s' % webhook_id, - {'description': 'webhook test update'} - ) - self.assertEqual(200, resp.status_int) - - expected = { - 'function_id': self.func_id, - 'description': 'webhook test update' - } - resp = self.app.get('/v1/webhooks/%s' % webhook_id) - self.assertEqual(200, resp.status_int) - self._assertDictContainsSubset(resp.json, expected) - - # Delete - resp = self.app.delete('/v1/webhooks/%s' % webhook_id) - self.assertEqual(204, resp.status_int) - resp = self.app.get('/v1/webhooks/%s' % webhook_id, expect_errors=True) - self.assertEqual(404, resp.status_int) - - def test_create_with_version(self): - db_api.increase_function_version(self.func_id, 0) - - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'description': 'webhook test' - } - resp = self.app.post_json('/v1/webhooks', body) - - self.assertEqual(201, resp.status_int) - self.assertEqual(1, resp.json.get("function_version")) - self.assertIsNone(resp.json.get("function_alias")) - - def test_create_with_alias(self): - db_api.increase_function_version(self.func_id, 0) - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'name': name - } - db_api.create_function_alias(**body) - - webhook_body = { - 'function_alias': name, - 'description': 'webhook test' - } - resp = self.app.post_json('/v1/webhooks', webhook_body) - - self.assertEqual(201, resp.status_int) - self.assertEqual(name, resp.json.get('function_alias')) - self.assertIsNone(resp.json.get("function_id")) - self.assertIsNone(resp.json.get("function_version")) - - def test_create_with_invalid_alias(self): - body = { - 'function_alias': 'fake_alias', - 'description': 'webhook test' - } - - resp = self.app.post_json('/v1/webhooks', body, expect_errors=True) - - self.assertEqual(404, resp.status_int) - - def test_create_without_required_params(self): - resp = self.app.post( - '/v1/webhooks', - params={}, - expect_errors=True - ) - - self.assertEqual(400, resp.status_int) - - def test_update_with_version(self): - db_api.increase_function_version(self.func_id, 0) - webhook = self.create_webhook(self.func_id) - - self.assertIsNone(webhook.function_version) - - resp = self.app.put_json( - '/v1/webhooks/%s' % webhook.id, - {'function_version': 1} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(1, resp.json.get("function_version")) - self.assertIsNone(resp.json.get("function_alias")) - - def test_update_only_description(self): - db_api.increase_function_version(self.func_id, 0) - webhook = self.create_webhook(self.func_id, function_version=1) - - self.assertEqual(1, webhook.function_version) - - resp = self.app.put_json( - '/v1/webhooks/%s' % webhook.id, - {'description': 'updated description'} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(1, resp.json.get("function_version")) - self.assertEqual('updated description', resp.json.get("description")) - - def test_update_function_alias_1(self): - # Create webhook using function alias - db_api.increase_function_version(self.func_id, 0) - name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'name': name - } - db_api.create_function_alias(**body) - webhook = self.create_webhook(function_alias=name) - - db_api.increase_function_version(self.func_id, 1) - new_name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 2, - 'name': new_name - } - db_api.create_function_alias(**body) - - # Update webhook with the new alias - resp = self.app.put_json( - '/v1/webhooks/%s' % webhook.id, - {'function_alias': new_name} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(new_name, resp.json.get("function_alias")) - self.assertIsNone(resp.json.get("function_id")) - self.assertIsNone(resp.json.get("function_version")) - - def test_update_function_alias_2(self): - # Create webhook using function id - db_api.increase_function_version(self.func_id, 0) - webhook = self.create_webhook(function_id=self.func_id, - function_version=1) - - db_api.increase_function_version(self.func_id, 1) - alias_name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 2, - 'name': alias_name - } - db_api.create_function_alias(**body) - - # Update webhook with function alias - resp = self.app.put_json( - '/v1/webhooks/%s' % webhook.id, - {'function_alias': alias_name} - ) - - self.assertEqual(200, resp.status_int) - self.assertEqual(alias_name, resp.json.get("function_alias")) - self.assertIsNone(resp.json.get("function_id")) - self.assertIsNone(resp.json.get("function_version")) - - @mock.patch("qinling.utils.openstack.keystone.create_trust_context") - @mock.patch("qinling.utils.executions.create_execution") - def test_invoke_with_function_id(self, mock_create_execution, - mock_create_context): - exec_mock = mock_create_execution.return_value - exec_mock.id = "fake_id" - webhook = self.create_webhook(function_id=self.func_id) - - resp = self.app.post_json('/v1/webhooks/%s/invoke' % webhook.id, {}) - context.set_ctx(self.ctx) - - self.assertEqual(202, resp.status_int) - - params = { - 'function_id': self.func_id, - 'function_version': None, - 'sync': False, - 'input': json.dumps({}), - 'description': constants.EXECUTION_BY_WEBHOOK % webhook.id - } - mock_create_execution.assert_called_once_with(mock.ANY, params) - - @mock.patch("qinling.utils.openstack.keystone.create_trust_context") - @mock.patch("qinling.utils.executions.create_execution") - def test_invoke_with_function_alias(self, mock_create_execution, - mock_create_context): - exec_mock = mock_create_execution.return_value - exec_mock.id = "fake_id" - - db_api.increase_function_version(self.func_id, 0) - alias_name = self.rand_name(name="alias", prefix=self.prefix) - body = { - 'function_id': self.func_id, - 'function_version': 1, - 'name': alias_name - } - db_api.create_function_alias(**body) - webhook = self.create_webhook(function_alias=alias_name) - - resp = self.app.post_json('/v1/webhooks/%s/invoke' % webhook.id, {}) - context.set_ctx(self.ctx) - - self.assertEqual(202, resp.status_int) - - params = { - 'function_id': self.func_id, - 'function_version': 1, - 'sync': False, - 'input': json.dumps({}), - 'description': constants.EXECUTION_BY_WEBHOOK % webhook.id - } - mock_create_execution.assert_called_once_with(mock.ANY, params) diff --git a/qinling/tests/unit/api/test_cors_middleware.py b/qinling/tests/unit/api/test_cors_middleware.py deleted file mode 100644 index c7a0626a..00000000 --- a/qinling/tests/unit/api/test_cors_middleware.py +++ /dev/null @@ -1,96 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests cors middleware.""" - -from oslo_config import cfg -from oslo_middleware import cors as cors_middleware -from qinling.tests.unit.api import base - - -class TestCORSMiddleware(base.APITest): - """Provide a basic smoke test to ensure CORS middleware is active. - - The tests below provide minimal confirmation that the CORS middleware - is active, and may be configured. For comprehensive tests, please consult - the test suite in oslo_middleware. - """ - - def setUp(self): - # Make sure the CORS options are registered - cfg.CONF.register_opts(cors_middleware.CORS_OPTS, 'cors') - - # Load up our valid domain values before the application is created. - self.override_config( - "allowed_origin", - "http://valid.example.com", - group='cors' - ) - - # Create the application. - super(TestCORSMiddleware, self).setUp() - - def test_valid_cors_options_request(self): - response = self.app.options( - '/', - headers={ - 'Origin': 'http://valid.example.com', - 'Access-Control-Request-Method': 'GET' - } - ) - - self.assertEqual(200, response.status_code) - self.assertIn('access-control-allow-origin', response.headers) - self.assertEqual( - 'http://valid.example.com', - response.headers['access-control-allow-origin'] - ) - - def test_invalid_cors_options_request(self): - response = self.app.options( - '/', - headers={ - 'Origin': 'http://invalid.example.com', - 'Access-Control-Request-Method': 'GET' - } - ) - - self.assertEqual(200, response.status_code) - self.assertNotIn('access-control-allow-origin', response.headers) - - def test_valid_cors_get_request(self): - response = self.app.get( - '/', - headers={ - 'Origin': 'http://valid.example.com' - } - ) - - self.assertEqual(200, response.status_code) - self.assertIn('access-control-allow-origin', response.headers) - self.assertEqual( - 'http://valid.example.com', - response.headers['access-control-allow-origin'] - ) - - def test_invalid_cors_get_request(self): - response = self.app.get( - '/', - headers={ - 'Origin': 'http://invalid.example.com' - } - ) - - self.assertEqual(200, response.status_code) - self.assertNotIn('access-control-allow-origin', response.headers) diff --git a/qinling/tests/unit/api/test_oslo_middleware.py b/qinling/tests/unit/api/test_oslo_middleware.py deleted file mode 100644 index 5dd0ff7a..00000000 --- a/qinling/tests/unit/api/test_oslo_middleware.py +++ /dev/null @@ -1,42 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests http_proxy_to_wsgi middleware.""" - -from oslo_config import cfg -from oslo_middleware import http_proxy_to_wsgi as http_proxy_to_wsgi_middleware -from qinling.tests.unit.api import base - - -class TestHTTPProxyToWSGIMiddleware(base.APITest): - """Test oslo_middleware HTTPProxyToWSGI. - - It checks that oslo_middleware middleware HTTPProxyToWSGI is executed - when enabled. - """ - - def setUp(self): - # Make sure the HTTPProxyToWSGI options are registered - cfg.CONF.register_opts(http_proxy_to_wsgi_middleware.OPTS, - 'oslo_middleware') - - # Enable proxy headers parsing in HTTPProxyToWSGI middleware. - self.override_config( - "enable_proxy_headers_parsing", - "True", - group='oslo_middleware' - ) - - # Create the application. - super(TestHTTPProxyToWSGIMiddleware, self).setUp() diff --git a/qinling/tests/unit/base.py b/qinling/tests/unit/base.py deleted file mode 100644 index 94feab47..00000000 --- a/qinling/tests/unit/base.py +++ /dev/null @@ -1,259 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2010-2011 OpenStack Foundation -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import random - -from oslo_config import cfg -from oslotest import base - -from qinling import config -from qinling import context as auth_context -from qinling.db import api as db_api -from qinling import status - -DEFAULT_PROJECT_ID = 'default' -OPT_PROJECT_ID = '55-66-77-88' - - -def get_context(default=True, admin=False): - if default: - return auth_context.Context.from_dict({ - 'user_name': 'test-default-user', - 'user': '1-2-3-4', - 'tenant': DEFAULT_PROJECT_ID, - 'project_name': 'test-default-project', - 'is_admin': admin - }) - else: - return auth_context.Context.from_dict({ - 'user_name': 'test-opt-user', - 'user': '5-6-7-8', - 'tenant': OPT_PROJECT_ID, - 'project_name': 'test-opt-project', - 'is_admin': admin - }) - - -class BaseTest(base.BaseTestCase): - def override_config(self, name, override, group=None): - """Cleanly override CONF variables.""" - cfg.CONF.set_override(name, override, group) - self.addCleanup(cfg.CONF.clear_override, name, group) - - def _assertDictContainsSubset(self, parent, child): - """Checks whether child dict is a superset of parent. - - assertDictContainsSubset() in standard Python 2.7 has been deprecated - since Python 3.2 - - Refer to https://goo.gl/iABb5c - """ - self.assertEqual(parent, dict(parent, **child)) - - def _assert_single_item(self, items, **props): - return self._assert_multiple_items(items, 1, **props)[0] - - def _assert_multiple_items(self, items, count, **props): - def _matches(item, **props): - for prop_name, prop_val in props.items(): - v = (item[prop_name] if isinstance(item, dict) - else getattr(item, prop_name)) - if v != prop_val: - return False - return True - - filtered_items = list( - [item for item in items if _matches(item, **props)] - ) - found = len(filtered_items) - - if found != count: - self.fail("Wrong number of items found [props=%s, " - "expected=%s, found=%s]" % (props, count, found)) - - return filtered_items - - def rand_name(self, name='', prefix=None): - """Generate a random name that inclues a random number. - - :param str name: The name that you want to include - :param str prefix: The prefix that you want to include - :return: a random name. The format is - '--'. - (e.g. 'prefixfoo-namebar-154876201') - :rtype: string - """ - randbits = str(random.randint(1, 0x7fffffff)) - rand_name = randbits - if name: - rand_name = name + '-' + rand_name - if prefix: - rand_name = prefix + '-' + rand_name - return rand_name - - -class DbTestCase(BaseTest): - is_heavy_init_called = False - - def setUp(self): - super(DbTestCase, self).setUp() - - self.prefix = self.__class__.__name__ - - self._heavy_init() - - self.ctx = get_context() - auth_context.set_ctx(self.ctx) - - self.addCleanup(auth_context.set_ctx, None) - self.addCleanup(self._clean_db) - - @classmethod - def heavy_init(cls): - """Runs a long initialization. - - This method runs long initialization once by class - and can be extended by child classes. - """ - cfg.CONF.set_default('connection', 'sqlite://', group='database') - cfg.CONF.set_default('max_overflow', -1, group='database') - cfg.CONF.set_default('max_pool_size', 1000, group='database') - - qinling_opts = [ - (config.API_GROUP, config.api_opts), - (config.PECAN_GROUP, config.pecan_opts), - (config.ENGINE_GROUP, config.engine_opts), - (config.STORAGE_GROUP, config.storage_opts), - (config.KUBERNETES_GROUP, config.kubernetes_opts), - (config.ETCD_GROUP, config.etcd_opts), - (config.RLIMITS_GROUP, config.rlimits_opts), - (None, [config.launch_opt]), - (None, config.default_opts) - ] - for group, options in qinling_opts: - cfg.CONF.register_opts(list(options), group) - cls.qinling_endpoint = 'http://127.0.0.1:7070/' - cfg.CONF.set_default('qinling_endpoint', cls.qinling_endpoint) - - db_api.setup_db() - - @classmethod - def _heavy_init(cls): - """Method that runs heavy_init(). - - Make this method private to prevent extending this one. - It runs heavy_init() only once. - - Note: setUpClass() can be used, but it magically is not invoked - from child class in another module. - """ - if not cls.is_heavy_init_called: - cls.heavy_init() - cls.is_heavy_init_called = True - - def _clean_db(self): - db_api.delete_all() - - def create_runtime(self): - runtime = db_api.create_runtime( - { - 'name': self.rand_name('runtime', prefix=self.prefix), - 'image': self.rand_name('image', prefix=self.prefix), - # 'auth_enable' is disabled by default, we create runtime for - # default tenant. - 'project_id': DEFAULT_PROJECT_ID, - 'status': status.AVAILABLE, - 'trusted': True - } - ) - - return runtime - - def create_function(self, runtime_id=None, code=None, timeout=None): - if not runtime_id: - runtime_id = self.create_runtime().id - - function = db_api.create_function( - { - 'name': self.rand_name('function', prefix=self.prefix), - 'runtime_id': runtime_id, - 'code': code or {"source": "package", "md5sum": "fake_md5"}, - 'entry': 'main.main', - # 'auth_enable' is disabled by default, we create runtime for - # default tenant. - 'project_id': DEFAULT_PROJECT_ID, - 'cpu': cfg.CONF.resource_limits.default_cpu, - 'memory_size': cfg.CONF.resource_limits.default_memory, - 'timeout': timeout or cfg.CONF.resource_limits.default_timeout - } - ) - - return function - - def create_job(self, function_id=None, function_alias=None, **kwargs): - if not function_id and not function_alias: - function_id = self.create_function().id - - job_params = { - 'name': self.rand_name('job', prefix=self.prefix), - 'function_alias': function_alias, - 'function_id': function_id, - # 'auth_enable' is disabled by default - 'project_id': DEFAULT_PROJECT_ID, - } - job_params.update(kwargs) - job = db_api.create_job(job_params) - - return job - - def create_webhook(self, function_id=None, function_alias=None, **kwargs): - if not function_id and not function_alias: - function_id = self.create_function().id - - webhook_params = { - 'function_alias': function_alias, - 'function_id': function_id, - # 'auth_enable' is disabled by default - 'project_id': DEFAULT_PROJECT_ID, - } - webhook_params.update(kwargs) - webhook = db_api.create_webhook(webhook_params) - - return webhook - - def create_execution(self, function_id=None, function_alias=None, - **kwargs): - if not function_id and not function_alias: - function_id = self.create_function().id - - execution_params = { - 'function_alias': function_alias, - 'function_id': function_id, - 'project_id': DEFAULT_PROJECT_ID, - 'status': status.RUNNING, - } - execution_params.update(kwargs) - execution = db_api.create_execution(execution_params) - - return execution - - def create_function_version(self, old_version, function_id=None, **kwargs): - if not function_id: - function_id = self.create_function().id - - db_api.increase_function_version(function_id, old_version, **kwargs) - db_api.update_function(function_id, - {"latest_version": old_version + 1}) diff --git a/qinling/tests/unit/cmd/__init__.py b/qinling/tests/unit/cmd/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/cmd/test_status.py b/qinling/tests/unit/cmd/test_status.py deleted file mode 100644 index 6bae39df..00000000 --- a/qinling/tests/unit/cmd/test_status.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (c) 2018 NEC, Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_upgradecheck.upgradecheck import Code - -from qinling.cmd import status -from qinling.tests.unit import base - - -class TestUpgradeChecks(base.BaseTest): - - def setUp(self): - super(TestUpgradeChecks, self).setUp() - self.cmd = status.Checks() - - def test__sample_check(self): - check_result = self.cmd._sample_check() - self.assertEqual( - Code.SUCCESS, check_result.code) diff --git a/qinling/tests/unit/engine/__init__.py b/qinling/tests/unit/engine/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/engine/test_default_engine.py b/qinling/tests/unit/engine/test_default_engine.py deleted file mode 100644 index 37004b18..00000000 --- a/qinling/tests/unit/engine/test_default_engine.py +++ /dev/null @@ -1,600 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from oslo_config import cfg - -from qinling.db import api as db_api -from qinling.engine import default_engine -from qinling import exceptions as exc -from qinling import status -from qinling.tests.unit import base -from qinling.utils import common -from qinling.utils import constants - - -class TestDefaultEngine(base.DbTestCase): - def setUp(self): - super(TestDefaultEngine, self).setUp() - self.orchestrator = mock.Mock() - self.qinling_endpoint = 'http://127.0.0.1:7070' - self.default_engine = default_engine.DefaultEngine( - self.orchestrator, self.qinling_endpoint - ) - self.rlimit = { - 'cpu': cfg.CONF.resource_limits.default_cpu, - 'memory_size': cfg.CONF.resource_limits.default_memory - } - - def _create_running_executions(self, function_id, num): - for _ in range(num): - self.create_execution(function_id=function_id) - - def test_create_runtime(self): - runtime = self.create_runtime() - runtime_id = runtime.id - # Set status to verify it is changed during creation. - db_api.update_runtime(runtime_id, {'status': status.CREATING}) - - self.default_engine.create_runtime(mock.Mock(), runtime_id) - - self.orchestrator.create_pool.assert_called_once_with( - runtime_id, runtime.image, trusted=True) - - runtime = db_api.get_runtime(runtime_id) - self.assertEqual(status.AVAILABLE, runtime.status) - - def test_create_runtime_failed(self): - runtime = self.create_runtime() - runtime_id = runtime.id - # Set status to verify it is changed during creation. - db_api.update_runtime(runtime_id, {'status': status.CREATING}) - self.orchestrator.create_pool.side_effect = RuntimeError - - self.default_engine.create_runtime(mock.Mock(), runtime_id) - - self.orchestrator.create_pool.assert_called_once_with( - runtime_id, runtime.image, trusted=True) - runtime = db_api.get_runtime(runtime_id) - self.assertEqual(status.ERROR, runtime.status) - - def test_delete_runtime(self): - runtime = self.create_runtime() - runtime_id = runtime.id - - self.default_engine.delete_runtime(mock.Mock(), runtime_id) - - self.orchestrator.delete_pool.assert_called_once_with( - runtime_id) - self.assertRaisesRegex( - exc.DBEntityNotFoundError, - "^Runtime not found \[id=%s\]$" % runtime_id, - db_api.get_runtime, runtime_id) - - def test_update_runtime(self): - runtime = self.create_runtime() - runtime_id = runtime.id - # Set status to verify it is changed during update. - db_api.update_runtime(runtime_id, {'status': status.UPGRADING}) - image = self.rand_name('new_image', prefix=self.prefix) - pre_image = self.rand_name('pre_image', prefix=self.prefix) - self.orchestrator.update_pool.return_value = True - - self.default_engine.update_runtime( - mock.Mock(), runtime_id, image, pre_image) - - self.orchestrator.update_pool.assert_called_once_with( - runtime_id, image=image) - runtime = db_api.get_runtime(runtime_id) - self.assertEqual(runtime.status, status.AVAILABLE) - - def test_update_runtime_rollbacked(self): - runtime = self.create_runtime() - runtime_id = runtime.id - # Set status to verify it is changed during update. - db_api.update_runtime(runtime_id, {'status': status.UPGRADING}) - image = self.rand_name('new_image', prefix=self.prefix) - pre_image = self.rand_name('pre_image', prefix=self.prefix) - self.orchestrator.update_pool.return_value = False - - self.default_engine.update_runtime( - mock.Mock(), runtime_id, image, pre_image) - - self.orchestrator.update_pool.assert_called_once_with( - runtime_id, image=image) - runtime = db_api.get_runtime(runtime_id) - self.assertEqual(runtime.image, pre_image) - self.assertEqual(runtime.status, status.AVAILABLE) - - @mock.patch('qinling.engine.default_engine.DefaultEngine.scaleup_function') - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.utils.etcd_util.get_worker_lock') - def test_function_load_check_no_worker(self, mock_getlock, mock_getworkers, - mock_scaleup): - function_id = common.generate_unicode_uuid() - runtime_id = common.generate_unicode_uuid() - lock = mock.Mock() - lock.is_acquired.return_value = True - mock_getlock.return_value.__enter__.return_value = lock - mock_getworkers.return_value = [] - - self.default_engine.function_load_check(function_id, 0, runtime_id) - - mock_getworkers.assert_called_once_with(function_id, 0) - mock_scaleup.assert_called_once_with(None, function_id, 0, runtime_id, - 1) - - @mock.patch('qinling.engine.default_engine.DefaultEngine.scaleup_function') - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.utils.etcd_util.get_worker_lock') - def test_function_load_check_scaleup(self, mock_getlock, mock_getworkers, - mock_scaleup): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - lock = mock.Mock() - lock.is_acquired.return_value = True - mock_getlock.return_value.__enter__.return_value = lock - - # The default concurrency is 3, we use 4 running executions against - # 1 worker so that there will be a scaling up. - mock_getworkers.return_value = ['worker1'] - self._create_running_executions(function_id, 4) - - self.default_engine.function_load_check(function_id, 0, runtime_id) - - mock_getworkers.assert_called_once_with(function_id, 0) - mock_scaleup.assert_called_once_with(None, function_id, 0, runtime_id, - 1) - - @mock.patch('qinling.engine.default_engine.DefaultEngine.scaleup_function') - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.utils.etcd_util.get_worker_lock') - def test_function_load_check_not_scaleup(self, mock_getlock, - mock_getworkers, mock_scaleup): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - lock = mock.Mock() - lock.is_acquired.return_value = True - mock_getlock.return_value.__enter__.return_value = lock - - # The default concurrency is 3, we use 3 running executions against - # 1 worker so that there won't be a scaling up. - mock_getworkers.return_value = ['worker1'] - self._create_running_executions(function_id, 3) - - self.default_engine.function_load_check(function_id, 0, runtime_id) - - mock_getworkers.assert_called_once_with(function_id, 0) - mock_scaleup.assert_not_called() - - @mock.patch('qinling.utils.etcd_util.get_workers') - @mock.patch('qinling.utils.etcd_util.get_worker_lock') - def test_function_load_check_lock_wait(self, mock_getlock, - mock_getworkers): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - lock = mock.Mock() - mock_getlock.return_value.__enter__.return_value = lock - # Lock is acquired upon the third try. - lock.is_acquired.side_effect = [False, False, True] - mock_getworkers.return_value = ['worker1'] - self._create_running_executions(function_id, 3) - - self.default_engine.function_load_check(function_id, 0, runtime_id) - - self.assertEqual(3, lock.is_acquired.call_count) - mock_getworkers.assert_called_once_with(function_id, 0) - - @mock.patch('qinling.utils.etcd_util.get_worker_lock') - def test_function_load_check_failed_to_get_worker_lock(self, mock_getlock): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - function_version = 0 - lock = mock.Mock() - # Lock is never acquired. - lock.is_acquired.return_value = False - mock_getlock.return_value.__enter__.return_value = lock - - self.assertRaisesRegex( - exc.EtcdLockException, - "^Etcd: failed to get worker lock for function %s" - "\(version %s\)\.$" % (function_id, function_version), - self.default_engine.function_load_check, - function_id, function_version, runtime_id - ) - - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_create_execution_image_type_function(self, mock_svc_url): - """Create 2 executions for an image type function.""" - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - db_api.update_function( - function_id, - { - 'code': { - 'source': constants.IMAGE_FUNCTION, - 'image': self.rand_name('image', prefix=self.prefix) - } - } - ) - function = db_api.get_function(function_id) - execution_1 = self.create_execution(function_id=function_id) - execution_1_id = execution_1.id - execution_2 = self.create_execution(function_id=function_id) - execution_2_id = execution_2.id - mock_svc_url.return_value = None - self.orchestrator.prepare_execution.return_value = ( - mock.Mock(), None) - self.orchestrator.run_execution.side_effect = [ - (True, {'duration': 5, 'logs': 'fake log'}), - (False, {'duration': 0, 'output': 'Function execution failed.'}) - ] - - # Create two executions, with different results - self.default_engine.create_execution( - mock.Mock(), execution_1_id, function_id, 0, runtime_id - ) - self.default_engine.create_execution( - mock.Mock(), execution_2_id, function_id, 0, runtime_id, - input='input' - ) - - get_service_url_calls = [ - mock.call(function_id, 0), mock.call(function_id, 0) - ] - mock_svc_url.assert_has_calls(get_service_url_calls) - - prepare_calls = [ - mock.call(function_id, - 0, - rlimit=self.rlimit, - image=function.code['image'], - identifier=mock.ANY, - labels=None, - input=None), - mock.call(function_id, - 0, - rlimit=self.rlimit, - image=function.code['image'], - identifier=mock.ANY, - labels=None, - input='input') - ] - self.orchestrator.prepare_execution.assert_has_calls(prepare_calls) - - run_calls = [ - mock.call(execution_1_id, - function_id, - 0, - rlimit=None, - input=None, - identifier=mock.ANY, - service_url=None, - entry=function.entry, - trust_id=function.trust_id, - timeout=function.timeout), - mock.call(execution_2_id, - function_id, - 0, - rlimit=None, - input='input', - identifier=mock.ANY, - service_url=None, - entry=function.entry, - trust_id=function.trust_id, - timeout=function.timeout) - ] - self.orchestrator.run_execution.assert_has_calls(run_calls) - - execution_1 = db_api.get_execution(execution_1_id) - execution_2 = db_api.get_execution(execution_2_id) - - self.assertEqual(status.SUCCESS, execution_1.status) - self.assertEqual('fake log', execution_1.logs) - self.assertEqual({"duration": 5}, execution_1.result) - self.assertEqual(status.FAILED, execution_2.status) - self.assertEqual('', execution_2.logs) - self.assertEqual( - {'duration': 0, 'output': 'Function execution failed.'}, - execution_2.result - ) - - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_create_execution_prepare_execution_exception( - self, - etcd_util_get_service_url_mock - ): - """test_create_execution_prepare_execution_exception - - Create execution for image type function, prepare_execution method - raises exception. - """ - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - db_api.update_function( - function_id, - { - 'code': { - 'source': constants.IMAGE_FUNCTION, - 'image': self.rand_name('image', prefix=self.prefix) - } - } - ) - function = db_api.get_function(function_id) - execution = self.create_execution(function_id=function_id) - execution_id = execution.id - prepare_execution = self.orchestrator.prepare_execution - prepare_execution.side_effect = exc.OrchestratorException( - 'Exception in prepare_execution' - ) - etcd_util_get_service_url_mock.return_value = None - - self.default_engine.create_execution( - mock.Mock(), execution_id, function_id, 0, runtime_id) - - execution = db_api.get_execution(execution_id) - - self.assertEqual(status.ERROR, execution.status) - self.assertEqual('', execution.logs) - self.assertEqual({'output': 'Function execution failed.'}, - execution.result) - - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_create_execution_package_type_function( - self, - etcd_util_get_service_url_mock - ): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - execution = self.create_execution(function_id=function_id) - execution_id = execution.id - self.default_engine.function_load_check = mock.Mock(return_value='') - etcd_util_get_service_url_mock.return_value = None - self.orchestrator.prepare_execution.return_value = ( - mock.Mock(), 'svc_url') - self.orchestrator.run_execution.return_value = ( - True, - {'success': True, 'logs': 'execution log', - 'output': 'success output'}) - - self.default_engine.create_execution( - mock.Mock(), execution_id, function_id, 0, runtime_id) - - self.default_engine.function_load_check.assert_called_once_with( - function_id, 0, runtime_id) - etcd_util_get_service_url_mock.assert_called_once_with(function_id, 0) - self.orchestrator.prepare_execution.assert_called_once_with( - function_id, 0, rlimit=self.rlimit, image=None, - identifier=runtime_id, labels={'runtime_id': runtime_id}, - input=None) - self.orchestrator.run_execution.assert_called_once_with( - execution_id, function_id, 0, rlimit=self.rlimit, input=None, - identifier=runtime_id, service_url='svc_url', entry=function.entry, - trust_id=function.trust_id, timeout=function.timeout) - - execution = db_api.get_execution(execution_id) - - self.assertEqual(execution.status, status.SUCCESS) - self.assertEqual(execution.logs, 'execution log') - self.assertEqual(execution.result, {'output': 'success output'}) - - def test_create_execution_loadcheck_exception(self): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - execution = self.create_execution(function_id=function_id) - execution_id = execution.id - self.default_engine.function_load_check = mock.Mock( - side_effect=exc.OrchestratorException( - 'Exception in scaleup_function' - ) - ) - - self.default_engine.create_execution( - mock.Mock(), execution_id, function_id, 0, runtime_id) - - execution = db_api.get_execution(execution_id) - - self.assertEqual(status.ERROR, execution.status) - self.assertEqual('', execution.logs) - self.assertEqual({'output': 'Function execution failed.'}, - execution.result) - - @mock.patch('qinling.engine.utils.get_request_data') - @mock.patch('qinling.engine.utils.url_request') - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_create_execution_found_service_url( - self, - etcd_util_get_service_url_mock, - engine_utils_url_request_mock, - engine_utils_get_request_data_mock - ): - function = self.create_function() - function_id = function.id - runtime_id = function.runtime_id - execution = self.create_execution(function_id=function_id) - execution_id = execution.id - self.default_engine.function_load_check = mock.Mock(return_value='') - etcd_util_get_service_url_mock.return_value = 'svc_url' - engine_utils_get_request_data_mock.return_value = 'data' - engine_utils_url_request_mock.return_value = ( - False, - {'success': False, 'logs': 'execution log', - 'output': 'failed output'}) - - self.default_engine.create_execution( - mock.Mock(), execution_id, function_id, 0, runtime_id, - input='input') - - self.default_engine.function_load_check.assert_called_once_with( - function_id, 0, runtime_id) - etcd_util_get_service_url_mock.assert_called_once_with(function_id, 0) - engine_utils_get_request_data_mock.assert_called_once_with( - mock.ANY, function_id, 0, execution_id, self.rlimit, - 'input', function.entry, function.trust_id, - self.qinling_endpoint, function.timeout) - engine_utils_url_request_mock.assert_called_once_with( - self.default_engine.session, 'svc_url/execute', body='data') - - execution = db_api.get_execution(execution_id) - - self.assertEqual(execution.status, status.FAILED) - self.assertEqual(execution.logs, 'execution log') - self.assertEqual(execution.result, - {'success': False, 'output': 'failed output'}) - - def test_delete_function(self): - function_id = common.generate_unicode_uuid() - - self.default_engine.delete_function(mock.Mock(), function_id) - - self.orchestrator.delete_function.assert_called_once_with( - function_id, 0 - ) - - @mock.patch('qinling.utils.etcd_util.create_service_url') - @mock.patch('qinling.utils.etcd_util.create_worker') - def test_scaleup_function( - self, - etcd_util_create_worker_mock, - etcd_util_create_service_url_mock - ): - function_id = common.generate_unicode_uuid() - runtime_id = common.generate_unicode_uuid() - self.orchestrator.scaleup_function.return_value = (['worker'], 'url') - - self.default_engine.scaleup_function( - mock.Mock(), function_id, 0, runtime_id) - - self.orchestrator.scaleup_function.assert_called_once_with( - function_id, 0, identifier=runtime_id, count=1) - etcd_util_create_worker_mock.assert_called_once_with( - function_id, 'worker', version=0) - etcd_util_create_service_url_mock.assert_called_once_with( - function_id, 'url', version=0) - - @mock.patch('qinling.utils.etcd_util.create_service_url') - @mock.patch('qinling.utils.etcd_util.create_worker') - def test_scaleup_function_multiple_workers( - self, - etcd_util_create_worker_mock, - etcd_util_create_service_url_mock - ): - function_id = common.generate_unicode_uuid() - runtime_id = common.generate_unicode_uuid() - self.orchestrator.scaleup_function.return_value = ( - ['worker0', 'worker1'], 'url') - - self.default_engine.scaleup_function( - mock.Mock(), function_id, 0, runtime_id, count=2 - ) - - self.orchestrator.scaleup_function.assert_called_once_with( - function_id, 0, identifier=runtime_id, count=2 - ) - # Two new workers are created. - expected = [mock.call(function_id, 'worker0', version=0), - mock.call(function_id, 'worker1', version=0)] - etcd_util_create_worker_mock.assert_has_calls(expected) - etcd_util_create_service_url_mock.assert_called_once_with( - function_id, 'url', version=0 - ) - - @mock.patch('qinling.utils.etcd_util.delete_worker') - @mock.patch('qinling.utils.etcd_util.get_workers') - def test_scaledown_function( - self, etcd_util_get_workers_mock, etcd_util_delete_workers_mock - ): - function_id = common.generate_unicode_uuid() - etcd_util_get_workers_mock.return_value = [ - 'worker_%d' % i for i in range(4) - ] - - self.default_engine.scaledown_function(mock.Mock(), function_id) - - etcd_util_get_workers_mock.assert_called_once_with( - function_id, 0) - self.orchestrator.delete_worker.assert_called_once_with('worker_0') - etcd_util_delete_workers_mock.assert_called_once_with( - function_id, 'worker_0', version=0 - ) - - @mock.patch('qinling.utils.etcd_util.delete_worker') - @mock.patch('qinling.utils.etcd_util.get_workers') - def test_scaledown_function_multiple_workers( - self, etcd_util_get_workers_mock, etcd_util_delete_workers_mock - ): - function_id = common.generate_unicode_uuid() - etcd_util_get_workers_mock.return_value = [ - 'worker_%d' % i for i in range(4) - ] - - self.default_engine.scaledown_function(mock.Mock(), function_id, - count=2) - - etcd_util_get_workers_mock.assert_called_once_with(function_id, 0) - # First two workers will be deleted. - expected = [mock.call('worker_0'), mock.call('worker_1')] - self.orchestrator.delete_worker.assert_has_calls(expected) - self.assertEqual(2, self.orchestrator.delete_worker.call_count) - expected = [ - mock.call(function_id, 'worker_0', version=0), - mock.call(function_id, 'worker_1', version=0) - ] - etcd_util_delete_workers_mock.assert_has_calls(expected) - self.assertEqual(2, etcd_util_delete_workers_mock.call_count) - - @mock.patch('qinling.utils.etcd_util.delete_worker') - @mock.patch('qinling.utils.etcd_util.get_workers') - def test_scaledown_function_leaving_one_worker( - self, etcd_util_get_workers_mock, etcd_util_delete_workers_mock - ): - function_id = common.generate_unicode_uuid() - etcd_util_get_workers_mock.return_value = [ - 'worker_%d' % i for i in range(4) - ] - - self.default_engine.scaledown_function( - mock.Mock(), function_id, count=5) # count > len(workers) - - etcd_util_get_workers_mock.assert_called_once_with(function_id, 0) - # Only the first three workers will be deleted - expected = [ - mock.call('worker_0'), mock.call('worker_1'), mock.call('worker_2') - ] - self.orchestrator.delete_worker.assert_has_calls(expected) - self.assertEqual(3, self.orchestrator.delete_worker.call_count) - expected = [ - mock.call(function_id, 'worker_0', version=0), - mock.call(function_id, 'worker_1', version=0), - mock.call(function_id, 'worker_2', version=0) - ] - etcd_util_delete_workers_mock.assert_has_calls(expected) - self.assertEqual(3, etcd_util_delete_workers_mock.call_count) - - def test_get_runtime_pool(self): - runtime = self.create_runtime() - runtime_id = runtime.id - - self.default_engine.get_runtime_pool(mock.Mock(), runtime_id) - - self.orchestrator.get_pool.assert_called_once_with(runtime_id) diff --git a/qinling/tests/unit/orchestrator/__init__.py b/qinling/tests/unit/orchestrator/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/orchestrator/kubernetes/__init__.py b/qinling/tests/unit/orchestrator/kubernetes/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/orchestrator/kubernetes/test_manager.py b/qinling/tests/unit/orchestrator/kubernetes/test_manager.py deleted file mode 100644 index 8ac88b82..00000000 --- a/qinling/tests/unit/orchestrator/kubernetes/test_manager.py +++ /dev/null @@ -1,925 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import testtools -from unittest import mock -import yaml - -from oslo_config import cfg - -from qinling import config -from qinling import exceptions as exc -from qinling.orchestrator.kubernetes import manager as k8s_manager -from qinling.tests.unit import base -from qinling.utils import common - -CONF = cfg.CONF -SERVICE_PORT = 9090 -SERVICE_ADDRESS_EXTERNAL = '1.2.3.4' -SERVICE_ADDRESS_INTERNAL = '127.0.0.1' - - -class TestKubernetesManager(base.DbTestCase): - def setUp(self): - super(TestKubernetesManager, self).setUp() - - self.conf = CONF - self.qinling_endpoint = 'http://127.0.0.1:7070' - self.rlimit = { - 'cpu': cfg.CONF.resource_limits.default_cpu, - 'memory_size': cfg.CONF.resource_limits.default_memory - } - self.k8s_v1_api = mock.Mock() - self.k8s_v1_ext = mock.Mock() - clients = {'v1': self.k8s_v1_api, - 'v1extension': self.k8s_v1_ext} - mock.patch( - 'qinling.orchestrator.kubernetes.utils.get_k8s_clients', - return_value=clients - ).start() - self.fake_namespace = self.rand_name('namespace', prefix=self.prefix) - self.override_config('namespace', self.fake_namespace, - config.KUBERNETES_GROUP) - - self.override_config('auth_enable', False, group='pecan') - - namespace = mock.Mock() - namespace.metadata.name = self.fake_namespace - namespaces = mock.Mock() - namespaces.items = [namespace] - self.k8s_v1_api.list_namespace.return_value = namespaces - - self.manager = k8s_manager.KubernetesManager(self.conf, - self.qinling_endpoint) - - def _create_service(self): - port = mock.Mock() - port.node_port = SERVICE_PORT - service = mock.Mock() - service.spec.ports = [port] - return service - - def _create_nodes_with_external_ip(self): - addr1 = mock.Mock() - addr1.type = 'UNKNOWN TYPE' - addr2 = mock.Mock() - addr2.type = 'ExternalIP' - addr2.address = SERVICE_ADDRESS_EXTERNAL - item = mock.Mock() - item.status.addresses = [addr1, addr2] - nodes = mock.Mock() - nodes.items = [item] - return nodes - - def _create_nodes_with_internal_ip(self): - addr1 = mock.Mock() - addr1.type = 'InternalIP' - addr1.address = SERVICE_ADDRESS_INTERNAL - addr2 = mock.Mock() - addr2.type = 'UNKNOWN TYPE' - item = mock.Mock() - item.status.addresses = [addr1, addr2] - nodes = mock.Mock() - nodes.items = [item] - return nodes - - def test__ensure_namespace(self): - # self.manager is not used in this test. - namespaces = mock.Mock() - namespaces.items = [] - self.k8s_v1_api.list_namespace.return_value = namespaces - - k8s_manager.KubernetesManager(self.conf, self.qinling_endpoint) - - namespace_body = { - 'apiVersion': 'v1', - 'kind': 'Namespace', - 'metadata': { - 'name': self.fake_namespace, - 'labels': { - 'name': self.fake_namespace - } - }, - } - # setUp also calls list_namespace. - self.assertEqual(2, self.k8s_v1_api.list_namespace.call_count) - self.k8s_v1_api.create_namespace.assert_called_once_with( - namespace_body) - - def test__ensure_namespace_not_create_namespace(self): - # self.manager is not used in this test. - item = mock.Mock() - item.metadata.name = self.fake_namespace - namespaces = mock.Mock() - namespaces.items = [item] - self.k8s_v1_api.list_namespace.return_value = namespaces - - k8s_manager.KubernetesManager(self.conf, self.qinling_endpoint) - - # setUp also calls list_namespace. - self.assertEqual(2, self.k8s_v1_api.list_namespace.call_count) - self.k8s_v1_api.create_namespace.assert_not_called() - - def test_create_pool(self): - ret = mock.Mock() - ret.status.replicas = 5 - ret.status.available_replicas = 5 - self.k8s_v1_ext.read_namespaced_deployment.return_value = ret - fake_replicas = 5 - self.override_config('replicas', fake_replicas, - config.KUBERNETES_GROUP) - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - fake_image = self.rand_name('image', prefix=self.prefix) - - self.manager.create_pool(fake_deployment_name, fake_image) - - deployment_body = self.manager.deployment_template.render( - { - 'name': fake_deployment_name, - 'labels': {'runtime_id': fake_deployment_name}, - 'replicas': fake_replicas, - 'container_name': 'worker', - 'image': fake_image, - 'sidecar_image': CONF.engine.sidecar_image, - 'trusted': 'true' - } - ) - self.k8s_v1_ext.create_namespaced_deployment.assert_called_once_with( - body=yaml.safe_load(deployment_body), - namespace=self.fake_namespace, - async_req=False) - self.k8s_v1_ext.read_namespaced_deployment.assert_called_once_with( - fake_deployment_name, self.fake_namespace) - - def test_create_pool_wait_deployment_available(self): - ret1 = mock.Mock() - ret1.status.replicas = 0 - ret2 = mock.Mock() - ret2.status.replicas = 3 - ret2.status.available_replicas = 1 - ret3 = mock.Mock() - ret3.status.replicas = 3 - ret3.status.available_replicas = 3 - self.k8s_v1_ext.read_namespaced_deployment.side_effect = [ - ret1, ret2, ret3 - ] - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - fake_image = self.rand_name('image', prefix=self.prefix) - - self.manager.create_pool(fake_deployment_name, fake_image) - - self.assertEqual( - 3, self.k8s_v1_ext.read_namespaced_deployment.call_count) - - @testtools.skip("Default timeout is too long.") - def test_create_pool_wait_deployment_timeout(self): - ret = mock.Mock() - ret.status.replicas = 0 - self.k8s_v1_ext.read_namespaced_deployment.return_value = ret - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - fake_image = self.rand_name('image', prefix=self.prefix) - - self.assertRaisesRegex( - exc.OrchestratorException, - "^Deployment %s not ready\.$" % fake_deployment_name, - self.manager.create_pool, - fake_deployment_name, fake_image) - self.assertLess( - 200, # Default timeout is 600s with wait interval set to 2s. - self.k8s_v1_ext.read_namespaced_deployment.call_count) - - def test_delete_pool(self): - # Deleting namespaced service is also tested in this. - svc1 = mock.Mock() - svc1_name = self.rand_name('service', prefix=self.prefix) - svc1.metadata.name = svc1_name - svc2 = mock.Mock() - svc2_name = self.rand_name('service', prefix=self.prefix) - svc2.metadata.name = svc2_name - services = mock.Mock() - services.items = [svc1, svc2] - self.k8s_v1_api.list_namespaced_service.return_value = services - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - - self.manager.delete_pool(fake_deployment_name) - - del_rep = self.k8s_v1_ext.delete_collection_namespaced_replica_set - del_rep.assert_called_once_with( - self.fake_namespace, - label_selector='runtime_id=%s' % fake_deployment_name) - delete_service_calls = [ - mock.call(svc1_name, self.fake_namespace), - mock.call(svc2_name, self.fake_namespace), - ] - self.k8s_v1_api.delete_namespaced_service.assert_has_calls( - delete_service_calls) - self.assertEqual( - 2, self.k8s_v1_api.delete_namespaced_service.call_count) - del_dep = self.k8s_v1_ext.delete_collection_namespaced_deployment - del_dep.assert_called_once_with( - self.fake_namespace, - label_selector='runtime_id=%s' % fake_deployment_name, - field_selector='metadata.name=%s' % fake_deployment_name) - del_pod = self.k8s_v1_api.delete_collection_namespaced_pod - del_pod.assert_called_once_with( - self.fake_namespace, - label_selector='runtime_id=%s' % fake_deployment_name) - - def test_update_pool(self): - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - image = self.rand_name('image', prefix=self.prefix) - body = { - 'spec': { - 'template': { - 'spec': { - 'containers': [ - { - 'name': 'worker', - 'image': image - } - ] - } - } - } - } - ret = mock.Mock() - ret.status.unavailable_replicas = None - self.k8s_v1_ext.read_namespaced_deployment.return_value = ret - - update_result = self.manager.update_pool(fake_deployment_name, - image=image) - - self.assertTrue(update_result) - self.k8s_v1_ext.patch_namespaced_deployment.assert_called_once_with( - fake_deployment_name, self.fake_namespace, body) - read_status = self.k8s_v1_ext.read_namespaced_deployment - read_status.assert_called_once_with(fake_deployment_name, - self.fake_namespace) - - def test_update_pool_retry(self): - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - image = self.rand_name('image', prefix=self.prefix) - ret1 = mock.Mock() - ret1.status.unavailable_replicas = 1 - ret2 = mock.Mock() - ret2.status.unavailable_replicas = None - self.k8s_v1_ext.read_namespaced_deployment.side_effect = [ret1, ret2] - - update_result = self.manager.update_pool(fake_deployment_name, - image=image) - - self.assertTrue(update_result) - self.k8s_v1_ext.patch_namespaced_deployment.assert_called_once_with( - fake_deployment_name, self.fake_namespace, mock.ANY) - read_status = self.k8s_v1_ext.read_namespaced_deployment - self.assertEqual(2, read_status.call_count) - - def test_get_pool(self): - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - - ret = mock.Mock() - ret.status.replicas = 3 - self.k8s_v1_ext.read_namespaced_deployment.return_value = ret - - list_pod_ret = mock.Mock() - list_pod_ret.items = [mock.Mock()] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - - pool_info = self.manager.get_pool(fake_deployment_name) - - expected = {"total": 3, "available": 1} - self.assertEqual(expected, pool_info) - - def test_get_pool_not_ready(self): - fake_deployment_name = self.rand_name('deployment', prefix=self.prefix) - - ret = mock.Mock() - ret.status.replicas = None - self.k8s_v1_ext.read_namespaced_deployment.return_value = ret - - pool_info = self.manager.get_pool(fake_deployment_name) - - expected = {"total": 0, "available": 0} - self.assertEqual(expected, pool_info) - - def test_prepare_execution_no_image(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = {'pod1_key1': 'pod1_value1'} - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - self.k8s_v1_api.create_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_external_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.prepare_execution( - function_id, 0, rlimit=None, image=None, identifier=runtime_id, - labels={'runtime_id': runtime_id}) - - self.assertEqual(pod.metadata.name, pod_names) - self.assertEqual( - 'http://%s:%s' % (SERVICE_ADDRESS_EXTERNAL, SERVICE_PORT), - service_url) - - # in _choose_available_pods - self.k8s_v1_api.list_namespaced_pod.assert_called_once_with( - self.fake_namespace, - label_selector='function_id=%s,function_version=0' % (function_id) - ) - - # in _prepare_pod -> _update_pod_label - pod_labels = { - 'pod1_key1': 'pod1_value1', - 'function_id': function_id, - 'function_version': '0' - } - body = {'metadata': {'labels': pod_labels}} - self.k8s_v1_api.patch_namespaced_pod.assert_called_once_with( - pod.metadata.name, self.fake_namespace, body) - - # in _prepare_pod - service_body = self.manager.service_template.render( - { - 'service_name': 'service-%s-0' % function_id, - 'labels': {'function_id': function_id, - 'function_version': '0', - 'runtime_id': runtime_id}, - 'selector': pod_labels - } - ) - self.k8s_v1_api.create_namespaced_service.assert_called_once_with( - self.fake_namespace, yaml.safe_load(service_body)) - - def test_prepare_execution_with_image(self): - function_id = common.generate_unicode_uuid() - image = self.rand_name('image', prefix=self.prefix) - identifier = ('%s-%s' % - (common.generate_unicode_uuid(dashed=False), function_id) - )[:63] - - pod_name, url = self.manager.prepare_execution( - function_id, 0, rlimit=self.rlimit, image=image, - identifier=identifier) - - self.assertEqual(identifier, pod_name) - self.assertIsNone(url) - - # in _create_pod - pod_body = self.manager.pod_template.render( - { - 'pod_name': identifier, - 'labels': {'function_id': function_id}, - 'pod_image': image, - 'input': [], - 'req_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'req_memory': str(cfg.CONF.resource_limits.default_memory), - 'limit_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'limit_memory': str(cfg.CONF.resource_limits.default_memory) - } - ) - self.k8s_v1_api.create_namespaced_pod.assert_called_once_with( - self.fake_namespace, body=yaml.safe_load(pod_body)) - - def test_prepare_execution_with_image_function_input(self): - function_id = common.generate_unicode_uuid() - image = self.rand_name('image', prefix=self.prefix) - identifier = ('%s-%s' % ( - common.generate_unicode_uuid(dashed=False), - function_id) - )[:63] - fake_input = {'__function_input': 'input_item1 input_item2'} - - pod_name, url = self.manager.prepare_execution( - function_id, 0, rlimit=self.rlimit, image=image, - identifier=identifier, input=fake_input) - - # in _create_pod - pod_body = self.manager.pod_template.render( - { - 'pod_name': identifier, - 'labels': {'function_id': function_id}, - 'pod_image': image, - 'input': ['input_item1', 'input_item2'], - 'req_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'req_memory': str(cfg.CONF.resource_limits.default_memory), - 'limit_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'limit_memory': str(cfg.CONF.resource_limits.default_memory) - } - ) - self.k8s_v1_api.create_namespaced_pod.assert_called_once_with( - self.fake_namespace, body=yaml.safe_load(pod_body)) - - def test_prepare_execution_with_image_json_input(self): - function_id = common.generate_unicode_uuid() - image = self.rand_name('image', prefix=self.prefix) - identifier = ('%s-%s' % ( - common.generate_unicode_uuid(dashed=False), - function_id) - )[:63] - fake_input = '["input_item3", "input_item4"]' - - pod_name, url = self.manager.prepare_execution( - function_id, 0, rlimit=self.rlimit, image=image, - identifier=identifier, input=fake_input) - - # in _create_pod - pod_body = self.manager.pod_template.render( - { - 'pod_name': identifier, - 'labels': {'function_id': function_id}, - 'pod_image': image, - 'input': ['input_item3', 'input_item4'], - 'req_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'req_memory': str(cfg.CONF.resource_limits.default_memory), - 'limit_cpu': str(cfg.CONF.resource_limits.default_cpu), - 'limit_memory': str(cfg.CONF.resource_limits.default_memory) - } - ) - self.k8s_v1_api.create_namespaced_pod.assert_called_once_with( - self.fake_namespace, body=yaml.safe_load(pod_body)) - - def test_prepare_execution_with_image_pod_failed(self): - function_id = common.generate_unicode_uuid() - image = self.rand_name('image', prefix=self.prefix) - identifier = ( - '%s-%s' % (common.generate_unicode_uuid(dashed=True), function_id) - )[:63] - self.k8s_v1_api.create_namespaced_pod.side_effect = RuntimeError - - self.assertRaises( - exc.OrchestratorException, - self.manager.prepare_execution, - function_id, - 0, - rlimit=self.rlimit, - image=image, - identifier=identifier, - ) - - def test_prepare_execution_not_image_no_worker_available(self): - ret_pods = mock.Mock() - ret_pods.items = [] - self.k8s_v1_api.list_namespaced_pod.return_value = ret_pods - function_id = common.generate_unicode_uuid() - runtime_id = common.generate_unicode_uuid() - labels = {'runtime_id': runtime_id} - - self.assertRaisesRegex( - exc.OrchestratorException, - "^Execution preparation failed\.$", - self.manager.prepare_execution, - function_id, 0, rlimit=None, image=None, - identifier=runtime_id, labels=labels) - - # in _choose_available_pods - list_calls = [ - mock.call( - self.fake_namespace, - label_selector=('function_id=%s,function_version=0' % - function_id) - ), - mock.call( - self.fake_namespace, - label_selector='!function_id,runtime_id=%s' % runtime_id - ) - ] - self.k8s_v1_api.list_namespaced_pod.assert_has_calls(list_calls) - self.assertEqual(2, self.k8s_v1_api.list_namespaced_pod.call_count) - - def test_prepare_execution_service_already_exists(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = {'pod1_key1': 'pod1_value1'} - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - exception = RuntimeError() - exception.status = 409 - self.k8s_v1_api.create_namespaced_service.side_effect = exception - self.k8s_v1_api.read_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_external_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.prepare_execution( - function_id, 0, rlimit=None, image=None, identifier=runtime_id, - labels={'runtime_id': runtime_id}) - - # in _prepare_pod - self.k8s_v1_api.read_namespaced_service.assert_called_once_with( - 'service-%s-0' % function_id, self.fake_namespace) - - def test_prepare_execution_create_service_failed(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = None - ret_pods = mock.Mock() - ret_pods.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = ret_pods - exception = RuntimeError() - exception.status = 500 - self.k8s_v1_api.create_namespaced_service.side_effect = exception - function_id = common.generate_unicode_uuid() - runtime_id = common.generate_unicode_uuid() - - with mock.patch.object( - self.manager, 'delete_function' - ) as delete_function_mock: - self.assertRaisesRegex( - exc.OrchestratorException, - '^Execution preparation failed\.$', - self.manager.prepare_execution, - function_id, 0, rlimit=None, image=None, identifier=runtime_id, - labels={'runtime_id': runtime_id}) - - delete_function_mock.assert_called_once_with( - function_id, - 0, - { - 'runtime_id': runtime_id, - 'function_id': function_id, - 'function_version': '0' - } - ) - - def test_prepare_execution_service_internal_ip(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = {'pod1_key1': 'pod1_value1'} - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - self.k8s_v1_api.create_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_internal_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.prepare_execution( - function_id, 0, rlimit=None, image=None, identifier=runtime_id, - labels={'runtime_id': runtime_id}) - - self.assertEqual(pod.metadata.name, pod_names) - self.assertEqual( - 'http://%s:%s' % (SERVICE_ADDRESS_INTERNAL, SERVICE_PORT), - service_url) - - def test_run_execution_image_type_function(self): - pod = mock.Mock() - status = mock.Mock() - status.state.terminated.finished_at = datetime.datetime(2018, 9, 4, 10, - 1, 50) - status.state.terminated.started_at = datetime.datetime(2018, 9, 4, 10, - 1, 40) - pod.status.phase = 'Succeeded' - pod.status.container_statuses = [status] - self.k8s_v1_api.read_namespaced_pod.return_value = pod - fake_log = 'fake log' - self.k8s_v1_api.read_namespaced_pod_log.return_value = fake_log - execution_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - identifier = 'fake_identifier' - - result, output = self.manager.run_execution(execution_id, function_id, - 0, identifier=identifier) - - self.k8s_v1_api.read_namespaced_pod.assert_called_once_with( - identifier, self.fake_namespace) - self.k8s_v1_api.read_namespaced_pod_log.assert_called_once_with( - identifier, self.fake_namespace) - self.assertTrue(result) - - expected_output = {'duration': 10, 'logs': fake_log} - self.assertEqual(expected_output, output) - - def test_run_execution_image_type_function_retry(self): - pod1 = mock.Mock() - pod1.status.phase = '' - pod2 = mock.Mock() - status = mock.Mock() - status.state.terminated.finished_at = datetime.datetime(2018, 9, 4, 10, - 1, 50) - status.state.terminated.started_at = datetime.datetime(2018, 9, 4, 10, - 1, 40) - pod2.status.phase = 'Succeeded' - pod2.status.container_statuses = [status] - self.k8s_v1_api.read_namespaced_pod.side_effect = [pod1, pod2] - fake_log = 'fake log' - self.k8s_v1_api.read_namespaced_pod_log.return_value = fake_log - execution_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - result, output = self.manager.run_execution(execution_id, function_id, - 0, timeout=5) - - self.assertEqual(2, self.k8s_v1_api.read_namespaced_pod.call_count) - self.k8s_v1_api.read_namespaced_pod_log.assert_called_once_with( - None, self.fake_namespace) - self.assertTrue(result) - - expected_output = {'duration': 10, 'logs': fake_log} - self.assertEqual(expected_output, output) - - def test_run_execution_image_type_function_timeout(self): - execution_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - pod1 = mock.Mock() - pod1.status.phase = '' - self.k8s_v1_api.read_namespaced_pod.return_value = pod1 - - result, output = self.manager.run_execution( - execution_id, function_id, 0, - identifier='fake_identifier', - timeout=1 - ) - - self.assertFalse(result) - - expected_output = { - 'output': 'Function execution timeout.', - 'duration': 1 - } - self.assertEqual(expected_output, output) - - def test_run_execution_image_type_function_read_pod_exception(self): - self.k8s_v1_api.read_namespaced_pod.side_effect = RuntimeError - execution_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - result, output = self.manager.run_execution(execution_id, function_id, - 0, timeout=5) - - self.k8s_v1_api.read_namespaced_pod.assert_called_once_with( - None, self.fake_namespace) - self.k8s_v1_api.read_namespaced_pod_log.assert_not_called() - self.assertFalse(result) - - expected_output = { - 'output': 'Function execution failed.', - 'duration': 0 - } - self.assertEqual(expected_output, output) - - @mock.patch('qinling.engine.utils.url_request') - def test_run_execution_version_0(self, mock_request): - mock_request.return_value = (True, 'fake output') - execution_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - timeout = 3 - - result, output = self.manager.run_execution( - execution_id, function_id, 0, rlimit=self.rlimit, - service_url='FAKE_URL', timeout=timeout - ) - - download_url = ('http://127.0.0.1:7070/v1/functions/%s?download=true' - % function_id) - data = { - 'execution_id': execution_id, - 'cpu': self.rlimit['cpu'], - 'memory_size': self.rlimit['memory_size'], - 'input': None, - 'function_id': function_id, - 'function_version': 0, - 'entry': 'main.main', - 'download_url': download_url, - 'request_id': self.ctx.request_id, - 'timeout': timeout, - } - - mock_request.assert_called_once_with( - self.manager.session, 'FAKE_URL/execute', body=data - ) - - def test_delete_function(self): - # Deleting namespaced service is also tested in this. - svc1 = mock.Mock() - svc1_name = self.rand_name('service', prefix=self.prefix) - svc1.metadata.name = svc1_name - svc2 = mock.Mock() - svc2_name = self.rand_name('service', prefix=self.prefix) - svc2.metadata.name = svc2_name - services = mock.Mock() - services.items = [svc1, svc2] - self.k8s_v1_api.list_namespaced_service.return_value = services - function_id = common.generate_unicode_uuid() - - self.manager.delete_function(function_id, 0) - - args, kwargs = self.k8s_v1_api.list_namespaced_service.call_args - self.assertIn(self.fake_namespace, args) - self.assertIn( - "function_id=%s" % function_id, - kwargs.get("label_selector") - ) - self.assertIn( - "function_version=0", - kwargs.get("label_selector") - ) - - delete_service_calls = [ - mock.call(svc1_name, self.fake_namespace), - mock.call(svc2_name, self.fake_namespace) - ] - self.k8s_v1_api.delete_namespaced_service.assert_has_calls( - delete_service_calls) - self.assertEqual( - 2, self.k8s_v1_api.delete_namespaced_service.call_count - ) - - args, kwargs = self.k8s_v1_api.delete_collection_namespaced_pod. \ - call_args - self.assertIn(self.fake_namespace, args) - self.assertIn( - "function_id=%s" % function_id, - kwargs.get("label_selector") - ) - self.assertIn( - "function_version=0", - kwargs.get("label_selector") - ) - - def test_delete_function_with_labels(self): - services = mock.Mock() - services.items = [] - labels = {'key1': 'value1', 'key2': 'value2'} - selector = common.convert_dict_to_string(labels) - self.k8s_v1_api.list_namespaced_service.return_value = services - function_id = common.generate_unicode_uuid() - - self.manager.delete_function(function_id, 0, labels=labels) - - self.k8s_v1_api.list_namespaced_service.assert_called_once_with( - self.fake_namespace, label_selector=selector) - self.k8s_v1_api.delete_namespaced_service.assert_not_called() - delete_pod = self.k8s_v1_api.delete_collection_namespaced_pod - delete_pod.assert_called_once_with( - self.fake_namespace, label_selector=selector) - - def test_scaleup_function(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = {'pod1_key1': 'pod1_value1'} - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - self.k8s_v1_api.create_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_external_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.scaleup_function( - function_id, 0, identifier=runtime_id - ) - - self.assertEqual([pod.metadata.name], pod_names) - self.assertEqual( - 'http://%s:%s' % (SERVICE_ADDRESS_EXTERNAL, SERVICE_PORT), - service_url) - - # in _choose_available_pods - self.k8s_v1_api.list_namespaced_pod.assert_called_once_with( - self.fake_namespace, - label_selector='!function_id,runtime_id=%s' % runtime_id) - - # in _prepare_pod -> _update_pod_label - pod_labels = { - 'pod1_key1': 'pod1_value1', - 'function_id': function_id, - 'function_version': '0' - } - body = {'metadata': {'labels': pod_labels}} - self.k8s_v1_api.patch_namespaced_pod.assert_called_once_with( - pod.metadata.name, self.fake_namespace, body) - - # in _prepare_pod - service_body = self.manager.service_template.render( - { - 'service_name': 'service-%s-0' % function_id, - 'labels': {'function_id': function_id, - 'function_version': 0, - 'runtime_id': runtime_id}, - 'selector': pod_labels - } - ) - self.k8s_v1_api.create_namespaced_service.assert_called_once_with( - self.fake_namespace, yaml.safe_load(service_body)) - - def test_scaleup_function_not_enough_workers(self): - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - ret_pods = mock.Mock() - ret_pods.items = [mock.Mock()] - self.k8s_v1_api.list_namespaced_pod.return_value = ret_pods - - self.assertRaisesRegex( - exc.OrchestratorException, - "^Not enough workers available\.$", - self.manager.scaleup_function, - function_id, 0, identifier=runtime_id, count=2) - - def test_scaleup_function_service_already_exists(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = {'pod1_key1': 'pod1_value1'} - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - exception = RuntimeError() - exception.status = 409 - self.k8s_v1_api.create_namespaced_service.side_effect = exception - self.k8s_v1_api.read_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_external_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.scaleup_function( - function_id, 0, identifier=runtime_id) - - # in _prepare_pod - self.k8s_v1_api.read_namespaced_service.assert_called_once_with( - 'service-%s-0' % function_id, self.fake_namespace) - - def test_scaleup_function_service_create_failed(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = None - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - exception = RuntimeError() - exception.status = 500 - self.k8s_v1_api.create_namespaced_service.side_effect = exception - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - self.assertRaises( - RuntimeError, - self.manager.scaleup_function, - function_id, 0, identifier=runtime_id) - - def test_scaleup_function_service_internal_ip(self): - pod = mock.Mock() - pod.metadata.name = self.rand_name('pod', prefix=self.prefix) - pod.metadata.labels = None - list_pod_ret = mock.Mock() - list_pod_ret.items = [pod] - self.k8s_v1_api.list_namespaced_pod.return_value = list_pod_ret - self.k8s_v1_api.create_namespaced_service.return_value = ( - self._create_service() - ) - self.k8s_v1_api.list_node.return_value = ( - self._create_nodes_with_internal_ip() - ) - runtime_id = common.generate_unicode_uuid() - function_id = common.generate_unicode_uuid() - - pod_names, service_url = self.manager.scaleup_function( - function_id, 0, identifier=runtime_id) - - self.assertEqual([pod.metadata.name], pod_names) - self.assertEqual( - 'http://%s:%s' % (SERVICE_ADDRESS_INTERNAL, SERVICE_PORT), - service_url) - - def test_delete_worker(self): - pod_name = self.rand_name('pod', prefix=self.prefix) - - self.manager.delete_worker(pod_name) - - self.k8s_v1_api.delete_namespaced_pod.assert_called_once_with( - pod_name, self.fake_namespace - ) diff --git a/qinling/tests/unit/services/__init__.py b/qinling/tests/unit/services/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/services/test_periodics.py b/qinling/tests/unit/services/test_periodics.py deleted file mode 100644 index 4c02ebb1..00000000 --- a/qinling/tests/unit/services/test_periodics.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from datetime import datetime -from datetime import timedelta -import time -from unittest import mock - -from oslo_config import cfg - -from qinling import context -from qinling.db import api as db_api -from qinling.services import periodics -from qinling import status -from qinling.tests.unit import base - -CONF = cfg.CONF - - -class TestPeriodics(base.DbTestCase): - def setUp(self): - super(TestPeriodics, self).setUp() - self.override_config('auth_enable', False, group='pecan') - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_handle_function_service_no_function_version(self, mock_etcd_url, - mock_etcd_delete): - db_func = self.create_function() - function_id = db_func.id - # Update function to simulate function execution - db_api.update_function(function_id, {'count': 1}) - time.sleep(1.5) - - mock_etcd_url.return_value = 'http://localhost:37718' - self.override_config('function_service_expiration', 1, 'engine') - mock_engine = mock.Mock() - - periodics.handle_function_service_expiration(self.ctx, mock_engine) - - mock_engine.delete_function.assert_called_once_with( - self.ctx, function_id, 0 - ) - mock_etcd_delete.assert_called_once_with(function_id, 0) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_handle_function_service_with_function_versions(self, mock_srv_url, - mock_etcd_delete): - db_func = self.create_function() - function_id = db_func.id - self.create_function_version(0, function_id, description="new_version") - db_api.update_function_version(function_id, 1, count=1) - time.sleep(1.5) - - self.override_config('function_service_expiration', 1, 'engine') - - # NOTE(huntxu): although we didn't create any execution using version 0 - # of the function, it is updated as a new version is created. So the - # call to get_service_url with version 0 should return None as there is - # not any worker for function version 0. - def mock_srv_url_side_effect(function_id, function_version): - return 'http://localhost:37718' if function_version != 0 else None - - mock_srv_url.side_effect = mock_srv_url_side_effect - mock_engine = mock.Mock() - - periodics.handle_function_service_expiration(self.ctx, mock_engine) - - mock_engine.delete_function.assert_called_once_with( - self.ctx, function_id, 1 - ) - mock_etcd_delete.assert_called_once_with(function_id, 1) - - @mock.patch('qinling.utils.etcd_util.delete_function') - @mock.patch('qinling.utils.etcd_util.get_service_url') - def test_handle_function_service_with_versioned_function_version_0( - self, mock_srv_url, mock_etcd_delete - ): - # This case tests that if a function has multiple versions, service - # which serves executions of function version 0 is correctly handled - # when expired. - db_func = self.create_function() - function_id = db_func.id - self.create_function_version(0, function_id, description="new_version") - # Simulate an execution using version 0 - db_api.update_function(function_id, {'count': 1}) - time.sleep(1.5) - - self.override_config('function_service_expiration', 1, 'engine') - mock_srv_url.return_value = 'http://localhost:37718' - mock_engine = mock.Mock() - - periodics.handle_function_service_expiration(self.ctx, mock_engine) - - mock_engine.delete_function.assert_called_once_with( - self.ctx, function_id, 0 - ) - mock_etcd_delete.assert_called_once_with(function_id, 0) - - @mock.patch('qinling.utils.jobs.get_next_execution_time') - def test_job_handler(self, mock_get_next): - db_func = self.create_function() - function_id = db_func.id - - self.assertEqual(0, db_func.count) - - now = datetime.utcnow() - db_job = self.create_job( - function_id=function_id, - status=status.RUNNING, - next_execution_time=now, - count=2 - ) - job_id = db_job.id - - e_client = mock.Mock() - mock_get_next.return_value = now + timedelta(seconds=1) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - db_job = db_api.get_job(job_id) - self.assertEqual(1, db_job.count) - db_func = db_api.get_function(function_id) - self.assertEqual(1, db_func.count) - db_execs = db_api.get_executions(function_id=function_id) - self.assertEqual(1, len(db_execs)) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - db_job = db_api.get_job(job_id) - self.assertEqual(0, db_job.count) - self.assertEqual(status.DONE, db_job.status) - db_func = db_api.get_function(function_id) - self.assertEqual(2, db_func.count) - db_execs = db_api.get_executions(function_id=function_id) - self.assertEqual(2, len(db_execs)) - - @mock.patch('qinling.utils.jobs.get_next_execution_time') - def test_job_handler_with_version(self, mock_next_time): - db_func = self.create_function() - function_id = db_func.id - new_version = db_api.increase_function_version(function_id, 0) - - self.assertEqual(0, new_version.count) - - now = datetime.utcnow() - db_job = self.create_job( - function_id, - function_version=1, - status=status.RUNNING, - next_execution_time=now, - count=2 - ) - job_id = db_job.id - - e_client = mock.Mock() - # It doesn't matter what's the returned value, but need to be in - # datetime type. - mock_next_time.return_value = now + timedelta(seconds=1) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - db_job = db_api.get_job(job_id) - self.assertEqual(1, db_job.count) - db_func = db_api.get_function(function_id) - self.assertEqual(0, db_func.count) - db_version = db_api.get_function_version(function_id, 1) - self.assertEqual(1, db_version.count) - db_execs = db_api.get_executions(function_id=function_id, - function_version=1) - self.assertEqual(1, len(db_execs)) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - db_job = db_api.get_job(job_id) - self.assertEqual(0, db_job.count) - self.assertEqual(status.DONE, db_job.status) - db_func = db_api.get_function(function_id) - self.assertEqual(0, db_func.count) - db_version = db_api.get_function_version(function_id, 1) - self.assertEqual(2, db_version.count) - db_execs = db_api.get_executions(function_id=function_id, - function_version=1) - self.assertEqual(2, len(db_execs)) - - @mock.patch('qinling.utils.jobs.get_next_execution_time') - def test_job_handler_with_alias(self, mock_next_time): - e_client = mock.Mock() - now = datetime.utcnow() - # It doesn't matter what's the returned value, but need to be in - # datetime type. - mock_next_time.return_value = now + timedelta(seconds=1) - - # Create a alias for a function. - alias_name = self.rand_name(name="alias", prefix=self.prefix) - db_func = self.create_function() - function_id = db_func.id - db_api.create_function_alias(name=alias_name, function_id=function_id) - - self.create_job( - function_alias=alias_name, - status=status.RUNNING, - next_execution_time=now, - ) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - # Create function version 1 and update the alias. - db_api.increase_function_version(function_id, 0) - db_api.update_function_alias(alias_name, function_version=1) - - periodics.handle_job(e_client) - context.set_ctx(self.ctx) - - db_func = db_api.get_function(function_id) - self.assertEqual(1, db_func.count) - db_version = db_api.get_function_version(function_id, 1) - self.assertEqual(1, db_version.count) - db_execs = db_api.get_executions(function_id=function_id) - self.assertEqual(2, len(db_execs)) diff --git a/qinling/tests/unit/storage/__init__.py b/qinling/tests/unit/storage/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/storage/test_file_system.py b/qinling/tests/unit/storage/test_file_system.py deleted file mode 100644 index 5a0092a4..00000000 --- a/qinling/tests/unit/storage/test_file_system.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -from unittest import mock - -from oslo_config import cfg - -from qinling import config -from qinling import exceptions as exc -from qinling.storage import file_system -from qinling.tests.unit import base -from qinling.utils import common - -CONF = cfg.CONF -FAKE_STORAGE_PATH = 'TMP_DIR' - - -class TestFileSystemStorage(base.BaseTest): - def setUp(self): - super(TestFileSystemStorage, self).setUp() - CONF.register_opts(config.storage_opts, config.STORAGE_GROUP) - self.override_config('file_system_dir', FAKE_STORAGE_PATH, 'storage') - self.project_id = base.DEFAULT_PROJECT_ID - self.storage = file_system.FileSystemStorage(CONF) - - @mock.patch('oslo_utils.fileutils.ensure_tree') - @mock.patch('os.rename') - @mock.patch('qinling.storage.file_system.open') - @mock.patch('zipfile.is_zipfile') - def test_store(self, is_zipfile_mock, open_mock, rename_mock, - ensure_tree_mock): - is_zipfile_mock.return_value = True - fake_fd = mock.Mock() - open_mock.return_value.__enter__.return_value = fake_fd - function = self.rand_name('function', prefix='TestFileSystemStorage') - # For python3, data should be encoded into bytes before hashing. - function_data = "Some data".encode('utf8') - md5 = common.md5(content=function_data) - - package_updated, ret_md5 = self.storage.store( - self.project_id, function, function_data - ) - - self.assertTrue(package_updated) - self.assertEqual(md5, ret_md5) - - temp_package_path = os.path.join(FAKE_STORAGE_PATH, self.project_id, - '%s.zip.new' % function) - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - md5) - ) - ensure_tree_mock.assert_called_once_with( - os.path.join(FAKE_STORAGE_PATH, self.project_id) - ) - fake_fd.write.assert_called_once_with(function_data) - is_zipfile_mock.assert_called_once_with(temp_package_path) - rename_mock.assert_called_once_with(temp_package_path, package_path) - - @mock.patch('oslo_utils.fileutils.ensure_tree') - @mock.patch('os.path.exists') - def test_store_zip_exists(self, exists_mock, ensure_tree_mock): - function = self.rand_name('function', prefix='TestFileSystemStorage') - function_data = "Some data".encode('utf8') - md5 = common.md5(content=function_data) - exists_mock.return_value = True - - package_updated, ret_md5 = self.storage.store( - self.project_id, function, function_data - ) - - self.assertFalse(package_updated) - self.assertEqual(md5, ret_md5) - - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - md5) - ) - - exists_mock.assert_called_once_with(package_path) - - @mock.patch('oslo_utils.fileutils.ensure_tree') - def test_store_md5_mismatch(self, ensure_tree_mock): - function = self.rand_name('function', prefix='TestFileSystemStorage') - # For python3, data should be encoded into bytes before hashing. - function_data = "Some data".encode('utf8') - not_a_md5sum = "Not a md5sum" - - self.assertRaisesRegex( - exc.InputException, - "^Package md5 mismatch\.$", - self.storage.store, - self.project_id, function, function_data, md5sum=not_a_md5sum) - - ensure_tree_mock.assert_called_once_with( - os.path.join(FAKE_STORAGE_PATH, self.project_id)) - - @mock.patch('oslo_utils.fileutils.delete_if_exists') - @mock.patch('oslo_utils.fileutils.ensure_tree') - @mock.patch('qinling.storage.file_system.open') - @mock.patch('zipfile.is_zipfile') - def test_store_invalid_zip_package( - self, is_zipfile_mock, open_mock, - ensure_tree_mock, delete_if_exists_mock - ): - is_zipfile_mock.return_value = False - fake_fd = mock.Mock() - open_mock.return_value.__enter__.return_value = fake_fd - function = self.rand_name('function', prefix='TestFileSystemStorage') - # For python3, data should be encoded into bytes before hashing. - function_data = "Some data".encode('utf8') - - self.assertRaisesRegex( - exc.InputException, - "^Package is not a valid ZIP package\.$", - self.storage.store, - self.project_id, function, function_data) - - ensure_tree_mock.assert_called_once_with( - os.path.join(FAKE_STORAGE_PATH, self.project_id)) - fake_fd.write.assert_called_once_with(function_data) - delete_if_exists_mock.assert_called_once_with( - os.path.join(FAKE_STORAGE_PATH, self.project_id, - '%s.zip.new' % function)) - - @mock.patch('os.path.exists') - @mock.patch('qinling.storage.file_system.open') - def test_retrieve(self, open_mock, exists_mock): - exists_mock.return_value = True - fake_fd = mock.Mock() - open_mock.return_value = fake_fd - function = self.rand_name('function', prefix='TestFileSystemStorage') - - ret = self.storage.retrieve(self.project_id, function, "fake_md5") - - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - "fake_md5") - ) - exists_mock.assert_called_once_with(package_path) - open_mock.assert_called_once_with(package_path, 'rb') - self.assertEqual(fake_fd, ret) - - @mock.patch('os.path.exists') - def test_retrieve_package_not_found(self, exists_mock): - exists_mock.return_value = False - function = self.rand_name('function', prefix='TestFileSystemStorage') - - self.assertRaisesRegex( - exc.StorageNotFoundException, - "^Package of function %s for project %s not found\.$" % ( - function, self.project_id), - self.storage.retrieve, - self.project_id, - function, - "fake_md5" - ) - - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - "fake_md5") - ) - exists_mock.assert_called_once_with(package_path) - - @mock.patch('qinling.storage.file_system.open') - @mock.patch('os.path.exists') - @mock.patch('os.listdir') - def test_retrieve_version(self, mock_list, mock_exist, mock_open): - function = "fake_function_id" - version = 1 - md5 = "md5" - mock_list.return_value = ["%s_%s_%s.zip" % (function, version, md5)] - mock_exist.return_value = True - - self.storage.retrieve(self.project_id, function, None, - version=version) - - version_zip = os.path.join(FAKE_STORAGE_PATH, self.project_id, - "%s_%s_%s.zip" % (function, version, md5)) - - mock_exist.assert_called_once_with(version_zip) - - @mock.patch('os.listdir') - def test_retrieve_version_not_found(self, mock_list): - function = "fake_function_id" - version = 1 - mock_list.return_value = [""] - - self.assertRaises( - exc.StorageNotFoundException, - self.storage.retrieve, - function, - self.project_id, - None, - version=version - ) - - @mock.patch('os.path.exists') - @mock.patch('os.remove') - def test_delete(self, remove_mock, exists_mock): - exists_mock.return_value = True - function = self.rand_name('function', prefix='TestFileSystemStorage') - - self.storage.delete(self.project_id, function, "fake_md5") - - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - "fake_md5") - ) - exists_mock.assert_called_once_with(package_path) - remove_mock.assert_called_once_with(package_path) - - @mock.patch('os.path.exists') - @mock.patch('os.remove') - @mock.patch('os.listdir') - def test_delete_with_version(self, mock_list, remove_mock, exists_mock): - exists_mock.return_value = True - function = self.rand_name('function', prefix='TestFileSystemStorage') - version = 1 - mock_list.return_value = ["%s_%s_md5.zip" % (function, version)] - - self.storage.delete(self.project_id, function, "fake_md5", version=1) - - package_path = os.path.join( - FAKE_STORAGE_PATH, - self.project_id, - file_system.PACKAGE_VERSION_TEMPLATE % (function, version, "md5") - ) - exists_mock.assert_called_once_with(package_path) - remove_mock.assert_called_once_with(package_path) - - @mock.patch('os.path.exists') - @mock.patch('os.remove') - def test_delete_package_not_exists(self, remove_mock, exists_mock): - exists_mock.return_value = False - function = self.rand_name('function', prefix='TestFileSystemStorage') - - self.storage.delete(self.project_id, function, "fake_md5") - - package_path = os.path.join( - FAKE_STORAGE_PATH, - file_system.PACKAGE_PATH_TEMPLATE % (self.project_id, function, - "fake_md5") - ) - exists_mock.assert_called_once_with(package_path) - remove_mock.assert_not_called() - - def test_changed_since_first_version(self): - ret = self.storage.changed_since(self.project_id, "fake_function", - "fake_md5", 0) - - self.assertTrue(ret) - - @mock.patch('os.path.exists') - def test_changed_since_exists(self, mock_exists): - mock_exists.return_value = True - - ret = self.storage.changed_since(self.project_id, "fake_function", - "fake_md5", 1) - - self.assertFalse(ret) - - expect_path = os.path.join(FAKE_STORAGE_PATH, self.project_id, - "fake_function_1_fake_md5.zip") - - mock_exists.assert_called_once_with(expect_path) - - @mock.patch('os.path.exists') - def test_changed_since_not_exists(self, mock_exists): - mock_exists.return_value = False - - ret = self.storage.changed_since(self.project_id, "fake_function", - "fake_md5", 1) - - self.assertTrue(ret) - - expect_path = os.path.join(FAKE_STORAGE_PATH, self.project_id, - "fake_function_1_fake_md5.zip") - - mock_exists.assert_called_once_with(expect_path) - - @mock.patch("shutil.copyfile") - def test_copy(self, mock_copy): - self.storage.copy(self.project_id, "fake_function", "fake_md5", 0) - - expect_src = os.path.join(FAKE_STORAGE_PATH, self.project_id, - "fake_function_fake_md5.zip") - expect_dest = os.path.join(FAKE_STORAGE_PATH, self.project_id, - "fake_function_1_fake_md5.zip") - - mock_copy.assert_called_once_with(expect_src, expect_dest) diff --git a/qinling/tests/unit/utils/__init__.py b/qinling/tests/unit/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/utils/openstack/__init__.py b/qinling/tests/unit/utils/openstack/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/tests/unit/utils/openstack/test_swift.py b/qinling/tests/unit/utils/openstack/test_swift.py deleted file mode 100644 index ed3a9692..00000000 --- a/qinling/tests/unit/utils/openstack/test_swift.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from swiftclient.exceptions import ClientException - -from qinling import exceptions as exc -from qinling.tests.unit import base -from qinling.utils import constants -from qinling.utils.openstack import swift - - -class TestSwift(base.BaseTest): - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_check_object(self, mock_sclient): - length = constants.MAX_PACKAGE_SIZE - 1 - mock_sclient.return_value.head_object.return_value = { - "content-length": length - } - - ret = swift.check_object("fake_container", "fake_object") - - self.assertTrue(ret) - - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_check_object_client_exception(self, mock_sclient): - mock_sclient.return_value.head_object.side_effect = ClientException - - ret = swift.check_object("fake_container", "fake_object") - - self.assertFalse(ret) - - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_check_object_other_exception(self, mock_sclient): - mock_sclient.return_value.head_object.side_effect = Exception - - ret = swift.check_object("fake_container", "fake_object") - - self.assertFalse(ret) - - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_check_object_invalid_length(self, mock_sclient): - length = constants.MAX_PACKAGE_SIZE + 1 - mock_sclient.return_value.head_object.return_value = { - "content-length": length - } - - ret = swift.check_object("fake_container", "fake_object") - - self.assertFalse(ret) - - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_download_object(self, mock_sclient): - mock_get = mock.MagicMock() - mock_get.return_value = (mock.ANY, mock.ANY) - mock_sclient.return_value.get_object = mock_get - swift.download_object("fake_container", "fake_object") - - mock_get.assert_called_once_with( - "fake_container", "fake_object", - resp_chunk_size=65536 - ) - - @mock.patch("qinling.utils.openstack.keystone.get_swiftclient") - def test_download_object_exception(self, mock_sclient): - mock_sclient.return_value.get_object.side_effect = Exception - - self.assertRaises( - exc.SwiftException, - swift.download_object, - "fake_container", - "fake_object" - ) diff --git a/qinling/utils/__init__.py b/qinling/utils/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/utils/common.py b/qinling/utils/common.py deleted file mode 100644 index f850852a..00000000 --- a/qinling/utils/common.py +++ /dev/null @@ -1,160 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import functools -import hashlib -import pdb -import sys -import warnings - -from oslo_utils import uuidutils - -from qinling import exceptions as exc -from qinling import version - - -def print_server_info(service): - QINLING_TITLE = r""" - /^L_ ,."\ - /~\ __ /~ \ ./ \ - / _\ _/ \ /T~\|~\_\ / \_ /~| _^ - / \ /W \ / V^\/X /~ T . \/ \ ,v-./ - ,'`-. /~ ^ H , . \/ ; . \ `. \-' / - M ~ | . ; / , _ : . ~\_,-' - / ~ . \ / : ' \ ,/` - I o. ^ oP '98b - _ 9.` `\9b. - 8oO888. oO888P d888b9bo. .8o 888o. 8bo. o 988o. - 88888888888888888888888888bo.98888888bo. 98888bo. .d888P - 88888888888888888888888888888888888888888888888888888888888 - _ __ _ - ___ _ (_) ___ / / (_) ___ ___ _ - / _ `/ / / / _ \ / / / / / _ \ / _ `/ - \_, / /_/ /_//_//_/ /_/ /_//_/ \_, / - /_/ /___/ - - Function as a Service in OpenStack, version: %s - """ % version.version_string() - - print(QINLING_TITLE) - print('Launching server components %s...' % service) - - -def get_properly_ordered_parameters(): - """Orders launch parameters in the right order. - - In oslo it's important the order of the launch parameters. - if --config-file came after the command line parameters the command - line parameters are ignored. - So to make user command line parameters are never ignored this method - moves --config-file to be always first. - """ - args = sys.argv[1:] - - for arg in sys.argv[1:]: - if arg == '--config-file' or arg.startswith('--config-file='): - if "=" in arg: - conf_file_value = arg.split("=", 1)[1] - else: - conf_file_value = args[args.index(arg) + 1] - args.remove(conf_file_value) - args.remove(arg) - args.insert(0, "--config-file") - args.insert(1, conf_file_value) - - return args - - -def convert_dict_to_string(d): - temp_list = ['%s=%s' % (k, v) for k, v in d.items()] - - return ','.join(temp_list) - - -def datetime_to_str(dct, attr_name): - """Convert datetime object in dict to string.""" - if (dct.get(attr_name) is not None and - not isinstance(dct.get(attr_name), str)): - dct[attr_name] = dct[attr_name].strftime('%Y-%m-%dT%H:%M:%SZ') - - -def generate_unicode_uuid(dashed=True): - return uuidutils.generate_uuid(dashed=dashed) - - -def validate_int_in_range(name, value, min_allowed, max_allowed): - unit_mapping = { - "cpu": "millicpu", - "memory": "bytes", - "timeout": "seconds" - } - - try: - value_int = int(value) - except ValueError: - raise exc.InputException( - 'Invalid %s resource specified. An integer is required.' % name - ) - - if (value_int < min_allowed or value_int > max_allowed): - raise exc.InputException( - '%s resource limitation not within the allowable range: ' - '%s ~ %s(%s).' % - (name, min_allowed, max_allowed, unit_mapping[name]) - ) - - -def disable_ssl_warnings(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", - message="A true SSLContext object is not available" - ) - warnings.filterwarnings( - "ignore", - message="Unverified HTTPS request is being made" - ) - return func(*args, **kwargs) - - return wrapper - - -class ForkedPdb(pdb.Pdb): - """A Pdb subclass that may be used from a forked multiprocessing child. - - Usage: - from qinling.utils import common - common.ForkedPdb().set_trace() - """ - - def interaction(self, *args, **kwargs): - _stdin = sys.stdin - try: - sys.stdin = open('/dev/stdin', 'r') - pdb.Pdb.interaction(self, *args, **kwargs) - finally: - sys.stdin = _stdin - - -def md5(file=None, content=None): - hash_md5 = hashlib.md5() - - if file: - with open(file, "rb") as f: - for chunk in iter(lambda: f.read(4096), b""): - hash_md5.update(chunk) - elif content: - hash_md5.update(content) - - return hash_md5.hexdigest() diff --git a/qinling/utils/constants.py b/qinling/utils/constants.py deleted file mode 100644 index e5655bfb..00000000 --- a/qinling/utils/constants.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -CURRENT_VERSION = 'v1' - -EXECUTION_BY_JOB = 'Created by Job %s' -EXECUTION_BY_WEBHOOK = 'Created by Webhook %s' - -PERIODIC_JOB_HANDLER = 'job_handler' -PERIODIC_FUNC_MAPPING_HANDLER = 'function_mapping_handler' - -PACKAGE_FUNCTION = 'package' -SWIFT_FUNCTION = 'swift' -IMAGE_FUNCTION = 'image' - -MAX_PACKAGE_SIZE = 51 * 1024 * 1024 - -MAX_VERSION_NUMBER = 10 diff --git a/qinling/utils/etcd_util.py b/qinling/utils/etcd_util.py deleted file mode 100644 index 833c4773..00000000 --- a/qinling/utils/etcd_util.py +++ /dev/null @@ -1,95 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import etcd3gw -from oslo_config import cfg -from oslo_utils import encodeutils - -CONF = cfg.CONF -CLIENT = None - - -def get_client(conf=None): - global CLIENT - conf = conf or CONF - - if not CLIENT: - if conf.etcd.protocol == "https": - CLIENT = etcd3gw.client(host=conf.etcd.host, - port=conf.etcd.port, - protocol=conf.etcd.protocol, - ca_cert=conf.etcd.ca_cert, - cert_cert=conf.etcd.cert_file, - cert_key=conf.etcd.cert_key) - else: - CLIENT = etcd3gw.client(host=conf.etcd.host, - port=conf.etcd.port, - protocol=conf.etcd.protocol) - - return CLIENT - - -def get_worker_lock(function_id, version=0): - client = get_client() - lock_id = "function_worker_%s_%s" % (function_id, version) - return client.lock(id=lock_id) - - -def get_function_version_lock(function_id): - client = get_client() - lock_id = "function_version_%s" % function_id - return client.lock(id=lock_id) - - -def create_worker(function_id, worker, version=0): - """Create the worker info in etcd. - - The worker parameter is assumed to be unique. - """ - # NOTE(huntxu): for the kubernetes orchestrator, which is the only - # available orchestrator at the moment, the value of the worker param - # is the name of the pod so it is unique. - client = get_client() - client.create( - '%s_%s/worker_%s' % (function_id, version, worker), - worker - ) - - -def delete_worker(function_id, worker, version=0): - client = get_client() - client.delete('%s_%s/worker_%s' % (function_id, version, worker)) - - -def get_workers(function_id, version=0): - client = get_client() - values = client.get_prefix("%s_%s/worker" % (function_id, version)) - workers = [encodeutils.safe_decode(w[0]) for w in values] - return workers - - -def delete_function(function_id, version=0): - client = get_client() - client.delete_prefix("%s_%s" % (function_id, version)) - - -def create_service_url(function_id, url, version=0): - client = get_client() - client.create('%s_%s/service_url' % (function_id, version), url) - - -def get_service_url(function_id, version=0): - client = get_client() - values = client.get('%s_%s/service_url' % (function_id, version)) - return None if not values else encodeutils.safe_decode(values[0]) diff --git a/qinling/utils/executions.py b/qinling/utils/executions.py deleted file mode 100644 index 67ea1bd7..00000000 --- a/qinling/utils/executions.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from oslo_log import log as logging -from oslo_serialization import jsonutils - -from qinling.db import api as db_api -from qinling.db.sqlalchemy import models -from qinling import exceptions as exc -from qinling import status -from qinling.utils import constants - -LOG = logging.getLogger(__name__) - - -def _update_function_db(function_id, pre_count): - # Function update is done using UPDATE ... FROM ... WHERE - # non-locking clause. - while True: - modified = db_api.conditional_update( - models.Function, - { - 'count': pre_count + 1, - }, - { - 'id': function_id, - 'count': pre_count - }, - insecure=True, - ) - if not modified: - LOG.warning("Retrying to update function count.") - pre_count += 1 - continue - else: - break - - -def _update_function_version_db(version_id, pre_count): - # Update is done using UPDATE ... FROM ... WHERE non-locking clause. - while True: - modified = db_api.conditional_update( - models.FunctionVersion, - { - 'count': pre_count + 1, - }, - { - 'id': version_id, - 'count': pre_count - }, - insecure=True, - ) - if not modified: - LOG.warning("Retrying to update function version count.") - pre_count += 1 - continue - else: - break - - -def create_execution(engine_client, params): - function_alias = params.get('function_alias') - function_id = params.get('function_id') - version = params.get('function_version', 0) - is_sync = params.get('sync', True) - input = params.get('input') - - if function_alias: - alias_db = db_api.get_function_alias(function_alias) - function_id = alias_db.function_id - version = alias_db.function_version - params.update({'function_id': function_id, - 'function_version': version}) - - func_db = db_api.get_function(function_id) - runtime_id = func_db.runtime_id - - # Image type function does not need runtime - if runtime_id: - runtime_db = db_api.get_runtime(runtime_id) - if runtime_db and runtime_db.status != status.AVAILABLE: - raise exc.RuntimeNotAvailableException( - 'Runtime %s is not available.' % func_db.runtime_id - ) - - if version > 0: - if func_db.code['source'] != constants.PACKAGE_FUNCTION: - raise exc.InputException( - "Can not specify version for %s type function." % - constants.PACKAGE_FUNCTION - ) - - # update version count - version_db = db_api.get_function_version(function_id, version) - pre_version_count = version_db.count - _update_function_version_db(version_db.id, pre_version_count) - else: - pre_count = func_db.count - _update_function_db(function_id, pre_count) - - # input in params should be a string. - if input: - try: - function_input = jsonutils.loads(input) - # If input is e.g. '6', result of jsonutils.loads is 6 which can - # not be stored in db. - if type(function_input) == int: - raise ValueError - params['input'] = function_input - except ValueError: - params['input'] = {'__function_input': input} - - params.update({'status': status.RUNNING}) - db_model = db_api.create_execution(params) - - try: - engine_client.create_execution( - db_model.id, function_id, version, runtime_id, - input=params.get('input'), is_sync=is_sync - ) - except exc.QinlingException: - # Catch RPC errors for executions: - # - for RemoteError in an RPC call, the execution status would be - # handled in the engine side; - # - for other exceptions in an RPC call or cast, the execution status - # would remain RUNNING so we should update it. - db_model = db_api.get_execution(db_model.id) - if db_model.status == status.RUNNING: - db_model = db_api.update_execution(db_model.id, - {'status': status.ERROR}) - return db_model - - if is_sync: - # The execution should already be updated by engine service for sync - # execution. - db_model = db_api.get_execution(db_model.id) - - return db_model diff --git a/qinling/utils/jobs.py b/qinling/utils/jobs.py deleted file mode 100644 index 02332044..00000000 --- a/qinling/utils/jobs.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import datetime - -import croniter -from dateutil import parser -from oslo_utils import timeutils - -from qinling import exceptions as exc - - -def validate_next_time(next_execution_time): - next_time = next_execution_time - if isinstance(next_execution_time, str): - try: - # We need naive datetime object. - next_time = parser.parse(next_execution_time, ignoretz=True) - except ValueError as e: - raise exc.InputException(str(e)) - - valid_min_time = timeutils.utcnow() + datetime.timedelta(0, 60) - if valid_min_time > next_time: - raise exc.InputException( - 'Execution time must be at least 1 minute in the future.' - ) - - return next_time - - -def validate_pattern(pattern): - try: - croniter.croniter(pattern) - except (ValueError, KeyError): - raise exc.InputException( - 'The specified pattern is not valid: {}'.format(pattern) - ) - - -def validate_job(params): - first_time = params.get('first_execution_time') - pattern = params.get('pattern') - count = params.get('count') - start_time = timeutils.utcnow() - - if not (first_time or pattern): - raise exc.InputException( - 'pattern or first_execution_time must be specified.' - ) - - if first_time: - first_time = validate_next_time(first_time) - if not pattern and count and count > 1: - raise exc.InputException( - 'pattern must be provided if count is greater than 1.' - ) - - next_time = first_time - if not (pattern or count): - count = 1 - if pattern: - validate_pattern(pattern) - - if first_time: - start_time = first_time - datetime.timedelta(minutes=1) - - next_time = croniter.croniter(pattern, start_time).get_next( - datetime.datetime - ) - first_time = next_time - - return first_time, next_time, count - - -def get_next_execution_time(pattern, start_time): - return croniter.croniter(pattern, start_time).get_next( - datetime.datetime - ) diff --git a/qinling/utils/openstack/__init__.py b/qinling/utils/openstack/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling/utils/openstack/keystone.py b/qinling/utils/openstack/keystone.py deleted file mode 100644 index 8c9f3d1b..00000000 --- a/qinling/utils/openstack/keystone.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from keystoneauth1.identity import v3 -from keystoneauth1 import session -from keystoneclient.v3 import client as ks_client -from oslo_config import cfg -from oslo_log import log as logging -import swiftclient - -from qinling import context -from qinling.utils import common - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -def _get_user_keystone_session(): - ctx = context.get_ctx() - - auth = v3.Token( - auth_url=CONF.keystone_authtoken.www_authenticate_uri, - token=ctx.auth_token, - project_domain_name=ctx.project_domain_name, - project_name=ctx.project_name - ) - - return session.Session(auth=auth, verify=False) - - -@common.disable_ssl_warnings -def get_swiftclient(): - session = _get_user_keystone_session() - - conn = swiftclient.Connection(session=session) - - return conn - - -@common.disable_ssl_warnings -def get_user_client(): - ctx = context.get_ctx() - auth_url = CONF.keystone_authtoken.www_authenticate_uri - client = ks_client.Client( - user_id=ctx.user, - token=ctx.auth_token, - tenant_id=ctx.projectid, - auth_url=auth_url - ) - client.management_url = auth_url - - return client - - -@common.disable_ssl_warnings -def get_service_client(): - client = ks_client.Client( - username=CONF.keystone_authtoken.username, - password=CONF.keystone_authtoken.password, - project_name=CONF.keystone_authtoken.project_name, - auth_url=CONF.keystone_authtoken.www_authenticate_uri, - user_domain_name=CONF.keystone_authtoken.user_domain_name, - project_domain_name=CONF.keystone_authtoken.project_domain_name - ) - return client - - -@common.disable_ssl_warnings -def get_trust_client(trust_id): - """Get project keystone client using admin credential.""" - client = ks_client.Client( - username=CONF.keystone_authtoken.username, - password=CONF.keystone_authtoken.password, - auth_url=CONF.keystone_authtoken.www_authenticate_uri, - trust_id=trust_id - ) - - return client - - -@common.disable_ssl_warnings -def create_trust(): - ctx = context.get_ctx() - user_client = get_user_client() - trustee_id = get_service_client().user_id - - return user_client.trusts.create( - trustor_user=ctx.user, - trustee_user=trustee_id, - impersonation=True, - role_names=ctx.roles, - project=ctx.tenant - ) - - -@common.disable_ssl_warnings -def delete_trust(trust_id): - """Delete trust from keystone. - - The trust can only be deleted by original user(trustor) - """ - if not trust_id: - return - - try: - client = get_user_client() - client.trusts.delete(trust_id) - LOG.debug('Trust %s deleted.', trust_id) - except Exception: - LOG.exception("Failed to delete trust [id=%s]", trust_id) - - -def create_trust_context(trust_id, project_id): - """Creates Qinling context on behalf of the project.""" - if CONF.pecan.auth_enable: - client = get_trust_client(trust_id) - - return context.Context( - user=client.user_id, - tenant=project_id, - auth_token=client.auth_token, - is_trust_scoped=True, - trust_id=trust_id, - ) - - return context.Context( - user=None, - tenant=context.DEFAULT_PROJECT_ID, - auth_token=None, - is_admin=True - ) - - -def get_qinling_endpoint(): - '''Get Qinling service endpoint.''' - if CONF.qinling_endpoint: - return CONF.qinling_endpoint - - region = CONF.keystone_authtoken.region_name - auth = v3.Password( - auth_url=CONF.keystone_authtoken.www_authenticate_uri, - username=CONF.keystone_authtoken.username, - password=CONF.keystone_authtoken.password, - project_name=CONF.keystone_authtoken.project_name, - user_domain_name=CONF.keystone_authtoken.user_domain_name, - project_domain_name=CONF.keystone_authtoken.project_domain_name, - ) - sess = session.Session(auth=auth, verify=False) - endpoint = sess.get_endpoint(service_type='function-engine', - interface='public', - region_name=region) - - return endpoint diff --git a/qinling/utils/openstack/swift.py b/qinling/utils/openstack/swift.py deleted file mode 100644 index 95e8ad10..00000000 --- a/qinling/utils/openstack/swift.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_log import log as logging -from swiftclient.exceptions import ClientException - -from qinling import exceptions as exc -from qinling.utils import common -from qinling.utils import constants -from qinling.utils.openstack import keystone - -LOG = logging.getLogger(__name__) - - -@common.disable_ssl_warnings -def check_object(container, object): - """Check object in Swift. - - 1. If the object exists. - 2. Object size. - - :param container: Container name. - :param object: Object name. - :return: True if object exists, otherwise return False. - """ - swift_conn = keystone.get_swiftclient() - - try: - header = swift_conn.head_object(container, object) - except ClientException: - LOG.error( - 'The object %s in container %s was not found', object, container - ) - return False - except Exception: - LOG.exception("Error when communicating with Swift.") - return False - - if int(header['content-length']) > constants.MAX_PACKAGE_SIZE: - LOG.error('Object size is greater than %s', constants.MAX_PACKAGE_SIZE) - return False - - return True - - -@common.disable_ssl_warnings -def download_object(container, object): - swift_conn = keystone.get_swiftclient() - - try: - # Specify 'resp_chunk_size' here to return a file reader. - _, obj_reader = swift_conn.get_object( - container, object, resp_chunk_size=65536 - ) - except Exception: - LOG.exception("Error when downloading object from Swift.") - raise exc.SwiftException() - - return obj_reader diff --git a/qinling/utils/rest_utils.py b/qinling/utils/rest_utils.py deleted file mode 100644 index 18eb212e..00000000 --- a/qinling/utils/rest_utils.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -import json - -from oslo_log import log as logging -import pecan -import webob -from wsme import exc as wsme_exc - -from qinling import context -from qinling import exceptions as exc - -LOG = logging.getLogger(__name__) - -FILTER_TYPES = ('in', 'nin', 'eq', 'neq', 'gt', 'gte', 'lt', 'lte', 'has') -LIST_VALUE_FILTER_TYPES = {'in', 'nin'} - - -def wrap_wsme_controller_exception(func): - """Decorator for controllers method. - - This decorator wraps controllers method to manage wsme exceptions: - In case of expected error it aborts the request with specific status code. - """ - - @functools.wraps(func) - def wrapped(*args, **kwargs): - try: - return func(*args, **kwargs) - except exc.QinlingException as e: - pecan.response.translatable_error = e - - LOG.error('Error during API call: %s', str(e)) - raise wsme_exc.ClientSideError( - msg=str(e), - status_code=e.http_code - ) - - return wrapped - - -def wrap_pecan_controller_exception(func): - """Decorator for controllers method. - - This decorator wraps controllers method to manage pecan exceptions: - In case of expected error it aborts the request with specific status code. - """ - - @functools.wraps(func) - def wrapped(*args, **kwargs): - try: - return func(*args, **kwargs) - except exc.QinlingException as e: - LOG.error('Error during API call: %s', str(e)) - return webob.Response( - status=e.http_code, - content_type='application/json', - body=json.dumps(dict(faultstring=str(e))), - charset='UTF-8' - ) - - return wrapped - - -def get_filters(**params): - """Create filters from REST request parameters. - - :param req_params: REST request parameters. - :return: filters dictionary. - """ - filters = {} - - for column, data in params.items(): - if data is not None: - if isinstance(data, str): - f_type, value = _extract_filter_type_and_value(data) - create_or_update_filter(column, value, f_type, filters) - else: - create_or_update_filter(column, data, _filter=filters) - - return filters - - -def create_or_update_filter(column, value, filter_type='eq', _filter=None): - """Create or Update filter. - - :param column: Column name by which user want to filter. - :param value: Column value. - :param filter_type: filter type. Filter type can be - 'eq', 'neq', 'gt', 'gte', 'lte', 'in', - 'lt', 'nin'. Default is 'eq'. - :param _filter: Optional. If provided same filter dictionary will - be updated. - :return: filter dictionary. - - """ - if _filter is None: - _filter = {} - - _filter[column] = {filter_type: value} - - return _filter - - -def _extract_filter_type_and_value(data): - """Extract filter type and its value from the data. - - :param data: REST parameter value from which filter type and - value can be get. It should be in format of - 'filter_type:value'. - :return: filter type and value. - """ - for filter_type in FILTER_TYPES: - prefix = filter_type + ':' - prefix_len = len(prefix) - if data.startswith(prefix): - value = str(data[prefix_len:]) - if filter_type in LIST_VALUE_FILTER_TYPES: - value = list(value.split(',')) - return filter_type, value - - # Not matching any filter types, defaults to 'eq'. - return 'eq', data - - -def get_project_params(project_id, all_projects): - ctx = context.get_ctx() - - if project_id and not ctx.is_admin: - project_id = context.ctx().projectid - if project_id and ctx.is_admin: - all_projects = True - - return project_id, all_projects diff --git a/qinling/utils/thread_local.py b/qinling/utils/thread_local.py deleted file mode 100644 index 6eda3c86..00000000 --- a/qinling/utils/thread_local.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading - -from eventlet import corolocal - -_th_loc_storage = threading.local() - - -def _get_greenlet_local_storage(): - greenlet_id = corolocal.get_ident() - - greenlet_locals = getattr(_th_loc_storage, "greenlet_locals", None) - - if not greenlet_locals: - greenlet_locals = {} - _th_loc_storage.greenlet_locals = greenlet_locals - - if greenlet_id in greenlet_locals: - return greenlet_locals[greenlet_id] - else: - return None - - -def has_thread_local(var_name): - gl_storage = _get_greenlet_local_storage() - return gl_storage and var_name in gl_storage - - -def get_thread_local(var_name): - if not has_thread_local(var_name): - return None - - return _get_greenlet_local_storage()[var_name] - - -def set_thread_local(var_name, val): - if val is None and has_thread_local(var_name): - gl_storage = _get_greenlet_local_storage() - - # Delete variable from greenlet local storage. - if gl_storage: - del gl_storage[var_name] - - # Delete the entire greenlet local storage from thread local storage. - if gl_storage and len(gl_storage) == 0: - del _th_loc_storage.greenlet_locals[corolocal.get_ident()] - - if val is not None: - gl_storage = _get_greenlet_local_storage() - if not gl_storage: - gl_storage = _th_loc_storage.greenlet_locals[ - corolocal.get_ident()] = {} - - gl_storage[var_name] = val diff --git a/qinling/version.py b/qinling/version.py deleted file mode 100644 index b206e88e..00000000 --- a/qinling/version.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pbr import version - -version_info = version.VersionInfo('qinling') -version_string = version_info.version_string diff --git a/qinling_tempest_plugin/README.rst b/qinling_tempest_plugin/README.rst deleted file mode 100644 index 49c63410..00000000 --- a/qinling_tempest_plugin/README.rst +++ /dev/null @@ -1,6 +0,0 @@ -=============================================== -Tempest Integration of Qinling -=============================================== - -This directory contains Tempest tests to cover the Qinling project. - diff --git a/qinling_tempest_plugin/__init__.py b/qinling_tempest_plugin/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling_tempest_plugin/config.py b/qinling_tempest_plugin/config.py deleted file mode 100644 index 50f33b15..00000000 --- a/qinling_tempest_plugin/config.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2015 -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -service_option = cfg.BoolOpt( - 'qinling', - default=True, - help="Whether or not Qinling is expected to be available" -) - - -qinling_group = cfg.OptGroup(name="qinling", title="Qinling Service Options") - -QinlingGroup = [ - cfg.StrOpt("region", - default="", - help="The region name to use. If empty, the value " - "of identity.region is used instead. If no such region " - "is found in the service catalog, the first found one is " - "used."), - cfg.StrOpt("catalog_type", - default="function-engine", - help="Catalog type of the Qinling service."), - cfg.StrOpt('endpoint_type', - default='publicURL', - choices=['public', 'admin', 'internal', - 'publicURL', 'adminURL', 'internalURL'], - help="The endpoint type to use for the qinling service."), - cfg.StrOpt("python_runtime_image", - default="openstackqinling/python3-runtime:0.0.2", - help="The Python runtime being used in the tests."), - cfg.StrOpt("nodejs_runtime_image", - default="openstackqinling/nodejs-runtime:0.0.1", - help="The NodeJS runtime being used in the tests."), - cfg.BoolOpt("allow_external_connection", - default=False, - help="If the tests which need external network connection " - "should be running."), -] diff --git a/qinling_tempest_plugin/functions/nodejs/test_nodejs_async.js b/qinling_tempest_plugin/functions/nodejs/test_nodejs_async.js deleted file mode 100644 index f3ced5ed..00000000 --- a/qinling_tempest_plugin/functions/nodejs/test_nodejs_async.js +++ /dev/null @@ -1,13 +0,0 @@ -var rp = require('request-promise'); - -exports.main = async function (context, input) { - var options = { - uri: 'https://httpbin.org/post', - method: 'POST', - body: { - name: input.name - }, - json: true, - }; - return rp(options) -} diff --git a/qinling_tempest_plugin/functions/nodejs/test_nodejs_basic.js b/qinling_tempest_plugin/functions/nodejs/test_nodejs_basic.js deleted file mode 100644 index be6234b4..00000000 --- a/qinling_tempest_plugin/functions/nodejs/test_nodejs_basic.js +++ /dev/null @@ -1,3 +0,0 @@ -exports.main = function (context, input) { - return "Hello, NodeJS" -} diff --git a/qinling_tempest_plugin/functions/python/test_python_basic.py b/qinling_tempest_plugin/functions/python/test_python_basic.py deleted file mode 100644 index cea1410b..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_basic.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def main(name='World', **kwargs): - print('Hello, %s' % name) diff --git a/qinling_tempest_plugin/functions/python/test_python_cpu_limit.py b/qinling_tempest_plugin/functions/python/test_python_cpu_limit.py deleted file mode 100644 index 1537bccf..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_cpu_limit.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# Codes are from https://www.craig-wood.com/nick/articles/pi-machin/ - -def arctan_euler(x, one): - x_squared = x * x - x_squared_plus_1 = x_squared + 1 - term = (x * one) // x_squared_plus_1 - total = term - two_n = 2 - while True: - divisor = (two_n + 1) * x_squared_plus_1 - term *= two_n - term = term // divisor - if term == 0: - break - total += term - two_n += 2 - return total - - -def pi_machin(one): - return 4 * (4 * arctan_euler(5, one) - arctan_euler(239, one)) - - -def main(digit=50000, *args, **kwargs): - return str(pi_machin(10**digit))[:15] diff --git a/qinling_tempest_plugin/functions/python/test_python_file_limit.py b/qinling_tempest_plugin/functions/python/test_python_file_limit.py deleted file mode 100644 index 1f5ae4d0..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_file_limit.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import resource - - -def main(number=1024, **kwargs): - for name, desc in [ - ('RLIMIT_NOFILE', 'number of open files'), - ]: - limit_num = getattr(resource, name) - soft, hard = resource.getrlimit(limit_num) - print('Maximum %-25s (%-15s) : %20s %20s' % (desc, name, soft, hard)) - - files = [] - - try: - for i in range(0, number): - files.append(_create_file(i)) - finally: - for f in files: - f.close() - - -def _create_file(index): - f = open('file_%s' % index, 'w') - return f diff --git a/qinling_tempest_plugin/functions/python/test_python_http_get.py b/qinling_tempest_plugin/functions/python/test_python_http_get.py deleted file mode 100644 index 95e8ac52..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_http_get.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import hashlib -import requests - - -def main(url='https://docs.openstack.org/qinling/latest/', timeout=10, - *args, **kwargs): - # This function simply returns a sha256 hash of a webpage. - # We use this to verify function pods have access the outside world. - response = requests.get(url, timeout=timeout) - return hashlib.sha256(response.text.encode('utf-8')).hexdigest() diff --git a/qinling_tempest_plugin/functions/python/test_python_memory_limit.py b/qinling_tempest_plugin/functions/python/test_python_memory_limit.py deleted file mode 100644 index 4d7f28d6..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_memory_limit.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2018 AWCloud Software Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -S = 20 * 1024 * 1024 # 20M - - -def main(*args, **kwargs): - L = [] - for i in 'abcd': - L.append(i * S) - return len(L) diff --git a/qinling_tempest_plugin/functions/python/test_python_positional_args.py b/qinling_tempest_plugin/functions/python/test_python_positional_args.py deleted file mode 100644 index b4b75fc5..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_positional_args.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def main(name, **kwargs): - return 'Hello, %s' % name diff --git a/qinling_tempest_plugin/functions/python/test_python_process_limit.py b/qinling_tempest_plugin/functions/python/test_python_process_limit.py deleted file mode 100644 index e04419b5..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_process_limit.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from multiprocessing import Process -import resource -import time - - -def main(number=10, **kwargs): - soft, hard = resource.getrlimit(resource.RLIMIT_NPROC) - print('(soft, hard): %20s %20s' % (soft, hard)) - - # We set a small number inside the function to avoid being affected by - # outside. - resource.setrlimit(resource.RLIMIT_NPROC, (number, hard)) - - processes = [] - for i in range(0, number+1): - p = Process( - target=_sleep, - args=(i,) - ) - p.start() - processes.append(p) - for p in processes: - p.join() - - -def _sleep(index): - time.sleep(10) diff --git a/qinling_tempest_plugin/functions/python/test_python_sleep.py b/qinling_tempest_plugin/functions/python/test_python_sleep.py deleted file mode 100644 index 27f4efc4..00000000 --- a/qinling_tempest_plugin/functions/python/test_python_sleep.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import time - - -def main(seconds=4, **kwargs): - time.sleep(seconds) diff --git a/qinling_tempest_plugin/plugin.py b/qinling_tempest_plugin/plugin.py deleted file mode 100644 index 7c5ae9f6..00000000 --- a/qinling_tempest_plugin/plugin.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2015 -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import os - -from tempest import config -from tempest.test_discover import plugins - -from qinling_tempest_plugin import config as qinling_config - - -class QinlingTempestPlugin(plugins.TempestPlugin): - def load_tests(self): - base_path = os.path.split(os.path.dirname( - os.path.abspath(__file__)) - )[0] - test_dir = "qinling_tempest_plugin/tests" - full_test_dir = os.path.join(base_path, test_dir) - return full_test_dir, base_path - - def register_opts(self, conf): - conf.register_opt( - qinling_config.service_option, group='service_available' - ) - - conf.register_group(qinling_config.qinling_group) - conf.register_opts(qinling_config.QinlingGroup, group='qinling') - - def get_opt_lists(self): - return [ - ('service_available', [qinling_config.service_option]), - (qinling_config.qinling_group.name, qinling_config.QinlingGroup) - ] - - def get_service_clients(self): - qinling_config = config.service_client_config('qinling') - params = { - 'name': 'qinling', - 'service_version': 'qinling', - 'module_path': 'qinling_tempest_plugin.services.qinling_client', - 'client_names': ['QinlingClient'], - } - params.update(qinling_config) - return [params] diff --git a/qinling_tempest_plugin/post_test_hook.sh b/qinling_tempest_plugin/post_test_hook.sh deleted file mode 100755 index 37c2718f..00000000 --- a/qinling_tempest_plugin/post_test_hook.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This script is executed inside post_test_hook function in devstack gate. - -set -ex - -sudo chmod -R a+rw /opt/stack/ -(cd $BASE/new/tempest/; sudo virtualenv .venv) -source $BASE/new/tempest/.venv/bin/activate - -(cd $BASE/new/tempest/; sudo pip install -r requirements.txt -r test-requirements.txt) -sudo pip install nose -sudo pip install numpy - -sudo cp $BASE/new/tempest/etc/logging.conf.sample $BASE/new/tempest/etc/logging.conf - -(cd $BASE/new/qinling/; sudo pip install -r requirements.txt -r test-requirements.txt) -(cd $BASE/new/qinling/; sudo python setup.py install) - -export TOX_TESTENV_PASSENV=ZUUL_PROJECT -(cd $BASE/new/tempest/; sudo -E testr init) -(cd $BASE/new/tempest/; sudo -E tox -eall-plugin -- qinling --serial) diff --git a/qinling_tempest_plugin/pre_test_hook.sh b/qinling_tempest_plugin/pre_test_hook.sh deleted file mode 100755 index 382f0cbd..00000000 --- a/qinling_tempest_plugin/pre_test_hook.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This script is executed inside pre_test_hook function in devstack gate. - -echo "Pass" diff --git a/qinling_tempest_plugin/services/__init__.py b/qinling_tempest_plugin/services/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling_tempest_plugin/services/base.py b/qinling_tempest_plugin/services/base.py deleted file mode 100644 index 11de6335..00000000 --- a/qinling_tempest_plugin/services/base.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import urllib - -from tempest.lib.common import rest_client - -urlparse = urllib.parse - - -class QinlingClientBase(rest_client.RestClient): - def __init__(self, auth_provider, **kwargs): - super(QinlingClientBase, self).__init__(auth_provider, **kwargs) - - def get_list_objs(self, obj, params=None): - url = '/v1/%s' % obj - query_string = ("?%s" % urlparse.urlencode(list(params.items())) - if params else "") - url += query_string - - resp, body = self.get(url) - return resp, json.loads(body) - - def delete_obj(self, obj, id): - return self.delete('/v1/{obj}/{id}'.format(obj=obj, id=id)) - - def get_obj(self, obj, id): - resp, body = self.get('/v1/{obj}/{id}'.format(obj=obj, id=id)) - - return resp, json.loads(body) - - def post_json(self, obj, req_body, extra_headers={}): - headers = {"Content-Type": "application/json"} - headers = dict(headers, **extra_headers) - url_path = '/v1/%s' % obj - - resp, body = self.post(url_path, json.dumps(req_body), headers=headers) - - return resp, json.loads(body) diff --git a/qinling_tempest_plugin/services/qinling_client.py b/qinling_tempest_plugin/services/qinling_client.py deleted file mode 100644 index a0576aca..00000000 --- a/qinling_tempest_plugin/services/qinling_client.py +++ /dev/null @@ -1,307 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from datetime import datetime -from datetime import timedelta -import json - -from oslo_log import log as logging -import requests -from tempest.lib import exceptions - -from qinling_tempest_plugin.services import base as client_base - -LOG = logging.getLogger(__name__) - - -class QinlingClient(client_base.QinlingClientBase): - """Tempest REST client for Qinling.""" - - def delete_resource(self, res, id, ignore_notfound=False): - try: - resp, _ = self.delete_obj(res, id) - return resp - except exceptions.NotFound: - if ignore_notfound: - pass - else: - raise - - def get_resource(self, res, id): - resp, body = self.get_obj(res, id) - - return resp, body - - def get_resources(self, res, params=None): - resp, body = self.get_list_objs(res, params=params) - - return resp, body - - def create_runtime(self, image, name=None, is_public=True): - req_body = {"image": image, "is_public": is_public} - - if name: - req_body.update({'name': name}) - - resp, body = self.post_json('runtimes', req_body) - - return resp, body - - def create_function(self, code, runtime_id, name='', package_data=None, - entry=None, timeout=None): - """Create function. - - Tempest rest client doesn't support multipart upload, so use requests - lib instead. As a result, we can not use self.assertRaises function for - negative tests. - """ - headers = {'X-Auth-Token': self.auth_provider.get_token()} - req_body = { - 'name': name, - 'runtime_id': runtime_id, - 'code': json.dumps(code), - 'timeout': timeout - } - if entry: - req_body['entry'] = entry - - req_kwargs = { - 'headers': headers, - 'data': req_body - } - if package_data: - req_kwargs.update({'files': {'package': package_data}}) - - url_path = '%s/v1/functions' % (self.base_url) - resp = requests.post(url_path, **req_kwargs) - - LOG.info('Request: %s POST %s', resp.status_code, url_path) - - return resp, json.loads(resp.text) - - def update_function(self, function_id, package_data=None, code=None, - entry=None, **kwargs): - headers = {'X-Auth-Token': self.auth_provider.get_token()} - - req_body = {} - if code: - req_body['code'] = json.dumps(code) - if entry: - req_body['entry'] = entry - req_body.update(kwargs) - - req_kwargs = { - 'headers': headers, - 'data': req_body - } - if package_data: - req_kwargs.update({'files': {'package': package_data}}) - - url_path = '%s/v1/functions/%s' % (self.base_url, function_id) - resp = requests.put(url_path, **req_kwargs) - - LOG.info('Request: %s PUT %s', resp.status_code, url_path) - - return resp, json.loads(resp.text) - - def get_function(self, function_id): - resp, body = self.get( - '/v1/functions/{id}'.format(id=function_id), - ) - - return resp, json.loads(body) - - def download_function(self, function_id): - return self.get('/v1/functions/%s?download=true' % function_id, - headers={}) - - def detach_function(self, function_id, version=0): - if version == 0: - url = '/v1/functions/%s/detach' % function_id - else: - url = '/v1/functions/%s/versions/%s/detach' % \ - (function_id, version) - - return self.post(url, None, headers={}) - - def create_execution(self, function_id=None, alias_name=None, input=None, - sync=True, version=0): - """Create execution. - - alias_name takes precedence over function_id. - """ - if alias_name: - req_body = { - 'function_alias': alias_name, - 'sync': sync, - 'input': input - } - elif function_id: - req_body = { - 'function_id': function_id, - 'function_version': version, - 'sync': sync, - 'input': input - } - else: - raise Exception("Either alias_name or function_id must be " - "provided.") - - resp, body = self.post_json('executions', req_body) - - return resp, body - - def get_execution_log(self, execution_id): - resp, body = self.get('/v1/executions/%s/log' % execution_id, - headers={'Accept': 'text/plain'}) - return resp, str(body) - - def get_function_workers(self, function_id, version=0): - q_params = None - if version > 0: - q_params = "/?function_version=%s" % version - - url = 'functions/%s/workers' % function_id - if q_params: - url += q_params - - return self.get_resources(url) - - def create_webhook(self, function_id=None, function_alias=None, - version=0): - """Create webhook. - - function_alias takes precedence over function_id. - """ - if function_alias: - req_body = {'function_alias': function_alias} - elif function_id: - req_body = { - 'function_id': function_id, - 'function_version': version - } - else: - raise Exception("Either function_alias or function_id must be " - "provided.") - resp, body = self.post_json('webhooks', req_body) - return resp, body - - def create_job(self, function_id=None, function_alias=None, version=0, - first_execution_time=None): - """Create job. - - function_alias takes precedence over function_id. - """ - if function_alias: - req_body = {'function_alias': function_alias} - elif function_id: - req_body = { - 'function_id': function_id, - 'function_version': version - } - else: - raise Exception("Either function_alias or function_id must be " - "provided.") - - if not first_execution_time: - first_execution_time = str( - datetime.utcnow() + timedelta(hours=1) - ) - req_body.update({'first_execution_time': first_execution_time}) - - resp, body = self.post_json('jobs', req_body) - return resp, body - - def create_function_version(self, function_id, description=None): - req_body = {} - if description is not None: - req_body['description'] = description - - resp, body = self.post_json( - 'functions/%s/versions' % function_id, - req_body - ) - - return resp, body - - def delete_function_version(self, function_id, version, - ignore_notfound=False): - try: - resp, _ = self.delete( - '/v1/functions/{id}/versions/{version}'.format( - id=function_id, version=version) - ) - return resp - except exceptions.NotFound: - if ignore_notfound: - pass - else: - raise - - def get_function_version(self, function_id, version): - resp, body = self.get( - '/v1/functions/%s/versions/%s' % (function_id, version), - ) - - return resp, json.loads(body) - - def get_function_versions(self, function_id): - resp, body = self.get( - '/v1/functions/%s/versions' % (function_id), - ) - - return resp, json.loads(body) - - def create_function_alias(self, name, function_id, - function_version=0, description=None): - req_body = { - 'function_id': function_id, - 'function_version': function_version, - 'name': name - } - if description is not None: - req_body['description'] = description - - resp, body = self.post_json('/aliases', req_body) - - return resp, body - - def delete_function_alias(self, alias_name, ignore_notfound=False): - try: - resp, _ = self.delete('/v1/aliases/%s' % alias_name) - return resp - except exceptions.NotFound: - if ignore_notfound: - pass - else: - raise - - def get_function_alias(self, alias_name): - resp, body = self.get('/v1/aliases/%s' % alias_name) - - return resp, json.loads(body) - - def update_function_alias(self, alias_name, function_id=None, - function_version=None, description=None): - req_body = {} - if function_id is not None: - req_body['function_id'] = function_id - if function_version is not None: - req_body['function_version'] = function_version - if description is not None: - req_body['description'] = description - - resp, body = self.put_json('/v1/aliases/%s' % alias_name, req_body) - - return resp, body diff --git a/qinling_tempest_plugin/tests/__init__.py b/qinling_tempest_plugin/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling_tempest_plugin/tests/api/__init__.py b/qinling_tempest_plugin/tests/api/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling_tempest_plugin/tests/api/test_executions.py b/qinling_tempest_plugin/tests/api/test_executions.py deleted file mode 100644 index 21edec6f..00000000 --- a/qinling_tempest_plugin/tests/api/test_executions.py +++ /dev/null @@ -1,572 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from concurrent import futures -import hashlib -import json -import requests - -import futurist -from oslo_serialization import jsonutils -from tempest import config -from tempest.lib import decorators -from tempest.lib import exceptions -import testtools - -from qinling_tempest_plugin.tests import base - -CONF = config.CONF -INVOKE_ERROR = "Function execution failed because of too much resource " \ - "consumption" - - -class ExecutionsTest(base.BaseQinlingTest): - name_prefix = 'ExecutionsTest' - - def setUp(self): - super(ExecutionsTest, self).setUp() - self.wait_runtime_available(self.runtime_id) - - @decorators.idempotent_id('2a93fab0-2dae-4748-b0d4-f06b735ff451') - def test_crud_execution(self): - function_id = self.create_function() - resp, body = self.client.create_execution(function_id, - input='{"name": "Qinling"}') - self.assertEqual(201, resp.status) - execution_id_1 = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id_1, ignore_notfound=True) - self.assertEqual('success', body['status']) - - # Create another execution without input - resp, body = self.client.create_execution(function_id) - self.assertEqual(201, resp.status) - execution_id_2 = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id_2, ignore_notfound=True) - self.assertEqual('success', body['status']) - - # Get executions - resp, body = self.client.get_resources('executions') - self.assertEqual(200, resp.status) - expected = {execution_id_1, execution_id_2} - actual = set([execution['id'] for execution in body['executions']]) - self.assertTrue(expected.issubset(actual)) - - # Delete executions - resp = self.client.delete_resource('executions', execution_id_1) - self.assertEqual(204, resp.status) - resp = self.client.delete_resource('executions', execution_id_2) - self.assertEqual(204, resp.status) - - # @decorators.idempotent_id('6a388918-86eb-4e10-88e2-0032a7df38e9') - # def test_create_execution_worker_lock_failed(self): - # """test_create_execution_worker_lock_failed - # - # When creating an execution, the qinling-engine will check the load - # and try to scaleup the function if needed. A lock is required when - # doing this check. - # - # In this test we acquire the lock manually, so that qinling will fail - # to acquire the lock. - # """ - # function_id = self.create_function() - # - # from qinling_tempest_plugin.tests import utils - # etcd3_client = utils.get_etcd_client() - # lock_id = "function_worker_%s_%s" % (function_id, 0) - # with etcd3_client.lock(id=lock_id): - # resp, body = self.client.create_execution( - # function_id, input='{"name": "Qinling"}' - # ) - # - # self.assertEqual(201, resp.status) - # self.assertEqual('error', body['status']) - # result = jsonutils.loads(body['result']) - # self.assertEqual('Function execution failed.', result['output']) - - @decorators.idempotent_id('2199d1e6-de7d-4345-8745-a8184d6022b1') - def test_get_all_admin(self): - """Admin user can get executions of other projects""" - function_id = self.create_function() - resp, body = self.client.create_execution( - function_id, input='{"name": "Qinling"}' - ) - self.assertEqual(201, resp.status) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - resp, body = self.admin_client.get_resources( - 'executions?all_projects=true' - ) - self.assertEqual(200, resp.status) - self.assertIn( - execution_id, - [execution['id'] for execution in body['executions']] - ) - - @decorators.idempotent_id('009fba47-957e-4de5-82e8-a032386d3ac0') - def test_get_all_not_allowed(self): - # Get other projects functions by normal user - context = self.assertRaises( - exceptions.Forbidden, - self.client.get_resources, - 'executions?all_projects=true' - ) - self.assertIn( - 'Operation not allowed', - context.resp_body.get('faultstring') - ) - - @decorators.idempotent_id('794cdfb2-0a27-4e56-86e8-be18eee9400f') - def test_create_with_function_version(self): - function_id = self.create_function() - execution_id = self.create_execution(function_id) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) - - version_1 = self.create_function_version(function_id) - execution_id = self.create_execution(function_id, version=version_1) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) - - self.update_function_package(function_id, - "python/test_python_sleep.py") - version_2 = self.create_function_version(function_id) - execution_id = self.create_execution(function_id, version=version_2) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertNotIn('Hello, World', body) - - @decorators.idempotent_id('dbf4bd84-bde3-4d1d-8dec-93aaf18b4b5f') - def test_create_with_function_alias(self): - function_id = self.create_function() - - alias_name = self.create_function_alias(function_id) - execution_id = self.create_execution(alias_name=alias_name) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) - - version_1 = self.create_function_version(function_id) - alias_name_1 = self.create_function_alias(function_id, version_1) - execution_id = self.create_execution(alias_name=alias_name_1) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) - - self.update_function_package(function_id, - "python/test_python_sleep.py") - version_2 = self.create_function_version(function_id) - alias_name_2 = self.create_function_alias(function_id, version_2) - execution_id = self.create_execution(alias_name=alias_name_2) - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertNotIn('Hello, World', body) - - @decorators.idempotent_id('8096cc52-64d2-4660-a657-9ac0bdd743ae') - def test_execution_async(self): - function_id = self.create_function() - resp, body = self.client.create_execution(function_id, sync=False) - self.assertEqual(201, resp.status) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - self.assertEqual('running', body['status']) - self.wait_execution_success(execution_id) - - @decorators.idempotent_id('6cb47b1d-a8c6-48f2-a92f-c4f613c33d1c') - def test_execution_log(self): - function_id = self.create_function() - resp, body = self.client.create_execution( - function_id, input='{"name": "OpenStack"}' - ) - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - execution_id = body['id'] - - # Get execution log - resp, body = self.client.get_execution_log(execution_id) - - self.assertEqual(200, resp.status) - self.assertIn('Hello, OpenStack', body) - - @decorators.idempotent_id('f22097dc-37db-484d-83d3-3a97e72ec576') - def test_execution_concurrency_no_scale(self): - package = self.create_package(name='python/test_python_sleep.py') - function_id = self.create_function(package_path=package) - - def _create_execution(): - resp, body = self.client.create_execution(function_id) - return resp, body - - futs = [] - with futurist.ThreadPoolExecutor(max_workers=10) as executor: - for _ in range(3): - fut = executor.submit(_create_execution) - futs.append(fut) - for f in futures.as_completed(futs): - # Wait until we get the response - resp, body = f.result() - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - resp, body = self.admin_client.get_function_workers(function_id) - - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['workers'])) - - @decorators.idempotent_id('a5ed173a-19b7-4c92-ac78-c8862ad1d1d2') - def test_execution_concurrency_scale_up(self): - package = self.create_package(name='python/test_python_sleep.py') - function_id = self.create_function(package_path=package) - - def _create_execution(): - resp, body = self.client.create_execution(function_id) - return resp, body - - futs = [] - with futurist.ThreadPoolExecutor(max_workers=10) as executor: - for _ in range(6): - fut = executor.submit(_create_execution) - futs.append(fut) - for f in futures.as_completed(futs): - # Wait until we get the response - resp, body = f.result() - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - resp, body = self.admin_client.get_function_workers(function_id) - self.assertEqual(200, resp.status) - self.assertEqual(2, len(body['workers'])) - - @decorators.idempotent_id('d0598868-e45d-11e7-9125-00224d6b7bc1') - def test_image_function_execution(self): - function_id = self.create_function( - image="openstackqinling/alpine-test") - resp, body = self.client.create_execution(function_id, - input='Qinling') - - self.assertEqual(201, resp.status) - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - self.assertEqual('success', body['status']) - self.assertIn('duration', jsonutils.loads(body['result'])) - - resp, body = self.client.get_execution_log(execution_id) - self.assertEqual(200, resp.status) - self.assertIn('Qinling', body) - - @decorators.idempotent_id('ab962144-d5b1-11e8-978f-026f8338c1e5') - def test_image_function_execution_timeout(self): - function_id = self.create_function(image="lingxiankong/sleep") - resp, body = self.client.create_execution(function_id, - input='6') - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('failed', body['status']) - - result = jsonutils.loads(body['result']) - - self.assertGreaterEqual(result['duration'], 5) - self.assertIn( - 'Function execution timeout', result['output'] - ) - - # Update function timeout - resp, _ = self.client.update_function( - function_id, - timeout=15 - ) - self.assertEqual(200, resp.status_code) - - resp, body = self.client.create_execution(function_id, - input='6') - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - result = jsonutils.loads(body['result']) - self.assertGreaterEqual(result['duration'], 6) - - @decorators.idempotent_id('ccfe67ce-e467-11e7-916c-00224d6b7bc1') - def test_python_execution_positional_args(self): - package = self.create_package( - name='python/test_python_positional_args.py' - ) - function_id = self.create_function(package_path=package) - - resp, body = self.client.create_execution(function_id, - input='Qinling') - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - result = jsonutils.loads(body['result']) - self.assertIn('Qinling', result['output']) - - @decorators.idempotent_id('a948382a-84af-4f0e-ad08-4297345e302c') - def test_python_execution_file_limit(self): - package = self.create_package(name='python/test_python_file_limit.py') - function_id = self.create_function(package_path=package) - - resp, body = self.client.create_execution(function_id) - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('failed', body['status']) - - result = jsonutils.loads(body['result']) - self.assertIn( - 'Too many open files', result['output'] - ) - - @decorators.idempotent_id('bf6f8f35-fa88-469b-8878-7aa85a8ce5ab') - def test_python_execution_process_number(self): - package = self.create_package( - name='python/test_python_process_limit.py' - ) - function_id = self.create_function(package_path=package) - - resp, body = self.client.create_execution(function_id) - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('failed', body['status']) - - result = jsonutils.loads(body['result']) - self.assertIn( - 'too much resource consumption', result['output'] - ) - - @decorators.idempotent_id('2b5f0787-b82d-4fc4-af76-cf86d389a76b') - def test_python_execution_memory_limit(self): - """In this case, the following steps are taken: - - 1. Create a function that requires ~80M memory to run. - 2. Create an execution using the function. - 3. Verify that the execution is killed by the OOM-killer - because the function memory limit is only 32M(default). - 4. Increase the function memory limit to 96M. - 5. Create another execution. - 6. Check the execution finished normally. - """ - - # Create function - package = self.create_package( - name='python/test_python_memory_limit.py' - ) - function_id = self.create_function(package_path=package) - - # Invoke function - resp, body = self.client.create_execution(function_id) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - # Check the process is killed - self.assertEqual(201, resp.status) - result = json.loads(body['result']) - output = result.get('output') - self.assertEqual(INVOKE_ERROR, output) - - # Increase the memory limit to 100663296(96M). - resp, body = self.client.update_function( - function_id, memory_size=100663296) - self.assertEqual(200, resp.status_code) - - # Invoke the function again - resp, body = self.client.create_execution(function_id) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - # Check the process exited normally - self.assertEqual(201, resp.status) - result = json.loads(body['result']) - output = result.get('output') - # The function returns the length of a list containing 4 long strings. - self.assertEqual(4, output) - - @decorators.idempotent_id('ed714f98-29fe-4e8d-b6ee-9730f92bddea') - def test_python_execution_cpu_limit(self): - """In this case, the following steps are taken: - - 1. Create a function that takes some time to finish (calculating the - first 50000 digits of PI) - 2. Create an execution using the function. - 3. Store the duration of the first execution. - 4. Increase the function cpu limit from 100(default) to 200 millicpu. - 5. Create another execution. - 6. Check whether the duration of the first execution is approximately - the double of the duration of the second one as its cpu resource is - half of the second run. - """ - - # Create function - package = self.create_package( - name='python/test_python_cpu_limit.py' - ) - function_id = self.create_function(package_path=package, timeout=180) - - # Invoke function - resp, body = self.client.create_execution(function_id) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - # Record the duration, check whether the result is correct. - self.assertEqual(201, resp.status) - result = json.loads(body['result']) - output = result.get('output') - # Only the first 15 digits are returned. - self.assertEqual('314159265358979', output) - first_duration = result.get('duration', 0) - - # Increase the cpu limit - resp, body = self.client.update_function(function_id, cpu=200) - self.assertEqual(200, resp.status_code) - - # Invoke the function again - resp, body = self.client.create_execution(function_id) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - # Record the second duration, check whether the result is correct. - self.assertEqual(201, resp.status) - result = json.loads(body['result']) - output = result.get('output') - # Only the first 15 digits are returned. - self.assertEqual('314159265358979', output) - second_duration = result.get('duration', 0) - - # Check whether the duration of the first execution is approximately - # the double (1.8x ~ 2.2x) of the duration of the second one. - # NOTE(huntxu): on my testbed, the result is quite near 2x. However - # it may vary in different environments, so we give a wider range - # here. - self.assertNotEqual(0, first_duration) - self.assertNotEqual(0, second_duration) - upper = second_duration * 2.5 - lower = second_duration * 1.8 - self.assertGreaterEqual(upper, first_duration) - self.assertLessEqual(lower, first_duration) - - @decorators.idempotent_id('07edf2ff-7544-4f30-b006-fd5302a2a9cc') - @testtools.skipUnless(CONF.qinling.allow_external_connection, - "External network connection is not allowed") - def test_python_execution_public_connection(self): - """Test connections from k8s pod to the outside. - - Create a function that reads a webpage on the Internet, to - verify that pods in Kubernetes can connect to the outside. - """ - - # Create function - package = self.create_package(name='python/test_python_http_get.py') - function_id = self.create_function(package_path=package) - - url = 'https://docs.openstack.org/qinling/latest' - - # Gets the page's sha256 outside Qinling - response = requests.get(url, timeout=10) - page_sha256 = hashlib.sha256(response.text.encode('utf-8')).hexdigest() - - # Create an execution to get the page's sha256 with Qinling - resp, body = self.client.create_execution( - function_id, input='{"url": "%s"}' % url - ) - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - self.assertEqual(201, resp.status) - self.assertEqual('success', body['status']) - result = json.loads(body['result']) - self.assertEqual(page_sha256, result['output']) - - @decorators.idempotent_id('b05e3bac-b23f-11e8-9679-00224d6b7bc1') - def test_python_execution_timeout(self): - package = self.create_package( - name='python/test_python_sleep.py' - ) - function_id = self.create_function(package_path=package) - - resp, body = self.client.create_execution( - function_id, - input='{"seconds": 7}' - ) - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('failed', body['status']) - - result = jsonutils.loads(body['result']) - - self.assertGreaterEqual(result['duration'], 5) - self.assertIn( - 'Function execution timeout', result['output'] - ) - - # Update function timeout - resp, _ = self.client.update_function( - function_id, - timeout=10 - ) - self.assertEqual(200, resp.status_code) - - resp, body = self.client.create_execution( - function_id, - input='{"seconds": 7}' - ) - - self.assertEqual(201, resp.status) - self.addCleanup(self.client.delete_resource, 'executions', - body['id'], ignore_notfound=True) - self.assertEqual('success', body['status']) - - result = jsonutils.loads(body['result']) - self.assertGreaterEqual(result['duration'], 7) diff --git a/qinling_tempest_plugin/tests/api/test_executions_nodejs.py b/qinling_tempest_plugin/tests/api/test_executions_nodejs.py deleted file mode 100644 index be4181dd..00000000 --- a/qinling_tempest_plugin/tests/api/test_executions_nodejs.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2018 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from tempest import config -from tempest.lib import decorators - -from qinling_tempest_plugin.tests import base - -CONF = config.CONF - - -class NodeJSExecutionsTest(base.BaseQinlingTest): - name_prefix = 'NodeJSExecutionsTest' - image = CONF.qinling.nodejs_runtime_image - - def setUp(self): - super(NodeJSExecutionsTest, self).setUp() - self.wait_runtime_available(self.runtime_id) - - @decorators.idempotent_id('e3046fa4-2289-11e8-b720-00224d6b7bc1') - def test_basic_nodejs_execution(self): - package = self.create_package(name='nodejs/test_nodejs_basic.js') - function_id = self.create_function(package_path=package) - resp, body = self.client.create_execution(function_id, - input='{"name": "Qinling"}') - self.assertEqual(201, resp.status) - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - self.assertEqual('success', body['status']) diff --git a/qinling_tempest_plugin/tests/api/test_function_versions.py b/qinling_tempest_plugin/tests/api/test_function_versions.py deleted file mode 100644 index 3c8dd9e3..00000000 --- a/qinling_tempest_plugin/tests/api/test_function_versions.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2018 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tempest.lib import decorators -from tempest.lib import exceptions -import tenacity - -from qinling_tempest_plugin.tests import base - - -class FunctionVersionsTest(base.BaseQinlingTest): - name_prefix = 'FunctionVersionsTest' - - def setUp(self): - super(FunctionVersionsTest, self).setUp() - - # Wait until runtime is available - self.wait_runtime_available(self.runtime_id) - - @decorators.idempotent_id('ce630c59-a79d-4b2d-89af-c7c5c8f8bd3f') - def test_create(self): - function_id = self.create_function() - new_version = self.create_function_version(function_id) - - self.assertEqual(1, new_version) - - resp, body = self.client.get_resources( - 'functions/%s/versions' % function_id) - - self.assertEqual(200, resp.status) - self.assertIn( - new_version, - [v['version_number'] for v in body['function_versions']] - ) - - @decorators.idempotent_id('9da2d24c-2ce4-4e6f-9e44-74ef1b9ec3cc') - def test_create_function_no_change(self): - function_id = self.create_function() - self.create_function_version(function_id) - - self.assertRaises( - exceptions.Forbidden, - self.client.create_function_version, - function_id - ) - - @decorators.idempotent_id('6864d134-fbb9-4738-9721-b541c4362789') - def test_create_function_change(self): - function_id = self.create_function() - version_1 = self.create_function_version(function_id) - self.update_function_package(function_id, - "python/test_python_sleep.py") - version_2 = self.create_function_version(function_id) - - self.assertGreater(version_2, version_1) - - resp, body = self.client.get_resources( - 'functions/%s/versions' % function_id) - self.assertEqual(200, resp.status) - - numbers = [v['version_number'] for v in body['function_versions']] - self.assertIn(version_1, numbers) - self.assertIn(version_2, numbers) - - @decorators.idempotent_id('3f735ed4-64b0-4ec3-8bf2-507e38dcea19') - def test_create_admin_not_allowed(self): - """test_create_admin_not_allowed - - Even admin user can not create function version for normal user's - function. - """ - function_id = self.create_function() - - self.assertRaises( - exceptions.NotFound, - self.admin_client.create_function_version, - function_id - ) - - # @decorators.idempotent_id('78dc5552-fcb8-4b27-86f7-5f3d96143934') - # def test_create_version_lock_failed(self): - # """test_create_version_lock_failed - # - # Creating a function requires a lock. If qinling failed to acquire the - # lock then an error would be returned after some retries. - # - # In this test we acquire the lock manually, so that qinling will fail - # to acquire the lock. - # """ - # function_id = self.create_function() - # - # from qinling_tempest_plugin.tests import utils - # etcd3_client = utils.get_etcd_client() - # lock_id = "function_version_%s" % function_id - # with etcd3_client.lock(id=lock_id): - # self.assertRaises( - # exceptions.ServerFault, - # self.client.create_function_version, - # function_id - # ) - - @decorators.idempotent_id('43c06f41-d116-43a7-a61c-115f7591b22e') - def test_get_by_admin(self): - """Admin user can get normal user's function version.""" - function_id = self.create_function() - version = self.create_function_version(function_id) - - resp, body = self.admin_client.get_function_version(function_id, - version) - - self.assertEqual(200, resp.status) - self.assertEqual(version, body.get("version_number")) - - @decorators.idempotent_id('e6b865d8-ffa8-4cfc-8afb-820c64f9b2af') - def test_get_all_by_admin(self): - """Admin user can list normal user's function version.""" - function_id = self.create_function() - version = self.create_function_version(function_id) - - resp, body = self.admin_client.get_function_versions(function_id) - - self.assertEqual(200, resp.status) - self.assertIn( - version, - [v['version_number'] for v in body['function_versions']] - ) - - @decorators.idempotent_id('0e70ef18-687c-4ce4-ae29-aee2f88b4b9c') - def test_delete(self): - function_id = self.create_function() - version = self.create_function_version(function_id) - - resp = self.client.delete_function_version(function_id, version) - - self.assertEqual(204, resp.status) - - resp, body = self.client.get_function_versions(function_id) - - self.assertEqual(200, resp.status) - self.assertNotIn( - version, - [v['version_number'] for v in body['function_versions']] - ) - - @decorators.idempotent_id('c6717e2e-e80a-43d9-a25b-84f4b7453c76') - def test_delete_by_admin(self): - """test_delete_by_admin - - Admin user can not delete normal user's function version. - """ - function_id = self.create_function() - version = self.create_function_version(function_id) - - self.assertRaises( - exceptions.NotFound, - self.admin_client.delete_function_version, - function_id, - version - ) - - @decorators.idempotent_id('7898f89f-a490-42a3-8cf7-63cbd9543a06') - def test_detach(self): - """Admin only operation.""" - function_id = self.create_function() - version = self.create_function_version(function_id) - - # Create execution to allocate worker - resp, _ = self.client.create_execution( - function_id, input='{"name": "Qinling"}', version=version - ) - self.assertEqual(201, resp.status) - - resp, body = self.admin_client.get_function_workers(function_id, - version=version) - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['workers'])) - - # Detach function version from workers - resp, _ = self.admin_client.detach_function(function_id, - version=version) - self.assertEqual(202, resp.status) - - def _assert_workers(): - resp, body = self.admin_client.get_function_workers( - function_id, - version=version - ) - self.assertEqual(200, resp.status) - self.assertEqual(0, len(body['workers'])) - - r = tenacity.Retrying( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(5), - retry=tenacity.retry_if_exception_type(AssertionError) - ) - r.call(_assert_workers) diff --git a/qinling_tempest_plugin/tests/api/test_functions.py b/qinling_tempest_plugin/tests/api/test_functions.py deleted file mode 100644 index 71d4906e..00000000 --- a/qinling_tempest_plugin/tests/api/test_functions.py +++ /dev/null @@ -1,163 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os - -from tempest.lib import decorators -from tempest.lib import exceptions -import tenacity - -from qinling_tempest_plugin.tests import base -from qinling_tempest_plugin.tests import utils - - -class FunctionsTest(base.BaseQinlingTest): - name_prefix = 'FunctionsTest' - - def setUp(self): - super(FunctionsTest, self).setUp() - - # Wait until runtime is available - self.wait_runtime_available(self.runtime_id) - self.python_zip_file = self.create_package() - - @decorators.idempotent_id('9c36ac64-9a44-4c44-9e44-241dcc6b0933') - def test_crud_function(self): - # Create function - md5sum = utils.md5(self.python_zip_file) - function_id = self.create_function(self.python_zip_file, md5sum=md5sum) - - # Get functions - resp, body = self.client.get_resources('functions') - self.assertEqual(200, resp.status) - self.assertIn( - function_id, - [function['id'] for function in body['functions']] - ) - - # Download function package - resp, data = self.client.download_function(function_id) - self.assertEqual(200, resp.status) - self.assertEqual('application/zip', resp['content-type']) - self.assertEqual(os.path.getsize(self.python_zip_file), len(data)) - - # Delete function - resp = self.client.delete_resource('functions', function_id) - self.assertEqual(204, resp.status) - - @decorators.idempotent_id('1fec41cd-b753-4cad-90c5-c89d7e710317') - def test_create_function_md5mismatch(self): - fake_md5 = "e807f1fcf82d132f9bb018ca6738a19f" - - with open(self.python_zip_file, 'rb') as package_data: - resp, body = self.client.create_function( - {"source": "package", "md5sum": fake_md5}, - self.runtime_id, - name='test_create_function_md5mismatch', - package_data=package_data - ) - - self.assertEqual(400, resp.status_code) - - @decorators.idempotent_id('f8dde7fc-fbcc-495c-9b39-70666b7d3f64') - def test_get_by_admin(self): - """test_get_by_admin - - Admin user can get the function by directly specifying the function id. - """ - function_id = self.create_function(self.python_zip_file) - - resp, body = self.admin_client.get_function(function_id) - - self.assertEqual(200, resp.status) - self.assertEqual(function_id, body['id']) - - @decorators.idempotent_id('051f3106-df01-4fcd-a0a3-c81c99653163') - def test_get_all_admin(self): - """test_get_all_admin - - Admin user needs to specify filters to get all the functions. - """ - function_id = self.create_function(self.python_zip_file) - - resp, body = self.admin_client.get_resources('functions') - - self.assertEqual(200, resp.status) - self.assertNotIn( - function_id, - [function['id'] for function in body['functions']] - ) - - resp, body = self.admin_client.get_resources( - 'functions?all_projects=true' - ) - - self.assertEqual(200, resp.status) - self.assertIn( - function_id, - [function['id'] for function in body['functions']] - ) - - @decorators.idempotent_id('cd396bda-2174-4335-9f7f-2457aab61a4a') - def test_get_all_not_allowed(self): - # Get other projects functions by normal user - context = self.assertRaises( - exceptions.Forbidden, - self.client.get_resources, - 'functions?all_projects=true' - ) - self.assertIn( - 'Operation not allowed', - context.resp_body.get('faultstring') - ) - - @decorators.idempotent_id('5cb44ee4-6c0c-4ede-9e6c-e1b9109eaa2c') - def test_delete_not_allowed(self): - """Even admin user can not delete other project's function.""" - function_id = self.create_function(self.python_zip_file) - - self.assertRaises( - exceptions.Forbidden, - self.admin_client.delete_resource, - 'functions', - function_id - ) - - @decorators.idempotent_id('45df227e-3399-4412-a8d3-d40c1290bc1c') - def test_detach(self): - """Admin only operation.""" - function_id = self.create_function(self.python_zip_file) - resp, _ = self.client.create_execution( - function_id, input='{"name": "Qinling"}' - ) - self.assertEqual(201, resp.status) - - resp, body = self.admin_client.get_function_workers(function_id) - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['workers'])) - - # Detach function - resp, _ = self.admin_client.detach_function(function_id) - self.assertEqual(202, resp.status) - - def _assert_workers(): - resp, body = self.admin_client.get_function_workers(function_id) - self.assertEqual(200, resp.status) - self.assertEqual(0, len(body['workers'])) - - r = tenacity.Retrying( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_attempt(5), - retry=tenacity.retry_if_exception_type(AssertionError) - ) - r.call(_assert_workers) diff --git a/qinling_tempest_plugin/tests/api/test_jobs.py b/qinling_tempest_plugin/tests/api/test_jobs.py deleted file mode 100644 index 83bdd117..00000000 --- a/qinling_tempest_plugin/tests/api/test_jobs.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2018 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from datetime import datetime -from datetime import timedelta - -from tempest.lib import decorators - -from qinling_tempest_plugin.tests import base - - -class JobsTest(base.BaseQinlingTest): - name_prefix = 'JobsTest' - - def setUp(self): - super(JobsTest, self).setUp() - - self.wait_runtime_available(self.runtime_id) - self.function_id = self.create_function() - - @decorators.idempotent_id('68e4d562-f762-11e7-875d-00224d6b7bc1') - def test_get_all_admin(self): - """Admin user can get jobs of other projects""" - job_id = self.create_job(self.function_id) - - resp, body = self.admin_client.get_resources( - 'jobs?all_projects=true' - ) - self.assertEqual(200, resp.status) - self.assertIn( - job_id, - [item['id'] for item in body['jobs']] - ) - - @decorators.idempotent_id('82a694a7-d3b5-4b6c-86e5-5fac6eae0f2a') - def test_create_with_function_version(self): - version = self.create_function_version(self.function_id) - # first_execution_time is at least 1 min ahead of current time. - first_execution_time = str(datetime.utcnow() + timedelta(seconds=90)) - job_id = self.create_job(self.function_id, version=version, - first_execution_time=first_execution_time) - - # Wait for job to be finished - self.wait_job_done(job_id) - - resp, body = self.client.get_resources( - 'executions', - {'description': 'has:%s' % job_id} - ) - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['executions'])) - - exec_id = body['executions'][0]['id'] - self.wait_execution_success(exec_id) - - resp, body = self.client.get_execution_log(exec_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) - - @decorators.idempotent_id('2ff6b90b-0432-44ec-8698-eed1c7fb7f04') - def test_create_with_function_alias(self): - version = self.create_function_version(self.function_id) - function_alias = self.create_function_alias(self.function_id, version) - # first_execution_time is at least 1 min ahead of current time. - first_execution_time = str(datetime.utcnow() + timedelta(seconds=90)) - job_id = self.create_job(function_alias=function_alias, - first_execution_time=first_execution_time) - - # Wait for job to be finished - self.wait_job_done(job_id) - - resp, body = self.client.get_resources( - 'executions', - {'description': 'has:%s' % job_id} - ) - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['executions'])) - - exec_id = body['executions'][0]['id'] - self.wait_execution_success(exec_id) - - resp, body = self.client.get_execution_log(exec_id) - self.assertEqual(200, resp.status) - self.assertIn('Hello, World', body) diff --git a/qinling_tempest_plugin/tests/api/test_runtimes.py b/qinling_tempest_plugin/tests/api/test_runtimes.py deleted file mode 100644 index 3edb29af..00000000 --- a/qinling_tempest_plugin/tests/api/test_runtimes.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from tempest.lib.common.utils import data_utils -from tempest.lib import decorators - -from qinling_tempest_plugin.tests import base - - -class RuntimesTest(base.BaseQinlingTest): - name_prefix = 'RuntimesTest' - create_runtime = False - - @decorators.idempotent_id('fdc2f07f-dd1d-4981-86d3-5bc7908d9a9b') - def test_crud_runtime(self): - name = data_utils.rand_name('runtime', prefix=self.name_prefix) - resp, body = self.admin_client.create_runtime(self.image, name) - - self.assertEqual(201, resp.status) - self.assertEqual(name, body['name']) - - runtime_id = body['id'] - self.addCleanup(self.admin_client.delete_resource, 'runtimes', - runtime_id, ignore_notfound=True) - - # Get runtimes - resp, body = self.client.get_resources('runtimes') - - self.assertEqual(200, resp.status) - self.assertIn( - runtime_id, - [runtime['id'] for runtime in body['runtimes']] - ) - - # Wait for runtime to be available - # We don't have to check k8s resource, if runtime's status has changed - # to available, then kubernetes deployment is assumed to be ok. - self.wait_runtime_available(runtime_id) - - # Delete runtime - resp = self.admin_client.delete_resource('runtimes', runtime_id) - - self.assertEqual(204, resp.status) - - @decorators.idempotent_id('c1db56bd-c3a8-4ca6-9482-c362fd492db0') - def test_create_private_runtime(self): - """Private runtime test. - - Admin user creates a private runtime which can not be used by other - projects. - """ - name = data_utils.rand_name('runtime', prefix=self.name_prefix) - resp, body = self.admin_client.create_runtime( - self.image, name, is_public=False - ) - - self.assertEqual(201, resp.status) - self.assertEqual(name, body['name']) - self.assertFalse(body['is_public']) - - runtime_id = body['id'] - self.addCleanup(self.admin_client.delete_resource, 'runtimes', - runtime_id, ignore_notfound=True) - - # Get runtimes - resp, body = self.client.get_resources('runtimes') - - self.assertEqual(200, resp.status) - self.assertNotIn( - runtime_id, - [runtime['id'] for runtime in body['runtimes']] - ) diff --git a/qinling_tempest_plugin/tests/api/test_webhooks.py b/qinling_tempest_plugin/tests/api/test_webhooks.py deleted file mode 100644 index 25bdd050..00000000 --- a/qinling_tempest_plugin/tests/api/test_webhooks.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2018 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import requests -from tempest.lib import decorators - -from qinling_tempest_plugin.tests import base - - -class WebhooksTest(base.BaseQinlingTest): - name_prefix = 'WebhooksTest' - - def setUp(self): - super(WebhooksTest, self).setUp() - self.wait_runtime_available(self.runtime_id) - self.function_id = self.create_function() - - @decorators.idempotent_id('37DCD022-32D6-48D1-B90C-31D605DBE53B') - def test_webhook_invoke(self): - webhook_id, url = self.create_webhook(self.function_id) - resp = requests.post(url, data={'name': 'qinling'}, verify=False) - self.assertEqual(202, resp.status_code) - resp_exec_id = resp.json().get('execution_id') - self.addCleanup(self.client.delete_resource, 'executions', - resp_exec_id, ignore_notfound=True) - - resp, body = self.client.get_resources( - 'executions', - {'description': 'has:%s' % webhook_id} - ) - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['executions'])) - exec_id = body['executions'][0]['id'] - self.assertEqual(resp_exec_id, exec_id) - self.wait_execution_success(exec_id) - - resp, body = self.client.get_execution_log(exec_id) - self.assertEqual(200, resp.status) - self.assertIn('qinling', body) - - @decorators.idempotent_id('68605edb-1e36-4953-907d-aa6e2352bb85') - def test_webhook_with_function_version(self): - version = self.create_function_version(self.function_id) - webhook_id, url = self.create_webhook(self.function_id, - version=version) - resp = requests.post(url, data={'name': 'version_test'}, verify=False) - - self.assertEqual(202, resp.status_code) - - resp_exec_id = resp.json().get('execution_id') - self.addCleanup(self.client.delete_resource, 'executions', - resp_exec_id, ignore_notfound=True) - - resp, body = self.client.get_resources( - 'executions', - {'description': 'has:%s' % webhook_id} - ) - - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['executions'])) - exec_id = body['executions'][0]['id'] - self.assertEqual(resp_exec_id, exec_id) - self.wait_execution_success(exec_id) - - resp, body = self.client.get_execution_log(exec_id) - self.assertEqual(200, resp.status) - self.assertIn('version_test', body) - - @decorators.idempotent_id('a5b5eed3-82ee-4ab1-b9ca-9898e4da6b5a') - def test_webhook_with_function_alias(self): - version = self.create_function_version(self.function_id) - function_alias = self.create_function_alias(self.function_id, version) - webhook_id, url = self.create_webhook(function_alias=function_alias) - resp = requests.post(url, data={'name': 'alias_test'}, verify=False) - - self.assertEqual(202, resp.status_code) - - resp_exec_id = resp.json().get('execution_id') - self.addCleanup(self.client.delete_resource, 'executions', - resp_exec_id, ignore_notfound=True) - - resp, body = self.client.get_resources( - 'executions', - {'description': 'has:%s' % webhook_id} - ) - - self.assertEqual(200, resp.status) - self.assertEqual(1, len(body['executions'])) - exec_id = body['executions'][0]['id'] - self.assertEqual(resp_exec_id, exec_id) - self.wait_execution_success(exec_id) - - resp, body = self.client.get_execution_log(exec_id) - self.assertEqual(200, resp.status) - self.assertIn('alias_test', body) - - @decorators.idempotent_id('8e6e4f76-f748-11e7-8ec3-00224d6b7bc1') - def test_get_all_admin(self): - """Admin user can get webhooks of other projects""" - webhook_id, _ = self.create_webhook(self.function_id) - - resp, body = self.admin_client.get_resources( - 'webhooks?all_projects=true' - ) - self.assertEqual(200, resp.status) - self.assertIn( - webhook_id, - [item['id'] for item in body['webhooks']] - ) diff --git a/qinling_tempest_plugin/tests/base.py b/qinling_tempest_plugin/tests/base.py deleted file mode 100644 index 9d566c28..00000000 --- a/qinling_tempest_plugin/tests/base.py +++ /dev/null @@ -1,267 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import pkg_resources -import tempfile -import zipfile - -from tempest import config -from tempest.lib.common.utils import data_utils -from tempest import test -import tenacity - -CONF = config.CONF - - -class BaseQinlingTest(test.BaseTestCase): - credentials = ('admin', 'primary', 'alt') - create_runtime = True - image = CONF.qinling.python_runtime_image - - @classmethod - def skip_checks(cls): - super(BaseQinlingTest, cls).skip_checks() - - if not CONF.service_available.qinling: - raise cls.skipException("Qinling service is not available.") - - @classmethod - def setup_clients(cls): - super(BaseQinlingTest, cls).setup_clients() - - cls.client = cls.os_primary.qinling.QinlingClient() - cls.alt_client = cls.os_alt.qinling.QinlingClient() - cls.admin_client = cls.os_admin.qinling.QinlingClient() - - @classmethod - def resource_setup(cls): - super(BaseQinlingTest, cls).resource_setup() - - if cls.create_runtime: - cls.runtime_id = None - name = data_utils.rand_name('runtime', prefix=cls.name_prefix) - _, body = cls.admin_client.create_runtime(cls.image, name) - cls.runtime_id = body['id'] - - @classmethod - def resource_cleanup(cls): - if cls.create_runtime and cls.runtime_id: - cls.admin_client.delete_resource( - 'runtimes', cls.runtime_id, - ignore_notfound=True - ) - - super(BaseQinlingTest, cls).resource_cleanup() - - @tenacity.retry( - wait=tenacity.wait_fixed(3), - stop=tenacity.stop_after_attempt(20), - retry=tenacity.retry_if_exception_type(AssertionError) - ) - def wait_runtime_available(self, id): - resp, body = self.client.get_resource('runtimes', id) - - self.assertEqual(200, resp.status) - self.assertEqual('available', body['status']) - - @tenacity.retry( - wait=tenacity.wait_fixed(3), - stop=tenacity.stop_after_attempt(10), - retry=tenacity.retry_if_exception_type(AssertionError) - ) - def wait_execution_success(self, id): - resp, body = self.client.get_resource('executions', id) - - self.assertEqual(200, resp.status) - self.assertEqual('success', body['status']) - - @tenacity.retry( - wait=tenacity.wait_fixed(10), - stop=tenacity.stop_after_attempt(12), - retry=tenacity.retry_if_exception_type(AssertionError) - ) - def wait_job_done(self, id): - resp, body = self.client.get_resource('jobs', id) - - self.assertEqual(200, resp.status) - self.assertEqual('done', body['status']) - - def create_package(self, name="python/test_python_basic.py"): - file_path = pkg_resources.resource_filename( - 'qinling_tempest_plugin', - "functions/%s" % name - ) - base_name, extension = os.path.splitext(file_path) - module_name = os.path.basename(base_name) - temp_dir = tempfile.mkdtemp() - zip_file = os.path.join(temp_dir, '%s.zip' % module_name) - - if not os.path.isfile(zip_file): - zf = zipfile.ZipFile(zip_file, mode='w') - try: - zf.write(file_path, '%s%s' % (module_name, extension)) - finally: - zf.close() - - self.addCleanup(os.rmdir, temp_dir) - self.addCleanup(os.remove, zip_file) - return zip_file - - def create_function(self, package_path=None, image=None, - md5sum=None, timeout=None): - function_name = data_utils.rand_name( - 'function', - prefix=self.name_prefix - ) - - if not image: - if not package_path: - package_path = self.create_package() - - code = {"source": "package"} - if md5sum: - code.update({"md5sum": md5sum}) - base_name, _ = os.path.splitext(package_path) - module_name = os.path.basename(base_name) - with open(package_path, 'rb') as package_data: - resp, body = self.client.create_function( - code, - self.runtime_id, - name=function_name, - package_data=package_data, - entry='%s.main' % module_name, - timeout=timeout - ) - else: - resp, body = self.client.create_function( - {"source": "image", "image": image}, - None, - name=function_name, - ) - - self.assertEqual(201, resp.status_code) - function_id = body['id'] - self.addCleanup(self.client.delete_resource, 'functions', - function_id, ignore_notfound=True) - - return function_id - - def update_function_package(self, function_id, function_path): - package_path = self.create_package(name=function_path) - base_name, _ = os.path.splitext(package_path) - module_name = os.path.basename(base_name) - - with open(package_path, 'rb') as package_data: - resp, _ = self.client.update_function( - function_id, - package_data=package_data, - entry='%s.main' % module_name - ) - - self.assertEqual(200, resp.status_code) - - def create_webhook(self, function_id=None, function_alias=None, - version=0): - if function_alias: - resp, body = self.client.create_webhook( - function_alias=function_alias - ) - else: - if not function_id: - function_id = self.create_function() - resp, body = self.client.create_webhook( - function_id, - version=version - ) - self.assertEqual(201, resp.status) - - webhook_id = body['id'] - self.addCleanup(self.client.delete_resource, 'webhooks', - webhook_id, ignore_notfound=True) - - return webhook_id, body['webhook_url'] - - def create_job(self, function_id=None, function_alias=None, version=0, - first_execution_time=None): - if function_alias: - resp, body = self.client.create_job( - function_alias=function_alias, - first_execution_time=first_execution_time - ) - else: - if not function_id: - function_id = self.create_function() - resp, body = self.client.create_job( - function_id, - version=version, - first_execution_time=first_execution_time - ) - self.assertEqual(201, resp.status) - job_id = body['id'] - - self.addCleanup(self.client.delete_resource, 'jobs', - job_id, ignore_notfound=True) - - return job_id - - def create_function_version(self, function_id=None): - if not function_id: - function_id = self.create_function() - - resp, body = self.client.create_function_version(function_id) - self.assertEqual(201, resp.status) - - version = body['version_number'] - - self.addCleanup(self.client.delete_function_version, function_id, - version, ignore_notfound=True) - - return version - - def create_execution(self, function_id=None, alias_name=None, version=0, - input=None): - if alias_name: - resp, body = self.client.create_execution(alias_name=alias_name, - input=input) - else: - resp, body = self.client.create_execution(function_id, - version=version, - input=input) - - self.assertEqual(201, resp.status) - - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - self.assertEqual('success', body['status']) - - return execution_id - - def create_function_alias(self, function_id=None, function_version=0): - name = data_utils.rand_name(name="alias", prefix=self.name_prefix) - if not function_id: - function_id = self.create_function() - - resp, body = self.client.create_function_alias(name, - function_id, - function_version) - - self.assertEqual(201, resp.status) - - alias_name = body['name'] - self.addCleanup(self.client.delete_function_alias, alias_name, - ignore_notfound=True) - - return alias_name diff --git a/qinling_tempest_plugin/tests/scenario/__init__.py b/qinling_tempest_plugin/tests/scenario/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/qinling_tempest_plugin/tests/scenario/test_basic_ops.py b/qinling_tempest_plugin/tests/scenario/test_basic_ops.py deleted file mode 100644 index 5de8bd51..00000000 --- a/qinling_tempest_plugin/tests/scenario/test_basic_ops.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2017 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from tempest.lib.common.utils import data_utils -from tempest.lib import decorators - -from qinling_tempest_plugin.tests import base - - -class BasicOpsTest(base.BaseQinlingTest): - name_prefix = 'BasicOpsTest' - create_runtime = False - - @decorators.idempotent_id('205fd749-2468-4d9f-9c05-45558d6d8f9e') - def test_basic_ops(self): - """Basic qinling operations test case, including following steps: - - 1. Admin user creates a runtime. - 2. Normal user creates function. - 3. Normal user creates execution(invoke function). - 4. Check result and execution log. - """ - name = data_utils.rand_name('runtime', prefix=self.name_prefix) - resp, body = self.admin_client.create_runtime(self.image, name) - self.assertEqual(201, resp.status) - self.assertEqual(name, body['name']) - - # Wait for runtime to be available - self.runtime_id = body['id'] - self.wait_runtime_available(self.runtime_id) - self.addCleanup(self.admin_client.delete_resource, 'runtimes', - self.runtime_id, ignore_notfound=True) - - # Create function - function_id = self.create_function() - - # Invoke function - resp, body = self.client.create_execution( - function_id, input='{"name": "Qinling"}' - ) - # self.assertEqual(201, resp.status) - # self.assertEqual('success', body['status']) - execution_id = body['id'] - self.addCleanup(self.client.delete_resource, 'executions', - execution_id, ignore_notfound=True) - - # Get execution log - resp, body = self.client.get_execution_log(execution_id) - - self.assertEqual(200, resp.status) - self.assertIn('Hello, Qinling', body) diff --git a/qinling_tempest_plugin/tests/utils.py b/qinling_tempest_plugin/tests/utils.py deleted file mode 100644 index 1f63ed88..00000000 --- a/qinling_tempest_plugin/tests/utils.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import hashlib - -from oslo_config import cfg - -from qinling import config as qinling_config -from qinling.utils import etcd_util - -QINLING_CONF = None - - -def md5(file=None, content=None): - hash_md5 = hashlib.md5() - - if file: - with open(file, "rb") as f: - for chunk in iter(lambda: f.read(4096), b""): - hash_md5.update(chunk) - elif content: - hash_md5.update(content) - - return hash_md5.hexdigest() - - -def get_etcd_client(): - """Use qinling's default CONF to connect to etcd.""" - global QINLING_CONF - - if not QINLING_CONF: - QINLING_CONF = cfg.ConfigOpts() - QINLING_CONF(args=[], project='qinling') - QINLING_CONF.register_opts(qinling_config.etcd_opts, - qinling_config.ETCD_GROUP) - - return etcd_util.get_client(conf=QINLING_CONF) diff --git a/releasenotes/notes/.placeholder b/releasenotes/notes/.placeholder deleted file mode 100644 index e69de29b..00000000 diff --git a/releasenotes/notes/add-apache-uwsgi-examples-13f735ec82c37a64.yaml b/releasenotes/notes/add-apache-uwsgi-examples-13f735ec82c37a64.yaml deleted file mode 100644 index 4c985ffa..00000000 --- a/releasenotes/notes/add-apache-uwsgi-examples-13f735ec82c37a64.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -features: - - | - This `change `__ allowed Qinling - API to be used with Apache ``mod_wsgi`` and uWSGI. - - Using Apache/uWSGI is the best approach for a production environment, Apache - virtualhost and uWSGI examples are available into ``etc/{apache2|uwsgi}`` directory. - - See `Qinling documentation - `__ - for details. diff --git a/releasenotes/notes/add-api-ref-documentation-5ec1325223ecafab.yaml b/releasenotes/notes/add-api-ref-documentation-5ec1325223ecafab.yaml deleted file mode 100644 index 3fcd8844..00000000 --- a/releasenotes/notes/add-api-ref-documentation-5ec1325223ecafab.yaml +++ /dev/null @@ -1,10 +0,0 @@ ---- -features: - - | - Add API reference documentation. - - The documentation covers ``functions``, ``executions``, ``versions``, - ``aliases``, ``webhooks`` and ``jobs`` endpoints. - - See `Qinling api-ref documentation - `__ for details. diff --git a/releasenotes/notes/add-cors-support-ce060ee8513a1acf.yaml b/releasenotes/notes/add-cors-support-ce060ee8513a1acf.yaml deleted file mode 100644 index 8a6af145..00000000 --- a/releasenotes/notes/add-cors-support-ce060ee8513a1acf.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -features: - - | - Add ``CORS``, ``HTTPProxyToWSGI`` and ``WSGI`` support based on - ``oslo_middleware`` library in front of the Qinling API. - - The purpose of this middleware is to set up the request URL correctly - in the case there is a proxy (for instance, a loadbalancer such as - HAProxy) in front of the Qinling API. - - The ``HTTPProxyToWSGI`` option is ``off`` by default and needs to be - enabled via a configuration value. diff --git a/releasenotes/notes/add-docker-tools-cd9aa6fff8936d81.yaml b/releasenotes/notes/add-docker-tools-cd9aa6fff8936d81.yaml deleted file mode 100644 index 63a0139a..00000000 --- a/releasenotes/notes/add-docker-tools-cd9aa6fff8936d81.yaml +++ /dev/null @@ -1,13 +0,0 @@ ---- -features: - - | - Docker support has been added to help people to test Qinling - easily and quickly. Authentication has been disabled by default - which means Keystone is not required. - - Dockerfile has been created to build Docker image from the master branch. - Docker compose files have been created to deploy RabbitMQ, MySQL and - Qinling API/Engine containers. - - Files have been added under ``tools/docker/`` directory same as for the - README with all the required instructions. diff --git a/releasenotes/notes/add-nodejs10-runtime-b473129963436a68.yaml b/releasenotes/notes/add-nodejs10-runtime-b473129963436a68.yaml deleted file mode 100644 index ef5b07ef..00000000 --- a/releasenotes/notes/add-nodejs10-runtime-b473129963436a68.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - | - Add NodeJS 10.16 runtime, NodeJS 10.16 is the latest LTS version which - provide many new features and performances improvement. diff --git a/releasenotes/notes/add-support-function-alias-for-execution-and-webhook-cf786fc4c9efd0af.yaml b/releasenotes/notes/add-support-function-alias-for-execution-and-webhook-cf786fc4c9efd0af.yaml deleted file mode 100644 index 50512340..00000000 --- a/releasenotes/notes/add-support-function-alias-for-execution-and-webhook-cf786fc4c9efd0af.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -fixes: - - When creating execution and webhook with function alias, they should always pick up the - updated function and its version corresponding to the alias. diff --git a/releasenotes/notes/drop-py-2-7-86062f8380134200.yaml b/releasenotes/notes/drop-py-2-7-86062f8380134200.yaml deleted file mode 100644 index 8ef1e399..00000000 --- a/releasenotes/notes/drop-py-2-7-86062f8380134200.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -upgrade: - - | - Python 2.7 support has been dropped. Last release of qinling - to support python 2.7 is OpenStack Train. The minimum version of Python now - supported by qinling is Python 3.6. diff --git a/releasenotes/notes/enable-mutable-configuration-a0527660fa58532c.yaml b/releasenotes/notes/enable-mutable-configuration-a0527660fa58532c.yaml deleted file mode 100644 index e4d19cee..00000000 --- a/releasenotes/notes/enable-mutable-configuration-a0527660fa58532c.yaml +++ /dev/null @@ -1,6 +0,0 @@ ---- -features: - - | - Operators can now update the running configuration of the Qinling control - plane processes by sending the parent process a "HUP" signal. - Note: The configuration option must support mutation. diff --git a/releasenotes/notes/function-alias-in-job-d3f969306b7a4749.yaml b/releasenotes/notes/function-alias-in-job-d3f969306b7a4749.yaml deleted file mode 100644 index ab1768a0..00000000 --- a/releasenotes/notes/function-alias-in-job-d3f969306b7a4749.yaml +++ /dev/null @@ -1,3 +0,0 @@ -fixes: - - When creating job with function alias, the job could always pick up the - updated function and its version corresponding to the alias. diff --git a/releasenotes/notes/function-aliasing-318abd48128b4084.yaml b/releasenotes/notes/function-aliasing-318abd48128b4084.yaml deleted file mode 100644 index 2b34bbb5..00000000 --- a/releasenotes/notes/function-aliasing-318abd48128b4084.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - Support alias for functions. Users can create function alias - pointing to the specific function version. Function alias can be used - for creating execution, job and webhook. diff --git a/releasenotes/notes/function-timeout-905dc4b064b73fd3.yaml b/releasenotes/notes/function-timeout-905dc4b064b73fd3.yaml deleted file mode 100644 index 6533190c..00000000 --- a/releasenotes/notes/function-timeout-905dc4b064b73fd3.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - Users can now specify a timeout value for their function to prevent the function from running - indefinitely. When the specified timeout is reached, Qinling will terminate the function - execution. diff --git a/releasenotes/notes/function-versioning-6e23fc021c2a7c7e.yaml b/releasenotes/notes/function-versioning-6e23fc021c2a7c7e.yaml deleted file mode 100644 index d5893578..00000000 --- a/releasenotes/notes/function-versioning-6e23fc021c2a7c7e.yaml +++ /dev/null @@ -1,10 +0,0 @@ ---- -prelude: > - Support versioning for functions. The function developer can work with - different variations of a single function in the development workflow, - such as development, beta, and production. -features: - - Support versioning for functions. End users can create one or more versions - for a single function, the code package of a function version is immutable. - Each function version has a version number that can be used for creating - execution, job and webhook. diff --git a/releasenotes/notes/get-runtime-pool-information-440351400ad7f0b8.yaml b/releasenotes/notes/get-runtime-pool-information-440351400ad7f0b8.yaml deleted file mode 100644 index 8abb162c..00000000 --- a/releasenotes/notes/get-runtime-pool-information-440351400ad7f0b8.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - Add an administrative operation for getting the pool information for the - runtime, so that the admin user can check the capacity of the runtime and - scale up or scale down the pool accordingly. diff --git a/releasenotes/notes/isolate-k8s-pods-617fec5dc5fbd2d8.yaml b/releasenotes/notes/isolate-k8s-pods-617fec5dc5fbd2d8.yaml deleted file mode 100644 index 67131bc1..00000000 --- a/releasenotes/notes/isolate-k8s-pods-617fec5dc5fbd2d8.yaml +++ /dev/null @@ -1,12 +0,0 @@ ---- -security: - - | - When using Kubernetes as the orchestrator, Qinling will create Kubernetes - pods to run executions of functions. In Kubernetes, pods are non-isolated - unless the NetworkPolicy is configured and enforced. In Qinling, we create - NetworkPolicy to disable the communication between pods and the traffic - from outside the cluster. -upgrade: - - Re-apply the Kubernetes manifest file to grant NetworkPolicy resource - operation permission to ``qinling`` user in Kubernetes, - ``curl -sSL https://raw.githubusercontent.com/openstack/qinling/master/example/kubernetes/k8s_qinling_role.yaml | kubectl apply -f -`` diff --git a/releasenotes/notes/python3-runtime-fca413608da48437.yaml b/releasenotes/notes/python3-runtime-fca413608da48437.yaml deleted file mode 100644 index a4ffb2e2..00000000 --- a/releasenotes/notes/python3-runtime-fca413608da48437.yaml +++ /dev/null @@ -1,4 +0,0 @@ ---- -features: - - A reference runtime implementation for Python 3 is added and used as the - default runtime in Devstack. diff --git a/releasenotes/notes/qinling-k8s-apiserver-certs-1651e26de5ca001c.yaml b/releasenotes/notes/qinling-k8s-apiserver-certs-1651e26de5ca001c.yaml deleted file mode 100644 index e3e878b2..00000000 --- a/releasenotes/notes/qinling-k8s-apiserver-certs-1651e26de5ca001c.yaml +++ /dev/null @@ -1,21 +0,0 @@ ---- -prelude: > - Qinling now can and by default connect to Kubernetes API server with TLS - certificates. -features: - - | - Qinling can connect to Kubernetes API server with TLS certificates, which - ensures that the connection between Qinling and Kubernetes API server is - secure, and the access to the Kubernetes API from Qinling is authenticated - and authroized. For more information, please refer to - `Kubernetes authenticating with X509 client certs `__ - and `using RBAC authorization in Kubernetes `__. -upgrade: - - | - Qinling now by default will connect to Kubernetes API server using TLS - certificates. For testing environments, users can set the - ``use_api_certificate`` option to ``False`` under the ``kubernetes`` - section in the Qinling configuration file to continue using insecure - connection between Qinling and Kubernetes API server. For production - environments, it is recommended to generate client certs for Qinling - to access the Kubernetes API. diff --git a/releasenotes/notes/qinling-kolla-integration-305813d80110f209.yaml b/releasenotes/notes/qinling-kolla-integration-305813d80110f209.yaml deleted file mode 100644 index 32df72e6..00000000 --- a/releasenotes/notes/qinling-kolla-integration-305813d80110f209.yaml +++ /dev/null @@ -1,15 +0,0 @@ ---- -features: - - | - Qinling is now fully integrated within Kolla (Docker) and Kolla Ansible. - - Qinling features supported by Kolla: - - * Configure an external Kubernetes cluster - * Use ``etcd`` from Kolla - * Qinling API Apache WSGI deployment - * ``CORS`` configuration - - See `Kolla documentation - `_ for - details. diff --git a/releasenotes/notes/resources-customized-5f5382d40e375dc1.yaml b/releasenotes/notes/resources-customized-5f5382d40e375dc1.yaml deleted file mode 100644 index b8d10fd4..00000000 --- a/releasenotes/notes/resources-customized-5f5382d40e375dc1.yaml +++ /dev/null @@ -1,10 +0,0 @@ ---- -prelude: > - Support customizing resource limitation for functions, both cpu and memory - are supported. -features: - - | - Support customizing memory and cpu resource limitation for functions. The - user can specify ``memory_size`` and ``cpu`` when creating functions, - otherwise the default values are used. Refer to Qinling documentation for - more details. diff --git a/releasenotes/notes/secure-connection-etcd-15c6a6c4ea49c33e.yaml b/releasenotes/notes/secure-connection-etcd-15c6a6c4ea49c33e.yaml deleted file mode 100644 index 83887345..00000000 --- a/releasenotes/notes/secure-connection-etcd-15c6a6c4ea49c33e.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -features: - - Qinling can now use TLS certificates to securely connect to the etcd - service. It is enabled by default in Qinling Devstack installation. The - configuration is defined in ``etcd`` section in Qinling config file. diff --git a/releasenotes/notes/workload-type-support-d613cdb7bb90b2a2.yaml b/releasenotes/notes/workload-type-support-d613cdb7bb90b2a2.yaml deleted file mode 100644 index 3ebb8815..00000000 --- a/releasenotes/notes/workload-type-support-d613cdb7bb90b2a2.yaml +++ /dev/null @@ -1,9 +0,0 @@ ---- -features: - - Support to specify ``trusted`` for runtime creation. In Kubernetes - orchestrator implementation, it's using - ``io.kubernetes.cri-o.TrustedSandbox`` annotation in the pod specification - to choose the underlying container runtime. This feature is useful to - leverage the security container technology such as Kata containers or - gVisor. It also gets rid of the security concerns for running image type - function. diff --git a/releasenotes/source/_static/.placeholder b/releasenotes/source/_static/.placeholder deleted file mode 100644 index e69de29b..00000000 diff --git a/releasenotes/source/_templates/.placeholder b/releasenotes/source/_templates/.placeholder deleted file mode 100644 index e69de29b..00000000 diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py deleted file mode 100644 index 0f18c06c..00000000 --- a/releasenotes/source/conf.py +++ /dev/null @@ -1,258 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Qinling Release Notes documentation build configuration file, created by -# sphinx-quickstart on Tue Nov 3 17:40:50 2015. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'reno.sphinxext', - 'openstackdocstheme', -] - -# openstackdocstheme options -openstackdocs_repo_name = 'openstack/qinling' -openstackdocs_use_storyboard = True - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -copyright = u'2016, OpenStack Foundation' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'native' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'openstackdocs' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'QinlingReleaseNotesdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'QinlingReleaseNotes.tex', - u'Qinling Release Notes Documentation', - u'Qinling Developers', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'qinlingreleasenotes', u'Qinling Release Notes Documentation', - [u'Qinling Developers'], 1) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'QinlingReleaseNotes', u'Qinling Release Notes Documentation', - u'Qinling Developers', 'QinlingReleaseNotes', - 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - -# -- Options for Internationalization output ------------------------------ -locale_dirs = ['locale/'] diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst deleted file mode 100644 index 9a54d787..00000000 --- a/releasenotes/source/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -============================================ - qinling Release Notes -============================================ - -.. toctree:: - :maxdepth: 1 - - unreleased - victoria - ussuri - train - stein - rocky - queens diff --git a/releasenotes/source/queens.rst b/releasenotes/source/queens.rst deleted file mode 100644 index 44255051..00000000 --- a/releasenotes/source/queens.rst +++ /dev/null @@ -1,6 +0,0 @@ -=========================== -Queens Series Release Notes -=========================== - -.. release-notes:: - :branch: origin/stable/queens diff --git a/releasenotes/source/rocky.rst b/releasenotes/source/rocky.rst deleted file mode 100644 index 40dd517b..00000000 --- a/releasenotes/source/rocky.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Rocky Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/rocky diff --git a/releasenotes/source/stein.rst b/releasenotes/source/stein.rst deleted file mode 100644 index efaceb66..00000000 --- a/releasenotes/source/stein.rst +++ /dev/null @@ -1,6 +0,0 @@ -=================================== - Stein Series Release Notes -=================================== - -.. release-notes:: - :branch: stable/stein diff --git a/releasenotes/source/train.rst b/releasenotes/source/train.rst deleted file mode 100644 index 58390039..00000000 --- a/releasenotes/source/train.rst +++ /dev/null @@ -1,6 +0,0 @@ -========================== -Train Series Release Notes -========================== - -.. release-notes:: - :branch: stable/train diff --git a/releasenotes/source/unreleased.rst b/releasenotes/source/unreleased.rst deleted file mode 100644 index cd22aabc..00000000 --- a/releasenotes/source/unreleased.rst +++ /dev/null @@ -1,5 +0,0 @@ -============================== - Current Series Release Notes -============================== - -.. release-notes:: diff --git a/releasenotes/source/ussuri.rst b/releasenotes/source/ussuri.rst deleted file mode 100644 index e21e50e0..00000000 --- a/releasenotes/source/ussuri.rst +++ /dev/null @@ -1,6 +0,0 @@ -=========================== -Ussuri Series Release Notes -=========================== - -.. release-notes:: - :branch: stable/ussuri diff --git a/releasenotes/source/victoria.rst b/releasenotes/source/victoria.rst deleted file mode 100644 index 4efc7b6f..00000000 --- a/releasenotes/source/victoria.rst +++ /dev/null @@ -1,6 +0,0 @@ -============================= -Victoria Series Release Notes -============================= - -.. release-notes:: - :branch: stable/victoria diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 77316665..00000000 --- a/requirements.txt +++ /dev/null @@ -1,34 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -pbr!=2.1.0,>=2.0.0 # Apache-2.0 -Babel!=2.4.0,>=2.3.4 # BSD -keystoneauth1>=3.4.0 # Apache-2.0 -keystonemiddleware>=4.17.0 # Apache-2.0 -oslo.concurrency>=3.26.0 # Apache-2.0 -oslo.config>=5.2.0 # Apache-2.0 -oslo.db>=4.27.0 # Apache-2.0 -oslo.messaging>=5.29.0 # Apache-2.0 -oslo.middleware>=3.35.0 # Apache-2.0 -oslo.policy>=1.30.0 # Apache-2.0 -oslo.upgradecheck>=0.1.0 # Apache-2.0 -oslo.utils>=3.33.0 # Apache-2.0 -oslo.log>=3.36.0 # Apache-2.0 -oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0 -oslo.service!=1.28.1,>=1.24.0 # Apache-2.0 -pecan!=1.0.2,!=1.0.3,!=1.0.4,!=1.2,>=1.0.0 # BSD -setuptools!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2,!=36.2.0,>=21.0.0 # PSF/ZPL -SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT -sqlalchemy-migrate>=0.11.0 # Apache-2.0 -stevedore>=1.20.0 # Apache-2.0 -WSME>=0.8.0 # MIT -kubernetes>=6.0.0 # Apache-2.0 -PyYAML>=3.12 # MIT -python-swiftclient>=3.2.0 # Apache-2.0 -croniter>=0.3.4 # MIT License -python-dateutil>=2.5.3 # BSD -tenacity>=4.4.0 # Apache-2.0 -PyMySQL>=0.7.6 # MIT License -etcd3gw>=0.2.3 # Apache-2.0 -cotyledon>=1.3.0 # Apache-2.0 diff --git a/runtimes/nodejs10/Dockerfile b/runtimes/nodejs10/Dockerfile deleted file mode 100644 index c2865dbc..00000000 --- a/runtimes/nodejs10/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM node:10-alpine -MAINTAINER gaetan.trellu@incloudus.com - -RUN mkdir -p /usr/src/app -WORKDIR /usr/src/app - -COPY package.json /usr/src/app/ -COPY package-lock.json /usr/src/app/ -RUN npm install && npm cache clean --force -COPY server.js /usr/src/app/server.js - -EXPOSE 9090 - -CMD [ "npm", "start" ] diff --git a/runtimes/nodejs10/package-lock.json b/runtimes/nodejs10/package-lock.json deleted file mode 100644 index 07eaca0d..00000000 --- a/runtimes/nodejs10/package-lock.json +++ /dev/null @@ -1,797 +0,0 @@ -{ - "name": "qinling-nodejs10-runtime", - "version": "1.0.0", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "accepts": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", - "requires": { - "mime-types": "2.1.18", - "negotiator": "0.6.1" - } - }, - "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", - "requires": { - "co": "4.6.0", - "fast-deep-equal": "1.1.0", - "fast-json-stable-stringify": "2.0.0", - "json-schema-traverse": "0.3.1" - } - }, - "array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" - }, - "asn1": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", - "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" - }, - "aws4": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", - "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" - }, - "basic-auth": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.0.tgz", - "integrity": "sha1-AV2z81PgLlY3d1X5YnQuiYHnu7o=", - "requires": { - "safe-buffer": "5.1.1" - } - }, - "bcrypt-pbkdf": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", - "optional": true, - "requires": { - "tweetnacl": "0.14.5" - } - }, - "bluebird": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz", - "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==" - }, - "body-parser": { - "version": "1.18.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", - "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", - "requires": { - "bytes": "3.0.0", - "content-type": "1.0.4", - "debug": "2.6.9", - "depd": "1.1.2", - "http-errors": "1.6.2", - "iconv-lite": "0.4.19", - "on-finished": "2.3.0", - "qs": "6.5.1", - "raw-body": "2.3.2", - "type-is": "1.6.16" - } - }, - "boom": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", - "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", - "requires": { - "hoek": "4.2.1" - } - }, - "bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" - }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" - }, - "combined-stream": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", - "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", - "requires": { - "delayed-stream": "1.0.0" - } - }, - "content-disposition": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" - }, - "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" - }, - "cookie": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "cryptiles": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", - "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", - "requires": { - "boom": "5.2.0" - }, - "dependencies": { - "boom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", - "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", - "requires": { - "hoek": "4.2.1" - } - } - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "requires": { - "assert-plus": "1.0.0" - } - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" - }, - "destroy": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" - }, - "ecc-jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", - "optional": true, - "requires": { - "jsbn": "0.1.1" - } - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" - }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" - }, - "etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" - }, - "express": { - "version": "4.16.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.16.2.tgz", - "integrity": "sha1-41xt/i1kt9ygpc1PIXgb4ymeB2w=", - "requires": { - "accepts": "1.3.5", - "array-flatten": "1.1.1", - "body-parser": "1.18.2", - "content-disposition": "0.5.2", - "content-type": "1.0.4", - "cookie": "0.3.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "1.1.2", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "etag": "1.8.1", - "finalhandler": "1.1.0", - "fresh": "0.5.2", - "merge-descriptors": "1.0.1", - "methods": "1.1.2", - "on-finished": "2.3.0", - "parseurl": "1.3.2", - "path-to-regexp": "0.1.7", - "proxy-addr": "2.0.3", - "qs": "6.5.1", - "range-parser": "1.2.0", - "safe-buffer": "5.1.1", - "send": "0.16.1", - "serve-static": "1.13.1", - "setprototypeof": "1.1.0", - "statuses": "1.3.1", - "type-is": "1.6.16", - "utils-merge": "1.0.1", - "vary": "1.1.2" - }, - "dependencies": { - "setprototypeof": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" - }, - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "extend": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", - "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" - }, - "fast-deep-equal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=" - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" - }, - "finalhandler": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.0.tgz", - "integrity": "sha1-zgtoVbRYU+eRsvzGgARtiCU91/U=", - "requires": { - "debug": "2.6.9", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "on-finished": "2.3.0", - "parseurl": "1.3.2", - "statuses": "1.3.1", - "unpipe": "1.0.0" - }, - "dependencies": { - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" - }, - "form-data": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", - "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", - "requires": { - "asynckit": "0.4.0", - "combined-stream": "1.0.6", - "mime-types": "2.1.18" - } - }, - "forwarded": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "requires": { - "assert-plus": "1.0.0" - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" - }, - "har-validator": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", - "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", - "requires": { - "ajv": "5.5.2", - "har-schema": "2.0.0" - } - }, - "hawk": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", - "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", - "requires": { - "boom": "4.3.1", - "cryptiles": "3.1.2", - "hoek": "4.2.1", - "sntp": "2.1.0" - } - }, - "hoek": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz", - "integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==" - }, - "http-errors": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", - "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", - "requires": { - "depd": "1.1.1", - "inherits": "2.0.3", - "setprototypeof": "1.0.3", - "statuses": "1.4.0" - }, - "dependencies": { - "depd": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", - "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" - } - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "requires": { - "assert-plus": "1.0.0", - "jsprim": "1.4.1", - "sshpk": "1.13.1" - } - }, - "iconv-lite": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" - }, - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" - }, - "ipaddr.js": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.6.0.tgz", - "integrity": "sha1-4/o1e3c9phnybpXwSdBVxyeW+Gs=" - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "optional": true - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" - }, - "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - } - }, - "lodash": { - "version": "4.17.5", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz", - "integrity": "sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw==" - }, - "media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" - }, - "merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" - }, - "mime": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" - }, - "mime-db": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", - "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" - }, - "mime-types": { - "version": "2.1.18", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", - "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", - "requires": { - "mime-db": "1.33.0" - } - }, - "morgan": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.9.0.tgz", - "integrity": "sha1-0B+mxlhZt2/PMbPLU6OCGjEdgFE=", - "requires": { - "basic-auth": "2.0.0", - "debug": "2.6.9", - "depd": "1.1.2", - "on-finished": "2.3.0", - "on-headers": "1.0.1" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "negotiator": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", - "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" - }, - "oauth-sign": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", - "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=" - }, - "on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", - "requires": { - "ee-first": "1.1.1" - } - }, - "on-headers": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz", - "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c=" - }, - "parseurl": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" - }, - "path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" - }, - "process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" - }, - "proxy-addr": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.3.tgz", - "integrity": "sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ==", - "requires": { - "forwarded": "0.1.2", - "ipaddr.js": "1.6.0" - } - }, - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" - }, - "qs": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" - }, - "range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" - }, - "raw-body": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", - "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", - "requires": { - "bytes": "3.0.0", - "http-errors": "1.6.2", - "iconv-lite": "0.4.19", - "unpipe": "1.0.0" - } - }, - "request": { - "version": "2.83.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", - "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==", - "requires": { - "aws-sign2": "0.7.0", - "aws4": "1.6.0", - "caseless": "0.12.0", - "combined-stream": "1.0.6", - "extend": "3.0.1", - "forever-agent": "0.6.1", - "form-data": "2.3.2", - "har-validator": "5.0.3", - "hawk": "6.0.2", - "http-signature": "1.2.0", - "is-typedarray": "1.0.0", - "isstream": "0.1.2", - "json-stringify-safe": "5.0.1", - "mime-types": "2.1.18", - "oauth-sign": "0.8.2", - "performance-now": "2.1.0", - "qs": "6.5.1", - "safe-buffer": "5.1.1", - "stringstream": "0.0.5", - "tough-cookie": "2.3.4", - "tunnel-agent": "0.6.0", - "uuid": "3.2.1" - } - }, - "request-promise": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/request-promise/-/request-promise-4.2.2.tgz", - "integrity": "sha1-0epG1lSm7k+O5qT+oQGMIpEZBLQ=", - "requires": { - "bluebird": "3.5.1", - "request-promise-core": "1.1.1", - "stealthy-require": "1.1.1", - "tough-cookie": "2.3.4" - } - }, - "request-promise-core": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz", - "integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=", - "requires": { - "lodash": "4.17.5" - } - }, - "safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" - }, - "send": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/send/-/send-0.16.1.tgz", - "integrity": "sha512-ElCLJdJIKPk6ux/Hocwhk7NFHpI3pVm/IZOYWqUmoxcgeyM+MpxHHKhb8QmlJDX1pU6WrgaHBkVNm73Sv7uc2A==", - "requires": { - "debug": "2.6.9", - "depd": "1.1.2", - "destroy": "1.0.4", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "etag": "1.8.1", - "fresh": "0.5.2", - "http-errors": "1.6.2", - "mime": "1.4.1", - "ms": "2.0.0", - "on-finished": "2.3.0", - "range-parser": "1.2.0", - "statuses": "1.3.1" - }, - "dependencies": { - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "serve-static": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.1.tgz", - "integrity": "sha512-hSMUZrsPa/I09VYFJwa627JJkNs0NrfL1Uzuup+GqHfToR2KcsXFymXSV90hoyw3M+msjFuQly+YzIH/q0MGlQ==", - "requires": { - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "parseurl": "1.3.2", - "send": "0.16.1" - } - }, - "setprototypeof": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", - "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" - }, - "sntp": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", - "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", - "requires": { - "hoek": "4.2.1" - } - }, - "sshpk": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", - "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", - "requires": { - "asn1": "0.2.3", - "assert-plus": "1.0.0", - "bcrypt-pbkdf": "1.0.1", - "dashdash": "1.14.1", - "ecc-jsbn": "0.1.1", - "getpass": "0.1.7", - "jsbn": "0.1.1", - "tweetnacl": "0.14.5" - } - }, - "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" - }, - "stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" - }, - "stringstream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", - "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" - }, - "tough-cookie": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", - "integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==", - "requires": { - "punycode": "1.4.1" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "requires": { - "safe-buffer": "5.1.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "optional": true - }, - "type-is": { - "version": "1.6.16", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", - "requires": { - "media-typer": "0.3.0", - "mime-types": "2.1.18" - } - }, - "unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" - }, - "utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" - }, - "uuid": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz", - "integrity": "sha512-jZnMwlb9Iku/O3smGWvZhauCf6cvvpKi4BKRiliS3cxnI+Gz9j5MEpTz2UFuXiKPJocb7gnsLHwiS05ige5BEA==" - }, - "vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "requires": { - "assert-plus": "1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "1.3.0" - } - } - } -} diff --git a/runtimes/nodejs10/package.json b/runtimes/nodejs10/package.json deleted file mode 100644 index 0a10e25a..00000000 --- a/runtimes/nodejs10/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "qinling-nodejs10-runtime", - "version": "1.0.0", - "description": "NodeJS 10 LTS runtime container for Qinling", - "main": "server.js", - "engine": { - "node": ">=10.16.0" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/openstack/qinling.git" - }, - "author": "Lingxian Kong", - "homepage": "https://docs.openstack.org/qinling/latest/", - "license": "Apache-2.0", - "dependencies": { - "body-parser": "^1.18.2", - "express": "^4.16.2", - "morgan": "^1.9.0", - "process": "^0.11.10", - "request": "^2.83.0", - "request-promise": "^4.2.2" - } -} diff --git a/runtimes/nodejs10/server.js b/runtimes/nodejs10/server.js deleted file mode 100644 index 44e3464e..00000000 --- a/runtimes/nodejs10/server.js +++ /dev/null @@ -1,105 +0,0 @@ -const process = require('process') -const express = require('express') -const bodyParser = require('body-parser') -const morgan = require('morgan') -const rp = require("request-promise") - -const app = express() -app.use(morgan('combined')) -app.use(bodyParser.urlencoded({ extended: false })) -app.use(bodyParser.json()) - -function execute(req, res) { - var executionID = req.body.execution_id - var functionID = req.body.function_id - var entry = req.body.entry - var downloadURL = req.body.download_url - var token = req.body.token - var input = req.body.input - var moduleName - var handlerName - - if (entry) { - [moduleName, handlerName] = entry.split('.') - } else { - moduleName = 'main' - handlerName = 'main' - } - - var modulePath = '/var/qinling/packages/' + functionID + '/' + moduleName - var userModule - var userHandler - var context = {} - - var requestData = { - 'download_url': downloadURL, - 'function_id': functionID, - 'unzip': true - } - if (token) { - requestData['token'] = token - } - - // download function package and unzip - async function download(reqBody) { - let options = { - uri: 'http://localhost:9091/download', - method: 'POST', - headers: { - "content-type": "application/json", - }, - body: reqBody, - json: true, - } - await rp(options) - console.log("download done!") - } - - // get user's defined function object - function getHandler() { - userModule = require(modulePath) - userHandler = userModule[handlerName] - if (userHandler === undefined) { - throw "error" - } - console.log("getHandler done!") - } - - // run user's function - function run() { - return Promise.resolve(userHandler(context, input)) - } - - function succeed(result) { - let elapsed = process.hrtime(startTime) - let body = { - "output": result, - "duration": elapsed[0], - "success": true, - "logs": "" - } - - res.status(200).send(body) - } - - function fail(error) { - let elapsed = process.hrtime(startTime) - let body = { - "output": "Function invocation error", - "duration": elapsed[0], - "success": false, - "logs": "" - } - - res.status(500).send(body) - } - - var startTime = process.hrtime() - download(requestData).then(getHandler).then(run).then(succeed).catch(fail) -} - -app.post('/execute', execute) -app.get('/ping', function ping(req, res) { - res.status(200).send("pong") -}) -app.listen(9090) diff --git a/runtimes/nodejs8/Dockerfile b/runtimes/nodejs8/Dockerfile deleted file mode 100644 index 248d0a9e..00000000 --- a/runtimes/nodejs8/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM node:8-alpine -MAINTAINER anlin.kong@gmail.com - -RUN mkdir -p /usr/src/app -WORKDIR /usr/src/app - -COPY package.json /usr/src/app/ -COPY package-lock.json /usr/src/app/ -RUN npm install && npm cache clean --force -COPY server.js /usr/src/app/server.js - -EXPOSE 9090 - -CMD [ "npm", "start" ] diff --git a/runtimes/nodejs8/package-lock.json b/runtimes/nodejs8/package-lock.json deleted file mode 100644 index 809a2763..00000000 --- a/runtimes/nodejs8/package-lock.json +++ /dev/null @@ -1,797 +0,0 @@ -{ - "name": "qinling-nodejs-runtime", - "version": "1.0.0", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "accepts": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", - "requires": { - "mime-types": "2.1.18", - "negotiator": "0.6.1" - } - }, - "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", - "requires": { - "co": "4.6.0", - "fast-deep-equal": "1.1.0", - "fast-json-stable-stringify": "2.0.0", - "json-schema-traverse": "0.3.1" - } - }, - "array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" - }, - "asn1": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz", - "integrity": "sha1-2sh4dxPJlmhJ/IGAd36+nB3fO4Y=" - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" - }, - "aws4": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", - "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" - }, - "basic-auth": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/basic-auth/-/basic-auth-2.0.0.tgz", - "integrity": "sha1-AV2z81PgLlY3d1X5YnQuiYHnu7o=", - "requires": { - "safe-buffer": "5.1.1" - } - }, - "bcrypt-pbkdf": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz", - "integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=", - "optional": true, - "requires": { - "tweetnacl": "0.14.5" - } - }, - "bluebird": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz", - "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==" - }, - "body-parser": { - "version": "1.18.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz", - "integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=", - "requires": { - "bytes": "3.0.0", - "content-type": "1.0.4", - "debug": "2.6.9", - "depd": "1.1.2", - "http-errors": "1.6.2", - "iconv-lite": "0.4.19", - "on-finished": "2.3.0", - "qs": "6.5.1", - "raw-body": "2.3.2", - "type-is": "1.6.16" - } - }, - "boom": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz", - "integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=", - "requires": { - "hoek": "4.2.1" - } - }, - "bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" - }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" - }, - "combined-stream": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz", - "integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=", - "requires": { - "delayed-stream": "1.0.0" - } - }, - "content-disposition": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" - }, - "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" - }, - "cookie": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" - }, - "cryptiles": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz", - "integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=", - "requires": { - "boom": "5.2.0" - }, - "dependencies": { - "boom": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz", - "integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==", - "requires": { - "hoek": "4.2.1" - } - } - } - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "requires": { - "assert-plus": "1.0.0" - } - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" - }, - "depd": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" - }, - "destroy": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" - }, - "ecc-jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz", - "integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=", - "optional": true, - "requires": { - "jsbn": "0.1.1" - } - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" - }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" - }, - "etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" - }, - "express": { - "version": "4.16.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.16.2.tgz", - "integrity": "sha1-41xt/i1kt9ygpc1PIXgb4ymeB2w=", - "requires": { - "accepts": "1.3.5", - "array-flatten": "1.1.1", - "body-parser": "1.18.2", - "content-disposition": "0.5.2", - "content-type": "1.0.4", - "cookie": "0.3.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "1.1.2", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "etag": "1.8.1", - "finalhandler": "1.1.0", - "fresh": "0.5.2", - "merge-descriptors": "1.0.1", - "methods": "1.1.2", - "on-finished": "2.3.0", - "parseurl": "1.3.2", - "path-to-regexp": "0.1.7", - "proxy-addr": "2.0.3", - "qs": "6.5.1", - "range-parser": "1.2.0", - "safe-buffer": "5.1.1", - "send": "0.16.1", - "serve-static": "1.13.1", - "setprototypeof": "1.1.0", - "statuses": "1.3.1", - "type-is": "1.6.16", - "utils-merge": "1.0.1", - "vary": "1.1.2" - }, - "dependencies": { - "setprototypeof": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" - }, - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "extend": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.1.tgz", - "integrity": "sha1-p1Xqe8Gt/MWjHOfnYtuq3F5jZEQ=" - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" - }, - "fast-deep-equal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=" - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" - }, - "finalhandler": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.0.tgz", - "integrity": "sha1-zgtoVbRYU+eRsvzGgARtiCU91/U=", - "requires": { - "debug": "2.6.9", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "on-finished": "2.3.0", - "parseurl": "1.3.2", - "statuses": "1.3.1", - "unpipe": "1.0.0" - }, - "dependencies": { - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" - }, - "form-data": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz", - "integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=", - "requires": { - "asynckit": "0.4.0", - "combined-stream": "1.0.6", - "mime-types": "2.1.18" - } - }, - "forwarded": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "requires": { - "assert-plus": "1.0.0" - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" - }, - "har-validator": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz", - "integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=", - "requires": { - "ajv": "5.5.2", - "har-schema": "2.0.0" - } - }, - "hawk": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz", - "integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==", - "requires": { - "boom": "4.3.1", - "cryptiles": "3.1.2", - "hoek": "4.2.1", - "sntp": "2.1.0" - } - }, - "hoek": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/hoek/-/hoek-4.2.1.tgz", - "integrity": "sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==" - }, - "http-errors": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz", - "integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=", - "requires": { - "depd": "1.1.1", - "inherits": "2.0.3", - "setprototypeof": "1.0.3", - "statuses": "1.4.0" - }, - "dependencies": { - "depd": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz", - "integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k=" - } - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "requires": { - "assert-plus": "1.0.0", - "jsprim": "1.4.1", - "sshpk": "1.13.1" - } - }, - "iconv-lite": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz", - "integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ==" - }, - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" - }, - "ipaddr.js": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.6.0.tgz", - "integrity": "sha1-4/o1e3c9phnybpXwSdBVxyeW+Gs=" - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "optional": true - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" - }, - "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - } - }, - "lodash": { - "version": "4.17.5", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz", - "integrity": "sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw==" - }, - "media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" - }, - "merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" - }, - "mime": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" - }, - "mime-db": { - "version": "1.33.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz", - "integrity": "sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==" - }, - "mime-types": { - "version": "2.1.18", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz", - "integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==", - "requires": { - "mime-db": "1.33.0" - } - }, - "morgan": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/morgan/-/morgan-1.9.0.tgz", - "integrity": "sha1-0B+mxlhZt2/PMbPLU6OCGjEdgFE=", - "requires": { - "basic-auth": "2.0.0", - "debug": "2.6.9", - "depd": "1.1.2", - "on-finished": "2.3.0", - "on-headers": "1.0.1" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" - }, - "negotiator": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", - "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" - }, - "oauth-sign": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", - "integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=" - }, - "on-finished": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", - "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", - "requires": { - "ee-first": "1.1.1" - } - }, - "on-headers": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz", - "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c=" - }, - "parseurl": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" - }, - "path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" - }, - "process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" - }, - "proxy-addr": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.3.tgz", - "integrity": "sha512-jQTChiCJteusULxjBp8+jftSQE5Obdl3k4cnmLA6WXtK6XFuWRnvVL7aCiBqaLPM8c4ph0S4tKna8XvmIwEnXQ==", - "requires": { - "forwarded": "0.1.2", - "ipaddr.js": "1.6.0" - } - }, - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" - }, - "qs": { - "version": "6.5.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz", - "integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A==" - }, - "range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" - }, - "raw-body": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz", - "integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=", - "requires": { - "bytes": "3.0.0", - "http-errors": "1.6.2", - "iconv-lite": "0.4.19", - "unpipe": "1.0.0" - } - }, - "request": { - "version": "2.83.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz", - "integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==", - "requires": { - "aws-sign2": "0.7.0", - "aws4": "1.6.0", - "caseless": "0.12.0", - "combined-stream": "1.0.6", - "extend": "3.0.1", - "forever-agent": "0.6.1", - "form-data": "2.3.2", - "har-validator": "5.0.3", - "hawk": "6.0.2", - "http-signature": "1.2.0", - "is-typedarray": "1.0.0", - "isstream": "0.1.2", - "json-stringify-safe": "5.0.1", - "mime-types": "2.1.18", - "oauth-sign": "0.8.2", - "performance-now": "2.1.0", - "qs": "6.5.1", - "safe-buffer": "5.1.1", - "stringstream": "0.0.5", - "tough-cookie": "2.3.4", - "tunnel-agent": "0.6.0", - "uuid": "3.2.1" - } - }, - "request-promise": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/request-promise/-/request-promise-4.2.2.tgz", - "integrity": "sha1-0epG1lSm7k+O5qT+oQGMIpEZBLQ=", - "requires": { - "bluebird": "3.5.1", - "request-promise-core": "1.1.1", - "stealthy-require": "1.1.1", - "tough-cookie": "2.3.4" - } - }, - "request-promise-core": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz", - "integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=", - "requires": { - "lodash": "4.17.5" - } - }, - "safe-buffer": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz", - "integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==" - }, - "send": { - "version": "0.16.1", - "resolved": "https://registry.npmjs.org/send/-/send-0.16.1.tgz", - "integrity": "sha512-ElCLJdJIKPk6ux/Hocwhk7NFHpI3pVm/IZOYWqUmoxcgeyM+MpxHHKhb8QmlJDX1pU6WrgaHBkVNm73Sv7uc2A==", - "requires": { - "debug": "2.6.9", - "depd": "1.1.2", - "destroy": "1.0.4", - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "etag": "1.8.1", - "fresh": "0.5.2", - "http-errors": "1.6.2", - "mime": "1.4.1", - "ms": "2.0.0", - "on-finished": "2.3.0", - "range-parser": "1.2.0", - "statuses": "1.3.1" - }, - "dependencies": { - "statuses": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.3.1.tgz", - "integrity": "sha1-+vUbnrdKrvOzrPStX2Gr8ky3uT4=" - } - } - }, - "serve-static": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.1.tgz", - "integrity": "sha512-hSMUZrsPa/I09VYFJwa627JJkNs0NrfL1Uzuup+GqHfToR2KcsXFymXSV90hoyw3M+msjFuQly+YzIH/q0MGlQ==", - "requires": { - "encodeurl": "1.0.2", - "escape-html": "1.0.3", - "parseurl": "1.3.2", - "send": "0.16.1" - } - }, - "setprototypeof": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz", - "integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ=" - }, - "sntp": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz", - "integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==", - "requires": { - "hoek": "4.2.1" - } - }, - "sshpk": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz", - "integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=", - "requires": { - "asn1": "0.2.3", - "assert-plus": "1.0.0", - "bcrypt-pbkdf": "1.0.1", - "dashdash": "1.14.1", - "ecc-jsbn": "0.1.1", - "getpass": "0.1.7", - "jsbn": "0.1.1", - "tweetnacl": "0.14.5" - } - }, - "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" - }, - "stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" - }, - "stringstream": { - "version": "0.0.5", - "resolved": "https://registry.npmjs.org/stringstream/-/stringstream-0.0.5.tgz", - "integrity": "sha1-TkhM1N5aC7vuGORjB3EKioFiGHg=" - }, - "tough-cookie": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz", - "integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==", - "requires": { - "punycode": "1.4.1" - } - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "requires": { - "safe-buffer": "5.1.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "optional": true - }, - "type-is": { - "version": "1.6.16", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", - "requires": { - "media-typer": "0.3.0", - "mime-types": "2.1.18" - } - }, - "unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" - }, - "utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" - }, - "uuid": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.2.1.tgz", - "integrity": "sha512-jZnMwlb9Iku/O3smGWvZhauCf6cvvpKi4BKRiliS3cxnI+Gz9j5MEpTz2UFuXiKPJocb7gnsLHwiS05ige5BEA==" - }, - "vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "requires": { - "assert-plus": "1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "1.3.0" - } - } - } -} \ No newline at end of file diff --git a/runtimes/nodejs8/package.json b/runtimes/nodejs8/package.json deleted file mode 100644 index 5eaf7a15..00000000 --- a/runtimes/nodejs8/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "qinling-nodejs-runtime", - "version": "1.0.0", - "description": "NodeJS runtime container for Qinling", - "main": "server.js", - "engine": { - "node": ">=8.10.0" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/openstack/qinling.git" - }, - "author": "Lingxian Kong", - "homepage": "https://docs.openstack.org/qinling/latest/", - "license": "Apache-2.0", - "dependencies": { - "body-parser": "^1.18.2", - "express": "^4.16.2", - "morgan": "^1.9.0", - "process": "^0.11.10", - "request": "^2.83.0", - "request-promise": "^4.2.2" - } -} \ No newline at end of file diff --git a/runtimes/nodejs8/server.js b/runtimes/nodejs8/server.js deleted file mode 100644 index 44e3464e..00000000 --- a/runtimes/nodejs8/server.js +++ /dev/null @@ -1,105 +0,0 @@ -const process = require('process') -const express = require('express') -const bodyParser = require('body-parser') -const morgan = require('morgan') -const rp = require("request-promise") - -const app = express() -app.use(morgan('combined')) -app.use(bodyParser.urlencoded({ extended: false })) -app.use(bodyParser.json()) - -function execute(req, res) { - var executionID = req.body.execution_id - var functionID = req.body.function_id - var entry = req.body.entry - var downloadURL = req.body.download_url - var token = req.body.token - var input = req.body.input - var moduleName - var handlerName - - if (entry) { - [moduleName, handlerName] = entry.split('.') - } else { - moduleName = 'main' - handlerName = 'main' - } - - var modulePath = '/var/qinling/packages/' + functionID + '/' + moduleName - var userModule - var userHandler - var context = {} - - var requestData = { - 'download_url': downloadURL, - 'function_id': functionID, - 'unzip': true - } - if (token) { - requestData['token'] = token - } - - // download function package and unzip - async function download(reqBody) { - let options = { - uri: 'http://localhost:9091/download', - method: 'POST', - headers: { - "content-type": "application/json", - }, - body: reqBody, - json: true, - } - await rp(options) - console.log("download done!") - } - - // get user's defined function object - function getHandler() { - userModule = require(modulePath) - userHandler = userModule[handlerName] - if (userHandler === undefined) { - throw "error" - } - console.log("getHandler done!") - } - - // run user's function - function run() { - return Promise.resolve(userHandler(context, input)) - } - - function succeed(result) { - let elapsed = process.hrtime(startTime) - let body = { - "output": result, - "duration": elapsed[0], - "success": true, - "logs": "" - } - - res.status(200).send(body) - } - - function fail(error) { - let elapsed = process.hrtime(startTime) - let body = { - "output": "Function invocation error", - "duration": elapsed[0], - "success": false, - "logs": "" - } - - res.status(500).send(body) - } - - var startTime = process.hrtime() - download(requestData).then(getHandler).then(run).then(succeed).catch(fail) -} - -app.post('/execute', execute) -app.get('/ping', function ping(req, res) { - res.status(200).send("pong") -}) -app.listen(9090) diff --git a/runtimes/python2/Dockerfile b/runtimes/python2/Dockerfile deleted file mode 100644 index 7620169a..00000000 --- a/runtimes/python2/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM phusion/baseimage:0.9.22 -MAINTAINER anlin.kong@gmail.com - -# We need to use non-root user to execute functions and root user to set resource limits. -USER root -RUN useradd -Ms /bin/bash qinling - -RUN apt-get update && \ - apt-get -y install python-dev python-setuptools libffi-dev libxslt1-dev libxml2-dev libyaml-dev libssl-dev python-pip && \ - pip install -U pip setuptools uwsgi - -COPY . /app -WORKDIR /app -RUN pip install --no-cache-dir -r requirements.txt && \ - chmod 0750 custom-entrypoint.sh && \ - mkdir /qinling_cgroup && \ - mkdir -p /var/lock/qinling && \ - mkdir -p /var/qinling/packages && \ - chown -R qinling:qinling /app /var/qinling/packages - -CMD ["/bin/bash", "custom-entrypoint.sh"] diff --git a/runtimes/python2/cglimit.py b/runtimes/python2/cglimit.py deleted file mode 100644 index 0b63ac85..00000000 --- a/runtimes/python2/cglimit.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import sys - -from flask import Flask -from flask import make_response -from flask import request -from oslo_concurrency import lockutils - -app = Flask(__name__) -ch = logging.StreamHandler(sys.stdout) -ch.setLevel(logging.DEBUG) -ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) -del app.logger.handlers[:] -app.logger.addHandler(ch) - -# Deployer can specify cfs_period_us default value here. -PERIOD = 100000 - - -def log(message, level="info"): - global app - log_func = getattr(app.logger, level) - log_func(message) - - -@lockutils.synchronized('set_limitation', external=True, - lock_path='/var/lock/qinling') -def _cgroup_limit(cpu, memory_size, pid): - """Modify 'cgroup' files to set resource limits. - - Each pod(worker) will have cgroup folders on the host cgroup filesystem, - like '/sys/fs/cgroup//kubepods//pod/', - to limit memory and cpu resources that can be used in pod. - - For more information about cgroup, please see [1], about sharing PID - namespaces in kubernetes, please see also [2]. - - Return None if successful otherwise a Flask.Response object. - - [1]https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/resource_management_guide/sec-creating_cgroups - [2]https://github.com/kubernetes/kubernetes/pull/51634 - """ - hostname = os.getenv('HOSTNAME') - pod_id = os.getenv('POD_UID') - qos_class = None - if os.getenv('QOS_CLASS') == 'BestEffort': - qos_class = 'besteffort' - elif os.getenv('QOS_CLASS') == 'Burstable': - qos_class = 'burstable' - elif os.getenv('QOS_CLASS') == 'Guaranteed': - qos_class = '' - - if not pod_id or qos_class is None: - return make_response("Failed to get current worker information", 500) - - memory_base_path = os.path.join('/qinling_cgroup', 'memory', 'kubepods', - qos_class, 'pod%s' % pod_id) - cpu_base_path = os.path.join('/qinling_cgroup', 'cpu', 'kubepods', - qos_class, 'pod%s' % pod_id) - memory_path = os.path.join(memory_base_path, hostname) - cpu_path = os.path.join(cpu_base_path, hostname) - - if os.path.isdir(memory_base_path): - if not os.path.isdir(memory_path): - os.makedirs(memory_path) - - if os.path.isdir(cpu_base_path): - if not os.path.isdir(cpu_path): - os.makedirs(cpu_path) - - try: - # set cpu and memory resource limits - with open('%s/memory.limit_in_bytes' % memory_path, 'w') as f: - f.write('%d' % int(memory_size)) - with open('%s/cpu.cfs_period_us' % cpu_path, 'w') as f: - f.write('%d' % PERIOD) - with open('%s/cpu.cfs_quota_us' % cpu_path, 'w') as f: - f.write('%d' % ((int(cpu)*PERIOD/1000))) - - # add pid to 'tasks' files - with open('%s/tasks' % memory_path, 'w') as f: - f.write('%d' % pid) - with open('%s/tasks' % cpu_path, 'w') as f: - f.write('%d' % pid) - except Exception as e: - return make_response("Failed to modify cgroup files: %s" - % str(e), 500) - - -@app.route('/cglimit', methods=['POST']) -def cglimit(): - """Set resource limitations for execution. - - Only root user has jurisdiction to modify all cgroup files. - - :param cpu: cpu resource that execution can use in total. - :param memory_size: RAM resource that execution can use in total. - - Currently swap ought to be disabled in kubernetes. - """ - params = request.get_json() - cpu = params['cpu'] - memory_size = params['memory_size'] - pid = params['pid'] - log("Set resource limits request received, params: %s" % params) - - resp = _cgroup_limit(cpu, memory_size, pid) - - return resp if resp else 'pidlimited' diff --git a/runtimes/python2/custom-entrypoint.sh b/runtimes/python2/custom-entrypoint.sh deleted file mode 100644 index f0404fb7..00000000 --- a/runtimes/python2/custom-entrypoint.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -# This is expected to run as root. - -uwsgi --http :9090 --uid qinling --wsgi-file server.py --callable app --master --processes 5 --threads 1 & - -uwsgi --http 127.0.0.1:9092 --uid root --wsgi-file cglimit.py --callable app --master --processes 1 --threads 1 diff --git a/runtimes/python2/requirements.txt b/runtimes/python2/requirements.txt deleted file mode 100644 index d28c2050..00000000 --- a/runtimes/python2/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -Flask>=0.10,!=0.11,<1.0 # BSD -python-openstackclient>=3.3.0,!=3.10.0 # Apache-2.0 -python-neutronclient>=6.3.0 # Apache-2.0 -python-swiftclient>=3.2.0 # Apache-2.0 -python-ceilometerclient>=2.5.0 # Apache-2.0 -python-zaqarclient>=1.0.0 # Apache-2.0 -python-octaviaclient>=1.0.0 # Apache-2.0 -python-mistralclient>=3.1.0 # Apache-2.0 -keystoneauth1>=2.21.0 # Apache-2.0 -openstacksdk>=0.9.19 -oslo.concurrency>=3.25.0 # Apache-2.0 -psutil>=5.4.7 # BSD diff --git a/runtimes/python2/server.py b/runtimes/python2/server.py deleted file mode 100644 index f129497a..00000000 --- a/runtimes/python2/server.py +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import json -from multiprocessing import Manager -from multiprocessing import Process -import os -import resource -import sys -import time -import traceback - -from flask import Flask -from flask import request -from flask import Response -from keystoneauth1.identity import generic -from keystoneauth1 import session -import psutil -import requests - -app = Flask(__name__) - -DOWNLOAD_ERROR = "Failed to download function package from %s, error: %s" -INVOKE_ERROR = "Function execution failed because of too much resource " \ - "consumption" -TIMEOUT_ERROR = "Function execution timeout." - - -def _print_trace(): - exc_type, exc_value, exc_traceback = sys.exc_info() - lines = traceback.format_exception(exc_type, exc_value, exc_traceback) - print(''.join(line for line in lines)) - - -def _set_ulimit(): - """Limit resources usage for the current process and/or its children. - - Refer to https://docs.python.org/2.7/library/resource.html - """ - customized_limits = { - resource.RLIMIT_NOFILE: 1024, - resource.RLIMIT_NPROC: 128, - # TODO(lxkong): 50M by default, need to be configurable in future. - resource.RLIMIT_FSIZE: 524288000 - } - for t, soft in customized_limits.items(): - _, hard = resource.getrlimit(t) - resource.setrlimit(t, (soft, hard)) - - -def _get_responce(output, duration, logs, success, code): - return Response( - response=json.dumps( - { - 'output': output, - 'duration': duration, - 'logs': logs, - 'success': success - } - ), - status=code, - mimetype='application/json' - ) - - -def _killtree(pid, including_parent=True): - parent = psutil.Process(pid) - for child in parent.children(recursive=True): - print("kill child %s" % child) - child.kill() - - if including_parent: - print("kill parent %s" % parent) - parent.kill() - - -def _invoke_function(execution_id, zip_file_dir, module_name, method, arg, - input, return_dict, rlimit): - """Thie function is supposed to be running in a child process. - - HOSTNAME will be used to create cgroup directory related to worker. - - Current execution pid will be added to cgroup tasks file, and then all - its child processes will be automatically added to this 'cgroup'. - - Once executions exceed the cgroup limit, they will be killed by OOMKill - and this subprocess will exit with number(-9). - """ - # Set resource limit for current sub-process - _set_ulimit() - - # Set cpu and memory limits to cgroup by calling cglimit service - pid = os.getpid() - root_resp = requests.post( - 'http://localhost:9092/cglimit', - json={ - 'cpu': rlimit['cpu'], - 'memory_size': rlimit['memory_size'], - 'pid': pid - } - ) - - sys.stdout = open("%s.out" % execution_id, "w", 0) - - if not root_resp.ok: - print('WARN: Resource limiting failed, run in unlimit mode.') - - print('Start execution: %s' % execution_id) - - sys.path.insert(0, zip_file_dir) - try: - module = importlib.import_module(module_name) - func = getattr(module, method) - return_dict['result'] = func(arg, **input) if arg else func(**input) - return_dict['success'] = True - except Exception as e: - _print_trace() - - if isinstance(e, OSError) and 'Resource' in str(e): - sys.exit(1) - - return_dict['result'] = str(e) - return_dict['success'] = False - finally: - print('Finished execution: %s' % execution_id) - - -@app.route('/execute', methods=['POST']) -def execute(): - """Invoke function. - - Several things need to handle in this function: - - Save the function log - - Capture the function internal exception - - Deal with process execution error (The process may be killed for some - reason, e.g. unlimited memory allocation) - - Deal with os error for process (e.g. Resource temporarily unavailable) - """ - params = request.get_json() or {} - input = params.get('input') or {} - execution_id = params['execution_id'] - download_url = params.get('download_url') - function_id = params.get('function_id') - entry = params.get('entry') - request_id = params.get('request_id') - trust_id = params.get('trust_id') - auth_url = params.get('auth_url') - username = params.get('username') - password = params.get('password') - timeout = params.get('timeout') - zip_file_dir = '/var/qinling/packages/%s' % function_id - rlimit = { - 'cpu': params['cpu'], - 'memory_size': params['memory_size'] - } - - function_module, function_method = 'main', 'main' - if entry: - function_module, function_method = tuple(entry.rsplit('.', 1)) - - print( - 'Request received, request_id: %s, execution_id: %s, input: %s, ' - 'auth_url: %s' % - (request_id, execution_id, input, auth_url) - ) - - #################################################################### - # - # Download function package by calling sidecar service. We don't check the - # zip file existence here to avoid using partial file during downloading. - # - #################################################################### - resp = requests.post( - 'http://localhost:9091/download', - json={ - 'download_url': download_url, - 'function_id': function_id, - 'token': params.get('token') - } - ) - if not resp.ok: - return _get_responce(resp.content, 0, '', False, 500) - - #################################################################### - # - # Provide an openstack session to user's function - # - #################################################################### - os_session = None - if auth_url: - auth = generic.Password( - username=username, - password=password, - auth_url=auth_url, - trust_id=trust_id, - user_domain_name='Default' - ) - os_session = session.Session(auth=auth, verify=False) - input.update({'context': {'os_session': os_session}}) - - #################################################################### - # - # Create a new process to run user's function - # - #################################################################### - manager = Manager() - return_dict = manager.dict() - return_dict['success'] = False - start = time.time() - - # Run the function in a separate process to avoid messing up the log. If - # the timeout is reached, kill all the subprocesses. - p = Process( - target=_invoke_function, - args=(execution_id, zip_file_dir, function_module, function_method, - input.pop('__function_input', None), input, return_dict, rlimit) - ) - - timed_out = False - p.start() - p.join(timeout) - if p.is_alive(): - _killtree(p.pid) - timed_out = True - - #################################################################### - # - # Get execution result(log, duration, etc.) - # - #################################################################### - duration = round(time.time() - start, 3) - - # Process was killed unexpectedly or finished with error. - if p.exitcode != 0: - output = TIMEOUT_ERROR if timed_out else INVOKE_ERROR - success = False - else: - output = return_dict.get('result') - success = return_dict['success'] - - # Execution log - with open('%s.out' % execution_id) as f: - logs = f.read() - os.remove('%s.out' % execution_id) - - return _get_responce(output, duration, logs, success, 200) - - -@app.route('/ping') -def ping(): - return 'pong' diff --git a/runtimes/python3/Dockerfile b/runtimes/python3/Dockerfile deleted file mode 100644 index 35670e79..00000000 --- a/runtimes/python3/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM phusion/baseimage:0.9.22 -MAINTAINER anlin.kong@gmail.com - -# We need to use non-root user to execute functions and root user to set resource limits. -USER root -RUN useradd -Ms /bin/bash qinling - -RUN apt-get update && \ - apt-get -y install python3-dev python3-setuptools libffi-dev libxslt1-dev libxml2-dev libyaml-dev libssl-dev python3-pip && \ - pip3 install -U pip setuptools uwsgi - -COPY . /app -WORKDIR /app -RUN pip install --no-cache-dir -r requirements.txt && \ - chmod 0750 custom-entrypoint.sh && \ - mkdir /qinling_cgroup && \ - mkdir -p /var/lock/qinling && \ - mkdir -p /var/qinling/packages && \ - chown -R qinling:qinling /app /var/qinling/packages - -CMD ["/bin/bash", "custom-entrypoint.sh"] diff --git a/runtimes/python3/cglimit.py b/runtimes/python3/cglimit.py deleted file mode 100644 index 0b63ac85..00000000 --- a/runtimes/python3/cglimit.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import sys - -from flask import Flask -from flask import make_response -from flask import request -from oslo_concurrency import lockutils - -app = Flask(__name__) -ch = logging.StreamHandler(sys.stdout) -ch.setLevel(logging.DEBUG) -ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) -del app.logger.handlers[:] -app.logger.addHandler(ch) - -# Deployer can specify cfs_period_us default value here. -PERIOD = 100000 - - -def log(message, level="info"): - global app - log_func = getattr(app.logger, level) - log_func(message) - - -@lockutils.synchronized('set_limitation', external=True, - lock_path='/var/lock/qinling') -def _cgroup_limit(cpu, memory_size, pid): - """Modify 'cgroup' files to set resource limits. - - Each pod(worker) will have cgroup folders on the host cgroup filesystem, - like '/sys/fs/cgroup//kubepods//pod/', - to limit memory and cpu resources that can be used in pod. - - For more information about cgroup, please see [1], about sharing PID - namespaces in kubernetes, please see also [2]. - - Return None if successful otherwise a Flask.Response object. - - [1]https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/resource_management_guide/sec-creating_cgroups - [2]https://github.com/kubernetes/kubernetes/pull/51634 - """ - hostname = os.getenv('HOSTNAME') - pod_id = os.getenv('POD_UID') - qos_class = None - if os.getenv('QOS_CLASS') == 'BestEffort': - qos_class = 'besteffort' - elif os.getenv('QOS_CLASS') == 'Burstable': - qos_class = 'burstable' - elif os.getenv('QOS_CLASS') == 'Guaranteed': - qos_class = '' - - if not pod_id or qos_class is None: - return make_response("Failed to get current worker information", 500) - - memory_base_path = os.path.join('/qinling_cgroup', 'memory', 'kubepods', - qos_class, 'pod%s' % pod_id) - cpu_base_path = os.path.join('/qinling_cgroup', 'cpu', 'kubepods', - qos_class, 'pod%s' % pod_id) - memory_path = os.path.join(memory_base_path, hostname) - cpu_path = os.path.join(cpu_base_path, hostname) - - if os.path.isdir(memory_base_path): - if not os.path.isdir(memory_path): - os.makedirs(memory_path) - - if os.path.isdir(cpu_base_path): - if not os.path.isdir(cpu_path): - os.makedirs(cpu_path) - - try: - # set cpu and memory resource limits - with open('%s/memory.limit_in_bytes' % memory_path, 'w') as f: - f.write('%d' % int(memory_size)) - with open('%s/cpu.cfs_period_us' % cpu_path, 'w') as f: - f.write('%d' % PERIOD) - with open('%s/cpu.cfs_quota_us' % cpu_path, 'w') as f: - f.write('%d' % ((int(cpu)*PERIOD/1000))) - - # add pid to 'tasks' files - with open('%s/tasks' % memory_path, 'w') as f: - f.write('%d' % pid) - with open('%s/tasks' % cpu_path, 'w') as f: - f.write('%d' % pid) - except Exception as e: - return make_response("Failed to modify cgroup files: %s" - % str(e), 500) - - -@app.route('/cglimit', methods=['POST']) -def cglimit(): - """Set resource limitations for execution. - - Only root user has jurisdiction to modify all cgroup files. - - :param cpu: cpu resource that execution can use in total. - :param memory_size: RAM resource that execution can use in total. - - Currently swap ought to be disabled in kubernetes. - """ - params = request.get_json() - cpu = params['cpu'] - memory_size = params['memory_size'] - pid = params['pid'] - log("Set resource limits request received, params: %s" % params) - - resp = _cgroup_limit(cpu, memory_size, pid) - - return resp if resp else 'pidlimited' diff --git a/runtimes/python3/custom-entrypoint.sh b/runtimes/python3/custom-entrypoint.sh deleted file mode 100644 index f0404fb7..00000000 --- a/runtimes/python3/custom-entrypoint.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env bash -# This is expected to run as root. - -uwsgi --http :9090 --uid qinling --wsgi-file server.py --callable app --master --processes 5 --threads 1 & - -uwsgi --http 127.0.0.1:9092 --uid root --wsgi-file cglimit.py --callable app --master --processes 1 --threads 1 diff --git a/runtimes/python3/requirements.txt b/runtimes/python3/requirements.txt deleted file mode 100644 index d28c2050..00000000 --- a/runtimes/python3/requirements.txt +++ /dev/null @@ -1,12 +0,0 @@ -Flask>=0.10,!=0.11,<1.0 # BSD -python-openstackclient>=3.3.0,!=3.10.0 # Apache-2.0 -python-neutronclient>=6.3.0 # Apache-2.0 -python-swiftclient>=3.2.0 # Apache-2.0 -python-ceilometerclient>=2.5.0 # Apache-2.0 -python-zaqarclient>=1.0.0 # Apache-2.0 -python-octaviaclient>=1.0.0 # Apache-2.0 -python-mistralclient>=3.1.0 # Apache-2.0 -keystoneauth1>=2.21.0 # Apache-2.0 -openstacksdk>=0.9.19 -oslo.concurrency>=3.25.0 # Apache-2.0 -psutil>=5.4.7 # BSD diff --git a/runtimes/python3/server.py b/runtimes/python3/server.py deleted file mode 100644 index 28ba3455..00000000 --- a/runtimes/python3/server.py +++ /dev/null @@ -1,262 +0,0 @@ -# Copyright 2017 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import importlib -import json -from multiprocessing import Manager -from multiprocessing import Process -import os -import resource -import sys -import time -import traceback - -from flask import Flask -from flask import request -from flask import Response -from keystoneauth1.identity import generic -from keystoneauth1 import session -import psutil -import requests - -app = Flask(__name__) - -DOWNLOAD_ERROR = "Failed to download function package from %s, error: %s" -INVOKE_ERROR = "Function execution failed because of too much resource " \ - "consumption" -TIMEOUT_ERROR = "Function execution timeout." - - -def _print_trace(): - exc_type, exc_value, exc_traceback = sys.exc_info() - lines = traceback.format_exception(exc_type, exc_value, exc_traceback) - print((''.join(line for line in lines))) - - -def _set_ulimit(): - """Limit resources usage for the current process and/or its children. - - Refer to https://docs.python.org/2.7/library/resource.html - """ - customized_limits = { - resource.RLIMIT_NOFILE: 1024, - resource.RLIMIT_NPROC: 128, - # TODO(lxkong): 50M by default, need to be configurable in future. - resource.RLIMIT_FSIZE: 524288000 - } - for t, soft in list(customized_limits.items()): - _, hard = resource.getrlimit(t) - resource.setrlimit(t, (soft, hard)) - - -def _get_responce(output, duration, logs, success, code): - return Response( - response=json.dumps( - { - 'output': output, - 'duration': duration, - 'logs': logs, - 'success': success - } - ), - status=code, - mimetype='application/json' - ) - - -def _killtree(pid, including_parent=True): - parent = psutil.Process(pid) - for child in parent.children(recursive=True): - print("kill child %s" % child) - child.kill() - - if including_parent: - print("kill parent %s" % parent) - parent.kill() - - -def _invoke_function(execution_id, zip_file_dir, module_name, method, arg, - input, return_dict, rlimit): - """Thie function is supposed to be running in a child process. - - HOSTNAME will be used to create cgroup directory related to worker. - - Current execution pid will be added to cgroup tasks file, and then all - its child processes will be automatically added to this 'cgroup'. - - Once executions exceed the cgroup limit, they will be killed by OOMKill - and this subprocess will exit with number(-9). - """ - # Set resource limit for current sub-process - _set_ulimit() - - # Set cpu and memory limits to cgroup by calling cglimit service - pid = os.getpid() - root_resp = requests.post( - 'http://localhost:9092/cglimit', - json={ - 'cpu': rlimit['cpu'], - 'memory_size': rlimit['memory_size'], - 'pid': pid - } - ) - - sys.stdout = open("%s.out" % execution_id, "w") - - if not root_resp.ok: - print('WARN: Resource limiting failed, run in unlimit mode.') - - print(('Start execution: %s' % execution_id)) - - sys.path.insert(0, zip_file_dir) - try: - module = importlib.import_module(module_name) - func = getattr(module, method) - return_dict['result'] = func(arg, **input) if arg else func(**input) - return_dict['success'] = True - except Exception as e: - _print_trace() - - if isinstance(e, OSError) and 'Resource' in str(e): - sys.exit(1) - - return_dict['result'] = str(e) - return_dict['success'] = False - finally: - print(('Finished execution: %s' % execution_id)) - - -@app.route('/execute', methods=['POST']) -def execute(): - """Invoke function. - - Several things need to handle in this function: - - Save the function log - - Capture the function internal exception - - Deal with process execution error (The process may be killed for some - reason, e.g. unlimited memory allocation) - - Deal with os error for process (e.g. Resource temporarily unavailable) - """ - params = request.get_json() or {} - input = params.get('input') or {} - execution_id = params['execution_id'] - download_url = params.get('download_url') - function_id = params.get('function_id') - entry = params.get('entry') - request_id = params.get('request_id') - trust_id = params.get('trust_id') - auth_url = params.get('auth_url') - username = params.get('username') - password = params.get('password') - timeout = params.get('timeout') - zip_file_dir = '/var/qinling/packages/%s' % function_id - rlimit = { - 'cpu': params['cpu'], - 'memory_size': params['memory_size'] - } - - function_module, function_method = 'main', 'main' - if entry: - function_module, function_method = tuple(entry.rsplit('.', 1)) - - print(( - 'Request received, request_id: %s, execution_id: %s, input: %s, ' - 'auth_url: %s' % - (request_id, execution_id, input, auth_url) - )) - - #################################################################### - # - # Download function package by calling sidecar service. We don't check the - # zip file existence here to avoid using partial file during downloading. - # - #################################################################### - resp = requests.post( - 'http://localhost:9091/download', - json={ - 'download_url': download_url, - 'function_id': function_id, - 'token': params.get('token') - } - ) - if not resp.ok: - return _get_responce(resp.content, 0, '', False, 500) - - #################################################################### - # - # Provide an openstack session to user's function - # - #################################################################### - os_session = None - if auth_url: - auth = generic.Password( - username=username, - password=password, - auth_url=auth_url, - trust_id=trust_id, - user_domain_name='Default' - ) - os_session = session.Session(auth=auth, verify=False) - input.update({'context': {'os_session': os_session}}) - - #################################################################### - # - # Create a new process to run user's function - # - #################################################################### - manager = Manager() - return_dict = manager.dict() - return_dict['success'] = False - start = time.time() - - # Run the function in a separate process to avoid messing up the log - p = Process( - target=_invoke_function, - args=(execution_id, zip_file_dir, function_module, function_method, - input.pop('__function_input', None), input, return_dict, rlimit) - ) - - timed_out = False - p.start() - p.join(timeout) - if p.is_alive(): - _killtree(p.pid) - timed_out = True - - #################################################################### - # - # Get execution output(log, duration, etc.) - # - #################################################################### - duration = round(time.time() - start, 3) - - # Process was killed unexpectedly or finished with error. - if p.exitcode != 0: - output = TIMEOUT_ERROR if timed_out else INVOKE_ERROR - success = False - else: - output = return_dict.get('result') - success = return_dict['success'] - - # Execution log - with open('%s.out' % execution_id) as f: - logs = f.read() - os.remove('%s.out' % execution_id) - - return _get_responce(output, duration, logs, success, 200) - - -@app.route('/ping') -def ping(): - return 'pong' diff --git a/runtimes/sidecar/Dockerfile b/runtimes/sidecar/Dockerfile deleted file mode 100644 index 8d2b11c1..00000000 --- a/runtimes/sidecar/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -FROM alpine:3.7 -MAINTAINER lingxian.kong@gmail.com - -# We need to use qinling user to keep consistent with server. -USER root -RUN adduser -HDs /bin/sh qinling - -RUN apk update && \ - apk add --no-cache linux-headers build-base python2 python2-dev py2-pip uwsgi-python uwsgi-http && \ - pip install --upgrade pip && \ - rm -r /root/.cache - -COPY . /sidecar -WORKDIR /sidecar -RUN pip install --no-cache-dir -r requirements.txt && \ - mkdir -p /var/lock/qinling && \ - mkdir -p /var/qinling/packages && \ - chown -R qinling:qinling /sidecar /var/lock/qinling /var/qinling/packages - -EXPOSE 9091 - -# uwsgi --plugin http,python --http :9091 --uid qinling --wsgi-file sidecar.py --callable app --master --processes 1 --threads 1 -CMD ["/usr/sbin/uwsgi", "--plugin", "http,python", "--http", "127.0.0.1:9091", "--uid", "qinling", "--wsgi-file", "sidecar.py", "--callable", "app", "--master", "--processes", "1", "--threads", "1"] \ No newline at end of file diff --git a/runtimes/sidecar/requirements.txt b/runtimes/sidecar/requirements.txt deleted file mode 100644 index b318ae73..00000000 --- a/runtimes/sidecar/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -Flask>=0.10,!=0.11,<1.0 # BSD -oslo.concurrency>=3.25.0 # Apache-2.0 -requests>=2.18.4 \ No newline at end of file diff --git a/runtimes/sidecar/sidecar.py b/runtimes/sidecar/sidecar.py deleted file mode 100644 index 3b66961b..00000000 --- a/runtimes/sidecar/sidecar.py +++ /dev/null @@ -1,104 +0,0 @@ -# Copyright 2018 Catalyst IT Limited -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import sys -import zipfile - -from flask import Flask -from flask import make_response -from flask import request -from oslo_concurrency import lockutils -import requests - -app = Flask(__name__) -ch = logging.StreamHandler(sys.stdout) -ch.setLevel(logging.DEBUG) -ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')) -del app.logger.handlers[:] -app.logger.addHandler(ch) - -DOWNLOAD_ERROR = "Failed to download function package from %s, error: %s" - - -def log(message, level="info"): - global app - log_func = getattr(app.logger, level) - log_func(message) - - -@lockutils.synchronized('download_function', external=True, - lock_path='/var/lock/qinling') -def _download_package(url, zip_file, token=None, unzip=None): - """Download package and unzip as needed. - - Return None if successful otherwise a Flask.Response object. - """ - if os.path.isfile(zip_file): - return None - - log("Start downloading function") - - headers = {} - if token: - headers = {'X-Auth-Token': token} - - try: - r = requests.get(url, headers=headers, stream=True, timeout=30, - verify=False) - if r.status_code != 200: - return make_response(DOWNLOAD_ERROR % (url, r.content), 500) - - with open(zip_file, 'wb') as fd: - for chunk in r.iter_content(chunk_size=65535): - fd.write(chunk) - - log("Downloaded function package to %s" % zip_file) - - if unzip: - dest = zip_file.split('.')[0] - with open(zip_file, 'rb') as f: - zf = zipfile.ZipFile(f) - zf.extractall(dest) - log("Unzipped") - except Exception as e: - return make_response(DOWNLOAD_ERROR % (url, str(e)), 500) - - -@app.route('/download', methods=['POST']) -def download(): - """Download function package to a shared folder. - - The parameters 'download_url' and 'function_id' need to be specified - explicitly. It's guaranteed on the server side. - - :param download_url: The URL for function package download. It's a Qinling - function resource URL with 'download' enabled. - :param function_id: Function ID. - :param token: Optional. The token used for download. - :param unzip: Optional. If unzip is needed after download. - """ - params = request.get_json() - zip_file = '/var/qinling/packages/%s.zip' % params['function_id'] - log("Function package download request received, params: %s" % params) - - resp = _download_package( - params['download_url'], - zip_file, - token=params.get('token'), - unzip=params.get('unzip', True) - ) - - return resp if resp else 'downloaded' diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index dceef893..00000000 --- a/setup.cfg +++ /dev/null @@ -1,52 +0,0 @@ -[metadata] -name = qinling -summary = Function as a Service -description-file = - README.rst -author = OpenStack Qinling Team -author-email = openstack-discuss@lists.openstack.org -home-page = https://docs.openstack.org/qinling/latest/ -python-requires = >=3.6 -classifier = - Environment :: OpenStack - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: Implementation :: CPython - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - -[files] -packages = - qinling - qinling_tempest_plugin - -[entry_points] -console_scripts = - qinling-api = qinling.cmd.api:main - qinling-engine = qinling.cmd.engine:main - qinling-db-manage = qinling.db.sqlalchemy.migration.cli:main - qinling-status = qinling.cmd.status:main - -wsgi_scripts = - qinling-wsgi-api = qinling.api.app:init_wsgi - -qinling.storage.provider: - local = qinling.storage.file_system:FileSystemStorage - -qinling.orchestrator = - kubernetes = qinling.orchestrator.kubernetes.manager:KubernetesManager - -oslo.config.opts = - qinling.config = qinling.config:list_opts - -oslo.config.opts.defaults = - qinling.config = qinling.config:set_cors_middleware_defaults - -tempest.test_plugins = - qinling_test = qinling_tempest_plugin.plugin:QinlingTempestPlugin diff --git a/setup.py b/setup.py deleted file mode 100644 index cd35c3c3..00000000 --- a/setup.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import setuptools - -setuptools.setup( - setup_requires=['pbr>=2.0.0'], - pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 41fde733..00000000 --- a/test-requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -# The order of packages is significant, because pip processes them in the order -# of appearance. Changing the order has an impact on the overall integration -# process, which may cause wedges in the gate later. - -hacking<0.13,>=0.12.0 # Apache-2.0 -# remove this pyflakes from here once you bump the -# hacking to 3.2.0 or above. hacking 3.2.0 takes -# care of pyflakes version compatibilty. -pyflakes>=2.1.1 - -coverage!=4.4,>=4.0 # Apache-2.0 -oslotest>=3.2.0 # Apache-2.0 -testrepository>=0.0.18 # Apache-2.0/BSD -testscenarios>=0.4 # Apache-2.0/BSD -testtools>=2.2.0 # MIT -tempest>=17.1.0 # Apache-2.0 -futurist>=1.2.0 # Apache-2.0 -kubernetes>=6.0.0 # Apache-2.0 -python-dateutil>=2.5.3 # BSD diff --git a/tools/clear_resources.sh b/tools/clear_resources.sh deleted file mode 100755 index 44614a6b..00000000 --- a/tools/clear_resources.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env -set -e - -# export QINLING_URL=http://127.0.0.1:7070 - -function delete_resources(){ - # Delete webhooks - ids=$(openstack webhook list -f yaml -c Id | awk '{print $3}') - for id in $ids - do - openstack webhook delete $id - done - - # Delete jobs - ids=$(openstack job list -f yaml -c Id | awk '{print $3}') - for id in $ids - do - openstack job delete $id - done - - # Delete executions - ids=$(openstack function execution list -f yaml -c Id | awk '{print $3}') - for id in $ids - do - openstack function execution delete --execution $id - done - - # Delete functions - ids=$(openstack function list -f yaml -c Id | awk '{print $3}') - for id in $ids - do - openstack function delete $id - done - - if [ "$1" = "admin" ] - then - # Delete runtimes by admin user - ids=$(openstack runtime list -f yaml -c Id | awk '{print $3}') - for id in $ids - do - openstack runtime delete $id - done - fi -} - -unset `env | grep OS_ | awk -F "=" '{print $1}' | xargs` -source ~/devstack/openrc demo demo -delete_resources - -if [ "$1" = "admin" ] -then - unset `env | grep OS_ | awk -F "=" '{print $1}' | xargs` - source ~/devstack/openrc admin admin - delete_resources admin -fi diff --git a/tools/config/config-generator.qinling.conf b/tools/config/config-generator.qinling.conf deleted file mode 100644 index 9f9ac7da..00000000 --- a/tools/config/config-generator.qinling.conf +++ /dev/null @@ -1,9 +0,0 @@ -[DEFAULT] -namespace = qinling.config -namespace = keystonemiddleware.auth_token -namespace = oslo.messaging -namespace = oslo.middleware.cors -namespace = oslo.middleware.http_proxy_to_wsgi -namespace = oslo.log -namespace = oslo.policy -namespace = oslo.db diff --git a/tools/cover.sh b/tools/cover.sh deleted file mode 100755 index 2a1ce1da..00000000 --- a/tools/cover.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash -# Copyright 2018 Catalyst IT Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -ALLOWED_EXTRA_MISSING=5 -TESTR_ARGS="$*" - -show_diff () { - head -1 $1 - diff -U 0 $1 $2 | sed 1,2d -} - -# Stash uncommitted changes, checkout master and save coverage report -uncommitted=$(git status --porcelain | grep -v "^??") -[[ -n $uncommitted ]] && git stash > /dev/null -git checkout HEAD^ - -baseline_report=$(mktemp -t qinling_coverageXXXXXXX) -find . -type f -name "*.py[c|o]" -delete && stestr run "$TESTR_ARGS" && coverage combine && coverage html -d cover -coverage report --ignore-errors > $baseline_report -baseline_missing=$(awk 'END { print $3 }' $baseline_report) - -# Checkout back and unstash uncommitted changes (if any) -git checkout - -[[ -n $uncommitted ]] && git stash pop > /dev/null - -# Generate and save coverage report -current_report=$(mktemp -t qinling_coverageXXXXXXX) -find . -type f -name "*.py[c|o]" -delete && stestr run "$TESTR_ARGS" && coverage combine && coverage html -d cover -coverage report --ignore-errors > $current_report -current_missing=$(awk 'END { print $3 }' $current_report) - -# Show coverage details -allowed_missing=$((baseline_missing+ALLOWED_EXTRA_MISSING)) - -echo "Allowed to introduce missing lines : ${ALLOWED_EXTRA_MISSING}" -echo "Missing lines in master : ${baseline_missing}" -echo "Missing lines in proposed change : ${current_missing}" - -if [ $allowed_missing -gt $current_missing ]; then - if [ $baseline_missing -lt $current_missing ]; then - show_diff $baseline_report $current_report - echo "I believe you can cover all your code with 100% coverage!" - else - echo "Thank you! You are awesome! Keep writing unit tests! :)" - fi - exit_code=0 -else - show_diff $baseline_report $current_report - echo "Please write more unit tests, we should keep our test coverage :( " - exit_code=1 -fi - -rm -f $baseline_report $current_report -exit $exit_code diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile deleted file mode 100644 index ea8e72ec..00000000 --- a/tools/docker/Dockerfile +++ /dev/null @@ -1,58 +0,0 @@ -FROM krallin/ubuntu-tini:16.04 - -LABEL name="Qinling" \ - description="Function Engine for OpenStack" \ - maintainers="GaĆ«tan Trellu " - -RUN apt-get -qq update && \ - apt-get install -y \ - libffi-dev \ - libpq-dev \ - libssl-dev \ - libxml2-dev \ - libxslt1-dev \ - libyaml-dev \ - libmysqlclient-dev \ - python \ - python-dev \ - crudini \ - curl \ - git \ - gcc \ - libuv1 \ - libuv1-dev && \ - curl -f -o /tmp/get-pip.py https://bootstrap.pypa.io/3.2/get-pip.py && \ - python /tmp/get-pip.py && rm /tmp/get-pip.py && \ - pip install --upgrade pip - -RUN pip install pymysql psycopg2 py_mini_racer - -ENV QINLING_DIR="/opt/stack/qinling" \ - TMP_CONSTRAINTS="/tmp/upper-constraints.txt" \ - CONFIG_FILE="/etc/qinling/qinling.conf" \ - INI_SET="crudini --set /etc/qinling/qinling.conf" \ - MESSAGE_BROKER_URL="rabbit://guest:guest@rabbitmq:5672/" \ - DATABASE_URL="sqlite:///qinling.db" \ - UPGRADE_DB="false" \ - DEBIAN_FRONTEND="noninteractive" \ - QINLING_SERVER="all" \ - LOG_DEBUG="false" \ - AUTH_ENABLE="false" - -# We install dependencies separatly for a caching purpose -COPY requirements.txt "${QINLING_DIR}/" -RUN curl -o "${TMP_CONSTRAINTS}" \ - http://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt && \ - sed -i "/^qinling.*/d" "${TMP_CONSTRAINTS}" && \ - pip install -r "${QINLING_DIR}/requirements.txt" - -COPY . ${QINLING_DIR} - -RUN pip install -e "${QINLING_DIR}" && \ - mkdir /etc/qinling && \ - rm -rf /var/lib/apt/lists/* && \ - find ${QINLING_DIR} -name "*.sh" -exec chmod +x {} \; - -WORKDIR "${QINLING_DIR}" -EXPOSE 7070 -CMD "${QINLING_DIR}/tools/docker/start.sh" diff --git a/tools/docker/README.rst b/tools/docker/README.rst deleted file mode 100644 index bec4a615..00000000 --- a/tools/docker/README.rst +++ /dev/null @@ -1,95 +0,0 @@ -Using Qinling with Docker -========================= - -Docker containers provide an easy way to quickly deploy independent or -networked Qinling instances in seconds. This guide describes the process -to launch an all-in-one Qinling container. - - -Docker Installation -------------------- - -The following links contain instructions to install latest Docker software: - -* `Docker Engine `_ -* `Docker Compose `_ - - -Build the Qinling Image Manually --------------------------------- - -Execute the following command from the repository top-level directory:: - - docker build -t qinling -f tools/docker/Dockerfile . - -The Qinling Docker image has one build parameter: - - -Running Qinling using Docker Compose ------------------------------------- - -To launch Qinling in the single node configuration:: - - docker-compose -f tools/docker/docker-compose/infrastructure.yaml \ - -f tools/docker/docker-compose/qinling-single-node.yaml \ - -p qinling up -d - -To launch Qinling in the multi node configuration:: - - docker-compose -f tools/docker/docker-compose/infrastructure.yaml \ - -f tools/docker/docker-compose/qinling-multi-node.yaml \ - -p qinling up -d - -The `--build` option can be used when it is necessary to rebuild the image, -for example: - - docker-compose -f tools/docker/docker-compose/infrastructure.yaml \ - -f tools/docker/docker-compose/qinling-single-node.yaml \ - -p qinling up -d --build - -Running the Qinling client from the Docker Compose container ------------------------------------------------------------- - -To run the qinling client against the server in the container using the client -present in the container: - - docker run -it qinling_qinling1 qinling runtime list - -Configuring Qinling -------------------- - -The Docker image contains the minimal set of Qinling configuration parameters -by default: - -+--------------------+------------------+--------------------------------------+ -|Name |Default value | Description | -+====================+==================+======================================+ -|`MESSAGE_BROKER_URL`|rabbit://guest:gu\|The message broker URL | -| |est@rabbitmq:5672 | | -+--------------------+------------------+----------------------+---------------+ -|`DATABASE_URL` |sqlite:///qinling\|The database URL | -| |.db | | -+--------------------+------------------+----------------------+---------------+ -|`UPGRADE_DB` |false |If the `UPGRADE_DB` equals `true`, | -| | |a database upgrade will be launched | -| | |before Qinling main process | -+--------------------+------------------+----------------------+---------------+ -|`QINLING_SERVER` |all |Specifies which qinling server to | -| | |start by the launch script. | -+--------------------+------------------+----------------------+---------------+ -|`LOG_DEBUG` |false |If set to true, the logging level will| -| | |be set to DEBUG instead of the default| -| | |INFO level. | -+--------------------+------------------+----------------------+---------------+ - -The `/etc/qinling/qinling.conf` configuration file can be mounted to the Qinling -Docker container by uncommenting and editing the `volumes` sections in the Qinling -docker-compose files. - - -Using Qinling Client --------------------- - -The Qinling API will be accessible from the host machine on the default -port 7070. Install `python-qinlingclient` on the host machine to -execute qinling commands. diff --git a/tools/docker/docker-compose/infrastructure.yaml b/tools/docker/docker-compose/infrastructure.yaml deleted file mode 100644 index a4b2678e..00000000 --- a/tools/docker/docker-compose/infrastructure.yaml +++ /dev/null @@ -1,39 +0,0 @@ -version: '3' -services: - - rabbitmq: - image: rabbitmq:3.7.2-management-alpine - restart: always - ports: - - "15672:15672" - networks: - - message-broker - hostname: rabbitmq - environment: - - RABBITMQ_VM_MEMORY_HIGH_WATERMARK=0.81 - - RABBITMQ_DEFAULT_USER=qinling - - RABBITMQ_DEFAULT_PASS=qinling - - RABBITMQ_DEFAULT_VHOST=qinling - - mysql: - image: mysql:8.0.3 - restart: always - ports: - - "3306:3306" - volumes: - - mysql:/var/lib/mysql - networks: - - database - environment: - - MYSQL_ROOT_PASSWORD=qinling - - MYSQL_DATABASE=qinling - - MYSQL_USER=qinling - - MYSQL_PASSWORD=qinling - -volumes: - rabbitmq: - mysql: - -networks: - database: - message-broker: diff --git a/tools/docker/docker-compose/qinling-multi-node.yaml b/tools/docker/docker-compose/qinling-multi-node.yaml deleted file mode 100644 index b1c0a5f6..00000000 --- a/tools/docker/docker-compose/qinling-multi-node.yaml +++ /dev/null @@ -1,34 +0,0 @@ -version: '3' -services: - qinling-api: - build: - context: ../../.. - dockerfile: tools/docker/Dockerfile - restart: always - ports: - - "7070:7070" - networks: - database: - message-broker: - env_file: - - qinling.env -# volumes: -# - "/path/to/qinling.conf:/etc/qinling/qinling.conf" - environment: - - QINLING_SERVER=api - - UPGRADE_DB=true - - qinling-engine: - build: - context: ../../.. - dockerfile: tools/docker/Dockerfile - restart: always - networks: - - database - - message-broker - env_file: - - qinling.env -# volumes: -# - "/path/to/qinling.conf:/etc/qinling/qinling.conf" - environment: - - QINLING_SERVER=engine diff --git a/tools/docker/docker-compose/qinling-single-node.yaml b/tools/docker/docker-compose/qinling-single-node.yaml deleted file mode 100644 index 4d929e74..00000000 --- a/tools/docker/docker-compose/qinling-single-node.yaml +++ /dev/null @@ -1,18 +0,0 @@ -version: '3' -services: - qinling: - build: - context: ../../.. - dockerfile: "tools/docker/Dockerfile" - restart: always - ports: - - "7070:7070" - networks: - - database - - message-broker - env_file: - - qinling.env -# volumes: -# - "/path/to/qinling.conf:/etc/qinling/qinling.conf" - environment: - - UPGRADE_DB=true diff --git a/tools/docker/docker-compose/qinling.env b/tools/docker/docker-compose/qinling.env deleted file mode 100644 index 90118656..00000000 --- a/tools/docker/docker-compose/qinling.env +++ /dev/null @@ -1,3 +0,0 @@ -MESSAGE_BROKER_URL=rabbit://qinling:qinling@rabbitmq:5672/qinling -DATABASE_URL=mysql+pymysql://qinling:qinling@mysql:3306/qinling -AUTH_ENABLE=false diff --git a/tools/docker/start.sh b/tools/docker/start.sh deleted file mode 100755 index 3fae42bd..00000000 --- a/tools/docker/start.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -set -e - -# If a Qinling config doesn't exist we should create it and fill in with -# parameters -if [ ! -f ${CONFIG_FILE} ]; then - oslo-config-generator \ - --config-file "${QINLING_DIR}/tools/config/config-generator.qinling.conf" \ - --output-file "${CONFIG_FILE}" - - ${INI_SET} DEFAULT debug "${LOG_DEBUG}" - ${INI_SET} DEFAULT auth_type ${AUTH_TYPE} - ${INI_SET} DEFAULT transport_url "${MESSAGE_BROKER_URL}" - ${INI_SET} oslo_policy policy_file "${QINLING_DIR}/etc/qinling/policy.json" - ${INI_SET} pecan auth_enable ${AUTH_ENABLE} - ${INI_SET} database connection "${DATABASE_URL}" -fi - -if [ ${DATABASE_URL} == "sqlite:///qinling.db" -a ! -f ./qinling.db ] -then - qinling-db-manage --config-file "${CONFIG_FILE}" upgrade head -fi - -if "${UPGRADE_DB}"; -then - qinling-db-manage --config-file "${CONFIG_FILE}" upgrade head -fi - -qinling-api --config-file "${CONFIG_FILE}" -qinling-engine --config-file "${CONFIG_FILE}" diff --git a/tools/gate/kubeadm-dind-cluster/README.md b/tools/gate/kubeadm-dind-cluster/README.md deleted file mode 100644 index cc55827c..00000000 --- a/tools/gate/kubeadm-dind-cluster/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# kubeadm-dind-cluster script for Qinling gate - -This script is just for backlog purpose, it is not used for current devstack -gate job. The known problem is Qinling service can not talk to the service -created in k8s. diff --git a/tools/gate/kubeadm-dind-cluster/setup_gate.sh b/tools/gate/kubeadm-dind-cluster/setup_gate.sh deleted file mode 100755 index 81e75a9d..00000000 --- a/tools/gate/kubeadm-dind-cluster/setup_gate.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -set -ex - -function net_default_iface { - sudo ip -4 route list 0/0 | awk '{ print $5; exit }' -} - -function net_default_host_addr { - sudo ip addr | awk "/inet / && /$(net_default_iface)/{print \$2; exit }" -} - -function net_default_host_ip { - echo $(net_default_host_addr) | awk -F '/' '{ print $1; exit }' -} - -function net_hosts_pre_kube { - sudo cp -f /etc/hosts /etc/hosts-pre-kube - sudo sed -i "/$(hostname)/d" /etc/hosts - sudo sed -i "/127.0.0.1/d" /etc/hosts - sudo sed -i "1 i 127.0.0.1 localhost" /etc/hosts - - host_ip=$(net_default_host_ip) - echo "${host_ip} $(hostname)" | sudo tee -a /etc/hosts -} - -function create_k8s_screen { - # Starts a proxy to the Kubernetes API server in a screen session - sudo screen -S kube_proxy -X quit || true - sudo screen -dmS kube_proxy && sudo screen -S kube_proxy -X screen -t kube_proxy - sudo screen -S kube_proxy -p kube_proxy -X stuff 'kubectl proxy --accept-hosts=".*" --address="0.0.0.0"\n' -} - -function tweak_etcd { - ETCD_VER=v3.2.0 - TMP_DIR=/tmp/etcd - sudo rm -rf $TMP_DIR && mkdir -p $TMP_DIR - curl -L https://github.com/coreos/etcd/releases/download/${ETCD_VER}/etcd-${ETCD_VER}-linux-amd64.tar.gz \ - -o /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz - tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C $TMP_DIR --strip-components=1 - - # Stop etcd service installed by apt - sudo systemctl stop etcd - cp $TMP_DIR/etcd /usr/bin/etcd - cp $TMP_DIR/etcdctl /usr/bin/etcdctl - sudo systemctl start etcd -} - - -sudo apt-get update -y -sudo apt-get install -y --no-install-recommends -qq \ - docker.io \ - jq \ - screen \ - etcd - -net_hosts_pre_kube - -curl -sSO https://cdn.rawgit.com/Mirantis/kubeadm-dind-cluster/master/fixed/dind-cluster-v1.8.sh -sudo chmod +x dind-cluster-v1.8.sh -sudo ./dind-cluster-v1.8.sh clean || true -sudo ./dind-cluster-v1.8.sh up -echo 'export PATH="$HOME/.kubeadm-dind-cluster:$PATH"' >> $HOME/.bashrc - -# Treak etcd service, the default etcd version installed by apt is v2, we need -# v3 instead. -tweak_etcd - -# Starts a proxy to the Kubernetes API server in a screen session -create_k8s_screen diff --git a/tools/gate/kubeadm/funcs/common.sh b/tools/gate/kubeadm/funcs/common.sh deleted file mode 100644 index 145c9644..00000000 --- a/tools/gate/kubeadm/funcs/common.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -function base_install { - if [ "x$HOST_OS" == "xubuntu" ]; then - sudo apt-add-repository --yes ppa:ansible/ansible - sudo apt-get update -y - sudo apt-get install -y --no-install-recommends \ - iproute2 \ - iptables \ - ipcalc \ - nmap \ - lshw \ - screen \ - ansible - elif [ "x$HOST_OS" == "xcentos" ]; then - sudo yum install -y \ - epel-release - # ipcalc is in the initscripts package - sudo yum install -y \ - iproute \ - iptables \ - initscripts \ - nmap \ - lshw \ - screen \ - ansible - elif [ "x$HOST_OS" == "xfedora" ]; then - sudo dnf install -y \ - iproute \ - iptables \ - ipcalc \ - nmap \ - lshw \ - ansible - fi -} - -function gate_base_setup { - # Install base requirements - base_install -} - -function create_k8s_screen { - # Starts a proxy to the Kubernetes API server in a screen session - sudo screen -S kube_proxy -X quit || true - sudo screen -dmS kube_proxy - sudo screen -S kube_proxy -p bash -X stuff 'kubectl proxy --accept-hosts=".*" --address="0.0.0.0"\n' -} \ No newline at end of file diff --git a/tools/gate/kubeadm/funcs/network.sh b/tools/gate/kubeadm/funcs/network.sh deleted file mode 100644 index 10b1b7f6..00000000 --- a/tools/gate/kubeadm/funcs/network.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -function net_default_iface { - sudo ip -4 route list 0/0 | awk '{ print $5; exit }' -} - -function net_default_host_addr { - sudo ip addr | awk "/inet / && /$(net_default_iface)/{print \$2; exit }" -} - -function net_default_host_ip { - echo $(net_default_host_addr) | awk -F '/' '{ print $1; exit }' -} - -function net_resolv_pre_kube { - sudo cp -f /etc/resolv.conf /etc/resolv-pre-kube.conf - cat << EOF | sudo tee -a /etc/resolv.conf -nameserver ${UPSTREAM_DNS} -EOF -} - -function net_resolv_post_kube { - sudo cp -f /etc/resolv-pre-kube.conf /etc/resolv.conf -} - -function net_hosts_pre_kube { - sudo cp -f /etc/hosts /etc/hosts-pre-kube - sudo sed -i "/$(hostname)/d" /etc/hosts - sudo sed -i "/127.0.0.1/d" /etc/hosts - sudo sed -i "1 i 127.0.0.1 localhost" /etc/hosts - - host_ip=$(net_default_host_ip) - echo "${host_ip} $(hostname)" | sudo tee -a /etc/hosts -} - -function net_hosts_post_kube { - sudo cp -f /etc/hosts-pre-kube /etc/hosts -} \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/deploy_k8s.yaml b/tools/gate/kubeadm/playbook/deploy_k8s.yaml deleted file mode 100644 index 5b02aa5f..00000000 --- a/tools/gate/kubeadm/playbook/deploy_k8s.yaml +++ /dev/null @@ -1,21 +0,0 @@ ---- -- name: Deploy k8s cluster - hosts: localhost - become: true - become_method: sudo - gather_facts: false - - pre_tasks: - - name: Install packages - package: name={{ item }} state=present update_cache=yes - with_items: - - python - - unzip - - - name: Gather facts after python installation - setup: filter=ansible_* - - roles: - - docker - - k8s_cli - - kube_master \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/roles/docker/defaults/main.yml b/tools/gate/kubeadm/playbook/roles/docker/defaults/main.yml deleted file mode 100644 index 7367bf20..00000000 --- a/tools/gate/kubeadm/playbook/roles/docker/defaults/main.yml +++ /dev/null @@ -1,2 +0,0 @@ ---- -docker_version: 17.03.2~ce-0~ubuntu-xenial \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/roles/docker/tasks/centos.yml b/tools/gate/kubeadm/playbook/roles/docker/tasks/centos.yml deleted file mode 100644 index e176d66d..00000000 --- a/tools/gate/kubeadm/playbook/roles/docker/tasks/centos.yml +++ /dev/null @@ -1,26 +0,0 @@ ---- -- name: Set up docker stable repository on CentOS - yum_repository: - name: docker-ce-stable - description: Docker CE Stable - $basearch - baseurl: https://download.docker.com/linux/centos/7/$basearch/stable - enabled: yes - gpgcheck: yes - gpgkey: https://download.docker.com/linux/centos/gpg - -- name: Install docker-ce - # yum: - # name: docker-ce-17.03.2.ce - # state: present - # update_cache: yes - # NOTE(huntxu): This workaround is required for docker-ce-17.03.2 as its - # dependency docker-ce-selinux is marked as obsolete. Hopefully this can - # be removed once a newer version of docker can be used. - shell: yum install -y --setopt=obsoletes=0 \ - docker-ce-17.03.2.ce-1.el7.centos \ - docker-ce-selinux-17.03.2.ce-1.el7.centos - -- name: Start docker service - systemd: - name: docker - state: started diff --git a/tools/gate/kubeadm/playbook/roles/docker/tasks/main.yml b/tools/gate/kubeadm/playbook/roles/docker/tasks/main.yml deleted file mode 100644 index 46f5e9a4..00000000 --- a/tools/gate/kubeadm/playbook/roles/docker/tasks/main.yml +++ /dev/null @@ -1,14 +0,0 @@ ---- -- name: Install packages - package: name={{ item }} state=present update_cache=yes - with_items: - - ca-certificates - - curl - -- include_tasks: ubuntu.yml - when: - ansible_pkg_mgr == 'apt' - -- include_tasks: centos.yml - when: - ansible_pkg_mgr == 'yum' diff --git a/tools/gate/kubeadm/playbook/roles/docker/tasks/ubuntu.yml b/tools/gate/kubeadm/playbook/roles/docker/tasks/ubuntu.yml deleted file mode 100644 index c5cae83a..00000000 --- a/tools/gate/kubeadm/playbook/roles/docker/tasks/ubuntu.yml +++ /dev/null @@ -1,22 +0,0 @@ ---- -- name: Install APT transport-https - apt: - name: apt-transport-https - state: present - update_cache: yes - -- name: Add Docker's official GPG key - apt_key: - url: https://download.docker.com/linux/ubuntu/gpg - state: present - -- name: Set up the stable repository - apt_repository: - repo: 'deb https://download.docker.com/linux/ubuntu xenial stable' - state: present - -- name: Install docker-ce - apt: - name: docker-ce={{ docker_version }} - state: present - update_cache: yes diff --git a/tools/gate/kubeadm/playbook/roles/k8s_cli/defaults/main.yaml b/tools/gate/kubeadm/playbook/roles/k8s_cli/defaults/main.yaml deleted file mode 100644 index c295bcfc..00000000 --- a/tools/gate/kubeadm/playbook/roles/k8s_cli/defaults/main.yaml +++ /dev/null @@ -1,3 +0,0 @@ ---- -k8s_version: "1.13.6" -kube_prompt_version: "v1.0.5" \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/centos.yml b/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/centos.yml deleted file mode 100644 index 46dc2182..00000000 --- a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/centos.yml +++ /dev/null @@ -1,36 +0,0 @@ ---- -- name: Set up Kubernetes repository - yum_repository: - name: kubernetes - description: Kubernetes - baseurl: https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 - enabled: yes - gpgcheck: yes - # Got "repomd.xml signature could not be verified for kubernetes" on zuul - repo_gpgcheck: no - gpgkey: - - https://packages.cloud.google.com/yum/doc/yum-key.gpg - - https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg - -- name: Install kube CLIs - package: - name: "{{ item }}" - state: present - update_cache: yes - with_items: - - kubelet-{{ k8s_version }} - - kubectl-{{ k8s_version }} - - kubeadm-{{ k8s_version }} - -# On CentOS 7, the kubeadm package adds a configuration file that overrides -# kubelet's cgroup driver to systemd, we want to use the default driver -# cgroupfs to get the same cgroup paths for resource limiting. -# -# $ rpm -qf /etc/systemd/system/kubelet.service.d/10-kubeadm.conf -# kubeadm-1.9.3-0.x86_64 -# -- name: Change kubelet cgroup driver - replace: - path: /etc/systemd/system/kubelet.service.d/10-kubeadm.conf - regexp: '(KUBELET_CGROUP_ARGS=)[^"]+' - replace: '\1' diff --git a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/install_kube_prompt_tasks.yml b/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/install_kube_prompt_tasks.yml deleted file mode 100644 index 725915a6..00000000 --- a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/install_kube_prompt_tasks.yml +++ /dev/null @@ -1,25 +0,0 @@ ---- -- name: Create temporary directory - file: - state: directory - path: ~/kube-prompt - register: dir - -- name: Download kube-prompt - get_url: - url: https://github.com/c-bata/kube-prompt/releases/download/{{ kube_prompt_version }}/kube-prompt_{{ kube_prompt_version }}_linux_amd64.zip - dest: "{{ dir.path }}/kube-prompt.zip" - -- name: Unarchive kube-prompt - unarchive: - src: "{{ dir.path }}/kube-prompt.zip" - dest: "{{ dir.path }}" - remote_src: yes - creates: "{{ dir.path }}/kube-prompt" - -- name: Copy kube-prompt to bin - copy: - dest: /usr/bin/kprompt - src: "{{ dir.path }}/kube-prompt" - mode: u+x - remote_src: yes \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/main.yml b/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/main.yml deleted file mode 100644 index bf49c6ba..00000000 --- a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/main.yml +++ /dev/null @@ -1,16 +0,0 @@ ---- -- include_tasks: ubuntu.yml - when: - ansible_pkg_mgr == 'apt' - -- include_tasks: centos.yml - when: - ansible_pkg_mgr == 'yum' - -- name: Start kubelet - systemd: - name: kubelet - state: started - daemon_reload: yes - -- import_tasks: install_kube_prompt_tasks.yml diff --git a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/ubuntu.yml b/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/ubuntu.yml deleted file mode 100644 index 08c33111..00000000 --- a/tools/gate/kubeadm/playbook/roles/k8s_cli/tasks/ubuntu.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -- name: Add Kubernetes GPG key - apt_key: - url: https://packages.cloud.google.com/apt/doc/apt-key.gpg - state: present - -- name: Set up Kubernetes repository - apt_repository: - repo: 'deb http://apt.kubernetes.io/ kubernetes-xenial main' - state: present - -- name: Install kube CLIs - package: - name: "{{ item }}" - state: present - update_cache: yes - with_items: - - kubelet={{ k8s_version }}-00 - - kubectl={{ k8s_version }}-00 - - kubeadm={{ k8s_version }}-00 diff --git a/tools/gate/kubeadm/playbook/roles/kube_master/defaults/main.yaml b/tools/gate/kubeadm/playbook/roles/kube_master/defaults/main.yaml deleted file mode 100644 index 77dc2929..00000000 --- a/tools/gate/kubeadm/playbook/roles/kube_master/defaults/main.yaml +++ /dev/null @@ -1,2 +0,0 @@ ---- -k8s_version: "1.13.6" \ No newline at end of file diff --git a/tools/gate/kubeadm/playbook/roles/kube_master/tasks/main.yml b/tools/gate/kubeadm/playbook/roles/kube_master/tasks/main.yml deleted file mode 100644 index d95c8540..00000000 --- a/tools/gate/kubeadm/playbook/roles/kube_master/tasks/main.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- -- name: disable swap - shell: swapoff -a - -- name: drain the node if needed - shell: kubectl drain {{ ansible_nodename }} --delete-local-data --force --ignore-daemonsets - ignore_errors: True - -- name: delete node if needed - shell: kubectl delete node {{ ansible_nodename }} - ignore_errors: True - -- name: reset kubeadm setup - shell: kubeadm reset -f - ignore_errors: True - -- name: Init master - vars: - extra_opts: "{{ lookup('env', 'EXTRA_KUBEADM_INIT_OPTS') }}" - shell: kubeadm init --pod-network-cidr=192.168.0.0/16 --kubernetes-version=v{{ k8s_version }} {{ extra_opts }} - register: kubeadm_init - -- fail: - msg: "kubeadm init command failed." - when: kubeadm_init.stdout.find ("kubeadm join") == -1 - -- name: Store kubeadm join string - set_fact: - join_str: "{{ kubeadm_init.stdout | regex_search('kubeadm join(.*)$') }}" - -- name: Prepare kube config - shell: mkdir -p {{ item }}/.kube && cp -a /etc/kubernetes/admin.conf {{ item }}/.kube/config && chmod 644 {{ item }}/.kube/config - with_items: - - "{{ ansible_env.HOME }}" - -- name: Allow pod on master - shell: kubectl taint nodes --all node-role.kubernetes.io/master- - -- name: Install Calico - shell: kubectl apply -f {{ item }} - loop: - - https://docs.projectcalico.org/v3.3/getting-started/kubernetes/installation/hosted/rbac-kdd.yaml - - https://docs.projectcalico.org/v3.3/getting-started/kubernetes/installation/hosted/kubernetes-datastore/calico-networking/1.7/calico.yaml diff --git a/tools/gate/kubeadm/setup_gate.sh b/tools/gate/kubeadm/setup_gate.sh deleted file mode 100755 index 2f8b37af..00000000 --- a/tools/gate/kubeadm/setup_gate.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex -export WORK_DIR=$(pwd) -source ${WORK_DIR}/tools/gate/kubeadm/vars.sh -source ${WORK_DIR}/tools/gate/kubeadm/funcs/common.sh -source ${WORK_DIR}/tools/gate/kubeadm/funcs/network.sh - -# Do the basic node setup for running the gate -gate_base_setup -net_resolv_pre_kube -net_hosts_pre_kube - -# Setup the K8s Cluster -ansible-playbook ${WORK_DIR}/tools/gate/kubeadm/playbook/deploy_k8s.yaml - -# waits until kubectl can access the api server -mkdir -p ${HOME}/.kube -sudo cp /etc/kubernetes/admin.conf ${HOME}/.kube/config -sudo chown $(id -u):$(id -g) ${HOME}/.kube/config -end=$(($(date +%s) + 600)) -READY="False" -while true; do - READY=$(kubectl get nodes --no-headers=true | awk "{ print \$2 }" | head -1) - [ "$READY" == "Ready" ] && break || true - sleep 2 - now=$(date +%s) - [ $now -gt $end ] && echo "Failed to setup kubernetes cluster in time" && exit -1 -done - -if [ "$QINLING_K8S_APISERVER_TLS" != "True" ]; then - # Kubernetes proxy is needed if we don't use secure connections. - create_k8s_screen -fi - -#net_hosts_post_kube -#net_resolv_post_kube diff --git a/tools/gate/kubeadm/vars.sh b/tools/gate/kubeadm/vars.sh deleted file mode 100644 index a3529b65..00000000 --- a/tools/gate/kubeadm/vars.sh +++ /dev/null @@ -1,16 +0,0 @@ -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Set work dir if not already done -: ${WORK_DIR:="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"} - -# Get Host OS -source /etc/os-release -export HOST_OS=${HOST_OS:="${ID}"} - -# Set Upstream DNS -export UPSTREAM_DNS=${UPSTREAM_DNS:-"8.8.8.8"} diff --git a/tools/gate/minikube/README.md b/tools/gate/minikube/README.md deleted file mode 100644 index 42c9e622..00000000 --- a/tools/gate/minikube/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Minikube installation script for Qinling devstack gate - -Those sciprts locate here just for backlog purpose. A known issue is the -application in the pod can not talk to Qinling service, failed with "No route -to host" error. diff --git a/tools/gate/minikube/funcs/common.sh b/tools/gate/minikube/funcs/common.sh deleted file mode 100644 index 8f359c96..00000000 --- a/tools/gate/minikube/funcs/common.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -function base_install { - if [ "x$HOST_OS" == "xubuntu" ]; then - sudo apt-get update -y - sudo apt-get install -y --no-install-recommends \ - iproute2 \ - iptables \ - ipcalc \ - nmap \ - lshw \ - screen - elif [ "x$HOST_OS" == "xcentos" ]; then - sudo yum install -y \ - epel-release - # ipcalc is in the initscripts package - sudo yum install -y \ - iproute \ - iptables \ - initscripts \ - nmap \ - lshw - elif [ "x$HOST_OS" == "xfedora" ]; then - sudo dnf install -y \ - iproute \ - iptables \ - ipcalc \ - nmap \ - lshw - fi -} - -function gate_base_setup { - # Install base requirements - base_install -} - -function create_k8s_screen { - # Starts a proxy to the Kubernetes API server in a screen session - sudo screen -S kube_proxy -X quit || true - sudo screen -dmS kube_proxy && sudo screen -S kube_proxy -X screen -t kube_proxy - sudo screen -S kube_proxy -p kube_proxy -X stuff 'kubectl proxy --accept-hosts=".*" --address="0.0.0.0"\n' -} \ No newline at end of file diff --git a/tools/gate/minikube/funcs/network.sh b/tools/gate/minikube/funcs/network.sh deleted file mode 100644 index 9f22a5d7..00000000 --- a/tools/gate/minikube/funcs/network.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -function net_default_iface { - sudo ip -4 route list 0/0 | awk '{ print $5; exit }' -} - -function net_default_host_addr { - sudo ip addr | awk "/inet / && /$(net_default_iface)/{print \$2; exit }" -} - -function net_default_host_ip { - echo $(net_default_host_addr) | awk -F '/' '{ print $1; exit }' -} - -function net_resolv_pre_kube { - sudo cp -f /etc/resolv.conf /etc/resolv-pre-kube.conf - sudo rm -f /etc/resolv.conf - cat << EOF | sudo tee /etc/resolv.conf -nameserver ${UPSTREAM_DNS} -EOF -} - -function net_resolv_post_kube { - sudo cp -f /etc/resolv-pre-kube.conf /etc/resolv.conf -} - -function net_hosts_pre_kube { - sudo cp -f /etc/hosts /etc/hosts-pre-kube - sudo sed -i "/$(hostname)/d" /etc/hosts - sudo sed -i "/127.0.0.1/d" /etc/hosts - sudo sed -i "1 i 127.0.0.1 localhost" /etc/hosts - - host_ip=$(net_default_host_ip) - echo "${host_ip} $(hostname)" | sudo tee -a /etc/hosts -} - -function net_hosts_post_kube { - sudo cp -f /etc/hosts-pre-kube /etc/hosts -} \ No newline at end of file diff --git a/tools/gate/minikube/setup_gate.sh b/tools/gate/minikube/setup_gate.sh deleted file mode 100755 index 6c46b5dc..00000000 --- a/tools/gate/minikube/setup_gate.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex -export WORK_DIR=$(pwd) -source ${WORK_DIR}/tools/gate/vars.sh -source ${WORK_DIR}/tools/gate/funcs/common.sh -source ${WORK_DIR}/tools/gate/funcs/network.sh - -# Do the basic node setup for running the gate -gate_base_setup -net_resolv_pre_kube -net_hosts_pre_kube - -# Setup the K8s Cluster -source ${WORK_DIR}/tools/gate/minikube/setup_minikube.sh -create_k8s_screen - -#net_hosts_post_kube -#net_resolv_post_kube \ No newline at end of file diff --git a/tools/gate/minikube/setup_minikube.sh b/tools/gate/minikube/setup_minikube.sh deleted file mode 100755 index ac6cdb81..00000000 --- a/tools/gate/minikube/setup_minikube.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash -set -xe - -sudo apt-get install -y --no-install-recommends -qq jq - -TMP_DIR=$(mktemp -d) - -curl -sSL https://storage.googleapis.com/kubernetes-release/release/${KUBE_VERSION}/bin/linux/amd64/kubectl -o ${TMP_DIR}/kubectl -chmod +x ${TMP_DIR}/kubectl -sudo mv ${TMP_DIR}/kubectl /usr/local/bin/kubectl - -curl -sSL https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64 -o ${TMP_DIR}/minikube -chmod +x ${TMP_DIR}/minikube -sudo mv ${TMP_DIR}/minikube /usr/local/bin/minikube - -curl -fsSL get.docker.com -o ${TMP_DIR}/get-docker.sh -sudo sh ${TMP_DIR}/get-docker.sh - -rm -rf ${TMP_DIR} - -export MINIKUBE_WANTUPDATENOTIFICATION=false -export MINIKUBE_WANTREPORTERRORPROMPT=false -export MINIKUBE_HOME=$HOME -export CHANGE_MINIKUBE_NONE_USER=true - -rm -rf $HOME/.kube -mkdir $HOME/.kube || true -touch $HOME/.kube/config - -export KUBECONFIG=$HOME/.kube/config -sudo minikube delete || true -sudo -E minikube start --vm-driver=none --kubernetes-version ${KUBE_VERSION} --loglevel 0 - -# waits until kubectl can access the api server that Minikube has created -end=$(($(date +%s) + 600)) -READY="False" -while true; do - kubectl get po &> /dev/null - if [ $? -ne 1 ]; then - READY="True" - echo "Kubernetes cluster is ready!" - fi - [ $READY == "True" ] && break || true - sleep 2 - now=$(date +%s) - [ $now -gt $end ] && echo "Failed to setup kubernetes cluster in time" && exit -1 -done diff --git a/tools/gate/minikube/vars.sh b/tools/gate/minikube/vars.sh deleted file mode 100644 index 94e27ce7..00000000 --- a/tools/gate/minikube/vars.sh +++ /dev/null @@ -1,19 +0,0 @@ -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Set work dir if not already done -: ${WORK_DIR:="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"} - -# Get Host OS -source /etc/os-release -export HOST_OS=${HOST_OS:="${ID}"} - -# Set versions of K8s to use -export KUBE_VERSION=${KUBE_VERSION:-"v1.8.0"} - -# Set Upstream DNS -export UPSTREAM_DNS=${UPSTREAM_DNS:-"8.8.8.8"} \ No newline at end of file diff --git a/tools/vagrant/Vagrantfile b/tools/vagrant/Vagrantfile deleted file mode 100644 index f522b967..00000000 --- a/tools/vagrant/Vagrantfile +++ /dev/null @@ -1,78 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -VAGRANTFILE_API_VERSION = "2" - -Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| - config.vm.box = "ubuntu/xenial64" - config.vm.hostname = "qinling" - - config.vm.network "private_network", ip: "192.168.33.18" - config.vm.network "forwarded_port", guest: 7070, host: 7070 - - config.vm.provider "virtualbox" do |vb| - vb.customize ["modifyvm", :id, "--memory", "1024"] - vb.customize ["modifyvm", :id, "--cpus", "1"] - vb.gui = false - end - - config.vm.provision "shell", privileged: false, inline: <<-SHELL - - #!/usr/bin/env bash - sudo apt-get update - sudo apt-get -y upgrade - sudo apt-get -y install python-dev python-setuptools libffi-dev \ - libxslt1-dev libxml2-dev libyaml-dev libssl-dev rabbitmq-server git - - # Install mysql and initialize database. - echo mysql-server-5.5 mysql-server/root_password password password | sudo debconf-set-selections - echo mysql-server-5.5 mysql-server/root_password_again password password | sudo debconf-set-selections - echo mysql-server-5.5 mysql-server/start_on_boot boolean true | sudo debconf-set-selections - - sudo apt-get -y install mysql-server python-mysqldb - sudo sed -i 's/127.0.0.1/0.0.0.0/g' /etc/mysql/my.cnf - sudo sed -i '44 i skip-name-resolve' /etc/mysql/my.cnf - sudo service mysql restart - - HOSTNAME="127.0.0.1" - PORT="3306" - USERNAME="root" - PASSWORD="password" - DBNAME="qinling" - create_db_sql="create database IF NOT EXISTS ${DBNAME}" - mysql -h${HOSTNAME} -P${PORT} -u${USERNAME} -p${PASSWORD} -e "${create_db_sql}" - - # Change rabbitmq credential. - sudo rabbitmqctl change_password guest password - - # Install pip. - curl -O https://bootstrap.pypa.io/get-pip.py && sudo python get-pip.py - sudo pip install httpie - - # Install Qinling. - git clone https://github.com/LingxianKong/qinling.git - cd qinling - sudo pip install -e . - cd .. - - # Install python-qinlingclient - git clone https://github.com/LingxianKong/python-qinlingclient.git - cd python-qinlingclient - sudo pip install -e . - cd .. - - # Initialize Qinling configuration. - sudo mkdir -p /vagrant/etc/qinling - sudo mkdir -p /vagrant/log - sudo mkdir -p /opt/qinling/function - sudo chown ubuntu:ubuntu /opt/qinling/function - cp /vagrant/qinling.conf.sample /vagrant/etc/qinling/qinling.conf - - # Qinling db migration. - qinling-db-manage --config-file /vagrant/etc/qinling/qinling.conf upgrade head - - # Start Qinling service. - qinling-server --server api,engine --config-file /vagrant/etc/qinling/qinling.conf & - - SHELL -end diff --git a/tools/vagrant/qinling.conf.sample b/tools/vagrant/qinling.conf.sample deleted file mode 100644 index 46456017..00000000 --- a/tools/vagrant/qinling.conf.sample +++ /dev/null @@ -1,26 +0,0 @@ -[DEFAULT] -debug=True -verbose=False -log_file=/vagrant/log/qinling.log -logging_default_format_string=%(asctime)s %(process)d %(levelname)s %(message)s %(resource)s (%(name)s) [-] -logging_context_format_string=%(asctime)s %(process)d %(levelname)s %(message)s %(resource)s (%(name)s) [%(request_id)s %(user_identity)s] -logging_user_identity_format=%(user)s %(tenant)s - -[api] -api_workers=1 - -[engine] -function_service_expiration = 86400 - -[database] -connection=mysql+pymysql://root:password@localhost:3306/qinling - -[oslo_messaging_rabbit] -rabbit_password=password - -[pecan] -auth_enable = false - -[kubernetes] -kube_host = KUBERNETES_API_HOST:KUBERNETES_API_PORT -qinling_service_address = QINLING_API_ADDRESS diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 7b86faca..00000000 --- a/tox.ini +++ /dev/null @@ -1,79 +0,0 @@ -[tox] -minversion = 3.1.1 -envlist = py38,pep8 -skipsdist = True -ignore_basepython_conflict = True - -[testenv] -basepython = python3 -usedevelop = True -install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} {opts} {packages} -setenv = - VIRTUAL_ENV={envdir} - PYTHONWARNINGS=default::DeprecationWarning -deps = -r{toxinidir}/test-requirements.txt -commands = - find . -type f -name "*.pyc" -delete - stestr run {posargs} -whitelist_externals = - rm - find - -[testenv:pep8] -commands = flake8 {posargs} - -[testenv:genconfig] -commands = - oslo-config-generator --config-file tools/config/config-generator.qinling.conf \ - --output-file etc/qinling.conf.sample - -#set PYTHONHASHSEED=0 to prevent oslo_policy.sphinxext from randomly failing. -[testenv:venv] -setenv = PYTHONHASHSEED=0 -commands = {posargs} - -[testenv:cover] -setenv = - {[testenv]setenv} - PYTHON=coverage run --source qinling --parallel-mode -commands = - {toxinidir}/tools/cover.sh {posargs} - -[testenv:docs] -deps = -r{toxinidir}/doc/requirements.txt -whitelist_externals = rm -commands = - rm -rf doc/build - sphinx-build -W -b html doc/source doc/build/html - -[testenv:releasenotes] -deps = -r{toxinidir}/doc/requirements.txt -commands = - sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html - -[testenv:debug] -commands = oslo_debug_helper {posargs} - -[flake8] -ignore = -show-source = true -builtins = _ -exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools,build,example -# H106: Don't put vim configuration in source files -# H203: Use assertIs(Not)None to check for None -# H204: Use assert(Not)Equal to check for equality -# H205: Use assert(Greater|Less)(Equal) for comparison -# H904: Delay string interpolations at logging calls -enable-extensions=H106,H203,H204,H205,H904 - -[testenv:lower-constraints] -deps = - -c{toxinidir}/lower-constraints.txt - -r{toxinidir}/test-requirements.txt - -r{toxinidir}/requirements.txt - -[testenv:api-ref] -deps = -r{toxinidir}/doc/requirements.txt -commands = - rm -rf api-ref/build - sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html