diff --git a/.bowerrc b/.bowerrc deleted file mode 100644 index deb30db8..00000000 --- a/.bowerrc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "directory": "refstack-ui/app/assets/lib" -} diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index c6a42acb..00000000 --- a/.dockerignore +++ /dev/null @@ -1,12 +0,0 @@ -*.egg* -*.py[cod] -.coverage -.testrepository/ -.tox/ -.venv/ -AUTHORS -ChangeLog -build/ -cover/ -dist -.git/ \ No newline at end of file diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index 8f118fb0..00000000 --- a/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -refstack-ui/app/assets/lib \ No newline at end of file diff --git a/.eslintrc b/.eslintrc deleted file mode 100644 index f95bae01..00000000 --- a/.eslintrc +++ /dev/null @@ -1,72 +0,0 @@ -{ - // For a detailed list of all options, please see here: - // http://eslint.org/docs/configuring/ - "ecmaFeatures": { - "arrowFunctions": false, - "binaryLiterals": false, - "blockBindings": false, - "defaultParams": false, - "forOf": false, - "generators": false, - "objectLiteralComputedProperties": false, - "objectLiteralDuplicateProperties": false, - "objectLiteralShorthandProperties": false, - "octalLiterals": false, - "regexUFlag": false, - "superInFunctions": false, - "templateStrings": false, - "unicodeCodePointEscapes": false, - "globalReturn": false, - "jsx": false - }, - - "env": { - "browser": true, - "node": false, - "amd": false, - "mocha": false, - "jasmine": true, - "phantomjs": false, - "jquery": false, - "prototypejs": false, - "shelljs": false, - "es6": true - }, - - "extends": "openstack", - - "globals": { - "require": false, - "exports": false, - "angular": false, // AngularJS - "module": false, - "inject": false, - "element": false, - "by": false, - "browser": false - }, - - "plugins": [ - "angular" - ], - - "rules": { - "quotes": [2, "single"], - "eol-last": 2, - "no-trailing-spaces": 2, - "camelcase": 0, - "no-extra-boolean-cast": 0, - "operator-linebreak": 0, - "require-jsdoc": 2, - "quote-props": 0, - "valid-jsdoc": 0, - - // Stylistic - "indent": [2, 4, {SwitchCase: 1}], - "max-len": [2, 80], - "no-undefined": 2, - - // Angular Plugin - "angular/controller-as-vm": [1, "ctrl"] - } -} diff --git a/.gitignore b/.gitignore deleted file mode 100755 index b541f1d5..00000000 --- a/.gitignore +++ /dev/null @@ -1,17 +0,0 @@ -*.egg* -*.py[cod] -.coverage -.stestr -.tox/ -.venv/ -AUTHORS -ChangeLog -build/ -cover/ -dist - -.tmp -node_modules -npm-debug.log -refstack-ui/app/assets/lib -refstack-ui/app/config.json diff --git a/.stestr.conf b/.stestr.conf deleted file mode 100644 index 672b5eba..00000000 --- a/.stestr.conf +++ /dev/null @@ -1,3 +0,0 @@ -[DEFAULT] -test_path=./refstack/tests/unit -top_dir=./ diff --git a/.zuul.yaml b/.zuul.yaml deleted file mode 100644 index 6d5a33d9..00000000 --- a/.zuul.yaml +++ /dev/null @@ -1,38 +0,0 @@ -- project: - templates: - - nodejs18-jobs - - openstack-cover-jobs - check: - jobs: - - openstack-tox-pep8 - - openstack-tox-py38 - - openstack-tox-py39 - - openstack-tox-py310 - - openstack-tox-py311 - - refstack-tox-functional - - opendev-tox-docs - gate: - jobs: - - openstack-tox-pep8 - - openstack-tox-py38 - - openstack-tox-py39 - - openstack-tox-py310 - - openstack-tox-py311 - - refstack-tox-functional - - opendev-tox-docs - promote: - jobs: - - opendev-promote-docs - -- job: - name: refstack-tox-functional - parent: openstack-tox-with-sudo - description: | - Run functional tests for an OpenStack Python project under cPython 3. - Uses tox with the ``functional`` environment. - irrelevant-files: - - ^.*\.rst$ - - ^doc/.*$ - - ^releasenotes/.*$ - vars: - tox_envlist: functional diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index cb05dcc5..00000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,34 +0,0 @@ -The source repository for this project can be found at: - - https://opendev.org/openinfra/refstack - -To start contributing to OpenStack, follow the steps in the contribution guide -to set up and use Gerrit: - - https://docs.openstack.org/contributors/code-and-documentation/quick-start.html - -Documentation of the project can be found at: - - https://docs.opendev.org/openinfra/refstack/latest/ - -Bugs should be filed on Storyboard: - - https://storyboard.openstack.org/#!/project/openinfra/refstack - -Patches against this project can be found at: - - https://review.opendev.org/q/project:openinfra/refstack - -To communicate with us you may use one of the following means: - -**Mailing List:** -Get in touch with us via `email `_. -Use [refstack] in your subject. - -**IRC:** -We're at #refstack channel on OFTC network. -`Setup IRC `_ - -**Meetings:** -`Visit this link `_ -for the meeting information. diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 68c771a0..00000000 --- a/LICENSE +++ /dev/null @@ -1,176 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - diff --git a/README.rst b/README.rst index 976103a3..b72ee4f4 100644 --- a/README.rst +++ b/README.rst @@ -1,65 +1,6 @@ -======== -RefStack -======== +This project is no longer maintained. -What is RefStack? -################# - -- Toolset for testing interoperability between OpenStack clouds. -- Database backed website supporting collection and publication of - Community Test results for OpenStack. -- User interface to display individual test run results. - -RefStack intends on being THE source of tools for interoperability testing -of OpenStack clouds. - -RefStack provides users in the OpenStack community with a Tempest wrapper, -refstack-client, that helps to verify interoperability of their cloud -with other OpenStack clouds. It does so by validating any cloud -implementation against the OpenStack Tempest API tests. - -Refstack's Use Case -################### - -**RefStack and Interop Working Group** - The prototypical use case for RefStack -provides the Interop Working Group - formerly known as DefCore committee - the -tools for vendors and other users to run API tests against their clouds to -provide the WG with a reliable overview of what APIs and capabilities are -being used in the marketplace. This will help to guide the Interop -Working Group defined capabilities and help ensure interoperability across -the entire OpenStack ecosystem. It can also be used to validate clouds -against existing capability lists, giving you assurance that your cloud -faithfully implements OpenStack standards. - -**Value add for openstack distro or solution vendors** - Vendors can use -RefStack to demonstrate that their distros, and/or their customers' installed -clouds remain OpenStack compliant after their software has been incorporated -into the distro or cloud. - -**RefStack consists of two parts:** - -* **refstack-api** - Our API isn't just for us to collect data from private and public cloud - vendors. It can be used by vendors in house to compare interoperability - data over time. - - * documentation: https://docs.opendev.org/openinfra/refstack/latest/ - * repository: https://opendev.org/openinfra/refstack - * reviews: https://review.opendev.org/#/q/status:open+project:openinfra/refstack - * bugs: https://storyboard.openstack.org/#!/project/openinfra/refstack - * Web-site: https://refstack.openstack.org - -* **refstack-client** - refstack-client contains the tools you will need to run the - Interop Working Group tests. - - * documentation: https://docs.opendev.org/openinfra/refstack-client/latest/ - * repository: https://opendev.org/openinfra/refstack-client - * reviews: https://review.opendev.org/#/q/status:open+project:openinfra/refstack-client - * bugs: https://storyboard.openstack.org/#!/project/openinfra/refstack-client - -Get Involved! -############# - -See the `CONTRIBUTING `_ -guide on how to get involved. +The contents of this repository are still available in the Git +source code management system. To see the contents of this +repository before it reached its end of life, please check out the +previous commit with "git checkout HEAD^1". diff --git a/bin/refstack-api b/bin/refstack-api deleted file mode 100755 index 04968f60..00000000 --- a/bin/refstack-api +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Command-line launcher for Refstack API -""" - -import sys - -from pecan.commands import serve - -from refstack.api import config as api_config - - -def get_pecan_config(): - """Get path to pecan configuration file""" - filename = api_config.__file__.replace('.pyc', '.py') - return filename - - -if __name__ == '__main__': - config_path = get_pecan_config() - sys.argv.append(config_path) - serve.gunicorn_run() diff --git a/bin/refstack-manage b/bin/refstack-manage deleted file mode 100755 index 28b00d69..00000000 --- a/bin/refstack-manage +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -Command-line utility for database manage -""" - -import sys - -from oslo_config import cfg -from oslo_log import log - -from refstack.db import migration - -CONF = cfg.CONF - -log.register_options(CONF) - - -class DatabaseManager(object): - - def version(self): - print(migration.version()) - - def upgrade(self): - migration.upgrade(CONF.command.revision) - - def downgrade(self): - migration.downgrade(CONF.command.revision) - - def stamp(self): - migration.stamp(CONF.command.revision) - - def revision(self): - migration.revision(CONF.command.message, CONF.command.autogenerate) - - -def add_command_parsers(subparsers): - db_manager = DatabaseManager() - - parser = subparsers.add_parser('version', - help='show current database version') - parser.set_defaults(func=db_manager.version) - - parser = subparsers.add_parser('upgrade', - help='upgrade database to ' - 'the specified version') - parser.set_defaults(func=db_manager.upgrade) - parser.add_argument('--revision', nargs='?', - help='desired database version') - - parser = subparsers.add_parser('downgrade', - help='downgrade database ' - 'to the specified version') - parser.set_defaults(func=db_manager.downgrade) - parser.add_argument('--revision', nargs='?', - help='desired database version') - - parser = subparsers.add_parser('stamp', - help='stamp database with provided ' - 'revision. Don\'t run any migrations') - parser.add_argument('--revision', nargs='?', - help='should match one from repository or head - ' - 'to stamp database with most recent revision') - parser.set_defaults(func=db_manager.stamp) - - parser = subparsers.add_parser('revision', - help='create template for migration') - parser.add_argument('-m', '--message', - help='text that will be used for migration title') - parser.add_argument('--autogenerate', action='store_true', - help='if True - generates diff based ' - 'on current database state (True by default)') - parser.set_defaults(func=db_manager.revision) - -command_opt = cfg.SubCommandOpt('command', - title='Available commands', - handler=add_command_parsers) - -CONF.register_cli_opt(command_opt) - -if __name__ == '__main__': - CONF(sys.argv[1:], project='refstack') - log.setup(CONF, 'refstack') - CONF.command.func() diff --git a/bindep.txt b/bindep.txt deleted file mode 100644 index dd5d96c6..00000000 --- a/bindep.txt +++ /dev/null @@ -1,8 +0,0 @@ -# This is a cross-platform list tracking distribution packages needed for install and tests; -# see https://docs.openstack.org/infra/bindep/ for additional information. - -gcc [compile test] -mariadb-client [test platform:dpkg] -mariadb-server [test platform:dpkg] -postgresql [test] -postgresql-client [test platform:dpkg] diff --git a/bower.json b/bower.json deleted file mode 100644 index a0cc152a..00000000 --- a/bower.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "refstack-ui", - "version": "0.0.1", - "description": "Refstack user interface", - "dependencies": { - "angular": "1.3.15", - "angular-ui-router": "0.2.13", - "angular-resource": "1.3.15", - "angular-bootstrap": "0.14.3", - "angular-busy": "4.1.3", - "angular-confirm-modal": "1.2.3", - "bootstrap": "3.3.2" - }, - "devDependencies": { - "angular-mocks": "1.3.15" - }, - "resolutions": { - "angular": "1.3.15" - } -} diff --git a/doc/requirements.txt b/doc/requirements.txt deleted file mode 100644 index e1b45749..00000000 --- a/doc/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -sphinx>=1.6.2 -openstackdocstheme>=1.11.0 # Apache-2.0 diff --git a/doc/source/conf.py b/doc/source/conf.py deleted file mode 100644 index cc94f093..00000000 --- a/doc/source/conf.py +++ /dev/null @@ -1,336 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Refstack documentation build configuration file, created by -# sphinx-quickstart on Fri Aug 5 01:41:59 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ['openstackdocstheme'] - -# Add any paths that contain templates here, relative to this directory. -#templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Refstack' -copyright = u'2016, OpenStack Foundation' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '1.0' -# The full version, including alpha/beta/rc tags. -release = '1.0' - -# openstackdocstheme options -openstackdocs_repo_name = 'openinfra/refstack' -openstackdocs_bug_project = '878' -openstackdocs_bug_tag = '' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['specs/prior/*', 'specs/README.rst', 'specs/template.rst'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' -# So that we can enable "log-a-bug" links from each output HTML page, this -# variable must be set to a format that includes year, month, day, hours and -# minutes. -html_last_updated_fmt = '%Y-%m-%d %H:%M' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Refstackdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'Refstack.tex', u'Refstack Documentation', - u'OpenStack Foundation', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'refstack', u'Refstack Documentation', - [u'OpenStack Foundation'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'Refstack', u'Refstack Documentation', - u'OpenStack Foundation', 'Refstack', 'Toolset for testing interoperability' - ' between OpenStack clouds.', 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = u'Refstack' -epub_publisher = u'OpenStack Foundation' -epub_copyright = u'2016, OpenStack Foundation' - -# The basename for the epub file. It defaults to the project name. -#epub_basename = u'Refstack' - -# The HTML theme for the epub output. Since the default themes are not optimized -# for small screen space, using the same theme for HTML and epub output is -# usually not wise. This defaults to 'epub', a theme designed to save visual -# space. -#epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or en if the language is not set. -#epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -#epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -#epub_identifier = '' - -# A unique identification for the text. -#epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -#epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -#epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -#epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -#epub_tocdepth = 3 - -# Allow duplicate toc entries. -#epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -#epub_tocscope = 'default' - -# Fix unsupported image types using the PIL. -#epub_fix_images = False - -# Scale large images. -#epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#epub_show_urls = 'inline' - -# If false, no index is generated. -#epub_use_index = True diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst deleted file mode 100644 index 1f5ca23c..00000000 --- a/doc/source/contributing.rst +++ /dev/null @@ -1,5 +0,0 @@ -============ -Contributing -============ - -.. include:: ../../CONTRIBUTING.rst diff --git a/doc/source/index.rst b/doc/source/index.rst deleted file mode 100644 index b98ca97e..00000000 --- a/doc/source/index.rst +++ /dev/null @@ -1,21 +0,0 @@ -.. Refstack documentation master file, created by - sphinx-quickstart on Fri Aug 5 01:41:59 2016. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -==================================== -Welcome to RefStack's documentation! -==================================== - -Content: --------- -.. toctree:: - :maxdepth: 2 - - refstack_setup_via_ansible - overview - contributing - refstack - vendor_product_management/index - uploading_private_results - test_result_management diff --git a/doc/source/overview.rst b/doc/source/overview.rst deleted file mode 100644 index 165b7dd0..00000000 --- a/doc/source/overview.rst +++ /dev/null @@ -1,2 +0,0 @@ -.. include:: ../../README.rst - diff --git a/doc/source/refstack.rst b/doc/source/refstack.rst deleted file mode 100644 index 325e0247..00000000 --- a/doc/source/refstack.rst +++ /dev/null @@ -1,294 +0,0 @@ -=================== -RefStack Quickstart -=================== - -You can use docker for `one-click setup `__ or follow -step-by-step instructions below. These instructions have been tested on -Ubuntu 14 and 16 LTS. - -Install API dependencies -^^^^^^^^^^^^^^^^^^^^^^^^ -:: - - sudo apt-get install git python-dev python-virtualenv libssl-dev build-essential libffi-dev - sudo apt-get install mysql-server python-mysqldb - -Install RefStack UI dependencies -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:: - - curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash - - curl -sL https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - - echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list - sudo apt-get update && sudo apt-get install -y nodejs yarn - -Setup the RefStack database -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -**Log into MySQL**:: - - mysql -u root -p - -**After authentication, create the database**:: - - CREATE DATABASE refstack; - -**Create a refstack user**:: - - CREATE USER 'refstack'@'localhost' IDENTIFIED BY ''; - -**or using hash value for your password**:: - - CREATE USER 'refstack'@'localhost' IDENTIFIED BY PASSWORD '=0.6.2,!=0.6.4 # python mysql connector - -Install RefStack application -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:: - - pip install . - -Install needed RefStack UI library dependencies -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -:: - - yarn - -API configuration file preparation -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Make a copy of the sample config file (etc/refstack.conf.sample) and -update it with the correct information of your environment. Examples -of the config parameters with default values are included in the -sample config file. - -You should ensure that the following values in the config file are -noted and properly set: - -``connection`` field in the ``[database]``\ section. - -For example, if the backend database is MySQL then update: - -``#connection = `` to -``connection = mysql+pymysql://refstack:@x.x.x.x/refstack`` - -``ui_url`` field in the ``[DEFAULT]`` section. - - This should be the URL that the UI can be accessed from. This will - likely be in the form ``http://:8000`` (8000 being - the default port RefStack is hosted on). For example: - - ``http://192.168.56.101:8000`` - -``api_url`` field in the ``[api]`` section. - - This should be the URL that the API can be accessed from. This, in - most cases, will be the same as the value for ``ui_url`` above. - -``app_dev_mode`` field in the ``[api]`` section. - - Set this field to true if you aren't creating a production-level - RefStack deployment and are just trying things out or developing. - Setting this field to true will allow you to quickly bring up both - the API and UI together, with the UI files being served by a simple - file server that comes with Pecan. - -Create UI config file -^^^^^^^^^^^^^^^^^^^^^ - -From the RefStack project root directory, create a config.json file and -specify your API endpoint inside this file. This will be something like -{"refstackApiUrl": "http://192.168.56.101:8000/v1"}:: - - cp refstack-ui/app/config.json.sample refstack-ui/app/config.json - -Openstack OpenID endpoint configuration (optional) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If you are only interested in the uploading and viewing of result sets, -then this section can be ignored. However, in order for user accounts -and authentication to work, you need to make sure you are properly -configured with an OpenStack OpenID endpoint. There are two options: - -- Use the official endpoint - `openstackid.org `__ -- Host your own openstackid endpoint - -Since openstackid checks for valid top-level domains, in both options -you will likely have to edit the hosts file of the system where your -web-browser for viewing the RefStack site resides. On Linux systems, you -would modify ``/etc/hosts``, adding a line like the following: - -`` `` - -Example: - -``192.168.56.101 myrefstack.com`` - -On Windows, you would do the same in -``%SystemRoot%\System32\drivers\etc\hosts``. Alternatively, you can add -a custom DNS record with the domain name mapping if possible. - -Note that doing this requires you to modify the config.json file and the -``api_url`` and ``ui_url`` fields in refstack.conf to use this domain -name instead of the IP. - -**Option 1 - Use Official Endpoint** - -Using the official site is probably the easiest as no additional configuration -is needed besides the hosts file modifications as noted above. RefStack, by -default, points to this endpoint. - -**Option 2 - Use Local Endpoint** - -Instructions for setting this up are outside of the scope of this doc, -but you can get started at -`Openstackid project `__ . -You would then need to modify the ``openstack_openid_endpoint`` field in -the ``[osid]`` section in refstack.conf to match the local endpoint. - -Database sync -^^^^^^^^^^^^^ - -**Check current revision**:: - - refstack-manage --config-file /path/to/refstack.conf version - -The response will show the current database revision. If the revision is -``None`` (indicating a clear database), the following command should be -performed to upgrade the database to the latest revision: - -**Upgrade database to latest revision**:: - - refstack-manage --config-file /path/to/refstack.conf upgrade --revision head - -**Check current revision**:: - - refstack-manage --config-file /path/to/refstack.conf version - -:: - - Now it should be some revision number other than `None`. - -(Optional) Generate About Page Content -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The RefStack About page is populated with HTML templates generated from -our RST documentation files. If you want this information displayed, then -run the following command from the root of the project. - -:: - - ./tools/convert-docs.py -o ./refstack-ui/app/components/about/templates ./doc/source/*.rst - -Ignore any unknown directive errors. - -Start RefStack -^^^^^^^^^^^^^^ - -A simple way to start refstack is to just kick off gunicorn using the -``refstack-api`` executable:: - - refstack-api --env REFSTACK_OSLO_CONFIG=/path/to/refstack.conf - -If ``app_dev_mode`` is set to true, this will launch both the UI and -API. - -Now available: - -- ``http://:8000/v1/results`` with response JSON - including records consisting of ```` and - ```` of the test runs. The default response is limited - to one page of the most recent uploaded test run records. The number - of records per page is configurable via the RefStack configuration - file. Filtering parameters such as page, start\_date, and end\_date - can also be used to specify the desired records. For example: GET - ``http://:8000/v1/results?page=n`` will return page - *n* of the data. - -- ``http://:8000/v1/results/`` with - response JSON including the detail test results of the specified - ```` - -(Optional) Configure Foundation organization and group -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Overall RefStack admin access is given to users belonging to a -"Foundation" organization. To become a Foundation admin, first a -"Foundation" organization must be created. Note that you must have -logged into RefStack at least once so that a user record for your -account is created. - -**Log into MySQL**:: - - mysql -u root -p - -**Create a group for the "Foundation" organization**:: - - INSERT INTO refstack.group (id, name, created_at) VALUES (UUID(), 'Foundation Group', NOW()); - -**Get the group ID for the group you just created**:: - - SELECT id from refstack.group WHERE name = 'Foundation Group'; - -**Get your OpenID**:: - - SELECT openid from refstack.user WHERE email = ''; - -**Add your user account to the previously created "Foundation" group.** - -Replace ```` and ```` with the values -retrieved in the two previous steps:: - - INSERT INTO refstack.user_to_group (created_by_user, user_openid, group_id, created_at) VALUES ('', '', '', NOW()); - -**Create the actual "Foundation" organization using this group**:: - - INSERT INTO refstack.organization (id, type, name, group_id, created_by_user, created_at) VALUES (UUID(), 0, 'Foundation', '', '', NOW()); - -(Optional) Build documentation -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The RefStack documentation can be build using following commands:: - - cd ~/refstack; source .venv/bin/activate - sudo apt-get install python3-dev python-tox - tox -e docs - -The documentation files will be build under ``~/refstack/build/sphinx``. - diff --git a/doc/source/refstack_setup_via_ansible.rst b/doc/source/refstack_setup_via_ansible.rst deleted file mode 100644 index daba2340..00000000 --- a/doc/source/refstack_setup_via_ansible.rst +++ /dev/null @@ -1,99 +0,0 @@ -============================================================= -Use Ansible playbook to set up a local refstack instance -============================================================= -These steps are meant for RefStack developers to help them with setting up -a local refstack instance. - -In production RefStack server is managed by a set of playbooks and Ansible roles -defined in `system-config `__ -repository. Below instructions use these Ansible capabilities. - -The RefStack server runs on Ubuntu 20.04 LTS in the production. - -You can find an Ansible playbook in ``playbooks`` directory which semi-automates -the process of running refstack server in a container. - -Execute the playbook by:: - - $ ansible-playbook playbooks/run-refstack-in-container.yaml - -In order to avoid setting certificates and https protocol (it's simpler and more -than enough for a testing instance), edit -``/etc/apache2/sites-enabled/000-default.conf`` like following: - -* remove VirtualHost section for the port 80 and change the port of VirtualHost from 443 to 80 -* Turn off the SSLEngine (`SSLEngine on -> SSLEngine off`) -* Remove SSLCertificate lines - -and then restart the apache service so that it loads the new configuration:: - - $ systemctl restart apache2 - -How to edit refstack files within the container ------------------------------------------------ - -List the running container by:: - - $ docker container list - -You can enter the container by:: - - $ sudo docker exec -it /bin/bash - -If you wanna install new packages like f.e. vim, do the following:: - $ apt update - $ apt install vim - -Edit what's needed, backend is installed under -``/usr/local/lib/python3.7/site-packages/refstack/`` and frontend source files -can be found at ``/refstack-ui`` - -After you made the changes, make pecan to reload the files served:: - - $ apt install procps # to install pkill command - $ pkill pecan - -Killing pecan will kick you out of the container, however, pecan serves the -edited files now and you may re-enter the container. - -Installing refstack with changes put for a review -------------------------------------------------- - -In order to do this, you will need to rebuild the refstack image built by the -playbook. - -Go to the location where the playbook downloaded system-config, default in -``/tmp/refstack-docker`` and edit the refstack's Dockerfile:: - - $ cd /tmp/refstack-docker - $ vim ./refstack-docker-files/Dockerfile - -Replace:: - - $ RUN git clone https://opendev.org/openinfra/refstack /tmp/src - -by:: - - $ RUN git clone https://opendev.org/openinfra/refstack.git /tmp/src \ - && cd /tmp/src && git fetch "https://review.opendev.org/openinfra/refstack" \ - refs/changes/37/ && git checkout -b \ - change-- FETCH_HEAD - -Then rebuild the image:: - - $ docker image build -f Dockerfile -t . - -Edit the ``docker-compose.yaml`` stored (by default) in -``/etc/refstack-docker/docker-compose.yaml`` and change the the image -(under `refstack-api`) to your image name and tag you set in the previous step. - -After then spin a new container using the new image:: - - $ cd /etc/refstack-docker - $ docker-compose down # if refstack container is already running - $ docker-compose up -d - -To see the server's logs use the following command:: - - $ docker container logs -f - diff --git a/doc/source/test_result_management.rst b/doc/source/test_result_management.rst deleted file mode 100755 index be40c8c4..00000000 --- a/doc/source/test_result_management.rst +++ /dev/null @@ -1,18 +0,0 @@ -====================== -Test result management -====================== - -Test result to product version association ------------------------------------------- - -Test results uploaded by users can be associated to a version of a product. To -perform this association, the user must be both the one who uploaded the result -and also an admin of the vendor which owns the product. Once a test result is -associated to a product, all admins of the vendor which owns the product can -manage the test result. - -Mark or unmark a test results as verified ------------------------------------------ - -Only Foundation admins can mark and un-mark a test as verified. A verified -test result can not be updated or deleted. diff --git a/doc/source/uploading_private_results.rst b/doc/source/uploading_private_results.rst deleted file mode 100644 index bdc02a0c..00000000 --- a/doc/source/uploading_private_results.rst +++ /dev/null @@ -1,120 +0,0 @@ -====================================== -How to upload test results to RefStack -====================================== - -RefStack allows test results contributors to submit test results and -have them displayed either anonymously, or identified with a vendor. As -such, test results should be uploaded with validated users. Users will -first log into RefStack with their OpenStack ID to upload their public -keys. RefStack test results need to be uploaded to RefStack using the -corresponding private key. By default, the uploaded data isn't shared, -but authorized users can decide to share the results with the community -anonymously. - -The following is a quick guide outlining the steps needed to upload your -first set of test results. - -Register an OpenStack ID -^^^^^^^^^^^^^^^^^^^^^^^^ - -The RefStack server uses OpenStack OpenID for user authentication. -Therefore, the RefStack server requires that anyone who wants to upload -test data to have an OpenStack ID. As you click on the Sign In/Sign Up -link on the `RefStack pages `__, you -will be redirected to the official OpenStack user log in page where you -can either log in with your OpenStack ID or register for one. -The registration page can also be found directly through: -https://www.openstack.org/join/register. - -Generate ssh keys locally -^^^^^^^^^^^^^^^^^^^^^^^^^ - -You will need to generate ssh keys locally. If your operating system is -a Linux distro, then you can use the following instructions. - -First check for existing keys with command:: - - $ ls -al ~/.ssh - -If you see you already have existing public and private keys that you -want to use, you can skip this step; otherwise:: - - $ ssh-keygen -m PEM -t rsa -b 4096 -C "youropenstackid" - -The `youropenstackid` string is the username you chose when you -registered for your OpenStack ID account. Enter the file name in which -to save the key (``/home/you/.ssh/id\_rsa``), then press enter. You will be -asked to enter a passphrase. Just press enter again as passphrase -protected keys currently aren't supported. Your ssh keys will then be -generated. - -Sign Key with RefStack Client -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -**IMPORTANT** You must have the RefStack client on you computer to -complete this step. -Follow `its README `__ on how to -install it. - -Generate a signature for your public key using your private key with -`refstack-client `__:: - - $ ./refstack-client sign /path-of-sshkey-folder/key-file-name - -The ``/path-of-sshkey-folder`` string is the path of the folder where the -generated ssh keys are stored locally. The 'key-file-name' portion -refers to the private key file name. If the command runs correctly, -there will be output like below: - -:: - - Public key: - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDSGo2xNDcII1ZaM3H2uKh3iXBzvKIOa5W/5HxKF23yrzwho7nR7td0kgFtZ/4fe0zmkkUuKdUhOACCD3QVyi1N5wIhKAYN1fGt0/305jk7VJ+yYhUPlvo... - - Self signature: - 19c28fc07e3fbe1085578bd6db2f1f75611dcd2ced068a2195bbca60ae98af7e27faa5b6968c3c5aef58b3fa91bae3df3... - -Upload the ssh public key and the signature -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Sign into https://refstack.openstack.org with your OpenStack ID. Click -the `Profile` link in the upper right corner. Now click the `Import -public key` button on your profile page. A popup window with two entry -fields will appear. Just copy and paste the key and signature generated -in the previous step into the corresponding textboxes. - -Note that the literal strings `Public key:` and `Self signature:` from -the ``refstack-client sign`` command output **should not** be copied/pasted -into the text boxes. Otherwise you will get an error like:: - - Bad Request Request doesn't correspond to schema - -Once complete, click the `Import public key` button. - -Upload the test result with refstack-client -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The results can be uploaded using the ``refstack-client`` by:: - - $ ./refstack-client upload /path_to_testresult_json_file \ - --url https://refstack.openstack.org/api -i ~/.ssh/id_rsa - -**NOTE** Users may need to add the `--insecure` optional argument -to the command string if certificate validation issues occur when -uploading test result. Usage with the insecure argument:: - - $ ./refstack-client upload --insecure /path_to_testresult_json_file \ - --url https://refstack.openstack.org/api -i ~/.ssh/id_rsa`` - -The ``path_to_testresult_json_file`` there is the json file of the test result. -By default, it's in ``.tempest/.stestr/.json``. If the command -runs correctly, there will be output like below: - -:: - - Test results will be uploaded to https://refstack.openstack.org/api. Ok? (yes/y): y - Test results uploaded! - URL: https://refstack.openstack.org/#/results/88a1e6f4-707d-4627-b658-b14b7e6ba70d. - -You can find your uploaded test results by clicking the `My Results` -link on the RefStack website. diff --git a/doc/source/vendor_product_management/ProductEntity.rst b/doc/source/vendor_product_management/ProductEntity.rst deleted file mode 100644 index e08b60e2..00000000 --- a/doc/source/vendor_product_management/ProductEntity.rst +++ /dev/null @@ -1,36 +0,0 @@ -============== -Product entity -============== - -Any user who has successfully authenticated to the RefStack server can create -product entities. The minimum information needed to create a product entity is -as follows: - -- Name - - This is the name of the product entity being created. - -- Product type: - - Product types are defined by OpenStack as shown on the OpenStack Marketplace - ( https://www.openstack.org/marketplace/ ). Currently, there are three types - of products, namely: Distro & Appliances, Hosted Private Clouds and Public - Clouds. - -- Vendor - - This is the vendor which owns the product. A default vendor will be created - for the user if no vendor entity exists for this user. - -Whenever a product is created, by default, it is a private product and is only -visible to its vendor users. Vendor users can make a product publicly visible -as needed later. However, only products that are owned by official vendors can -be made publicly visible. - -Product version -~~~~~~~~~~~~~~~ - -A default version is created whenever a product is created. The name of the -default version is blank. The default version is used for products that have -no version. Users can add new product versions to the product as needed. - diff --git a/doc/source/vendor_product_management/VendorEntity.rst b/doc/source/vendor_product_management/VendorEntity.rst deleted file mode 100644 index 94c2410e..00000000 --- a/doc/source/vendor_product_management/VendorEntity.rst +++ /dev/null @@ -1,49 +0,0 @@ -============= -Vendor entity -============= - -Any user who has successfully authenticated to the RefStack server can create -vendor entities. The minimum required information to create a vendor is the -vendor name. Users can update the rest of the vendor related information at a -later time. - -Vendor admin -~~~~~~~~~~~~~ - -Whenever a user creates a vendor, this user will be added as the vendor's first -vendor admin. Subsequently, any admin of the vendor can add additional users to -the vendor. In RefStack, the "OpenStack User ID" of users are used as the -identities for adding users to vendors. At the time this document is written, -RefStack has not implemented user roles, and as such, all users of a vendor are -admin users. - -Vendor types -~~~~~~~~~~~~~ - -There are four types of vendor entities in RefStack: - -- Foundation: - - This is a special entity representing the OpenStack Foundation. Users belong - to this entity are the Foundation admins. Foundation admins have visibility - to all vendors and products. - -- Private: - - A vendor will always be created with type "private". Vendors of this type - are only visible to their own users and Foundation admins. Vendor users can - initiate a registration request to the Foundation to change its type from - "private" to "official". - -- Pending - - Once a registration request is submitted, the vendor type will be changed - automatically from type "private" to "pending". Vendors of this type are - still only visible to their own users and Foundation admins. - -- Official - - Once a vendor registration request is approved by the Foundation. The vendor - type will be changed from "pending" to "official". Official vendors are - visible to all RefStack users. - diff --git a/doc/source/vendor_product_management/index.rst b/doc/source/vendor_product_management/index.rst deleted file mode 100644 index 3fc73afd..00000000 --- a/doc/source/vendor_product_management/index.rst +++ /dev/null @@ -1,15 +0,0 @@ -Vendor and product management -============================= - -RefStack has implemented a vendor and product registration process so that test -results can be associated to products of vendors. The creation and management -of vendor and product entities can be done using the RefStack Server UI or -RefStack APIs. The following is a quick guide outlining the information related -to the creation and management of those entities. - -.. toctree:: - :maxdepth: 1 - :includehidden: - - VendorEntity - ProductEntity diff --git a/etc/refstack.conf.sample b/etc/refstack.conf.sample deleted file mode 100644 index c37ae671..00000000 --- a/etc/refstack.conf.sample +++ /dev/null @@ -1,392 +0,0 @@ -[DEFAULT] - -# -# From oslo.log -# - -# If set to true, the logging level will be set to DEBUG instead of -# the default INFO level. (boolean value) -# Note: This option can be changed without restarting. -#debug = false - -# The name of a logging configuration file. This file is appended to -# any existing logging configuration files. For details about logging -# configuration files, see the Python logging module documentation. -# Note that when logging configuration files are used then all logging -# configuration is set in the configuration file and other logging -# configuration options are ignored (for example, log-date-format). -# (string value) -# Note: This option can be changed without restarting. -# Deprecated group/name - [DEFAULT]/log_config -#log_config_append = - -# Defines the format string for %%(asctime)s in log records. Default: -# %(default)s . This option is ignored if log_config_append is set. -# (string value) -#log_date_format = %Y-%m-%d %H:%M:%S - -# (Optional) Name of log file to send logging output to. If no default -# is set, logging will go to stderr as defined by use_stderr. This -# option is ignored if log_config_append is set. (string value) -# Deprecated group/name - [DEFAULT]/logfile -#log_file = - -# (Optional) The base directory used for relative log_file paths. -# This option is ignored if log_config_append is set. (string value) -# Deprecated group/name - [DEFAULT]/logdir -#log_dir = - -# Uses logging handler designed to watch file system. When log file is -# moved or removed this handler will open a new log file with -# specified path instantaneously. It makes sense only if log_file -# option is specified and Linux platform is used. This option is -# ignored if log_config_append is set. (boolean value) -#watch_log_file = false - -# Use syslog for logging. Existing syslog format is DEPRECATED and -# will be changed later to honor RFC5424. This option is ignored if -# log_config_append is set. (boolean value) -#use_syslog = false - -# Enable journald for logging. If running in a systemd environment you -# may wish to enable journal support. Doing so will use the journal -# native protocol which includes structured metadata in addition to -# log messages.This option is ignored if log_config_append is set. -# (boolean value) -#use_journal = false - -# Syslog facility to receive log lines. This option is ignored if -# log_config_append is set. (string value) -#syslog_log_facility = LOG_USER - -# Use JSON formatting for logging. This option is ignored if -# log_config_append is set. (boolean value) -#use_json = false - -# Log output to standard error. This option is ignored if -# log_config_append is set. (boolean value) -#use_stderr = false - -# Log output to Windows Event Log. (boolean value) -#use_eventlog = false - -# The amount of time before the log files are rotated. This option is -# ignored unless log_rotation_type is set to "interval". (integer -# value) -#log_rotate_interval = 1 - -# Rotation interval type. The time of the last file change (or the -# time when the service was started) is used when scheduling the next -# rotation. (string value) -# Possible values: -# Seconds - -# Minutes - -# Hours - -# Days - -# Weekday - -# Midnight - -#log_rotate_interval_type = days - -# Maximum number of rotated log files. (integer value) -#max_logfile_count = 30 - -# Log file maximum size in MB. This option is ignored if -# "log_rotation_type" is not set to "size". (integer value) -#max_logfile_size_mb = 200 - -# Log rotation type. (string value) -# Possible values: -# interval - Rotate logs at predefined time intervals. -# size - Rotate logs once they reach a predefined size. -# none - Do not rotate log files. -#log_rotation_type = none - -# Format string to use for log messages with context. Used by -# oslo_log.formatters.ContextFormatter (string value) -#logging_context_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s - -# Format string to use for log messages when context is undefined. -# Used by oslo_log.formatters.ContextFormatter (string value) -#logging_default_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s - -# Additional data to append to log message when logging level for the -# message is DEBUG. Used by oslo_log.formatters.ContextFormatter -# (string value) -#logging_debug_format_suffix = %(funcName)s %(pathname)s:%(lineno)d - -# Prefix each line of exception output with this format. Used by -# oslo_log.formatters.ContextFormatter (string value) -#logging_exception_prefix = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s - -# Defines the format string for %(user_identity)s that is used in -# logging_context_format_string. Used by -# oslo_log.formatters.ContextFormatter (string value) -#logging_user_identity_format = %(user)s %(tenant)s %(domain)s %(user_domain)s %(project_domain)s - -# List of package logging levels in logger=LEVEL pairs. This option is -# ignored if log_config_append is set. (list value) -#default_log_levels = amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,oslo.messaging=INFO,oslo_messaging=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN,urllib3.connectionpool=WARN,websocket=WARN,requests.packages.urllib3.util.retry=WARN,urllib3.util.retry=WARN,keystonemiddleware=WARN,routes.middleware=WARN,stevedore=WARN,taskflow=WARN,keystoneauth=WARN,oslo.cache=INFO,oslo_policy=INFO,dogpile.core.dogpile=INFO - -# Enables or disables publication of error events. (boolean value) -#publish_errors = false - -# The format for an instance that is passed with the log message. -# (string value) -#instance_format = "[instance: %(uuid)s] " - -# The format for an instance UUID that is passed with the log message. -# (string value) -#instance_uuid_format = "[instance: %(uuid)s] " - -# Interval, number of seconds, of log rate limiting. (integer value) -#rate_limit_interval = 0 - -# Maximum number of logged messages per rate_limit_interval. (integer -# value) -#rate_limit_burst = 0 - -# Log level name used by rate limiting: CRITICAL, ERROR, INFO, -# WARNING, DEBUG or empty string. Logs with level greater or equal to -# rate_limit_except_level are not filtered. An empty string means that -# all levels are filtered. (string value) -#rate_limit_except_level = CRITICAL - -# Enables or disables fatal status of deprecations. (boolean value) -#fatal_deprecations = false - -# -# From refstack -# - -# Url of user interface for RefStack. Need for redirects after sign in -# and sign out. (string value) -#ui_url = https://refstack.openstack.org - -# The backend to use for database. (string value) -#db_backend = sqlalchemy - -# The alembic version table name to use within the database. To allow -# RefStack to upload and store the full set of subunit data, set this -# option to refstack_alembic_version. (string value) -#version_table = alembic_version - - -[api] - -# -# From refstack -# - -# Url of public RefStack API. (string value) -#api_url = https://refstack.openstack.org/api - -# The directory where your static files can be found. Pecan comes -# with middleware that can be used to serve static files (like CSS and -# Javascript files) during development. Here, a special variable -# %(project_root)s can be used to point to the root directory of the -# Refstack project's module, so paths can be specified relative to -# that. (string value) -#static_root = refstack-ui/app - -# Points to the directory where your template files live. Here, a -# special variable %(project_root)s can be used to point to the root -# directory of the Refstack project's main module, so paths can be -# specified relative to that. (string value) -#template_path = refstack-ui/app - -# List of sites allowed cross-site resource access. If this is empty, -# only same-origin requests are allowed. (list value) -#allowed_cors_origins = - -# Switch Refstack app into debug mode. Helpful for development. In -# debug mode static file will be served by pecan application. Also, -# server responses will contain some details with debug information. -# (boolean value) -#app_dev_mode = false - -# Template for test result url. (string value) -#test_results_url = /#/results/%s - -# The GitHub API URL of the repository and location of the Interop -# Working Group capability files. This URL is used to get a listing of -# all capability files. (string value) -#opendev_api_capabilities_url = https://opendev.org/api/v1/repos/openinfra/interop/contents/guidelines - -# The GitHub API URL of the repository and location of any additional -# guideline sources which will need to be parsed by the refstack API. -# (string value) -#additional_capability_urls = https://opendev.org/api/v1/repos/openinfra/interop/contents/add-ons/guidelines - -# This is the base URL that is used for retrieving specific capability -# files. Capability file names will be appended to this URL to get the -# contents of that file. (string value) -#opendev_raw_base_url = https://opendev.org/api/v1/repos/openinfra/interop/raw/ - -# Enable or disable anonymous uploads. If set to False, all clients -# will need to authenticate and sign with a public/private keypair -# previously uploaded to their user account. (boolean value) -#enable_anonymous_upload = true - -# Number of results for one page (integer value) -#results_per_page = 20 - -# The format for start_date and end_date parameters (string value) -#input_date_format = %Y-%m-%d %H:%M:%S - - -[database] - -# -# From oslo.db -# - -# If True, SQLite uses synchronous mode. (boolean value) -#sqlite_synchronous = true - -# The back end to use for the database. (string value) -# Deprecated group/name - [DEFAULT]/db_backend -#backend = sqlalchemy - -# The SQLAlchemy connection string to use to connect to the database. -# (string value) -# Deprecated group/name - [DEFAULT]/sql_connection -# Deprecated group/name - [DATABASE]/sql_connection -# Deprecated group/name - [sql]/connection -#connection = - -# The SQLAlchemy connection string to use to connect to the slave -# database. (string value) -#slave_connection = - -# The SQL mode to be used for MySQL sessions. This option, including -# the default, overrides any server-set SQL mode. To use whatever SQL -# mode is set by the server configuration, set this to no value. -# Example: mysql_sql_mode= (string value) -#mysql_sql_mode = TRADITIONAL - -# If True, transparently enables support for handling MySQL Cluster -# (NDB). (boolean value) -#mysql_enable_ndb = false - -# Connections which have been present in the connection pool longer -# than this number of seconds will be replaced with a new one the next -# time they are checked out from the pool. (integer value) -# Deprecated group/name - [DATABASE]/idle_timeout -# Deprecated group/name - [database]/idle_timeout -# Deprecated group/name - [DEFAULT]/sql_idle_timeout -# Deprecated group/name - [DATABASE]/sql_idle_timeout -# Deprecated group/name - [sql]/idle_timeout -#connection_recycle_time = 3600 - -# Maximum number of SQL connections to keep open in a pool. Setting a -# value of 0 indicates no limit. (integer value) -#max_pool_size = 5 - -# Maximum number of database connection retries during startup. Set to -# -1 to specify an infinite retry count. (integer value) -# Deprecated group/name - [DEFAULT]/sql_max_retries -# Deprecated group/name - [DATABASE]/sql_max_retries -#max_retries = 10 - -# Interval between retries of opening a SQL connection. (integer -# value) -# Deprecated group/name - [DEFAULT]/sql_retry_interval -# Deprecated group/name - [DATABASE]/reconnect_interval -#retry_interval = 10 - -# If set, use this value for max_overflow with SQLAlchemy. (integer -# value) -# Deprecated group/name - [DEFAULT]/sql_max_overflow -# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow -#max_overflow = 50 - -# Verbosity of SQL debugging information: 0=None, 100=Everything. -# (integer value) -# Minimum value: 0 -# Maximum value: 100 -# Deprecated group/name - [DEFAULT]/sql_connection_debug -#connection_debug = 0 - -# Add Python stack traces to SQL as comment strings. (boolean value) -# Deprecated group/name - [DEFAULT]/sql_connection_trace -#connection_trace = false - -# If set, use this value for pool_timeout with SQLAlchemy. (integer -# value) -# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout -#pool_timeout = - -# Enable the experimental use of database reconnect on connection -# lost. (boolean value) -#use_db_reconnect = false - -# Seconds between retries of a database transaction. (integer value) -#db_retry_interval = 1 - -# If True, increases the interval between retries of a database -# operation up to db_max_retry_interval. (boolean value) -#db_inc_retry_interval = true - -# If db_inc_retry_interval is set, the maximum seconds between retries -# of a database operation. (integer value) -#db_max_retry_interval = 10 - -# Maximum retries in case of connection error or deadlock error before -# error is raised. Set to -1 to specify an infinite retry count. -# (integer value) -#db_max_retries = 20 - -# Optional URL parameters to append onto the connection URL at connect -# time; specify as param1=value1¶m2=value2&... (string value) -#connection_parameters = - - -[osid] - -# -# From refstack -# - -# OpenStackID Auth Server URI. (string value) -#openstack_openid_endpoint = https://openstackid.org/accounts/openid2 - -# OpenStackID logout URI. (string value) -#openid_logout_endpoint = https://openstackid.org/accounts/user/logout - -# Interaction mode. Specifies whether Openstack Id IdP may interact -# with the user to determine the outcome of the request. (string -# value) -#openid_mode = checkid_setup - -# Protocol version. Value identifying the OpenID protocol version -# being used. This value should be "http://specs.openid.net/auth/2.0". -# (string value) -#openid_ns = http://specs.openid.net/auth/2.0 - -# Return endpoint in Refstack's API. Value indicating the endpoint -# where the user should be returned to after signing in. Openstack Id -# Idp only supports HTTPS address types. (string value) -#openid_return_to = /v1/auth/signin_return - -# Claimed identifier. This value must be set to -# "http://specs.openid.net/auth/2.0/identifier_select". or to user -# claimed identity (user local identifier or user owned identity [ex: -# custom html hosted on a owned domain set to html discover]). (string -# value) -#openid_claimed_id = http://specs.openid.net/auth/2.0/identifier_select - -# Alternate identifier. This value must be set to -# http://specs.openid.net/auth/2.0/identifier_select. (string value) -#openid_identity = http://specs.openid.net/auth/2.0/identifier_select - -# Indicates request for user attribute information. This value must be -# set to "http://openid.net/extensions/sreg/1.1". (string value) -#openid_ns_sreg = http://openid.net/extensions/sreg/1.1 - -# Comma-separated list of field names which, if absent from the -# response, will prevent the Consumer from completing the registration -# without End User interation. The field names are those that are -# specified in the Response Format, with the "openid.sreg." prefix -# removed. Valid values include: "country", "email", "firstname", -# "language", "lastname" (string value) -#openid_sreg_required = email,fullname diff --git a/package.json b/package.json deleted file mode 100644 index d0f737f7..00000000 --- a/package.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "version": "0.5.0", - "private": true, - "name": "refstack-ui", - "description": "A user interface for RefStack", - "license": "Apache2", - "devDependencies": { - "eslint": "^3.0.0", - "eslint-config-openstack": "4.0.1", - "eslint-plugin-angular": "1.4.0", - "jasmine-core": "2.8.0", - "karma": "^1.7.1", - "karma-chrome-launcher": "^2.2.0", - "karma-cli": "1.0.1", - "karma-jasmine": "^1.1.0", - "angular-mocks": "^1.3.15" - }, - "scripts": { - "prestart": "yarn", - "pretest": "yarn", - "test": "if [ -z $CHROME_BIN ];then export CHROME_BIN=/usr/bin/chromium-browser;fi && karma start ./refstack-ui/tests/karma.conf.js --single-run", - "test-auto-watch": "if [ -z $CHROME_BIN ];then export CHROME_BIN=/usr/bin/chromium-browser;fi && karma start ./refstack-ui/tests/karma.conf.js --auto-watch", - "lint": "eslint -c ./.eslintrc --no-color ./refstack-ui", - "postinstall": "node -e \"try { require('fs').symlinkSync(require('path').resolve('node_modules/@bower_components'), 'refstack-ui/app/assets/lib', 'junction') } catch (e) { }\"" - }, - "dependencies": { - "bower-away": "^1.1.2", - "@bower_components/angular": "angular/bower-angular#1.3.15", - "@bower_components/angular-animate": "angular/bower-angular-animate#~1.3", - "@bower_components/angular-bootstrap": "angular-ui/bootstrap-bower#0.14.3", - "@bower_components/angular-busy": "cgross/angular-busy#4.1.3", - "@bower_components/angular-confirm-modal": "Schlogen/angular-confirm#1.2.3", - "@bower_components/angular-mocks": "angular/bower-angular-mocks#1.3.15", - "@bower_components/angular-resource": "angular/bower-angular-resource#1.3.15", - "@bower_components/angular-ui-router": "angular-ui/angular-ui-router-bower#0.2.13", - "@bower_components/bootstrap": "twbs/bootstrap#3.3.2", - "@bower_components/jquery": "jquery/jquery-dist#>= 1.9.1" - }, - "engines": { - "yarn": ">= 1.0.0" - } -} diff --git a/playbooks/README.rst b/playbooks/README.rst deleted file mode 100644 index 1eba4b72..00000000 --- a/playbooks/README.rst +++ /dev/null @@ -1,12 +0,0 @@ -Playbook for running refstack locally -###################################### - -The playbook is meant for developers to help them with debugging and -reviewing new changes in the refstack project. - -The playbook semi-automates running the refstack server on the localhost. -It downloads refstack role and templates from -`system-config `__ repository -which is used for deploying and maintaining upstream servers, one of which is -refstack. Then it builds the refstack image and spins a container using the -refstack role. diff --git a/playbooks/run-refstack-in-container.yml b/playbooks/run-refstack-in-container.yml deleted file mode 100644 index c6d26580..00000000 --- a/playbooks/run-refstack-in-container.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- -- hosts: localhost - become: true - gather_facts: true - vars: - # dir where refstack files for buidling an image, running a container - # will be stored - refstack_dir: /tmp/refstack-docker - refstack_openid_endpoint: '' - # ip address of the machine you're running this on - refstack_url: # 'http://' - # the default credentials for the refstack database - refstack_db_username: refstack - refstack_db_password: Jz4ooq9TL7nc3hX3 - refstack_root_db_password: KbgY3r9HYnEYpgRP - tasks: - - name: Clone system-config repository - git: - repo: https://opendev.org/opendev/system-config.git - dest: "{{ refstack_dir }}/system-config" - - - name: Extract docker files - copy: - src: "{{ refstack_dir }}/system-config/docker/refstack/" - dest: "{{ refstack_dir }}/refstack-docker-files" - remote_src: yes - - - name: Extract refstack role - copy: - src: "{{ refstack_dir }}/system-config/playbooks/roles/refstack/" - dest: "{{ refstack_dir }}/refstack-role" - remote_src: yes - - - name: Delete the rest of system-config content - file: - state: absent - path: "{{ refstack_dir }}/system-config" - - - name: Install Docker - apt: - name: - - docker - - docker-compose - state: present - - - name: Build refstack image - command: docker image build -f Dockerfile -t refstack:1 . - args: - chdir: "{{ refstack_dir }}/refstack-docker-files" - - - include_role: - name: "{{ refstack_dir }}/refstack-role" diff --git a/refstack-ui/app/app.js b/refstack-ui/app/app.js deleted file mode 100644 index 4d03f18a..00000000 --- a/refstack-ui/app/app.js +++ /dev/null @@ -1,222 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - /** Main app module where application dependencies are listed. */ - angular - .module('refstackApp', [ - 'ui.router','ui.bootstrap', 'cgBusy', - 'ngResource', 'angular-confirm' - ]); - - angular - .module('refstackApp') - .config(configureRoutes); - - configureRoutes.$inject = ['$stateProvider', '$urlRouterProvider']; - - /** - * Handle application routing. Specific templates and controllers will be - * used based on the URL route. - */ - function configureRoutes($stateProvider, $urlRouterProvider) { - $urlRouterProvider.otherwise('/'); - $stateProvider. - state('home', { - url: '/', - templateUrl: '/components/home/home.html' - }). - state('about', { - url: '/about', - templateUrl: '/components/about/about.html', - controller: 'AboutController as ctrl' - - }). - state('guidelines', { - url: '/guidelines', - templateUrl: '/components/guidelines/guidelines.html', - controller: 'GuidelinesController as ctrl' - }). - state('communityResults', { - url: '/community_results', - templateUrl: '/components/results/results.html', - controller: 'ResultsController as ctrl' - }). - state('userResults', { - url: '/user_results', - templateUrl: '/components/results/results.html', - controller: 'ResultsController as ctrl' - }). - state('resultsDetail', { - url: '/results/:testID', - templateUrl: '/components/results-report' + - '/resultsReport.html', - controller: 'ResultsReportController as ctrl' - }). - state('profile', { - url: '/profile', - templateUrl: '/components/profile/profile.html', - controller: 'ProfileController as ctrl' - }). - state('authFailure', { - url: '/auth_failure', - templateUrl: '/components/home/home.html', - controller: 'AuthFailureController as ctrl' - }). - state('logout', { - url: '/logout', - templateUrl: '/components/logout/logout.html', - controller: 'LogoutController as ctrl' - }). - state('userVendors', { - url: '/user_vendors', - templateUrl: '/components/vendors/vendors.html', - controller: 'VendorsController as ctrl' - }). - state('publicVendors', { - url: '/public_vendors', - templateUrl: '/components/vendors/vendors.html', - controller: 'VendorsController as ctrl' - }). - state('vendor', { - url: '/vendor/:vendorID', - templateUrl: '/components/vendors/vendor.html', - controller: 'VendorController as ctrl' - }). - state('userProducts', { - url: '/user_products', - templateUrl: '/components/products/products.html', - controller: 'ProductsController as ctrl' - }). - state('publicProducts', { - url: '/public_products', - templateUrl: '/components/products/products.html', - controller: 'ProductsController as ctrl' - }). - state('cloud', { - url: '/cloud/:id', - templateUrl: '/components/products/cloud.html', - controller: 'ProductController as ctrl' - }). - state('distro', { - url: '/distro/:id', - templateUrl: '/components/products/distro.html', - controller: 'ProductController as ctrl' - }); - } - - angular - .module('refstackApp') - .config(disableHttpCache); - - disableHttpCache.$inject = ['$httpProvider']; - - /** - * Disable caching in $http requests. This is primarily for IE, as it - * tends to cache Angular IE requests. - */ - function disableHttpCache($httpProvider) { - if (!$httpProvider.defaults.headers.get) { - $httpProvider.defaults.headers.get = {}; - } - $httpProvider.defaults.headers.get['Cache-Control'] = 'no-cache'; - $httpProvider.defaults.headers.get.Pragma = 'no-cache'; - } - - angular - .module('refstackApp') - .run(setup); - - setup.$inject = [ - '$http', '$rootScope', '$window', '$state', 'refstackApiUrl' - ]; - - /** - * Set up the app with injections into $rootscope. This is mainly for auth - * functions. - */ - function setup($http, $rootScope, $window, $state, refstackApiUrl) { - - $rootScope.auth = {}; - $rootScope.auth.doSignIn = doSignIn; - $rootScope.auth.doSignOut = doSignOut; - $rootScope.auth.doSignCheck = doSignCheck; - - var sign_in_url = refstackApiUrl + '/auth/signin'; - var sign_out_url = refstackApiUrl + '/auth/signout'; - var profile_url = refstackApiUrl + '/profile'; - - /** This function initiates a sign in. */ - function doSignIn() { - $window.location.href = sign_in_url; - } - - /** This function will initate a sign out. */ - function doSignOut() { - $rootScope.auth.currentUser = null; - $rootScope.auth.isAuthenticated = false; - $window.location.href = sign_out_url; - } - - /** - * This function checks to see if a user is logged in and - * authenticated. - */ - function doSignCheck() { - return $http.get(profile_url, {withCredentials: true}). - success(function (data) { - $rootScope.auth.currentUser = data; - $rootScope.auth.isAuthenticated = true; - }). - error(function () { - $rootScope.auth.currentUser = null; - $rootScope.auth.isAuthenticated = false; - }); - } - - $rootScope.auth.doSignCheck(); - } - - angular - .element(document) - .ready(loadConfig); - - /** - * Load config and start up the angular application. - */ - function loadConfig() { - - var $http = angular.injector(['ng']).get('$http'); - - /** - * Store config variables as constants, and start the app. - */ - function startApp(config) { - // Add config options as constants. - angular.forEach(config, function(value, key) { - angular.module('refstackApp').constant(key, value); - }); - - angular.bootstrap(document, ['refstackApp']); - } - - $http.get('config.json').success(function (data) { - startApp(data); - }).error(function () { - startApp({}); - }); - } -})(); diff --git a/refstack-ui/app/assets/css/style.css b/refstack-ui/app/assets/css/style.css deleted file mode 100644 index eea5de35..00000000 --- a/refstack-ui/app/assets/css/style.css +++ /dev/null @@ -1,251 +0,0 @@ -body { - background: white; - color: black; - font-family: 'Helvetica Neue', 'Helvetica', 'Verdana', sans-serif; -} - -a { - text-decoration: none; - cursor: pointer; -} - -a:hover { - text-decoration: underline; -} - -.heading { - font-size: 3em; - font-weight: bold; - margin-bottom: 10px; - margin-top: 10px; -} - -.heading img { - height: 50px; - vertical-align: text-bottom; -} - -form { - margin: 0; - padding: 0; - border: 0; -} - -fieldset { - border: 0; -} - -input.error { - background: #FAFF78; -} - -h1, h2, h3, h4, h5, h6 { - font-family: 'Futura-CondensedExtraBold', 'Futura', 'Helvetica', sans-serif; -} - -.footer { - background: none repeat scroll 0% 0% #333; -} - -.required { - color: #1D6503; -} - -.advisory { - color: #9F8501; -} - -.deprecated { - color: #B03838; -} - -.removed { - color: #801601; -} - -.checkbox { - word-spacing: 20px; - background: #F8F8F8; - padding: 10px; -} - -.checkbox-test-list { - word-spacing: normal; - background: none; -} - -.checkbox-test-list .info-hover { - font-size: 12px; - color: #878787; - cursor: help; -} - -.checkbox-verified { - border: 1px solid #A9A9A9; - text-align: center; - width: 150px; -} - -.capabilities { - color: #4B4B4B; -} - -.capabilities .capability-list-item { - border-bottom: 2px solid #AFAFAF; - padding-bottom: .6em; -} - -.capabilities .capability-name { - font-size: 1.3em; - font-weight: bold; - color: black; -} - -#criteria { - color: #4B4B4B; -} - -.criterion-name { - font-size: 1.1em; - font-weight: bold; -} - -.list-inline li:before { - content: '\00BB'; -} - -.program-about { - font-size: .8em; - padding-top: .3em; - float: right; -} - -.jumbotron .left { - width: 70%; -} - -.container .jumbotron { - background: #F6F6F6; - border-top: 2px solid #C9C9C9; - border-bottom: 2px solid #C9C9C9; - border-radius: 0; -} - -.jumbotron .right { - width: 30%; -} - -.jumbotron img { - width: 90%; - height: 70%; -} - -.jumbotron .openstack-intro__logo { - width: 100%; -} - -.result-filters { - padding-bottom: 10px; - border-top: 2px solid #C9C9C9; - border-bottom: 2px solid #C9C9C9; - margin-bottom: 15px; -} - -@media (min-width: 450px) { - .jumbotron .openstack-intro__logo { - width: 30%; - } - .openstack-intro__logo img { - float: right; - } - .openstack-intro__content > *:first-child { - margin-top: 0; - } - .openstack-intro__content > *:last-child { - margin-bottom: 0; - } -} -@media (min-width: 768px) { - .jumbotron.openstack-intro { - padding: 40px; - } -} - -.yes { - background: #1A911E; - color: white; - padding-left: .5em; - padding-right: .5em; -} - -.no { - background: #BC0505; - color: white; - padding-left: .5em; - padding-right: .5em; -} - -.button-margin { - margin-bottom: 1em; -} - -.tests-modal-content { - overflow: auto; - max-height: calc(100vh - 100px); -} - -.tests-modal-content textarea { - font-size: .9em; - resize: none; -} - -.test-detail { - padding-left: 10px; -} - -.test-detail ul { - padding-left: 20px; -} - -.test-detail-report { - font-size: .9em; -} - -a.glyphicon { - text-decoration: none; -} - -.test-list-dl { - word-spacing: normal; -} - -.test-list-dl:hover { - text-decoration: none; -} - -.modal-body .row { - margin-bottom: 10px; -} - -.about-sidebar { - width: 20%; - float: left; - padding-right: 2px; - padding-top: 25px; -} - -.about-content { - width: 80%; - float: left; - padding-left: 5%; - -} - -.about-option { - padding: 5px 5px 5px 10px; -} - -.about-active { - background: #f2f2f2; - border-left: 2px solid orange; -} \ No newline at end of file diff --git a/refstack-ui/app/assets/img/OpenStack_Project_Refstack_mascot_90x90.png b/refstack-ui/app/assets/img/OpenStack_Project_Refstack_mascot_90x90.png deleted file mode 100755 index 4695090d..00000000 Binary files a/refstack-ui/app/assets/img/OpenStack_Project_Refstack_mascot_90x90.png and /dev/null differ diff --git a/refstack-ui/app/assets/img/openstack-logo.png b/refstack-ui/app/assets/img/openstack-logo.png deleted file mode 100644 index 826bf2e5..00000000 Binary files a/refstack-ui/app/assets/img/openstack-logo.png and /dev/null differ diff --git a/refstack-ui/app/assets/img/refstack-logo.png b/refstack-ui/app/assets/img/refstack-logo.png deleted file mode 100755 index fc45f3ee..00000000 Binary files a/refstack-ui/app/assets/img/refstack-logo.png and /dev/null differ diff --git a/refstack-ui/app/components/about/about.html b/refstack-ui/app/components/about/about.html deleted file mode 100644 index 348318cd..00000000 --- a/refstack-ui/app/components/about/about.html +++ /dev/null @@ -1,13 +0,0 @@ -
- -
-
-
diff --git a/refstack-ui/app/components/about/aboutController.js b/refstack-ui/app/components/about/aboutController.js deleted file mode 100644 index 2c81b6dd..00000000 --- a/refstack-ui/app/components/about/aboutController.js +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('AboutController', AboutController); - - AboutController.$inject = ['$location']; - - /** - * RefStack About Controller - * This controller handles the about page and the multiple templates - * associated to the page. - */ - function AboutController($location) { - var ctrl = this; - - ctrl.selectOption = selectOption; - ctrl.getHash = getHash; - - ctrl.options = { - 'about' : { - 'title': 'About RefStack', - 'template': 'components/about/templates/overview.html', - 'order': 1 - }, - 'uploading-your-results': { - 'title': 'Uploading Your Results', - 'template': 'components/about/templates/' + - 'uploading_private_results.html', - 'order': 2 - }, - 'managing-results': { - 'title': 'Managing Results', - 'template': 'components/about/templates/' + - 'test_result_management.html', - 'order': 3 - }, - 'vendors': { - 'title': 'Vendors', - 'template': 'components/about/templates/VendorEntity.html', - 'order': 4 - }, - 'products': { - 'title': 'Products', - 'template': 'components/about/templates/ProductEntity.html', - 'order': 5 - } - }; - - /** - * Given an option key, mark it as selected and set the corresponding - * template and URL hash. - */ - function selectOption(key) { - ctrl.selected = key; - ctrl.template = ctrl.options[key].template; - $location.hash(key); - } - - /** - * Get the hash in the URL and select it if possible. - */ - function getHash() { - var hash = $location.hash(); - if (hash && hash in ctrl.options) { - ctrl.selectOption(hash); - } else { - ctrl.selectOption('about'); - } - } - - ctrl.getHash(); - } -})(); diff --git a/refstack-ui/app/components/auth-failure/authFailureController.js b/refstack-ui/app/components/auth-failure/authFailureController.js deleted file mode 100644 index 1a9bb734..00000000 --- a/refstack-ui/app/components/auth-failure/authFailureController.js +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('AuthFailureController', AuthFailureController); - - AuthFailureController.$inject = ['$location', '$state', 'raiseAlert']; - /** - * Refstack Auth Failure Controller - * This controller handles messages from Refstack API if user auth fails. - */ - function AuthFailureController($location, $state, raiseAlert) { - var ctrl = this; - ctrl.message = $location.search().message; - raiseAlert('danger', 'Authentication Failure:', ctrl.message); - $state.go('home'); - } -})(); diff --git a/refstack-ui/app/components/guidelines/guidelines.html b/refstack-ui/app/components/guidelines/guidelines.html deleted file mode 100644 index 8592da45..00000000 --- a/refstack-ui/app/components/guidelines/guidelines.html +++ /dev/null @@ -1,85 +0,0 @@ -

OpenStack Powered™ Guidelines

- - -
-
- Version: - - -
-
- Target Program: - About - -
-
- -
-
- Guideline Status: - {{ctrl.guidelineStatus | capitalize}} -
- -
- Corresponding OpenStack Releases: -
    -
  • - {{release | capitalize}} -
  • -
-
- -Capability Status: -
- - - - - - - Test List - -
- - -

Tests marked with are tests flagged by Interop Working Group.

- - -
-
- - -
- - diff --git a/refstack-ui/app/components/guidelines/guidelinesController.js b/refstack-ui/app/components/guidelines/guidelinesController.js deleted file mode 100644 index 0ab6dd44..00000000 --- a/refstack-ui/app/components/guidelines/guidelinesController.js +++ /dev/null @@ -1,392 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('GuidelinesController', GuidelinesController); - - GuidelinesController.$inject = - ['$filter', '$http', '$uibModal', 'refstackApiUrl']; - - /** - * RefStack Guidelines Controller - * This controller is for the '/guidelines' page where a user can browse - * through tests belonging to Interop WG defined capabilities. - */ - function GuidelinesController($filter ,$http, $uibModal, refstackApiUrl) { - var ctrl = this; - - ctrl.getVersionList = getVersionList; - ctrl.update = update; - ctrl.updateTargetCapabilities = updateTargetCapabilities; - ctrl.filterStatus = filterStatus; - ctrl.getObjectLength = getObjectLength; - ctrl.openTestListModal = openTestListModal; - ctrl.updateVersionList = updateVersionList; - ctrl.gl_type = 'powered'; - - /** The target OpenStack marketing program to show capabilities for. */ - ctrl.target = 'platform'; - - /** The various possible capability statuses. */ - ctrl.status = { - required: true, - advisory: false, - deprecated: false, - removed: false - }; - - /** - * The template to load for displaying capability details. - */ - ctrl.detailsTemplate = 'components/guidelines/partials/' + - 'guidelineDetails.html'; - - /** - * Update the array of dictionary objects which stores data - * pertaining to each guideline, sorting them in descending - * order by guideline name. After these are sorted, the - * function to update the capabilities is called. - */ - function updateVersionList() { - let gl_files = ctrl.guidelineData[ctrl.gl_type]; - ctrl.versionList = $filter('orderBy')(gl_files, 'name', true); - // Default to the first approved guideline which is expected - // to be at index 1. - ctrl.version = ctrl.versionList[1]; - update(); - } - - /** - * Retrieve a dictionary object comprised of available guideline types - * and and an array of dictionary objects containing file info about - * each guideline file pertaining to that particular guideline type. - * After a successful API call, the function to sort and update the - * version list is called. - */ - function getVersionList() { - var content_url = refstackApiUrl + '/guidelines'; - ctrl.versionsRequest = - $http.get(content_url).success(function (data) { - ctrl.guidelineData = data; - updateVersionList(); - }).error(function (error) { - ctrl.showError = true; - ctrl.error = 'Error retrieving version list: ' + - angular.toJson(error); - }); - } - - /** - * This will contact the Refstack API server to retrieve the JSON - * content of the guideline file corresponding to the selected - * version. - */ - function update() { - ctrl.content_url = refstackApiUrl + '/guidelines/' - + ctrl.version.file; - let get_params = {'gl_file': ctrl.version.file}; - ctrl.capsRequest = - $http.get(ctrl.content_url, get_params).success( - function (data) { - ctrl.guidelines = data; - if ('metadata' in data && data.metadata.schema >= '2.0') { - ctrl.schema = data.metadata.schema; - ctrl.criteria = data.metadata.scoring.criteria; - ctrl.releases = - data.metadata.os_trademark_approval.releases; - ctrl.guidelineStatus = - data.metadata.os_trademark_approval.status; - } else { - ctrl.schema = data.schema; - ctrl.criteria = data.criteria; - ctrl.releases = data.releases; - ctrl.guidelineStatus = data.status; - } - ctrl.updateTargetCapabilities(); - - }).error(function (error) { - ctrl.showError = true; - ctrl.guidelines = null; - ctrl.error = 'Error retrieving guideline content: ' + - angular.toJson(error); - }); - } - - /** - * This will update the scope's 'targetCapabilities' object with - * capabilities belonging to the selected OpenStack marketing program - * (programs typically correspond to 'components' in the Interop WG - * schema). Each capability will have its status mapped to it. - */ - function updateTargetCapabilities() { - ctrl.targetCapabilities = {}; - var components = ctrl.guidelines.components; - var targetCaps = ctrl.targetCapabilities; - var targetComponents = null; - - var old_type = ctrl.gl_type; - if (ctrl.target === 'dns' || - ctrl.target === 'orchestration' || - ctrl.target === 'shared_file_system' || - ctrl.target === 'load_balancer' || - ctrl.target === 'key_manager' - ) { - ctrl.gl_type = ctrl.target; - } else { - ctrl.gl_type = 'powered'; - } - // If it has not been updated since the last program type change, - // will need to update the list - if (old_type !== ctrl.gl_type) { - updateVersionList(); - return; - } - - // The 'platform' target is comprised of multiple components, so - // we need to get the capabilities belonging to each of its - // components. - if (ctrl.target === 'platform' || ctrl.schema >= '2.0') { - if ('add-ons' in ctrl.guidelines) { - targetComponents = ['os_powered_' + ctrl.target]; - } else if (ctrl.schema >= '2.0') { - var platformsMap = { - 'platform': 'OpenStack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Storage' - }; - - targetComponents = ctrl.guidelines.platforms[ - platformsMap[ctrl.target]].components.map( - function(c) { - return c.name; - } - ); - } else { - targetComponents = ctrl.guidelines.platform.required; - } - - // This will contain status priority values, where lower - // values mean higher priorities. - var statusMap = { - required: 1, - advisory: 2, - deprecated: 3, - removed: 4 - }; - - // For each component required for the platform program. - angular.forEach(targetComponents, function (component) { - // Get each capability list belonging to each status. - var componentList = components[component]; - if (ctrl.schema >= '2.0') { - componentList = componentList.capabilities; - } - angular.forEach(componentList, - function (caps, status) { - // For each capability. - angular.forEach(caps, function(cap) { - // If the capability has already been added. - if (cap in targetCaps) { - // If the status priority value is less - // than the saved priority value, update - // the value. - if (statusMap[status] < - statusMap[targetCaps[cap]]) { - targetCaps[cap] = status; - } - } else { - targetCaps[cap] = status; - } - }); - }); - }); - } else { - angular.forEach(components[ctrl.target], - function (caps, status) { - angular.forEach(caps, function(cap) { - targetCaps[cap] = status; - }); - }); - } - } - - /** - * This filter will check if a capability's status corresponds - * to a status that is checked/selected in the UI. This filter - * is meant to be used with the ng-repeat directive. - * @param {Object} capability - * @returns {Boolean} True if capability's status is selected - */ - function filterStatus(capability) { - var caps = ctrl.targetCapabilities; - return ctrl.status.required && - caps[capability.id] === 'required' || - ctrl.status.advisory && - caps[capability.id] === 'advisory' || - ctrl.status.deprecated && - caps[capability.id] === 'deprecated' || - ctrl.status.removed && - caps[capability.id] === 'removed'; - } - - /** - * This function will get the length of an Object/dict based on - * the number of keys it has. - * @param {Object} object - * @returns {Number} length of object - */ - function getObjectLength(object) { - return Object.keys(object).length; - } - - /** - * This will open the modal that will show a list of all tests - * belonging to capabilities with the selected status(es). - */ - function openTestListModal() { - $uibModal.open({ - templateUrl: '/components/guidelines/partials' + - '/testListModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'TestListModalController as modal', - size: 'lg', - resolve: { - version: function () { - return ctrl.version.name.slice(0, -5); - }, - version_file: function() { - return ctrl.version.file; - }, - target: function () { - return ctrl.target; - }, - status: function () { - return ctrl.status; - } - } - }); - } - ctrl.getVersionList(); - } - - angular - .module('refstackApp') - .controller('TestListModalController', TestListModalController); - - TestListModalController.$inject = [ - '$uibModalInstance', '$http', 'version', - 'version_file', 'target', 'status', - 'refstackApiUrl' - ]; - - /** - * Test List Modal Controller - * This controller is for the modal that appears if a user wants to see the - * test list corresponding to Interop WG capabilities with the selected - * statuses. - */ - function TestListModalController($uibModalInstance, $http, version, - version_file, target, status, refstackApiUrl) { - - var ctrl = this; - - ctrl.version = version; - ctrl.version_file = version_file; - ctrl.target = target; - ctrl.status = status; - ctrl.close = close; - ctrl.updateTestListString = updateTestListString; - - ctrl.aliases = true; - ctrl.flagged = false; - - // Check if the API URL is absolute or relative. - if (refstackApiUrl.indexOf('http') > -1) { - ctrl.url = refstackApiUrl; - } else { - ctrl.url = location.protocol + '//' + location.host + - refstackApiUrl; - } - - /** - * This function will close/dismiss the modal. - */ - function close() { - $uibModalInstance.dismiss('exit'); - } - - /** - * This function will return a list of statuses based on which ones - * are selected. - */ - function getStatusList() { - var statusList = []; - angular.forEach(ctrl.status, function(value, key) { - if (value) { - statusList.push(key); - } - }); - return statusList; - } - - /** - * This will get the list of tests from the API and update the - * controller's test list string variable. - */ - function updateTestListString() { - var statuses = getStatusList(); - if (!statuses.length) { - ctrl.error = 'No tests matching selected criteria.'; - return; - } - ctrl.testListUrl = [ - ctrl.url, '/guidelines/', ctrl.version_file, '/tests?', - 'target=', ctrl.target, '&', - 'type=', statuses.join(','), '&', - 'alias=', ctrl.aliases.toString(), '&', - 'flag=', ctrl.flagged.toString() - ].join(''); - ctrl.testListRequest = - $http.get(ctrl.testListUrl). - then(function successCallback(response) { - ctrl.error = null; - ctrl.testListString = response.data; - if (!ctrl.testListString) { - ctrl.testListCount = 0; - } else { - ctrl.testListCount = - ctrl.testListString.split('\n').length; - } - }, function errorCallback(response) { - ctrl.testListString = null; - ctrl.testListCount = null; - if (angular.isObject(response.data) && - response.data.message) { - ctrl.error = 'Error retrieving test list: ' + - response.data.message; - } else { - ctrl.error = 'Unknown error retrieving test list.'; - } - }); - } - - updateTestListString(); - } -})(); diff --git a/refstack-ui/app/components/guidelines/partials/guidelineDetails.html b/refstack-ui/app/components/guidelines/partials/guidelineDetails.html deleted file mode 100644 index 2b345867..00000000 --- a/refstack-ui/app/components/guidelines/partials/guidelineDetails.html +++ /dev/null @@ -1,50 +0,0 @@ - - -
    -
  1. - {{capability.id}}
    - {{capability.description}}
    - Status: {{ctrl.targetCapabilities[capability.id]}}
    - Project: {{capability.project | capitalize}}
    - Achievements ({{capability.achievements.length}})
    -
      -
    1. - {{achievement}} -
    2. -
    - - Tests ({{ctrl.getObjectLength(capability.tests)}}) -
      -
    • - - {{test}} -
    • -
    • - - {{testName}} -
      - Aliases: -
      • {{alias}}
      -
      -
    • -
    -
  2. -
- -
-
-

Criteria

-
-
    -
  • - {{criterion.name}}
    - {{criterion.Description || criterion.description}}
    - Weight: {{criterion.weight}} -
  • -
-
-
diff --git a/refstack-ui/app/components/guidelines/partials/testListModal.html b/refstack-ui/app/components/guidelines/partials/testListModal.html deleted file mode 100644 index 7fe85717..00000000 --- a/refstack-ui/app/components/guidelines/partials/testListModal.html +++ /dev/null @@ -1,46 +0,0 @@ - diff --git a/refstack-ui/app/components/home/home.html b/refstack-ui/app/components/home/home.html deleted file mode 100644 index 6a45173a..00000000 --- a/refstack-ui/app/components/home/home.html +++ /dev/null @@ -1,38 +0,0 @@ -
- -
-

OpenStack Interoperability

-

RefStack is a source of tools for - OpenStack interoperability - testing.

-
-
-
- -
-
-

What is RefStack?

-
    -
  • Toolset for testing interoperability between OpenStack clouds.
  • -
  • Database backed website supporting collection and publication of - community test results for OpenStack.
  • -
  • User interface to display individual test run results.
  • -
-
- -
-

OpenStack Marketing Programs

-
    -
  • OpenStack Powered Platform
  • -
  • OpenStack Powered Compute
  • -
  • OpenStack Powered Object Storage
  • -
  • OpenStack with DNS
  • -
  • OpenStack with Orchestration
  • -
  • OpenStack with Shared File System
  • -
  • OpenStack with Load Balancer
  • -
  • OpenStack with Key Manager
  • -
-
-
diff --git a/refstack-ui/app/components/logout/logout.html b/refstack-ui/app/components/logout/logout.html deleted file mode 100644 index 38a5c369..00000000 --- a/refstack-ui/app/components/logout/logout.html +++ /dev/null @@ -1 +0,0 @@ -
diff --git a/refstack-ui/app/components/logout/logoutController.js b/refstack-ui/app/components/logout/logoutController.js deleted file mode 100644 index 86acb4af..00000000 --- a/refstack-ui/app/components/logout/logoutController.js +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('LogoutController', LogoutController); - - LogoutController.$inject = [ - '$location', '$window', '$timeout' - ]; - - /** - * Refstack Logout Controller - * This controller handles logging out. In order to fully logout, the - * openstackid_session cookie must also be removed. The way to do that - * is to have the user's browser make a request to the openstackid logout - * page. We do this by placing the logout link as the src for an html - * image. After some time, the user is redirected home. - */ - function LogoutController($location, $window, $timeout) { - var ctrl = this; - - ctrl.openid_logout_url = $location.search().openid_logout; - var img = new Image(0, 0); - img.src = ctrl.openid_logout_url; - ctrl.redirectWait = $timeout(function() { - $window.location.href = '/'; - }, 500); - } -})(); diff --git a/refstack-ui/app/components/products/cloud.html b/refstack-ui/app/components/products/cloud.html deleted file mode 100644 index e85220c6..00000000 --- a/refstack-ui/app/components/products/cloud.html +++ /dev/null @@ -1,34 +0,0 @@ -

Cloud Product

-
-
-
-
-
- Name: {{ctrl.product.name}}
- Product ID: {{ctrl.id}}
- Description: {{ctrl.product.description}}
- CPID: {{ctrl.nullVersion.cpid}}
- Publicity: {{ctrl.product.public ? 'Public' : 'Private'}}
- Vendor Name: {{ctrl.vendor.name}}
-
- Properties: -
    -
  • - {{key}}: {{value}} -
  • -
-
-
-
-
-
-
-
-
-
-
- diff --git a/refstack-ui/app/components/products/distro.html b/refstack-ui/app/components/products/distro.html deleted file mode 100644 index 12ab93e6..00000000 --- a/refstack-ui/app/components/products/distro.html +++ /dev/null @@ -1,34 +0,0 @@ -

Distro Product

-
-
-
-
-
- Name: {{ctrl.product.name}}
- Product ID: {{ctrl.id}}
- Description: {{ctrl.product.description}}
- CPID: {{ctrl.nullVersion.cpid}}
- Publicity: {{ctrl.product.public ? 'Public' : 'Private'}}
- Vendor Name: {{ctrl.vendor.name}}
-
- Properties: -
    -
  • - {{key}}: {{value}} -
  • -
-
-
-
-
-
-
-
-
-
-
- diff --git a/refstack-ui/app/components/products/partials/management.html b/refstack-ui/app/components/products/partials/management.html deleted file mode 100644 index b41cf300..00000000 --- a/refstack-ui/app/components/products/partials/management.html +++ /dev/null @@ -1,18 +0,0 @@ - diff --git a/refstack-ui/app/components/products/partials/productEditModal.html b/refstack-ui/app/components/products/partials/productEditModal.html deleted file mode 100644 index cd1e7459..00000000 --- a/refstack-ui/app/components/products/partials/productEditModal.html +++ /dev/null @@ -1,75 +0,0 @@ - diff --git a/refstack-ui/app/components/products/partials/testsTable.html b/refstack-ui/app/components/products/partials/testsTable.html deleted file mode 100644 index e1d93a38..00000000 --- a/refstack-ui/app/components/products/partials/testsTable.html +++ /dev/null @@ -1,142 +0,0 @@ -

Test Runs on Product

-
- - - - - - - - - - - - - - - - - - - - - - - - - -
Upload DateTest Run IDProduct VersionShared
- - - - - {{result.created_at}}{{result.id}}{{result.product_version.version}} - -
- Publicly Shared: - Yes - - No - - - - -
- - Associated Guideline: - - None - - - {{result.meta.guideline.slice(0, -5)}} - - - - - -
- - Associated Target Program: - - None - - - {{ctrl.targetMappings[result.meta.target]}} - - - - - -
-
- - - Unassociate test result from product - - -
- -
- - -
- - diff --git a/refstack-ui/app/components/products/partials/versions.html b/refstack-ui/app/components/products/partials/versions.html deleted file mode 100644 index 6f601558..00000000 --- a/refstack-ui/app/components/products/partials/versions.html +++ /dev/null @@ -1,29 +0,0 @@ -Version(s) Available: - - - {{item.version}} - - {{item.version}} - -  - - - -
-
-
- - - - -
-
-
diff --git a/refstack-ui/app/components/products/partials/versionsModal.html b/refstack-ui/app/components/products/partials/versionsModal.html deleted file mode 100644 index 26daf87c..00000000 --- a/refstack-ui/app/components/products/partials/versionsModal.html +++ /dev/null @@ -1,51 +0,0 @@ - diff --git a/refstack-ui/app/components/products/productController.js b/refstack-ui/app/components/products/productController.js deleted file mode 100644 index be9dd67d..00000000 --- a/refstack-ui/app/components/products/productController.js +++ /dev/null @@ -1,522 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('ProductController', ProductController); - - ProductController.$inject = [ - '$scope', '$http', '$state', '$stateParams', '$window', '$uibModal', - 'refstackApiUrl', 'raiseAlert' - ]; - - /** - * RefStack Product Controller - * This controller is for the '/product/' details page where owner can - * view details of the product. - */ - function ProductController($scope, $http, $state, $stateParams, - $window, $uibModal, refstackApiUrl, raiseAlert) { - var ctrl = this; - - ctrl.getProduct = getProduct; - ctrl.getProductVersions = getProductVersions; - ctrl.deleteProduct = deleteProduct; - ctrl.deleteProductVersion = deleteProductVersion; - ctrl.getProductTests = getProductTests; - ctrl.switchProductPublicity = switchProductPublicity; - ctrl.associateTestMeta = associateTestMeta; - ctrl.getGuidelineVersionList = getGuidelineVersionList; - ctrl.addProductVersion = addProductVersion; - ctrl.unassociateTest = unassociateTest; - ctrl.openVersionModal = openVersionModal; - ctrl.openProductEditModal = openProductEditModal; - - /** The product id extracted from the URL route. */ - ctrl.id = $stateParams.id; - ctrl.productVersions = []; - - if (!$scope.auth.isAuthenticated) { - $state.go('home'); - } - - /** Mappings of Interop WG components to marketing program names. */ - ctrl.targetMappings = { - 'platform': 'Openstack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Object Storage', - 'dns': 'OpenStack with DNS', - 'orchestration': 'OpenStack with Orchestration', - 'shared_file_system': 'OpenStack with Shared File System', - 'load_balancer': 'OpenStack with Load Balancer', - 'key_manager': 'OpenStack with Key Manager' - }; - - // Pagination controls. - ctrl.currentPage = 1; - ctrl.itemsPerPage = 20; - ctrl.maxSize = 5; - - ctrl.getProduct(); - ctrl.getProductVersions(); - ctrl.getProductTests(); - - /** - * This will contact the Refstack API to get a product information. - */ - function getProduct() { - ctrl.showError = false; - ctrl.product = null; - var content_url = refstackApiUrl + '/products/' + ctrl.id; - ctrl.productRequest = $http.get(content_url).success( - function(data) { - ctrl.product = data; - ctrl.productProperties = - angular.fromJson(data.properties); - } - ).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving from server: ' + - angular.toJson(error); - }).then(function() { - var url = refstackApiUrl + '/vendors/' + - ctrl.product.organization_id; - $http.get(url).success(function(data) { - ctrl.vendor = data; - }).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving from server: ' + - angular.toJson(error); - }); - }); - } - - /** - * This will contact the Refstack API to get product versions. - */ - function getProductVersions() { - ctrl.showError = false; - var content_url = refstackApiUrl + '/products/' + ctrl.id + - '/versions'; - ctrl.productVersionsRequest = $http.get(content_url).success( - function(data) { - ctrl.productVersions = data; - - // Determine the null version. - for (var i = 0; i < data.length; i++) { - if (data[i].version === null) { - ctrl.nullVersion = data[i]; - break; - } - } - } - ).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving versions from server: ' + - angular.toJson(error); - }); - } - - /** - * This will delete the product. - */ - function deleteProduct() { - var url = [refstackApiUrl, '/products/', ctrl.id].join(''); - $http.delete(url).success(function () { - $window.location.href = '/'; - }).error(function (error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * This will delete the given product versions. - */ - function deleteProductVersion(versionId) { - var url = [ - refstackApiUrl, '/products/', ctrl.id, - '/versions/', versionId ].join(''); - $http.delete(url).success(function () { - ctrl.getProductVersions(); - }).error(function (error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * Set a POST request to the API server to add a new version for - * the product. - */ - function addProductVersion() { - var url = [refstackApiUrl, '/products/', ctrl.id, - '/versions'].join(''); - ctrl.addVersionRequest = $http.post(url, - {'version': ctrl.newProductVersion}) - .success(function (data) { - ctrl.productVersions.push(data); - ctrl.newProductVersion = ''; - ctrl.showNewVersionInput = false; - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * Get tests runs associated with the current product. - */ - function getProductTests() { - ctrl.showTestsError = false; - var content_url = refstackApiUrl + '/results' + - '?page=' + ctrl.currentPage + '&product_id=' - + ctrl.id; - - ctrl.testsRequest = $http.get(content_url).success( - function(data) { - ctrl.testsData = data.results; - ctrl.totalItems = data.pagination.total_pages * - ctrl.itemsPerPage; - ctrl.currentPage = data.pagination.current_page; - } - ).error(function(error) { - ctrl.showTestsError = true; - ctrl.testsError = - 'Error retrieving tests from server: ' + - angular.toJson(error); - }); - } - - /** - * This will switch public/private property of the product. - */ - function switchProductPublicity() { - var url = [refstackApiUrl, '/products/', ctrl.id].join(''); - $http.put(url, {public: !ctrl.product.public}).success( - function (data) { - ctrl.product = data; - ctrl.productProperties = angular.fromJson(data.properties); - }).error(function (error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * This will send an API request in order to associate a metadata - * key-value pair with the given testId - * @param {Number} index - index of the test object in the results list - * @param {String} key - metadata key - * @param {String} value - metadata value - */ - function associateTestMeta(index, key, value) { - var testId = ctrl.testsData[index].id; - var metaUrl = [ - refstackApiUrl, '/results/', testId, '/meta/', key - ].join(''); - - var editFlag = key + 'Edit'; - if (value) { - ctrl.associateRequest = $http.post(metaUrl, value) - .success(function () { - ctrl.testsData[index][editFlag] = false; - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } else { - ctrl.unassociateRequest = $http.delete(metaUrl) - .success(function () { - ctrl.testsData[index][editFlag] = false; - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - } - - /** - * Retrieve an array of available capability files from the Refstack - * API server, sort this array reverse-alphabetically, and store it in - * a scoped variable. - * Sample API return array: ["2015.03.json", "2015.04.json"] - */ - function getGuidelineVersionList() { - if (ctrl.versionList) { - return; - } - var content_url = refstackApiUrl + '/guidelines'; - ctrl.versionsRequest = - $http.get(content_url).success(function (data) { - ctrl.versionList = data.sort().reverse(); - }).error(function (error) { - raiseAlert('danger', error.title, - 'Unable to retrieve version list'); - }); - } - - /** - * Send a PUT request to the API server to unassociate a product with - * a test result. - */ - function unassociateTest(index) { - var testId = ctrl.testsData[index].id; - var url = refstackApiUrl + '/results/' + testId; - ctrl.associateRequest = $http.put(url, {'product_version_id': null}) - .success(function () { - ctrl.testsData.splice(index, 1); - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * This will open the modal that will allow a product version - * to be managed. - */ - function openVersionModal(version) { - $uibModal.open({ - templateUrl: '/components/products/partials' + - '/versionsModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'ProductVersionModalController as modal', - size: 'lg', - resolve: { - version: function () { - return version; - }, - parent: function () { - return ctrl; - } - } - }); - } - - /** - * This will open the modal that will allow product details - * to be edited. - */ - function openProductEditModal() { - $uibModal.open({ - templateUrl: '/components/products/partials' + - '/productEditModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'ProductEditModalController as modal', - size: 'lg', - resolve: { - product: function () { - return ctrl.product; - }, - version: function () { - return ctrl.nullVersion; - } - } - }); - - } - } - - angular - .module('refstackApp') - .controller('ProductVersionModalController', - ProductVersionModalController); - - ProductVersionModalController.$inject = [ - '$uibModalInstance', '$http', 'refstackApiUrl', 'version', 'parent' - ]; - - /** - * Product Version Modal Controller - * This controller is for the modal that appears if a user wants to - * manage a product version. - */ - function ProductVersionModalController($uibModalInstance, $http, - refstackApiUrl, version, parent) { - - var ctrl = this; - - ctrl.version = angular.copy(version); - ctrl.parent = parent; - - ctrl.close = close; - ctrl.deleteProductVersion = deleteProductVersion; - ctrl.saveChanges = saveChanges; - - /** - * This function will close/dismiss the modal. - */ - function close() { - $uibModalInstance.dismiss('exit'); - } - - /** - * Call the parent function to delete a version, then close the modal. - */ - function deleteProductVersion() { - ctrl.parent.deleteProductVersion(ctrl.version.id); - ctrl.close(); - } - - /** - * This will update the current version, saving changes. - */ - function saveChanges() { - ctrl.showSuccess = false; - ctrl.showError = false; - var url = [ - refstackApiUrl, '/products/', ctrl.version.product_id, - '/versions/', ctrl.version.id ].join(''); - var content = {'cpid': ctrl.version.cpid}; - $http.put(url, content).success(function() { - // Update the original version object. - version.cpid = ctrl.version.cpid; - ctrl.showSuccess = true; - }).error(function(error) { - ctrl.showError = true; - ctrl.error = error.detail; - }); - } - - } - - angular - .module('refstackApp') - .controller('ProductEditModalController', ProductEditModalController); - - ProductEditModalController.$inject = [ - '$uibModalInstance', '$http', '$state', 'product', - 'version', 'refstackApiUrl' - ]; - - /** - * Product Edit Modal Controller - * This controls the modal that allows editing a product. - */ - function ProductEditModalController($uibModalInstance, $http, - $state, product, version, refstackApiUrl) { - - var ctrl = this; - - ctrl.close = close; - ctrl.addField = addField; - ctrl.saveChanges = saveChanges; - ctrl.removeProperty = removeProperty; - - ctrl.product = angular.copy(product); - ctrl.productName = product.name; - ctrl.productProperties = []; - ctrl.productVersion = angular.copy(version); - ctrl.originalCpid = version ? version.cpid : null; - - parseProductProperties(); - - /** - * Close the product edit modal. - */ - function close() { - $uibModalInstance.dismiss('exit'); - } - - /** - * Push a blank property key-value pair into the productProperties - * array. This will spawn new input boxes. - */ - function addField() { - ctrl.productProperties.push({'key': '', 'value': ''}); - } - - /** - * Send a PUT request to the server with the changes. - */ - function saveChanges() { - ctrl.showError = false; - ctrl.showSuccess = false; - var url = [refstackApiUrl, '/products/', ctrl.product.id].join(''); - var properties = propertiesToJson(); - var content = {'description': ctrl.product.description, - 'properties': properties}; - if (ctrl.productName !== ctrl.product.name) { - content.name = ctrl.product.name; - } - - // Request for product detail updating. - $http.put(url, content).success(function() { - - // Request for product version CPID update if it has changed. - if (ctrl.productVersion && - ctrl.originalCpid !== ctrl.productVersion.cpid) { - - url = url + '/versions/' + ctrl.productVersion.id; - content = {'cpid': ctrl.productVersion.cpid}; - $http.put(url, content).success(function() { - ctrl.showSuccess = true; - ctrl.originalCpid = ctrl.productVersion.cpid; - $state.reload(); - }).error(function(error) { - ctrl.showError = true; - ctrl.error = error.detail; - }); - } else { - ctrl.showSuccess = true; - $state.reload(); - } - }).error(function(error) { - ctrl.showError = true; - ctrl.error = error.detail; - }); - } - - /** - * Remove a property from the productProperties array at the given - * index. - */ - function removeProperty(index) { - ctrl.productProperties.splice(index, 1); - } - - /** - * Parse the product properties and put them in a format more suitable - * for forms. - */ - function parseProductProperties() { - var props = angular.fromJson(ctrl.product.properties); - angular.forEach(props, function(value, key) { - ctrl.productProperties.push({'key': key, 'value': value}); - }); - } - - /** - * Convert the list of property objects to a dict containing the - * each key-value pair. - */ - function propertiesToJson() { - if (!ctrl.productProperties.length) { - return null; - } - var properties = {}; - for (var i = 0, len = ctrl.productProperties.length; i < len; i++) { - var prop = ctrl.productProperties[i]; - if (prop.key && prop.value) { - properties[prop.key] = prop.value; - } - } - return properties; - } - } -})(); diff --git a/refstack-ui/app/components/products/products.html b/refstack-ui/app/components/products/products.html deleted file mode 100644 index 9b103ecc..00000000 --- a/refstack-ui/app/components/products/products.html +++ /dev/null @@ -1,85 +0,0 @@ -

{{ctrl.pageHeader}}

-

{{ctrl.pageParagraph}}

- -
- -
- - - - - - - - - - - - - - - - - - - - - - -
NameProduct TypeDescriptionVendorVisibility
{{product.name}}{{product.name}}{{product.name}}{{ctrl.getProductTypeDescription(product.product_type)}}{{product.description}}{{ctrl.allVendors[product.organization_id].name}}{{product.public ? 'Public' : 'Private'}}
-
- -
-
-

Add New Product

-
-
- -

- -

-
-
- -

- -

-
-
- - -
-
- - -
-
- -
-
-
- - Success: - Product successfully created. -
-
-
- -
-
-
- - diff --git a/refstack-ui/app/components/products/productsController.js b/refstack-ui/app/components/products/productsController.js deleted file mode 100644 index fbd408a9..00000000 --- a/refstack-ui/app/components/products/productsController.js +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('ProductsController', ProductsController); - - ProductsController.$inject = [ - '$rootScope', '$scope', '$http', '$state', 'refstackApiUrl' - ]; - - /** - * RefStack Products Controller - */ - function ProductsController($rootScope, $scope, $http, $state, - refstackApiUrl) { - var ctrl = this; - - ctrl.update = update; - ctrl.updateData = updateData; - ctrl._filterProduct = _filterProduct; - ctrl.addProduct = addProduct; - ctrl.updateVendors = updateVendors; - ctrl.getProductTypeDescription = getProductTypeDescription; - - /** Check to see if this page should display user-specific products. */ - ctrl.isUserProducts = $state.current.name === 'userProducts'; - /** Show private products in list for foundation admin */ - ctrl.withPrivate = false; - - /** Properties for adding new products */ - ctrl.name = ''; - ctrl.description = ''; - ctrl.organizationId = ''; - - // Should only be on user-products-page if authenticated. - if (ctrl.isUserProducts && !$scope.auth.isAuthenticated) { - $state.go('home'); - } - - ctrl.pageHeader = ctrl.isUserProducts ? - 'My Products' : 'Public Products'; - - ctrl.pageParagraph = ctrl.isUserProducts ? - 'Your added products are listed here.' : - 'Public products are listed here.'; - - if (ctrl.isUserProducts) { - ctrl.authRequest = $scope.auth.doSignCheck() - .then(ctrl.updateVendors) - .then(ctrl.update); - } else { - ctrl.updateVendors(); - ctrl.update(); - } - - ctrl.rawData = null; - ctrl.allVendors = {}; - ctrl.isAdminView = $rootScope.auth - && $rootScope.auth.currentUser - && $rootScope.auth.currentUser.is_admin; - - /** - * This will contact the Refstack API to get a listing of products. - */ - function update() { - ctrl.showError = false; - // Construct the API URL based on user-specified filters. - var contentUrl = refstackApiUrl + '/products'; - if (typeof ctrl.rawData === 'undefined' - || ctrl.rawData === null) { - ctrl.productsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.rawData = data; - ctrl.updateData(); - }).error(function (error) { - ctrl.rawData = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving Products listing from server: ' + - angular.toJson(error); - }); - } else { - ctrl.updateData(); - } - } - - /** - * This will update data for view with current settings on page. - */ - function updateData() { - ctrl.data = {}; - ctrl.data.products = ctrl.rawData.products.filter(function(s) { - return ctrl._filterProduct(s); - }); - ctrl.data.products.sort(function(a, b) { - return a.name.localeCompare(b.name); - }); - } - - /** - * Returns true if a specific product can be displayed on this page. - */ - function _filterProduct(product) { - if (!ctrl.isUserProducts) { - return product.public; - } - - if ($rootScope.auth.currentUser.is_admin) { - // TO-DO: filter out non-admin's items - // because public is not a correct flag for this - return product.public || ctrl.withPrivate; - } - - return product.can_manage; - } - - /** - * Get the product type description given the type integer. - */ - function getProductTypeDescription(product_type) { - switch (product_type) { - case 0: - return 'Distro'; - case 1: - return 'Public Cloud'; - case 2: - return 'Hosted Private Cloud'; - default: - return 'Unknown'; - } - } - - /** - * This will contact the Refstack API to get a listing of - * available vendors that can be used to associate with products. - */ - function updateVendors() { - // Construct the API URL based on user-specified filters. - var contentUrl = refstackApiUrl + '/vendors'; - ctrl.vendorsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.vendors = Array(); - ctrl.allVendors = {}; - data.vendors.forEach(function(vendor) { - ctrl.allVendors[vendor.id] = vendor; - if (vendor.can_manage) { - ctrl.vendors.push(vendor); - } - }); - ctrl.vendors.sort(function(a, b) { - return a.name.localeCompare(b.name); - }); - if (ctrl.vendors.length === 0) { - ctrl.vendors.push({name: 'Create New...', id: ''}); - } - ctrl.organizationId = ctrl.vendors[0].id; - }).error(function (error) { - ctrl.vendors = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving vendor listing from server: ' + - angular.toJson(error); - }); - } - - /** - * This will add new Product record. - */ - function addProduct() { - ctrl.showSuccess = false; - ctrl.showError = false; - var url = refstackApiUrl + '/products'; - var data = { - name: ctrl.name, - description: ctrl.description, - organization_id: ctrl.organizationId, - product_type: parseInt(ctrl.productType, 10) - }; - $http.post(url, data).success(function () { - ctrl.rawData = null; - ctrl.showSuccess = true; - ctrl.name = ''; - ctrl.description = ''; - ctrl.productType = null; - ctrl.update(); - }).error(function (error) { - ctrl.showError = true; - ctrl.error = - 'Error adding new Product: ' + angular.toJson(error); - }); - } - } -})(); diff --git a/refstack-ui/app/components/profile/importPubKeyModal.html b/refstack-ui/app/components/profile/importPubKeyModal.html deleted file mode 100644 index 5b20a9f7..00000000 --- a/refstack-ui/app/components/profile/importPubKeyModal.html +++ /dev/null @@ -1,27 +0,0 @@ - - diff --git a/refstack-ui/app/components/profile/profile.html b/refstack-ui/app/components/profile/profile.html deleted file mode 100644 index dc97c41e..00000000 --- a/refstack-ui/app/components/profile/profile.html +++ /dev/null @@ -1,37 +0,0 @@ -

User profile

-
-
- - - - - - -
User name {{auth.currentUser.fullname}}
User OpenId {{auth.currentUser.openid}}
Email {{auth.currentUser.email}}
-
-
-
-
-
-

User Public Keys

-
-
- -
-
-
- -
- - - - - - - - -
{{pubKey.format}}{{pubKey.shortKey}}{{pubKey.comment}}
-
-
diff --git a/refstack-ui/app/components/profile/profileController.js b/refstack-ui/app/components/profile/profileController.js deleted file mode 100644 index 6428e5e1..00000000 --- a/refstack-ui/app/components/profile/profileController.js +++ /dev/null @@ -1,219 +0,0 @@ -/* - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .factory('PubKeys', PubKeys); - - PubKeys.$inject = ['$resource', 'refstackApiUrl']; - - /** - * This is a provider for the user's uploaded public keys. - */ - function PubKeys($resource, refstackApiUrl) { - return $resource(refstackApiUrl + '/profile/pubkeys/:id', null, null); - } - - angular - .module('refstackApp') - .controller('ProfileController', ProfileController); - - ProfileController.$inject = [ - '$scope', '$http', 'refstackApiUrl', 'PubKeys', - '$uibModal', 'raiseAlert', '$state' - ]; - - /** - * RefStack Profile Controller - * This controller handles user's profile page, where a user can view - * account-specific information. - */ - function ProfileController($scope, $http, refstackApiUrl, - PubKeys, $uibModal, raiseAlert, $state) { - - var ctrl = this; - - ctrl.updatePubKeys = updatePubKeys; - ctrl.openImportPubKeyModal = openImportPubKeyModal; - ctrl.openShowPubKeyModal = openShowPubKeyModal; - - // Must be authenticated to view this page. - if (!$scope.auth.isAuthenticated) { - $state.go('home'); - } - - /** - * This function will fetch all the user's public keys from the - * server and store them in an array. - */ - function updatePubKeys() { - var keys = PubKeys.query(function() { - ctrl.pubkeys = []; - angular.forEach(keys, function (key) { - ctrl.pubkeys.push({ - 'resource': key, - 'format': key.format, - 'shortKey': [ - key.pubkey.slice(0, 10), - '.', - key.pubkey.slice(-10) - ].join('.'), - 'pubkey': key.pubkey, - 'comment': key.comment - }); - }); - }); - } - - /** - * This function will open the modal that will give the user a form - * for importing a public key. - */ - function openImportPubKeyModal() { - $uibModal.open({ - templateUrl: '/components/profile/importPubKeyModal.html', - backdrop: true, - windowClass: 'modal', - controller: 'ImportPubKeyModalController as modal' - }).result.finally(function() { - ctrl.updatePubKeys(); - }); - } - - /** - * This function will open the modal that will give the full - * information regarding a specific public key. - * @param {Object} pubKey resource - */ - function openShowPubKeyModal(pubKey) { - $uibModal.open({ - templateUrl: '/components/profile/showPubKeyModal.html', - backdrop: true, - windowClass: 'modal', - controller: 'ShowPubKeyModalController as modal', - resolve: { - pubKey: function() { - return pubKey; - } - } - }).result.finally(function() { - ctrl.updatePubKeys(); - }); - } - - ctrl.authRequest = $scope.auth.doSignCheck().then(ctrl.updatePubKeys); - } - - angular - .module('refstackApp') - .controller('ImportPubKeyModalController', ImportPubKeyModalController); - - ImportPubKeyModalController.$inject = [ - '$uibModalInstance', 'PubKeys', 'raiseAlert' - ]; - - /** - * Import Pub Key Modal Controller - * This controller is for the modal that appears if a user wants to import - * a public key. - */ - function ImportPubKeyModalController($uibModalInstance, - PubKeys, raiseAlert) { - - var ctrl = this; - - ctrl.importPubKey = importPubKey; - ctrl.cancel = cancel; - - /** - * This function will save a new public key resource to the API server. - */ - function importPubKey() { - var newPubKey = new PubKeys( - {raw_key: ctrl.raw_key, self_signature: ctrl.self_signature} - ); - newPubKey.$save( - function(newPubKey_) { - raiseAlert('success', '', 'Public key saved successfully'); - $uibModalInstance.close(newPubKey_); - }, - function(httpResp) { - raiseAlert('danger', - httpResp.statusText, httpResp.data.title); - ctrl.cancel(); - } - ); - } - - /** - * This function will dismiss the modal. - */ - function cancel() { - $uibModalInstance.dismiss('cancel'); - } - } - - angular - .module('refstackApp') - .controller('ShowPubKeyModalController', ShowPubKeyModalController); - - ShowPubKeyModalController.$inject = [ - '$uibModalInstance', 'raiseAlert', 'pubKey' - ]; - - /** - * Show Pub Key Modal Controller - * This controller is for the modal that appears if a user wants to see the - * full details of one of their public keys. - */ - function ShowPubKeyModalController($uibModalInstance, raiseAlert, pubKey) { - var ctrl = this; - - ctrl.deletePubKey = deletePubKey; - ctrl.cancel = cancel; - - ctrl.pubKey = pubKey.resource; - ctrl.rawKey = [pubKey.format, pubKey.pubkey, pubKey.comment].join('\n'); - - /** - * This function will delete a public key resource. - */ - function deletePubKey() { - ctrl.pubKey.$remove( - {id: ctrl.pubKey.id}, - function() { - raiseAlert('success', - '', 'Public key deleted successfully'); - $uibModalInstance.close(ctrl.pubKey.id); - }, - function(httpResp) { - raiseAlert('danger', - httpResp.statusText, httpResp.data.title); - ctrl.cancel(); - } - ); - } - - /** - * This method will dismiss the modal. - */ - function cancel() { - $uibModalInstance.dismiss('cancel'); - } - } -})(); diff --git a/refstack-ui/app/components/profile/showPubKeyModal.html b/refstack-ui/app/components/profile/showPubKeyModal.html deleted file mode 100644 index 5f63a5ef..00000000 --- a/refstack-ui/app/components/profile/showPubKeyModal.html +++ /dev/null @@ -1,11 +0,0 @@ - - diff --git a/refstack-ui/app/components/results-report/partials/editTestModal.html b/refstack-ui/app/components/results-report/partials/editTestModal.html deleted file mode 100644 index b559508b..00000000 --- a/refstack-ui/app/components/results-report/partials/editTestModal.html +++ /dev/null @@ -1,70 +0,0 @@ - diff --git a/refstack-ui/app/components/results-report/partials/fullTestListModal.html b/refstack-ui/app/components/results-report/partials/fullTestListModal.html deleted file mode 100644 index 6db198b0..00000000 --- a/refstack-ui/app/components/results-report/partials/fullTestListModal.html +++ /dev/null @@ -1,13 +0,0 @@ - diff --git a/refstack-ui/app/components/results-report/partials/reportDetails.html b/refstack-ui/app/components/results-report/partials/reportDetails.html deleted file mode 100644 index 517e569c..00000000 --- a/refstack-ui/app/components/results-report/partials/reportDetails.html +++ /dev/null @@ -1,87 +0,0 @@ - - - - - {{status | capitalize}} - - (Total: {{ctrl.caps[status].caps.length}} capabilities, {{ctrl.caps[status].count}} tests) - - ({{ctrl.testStatus | capitalize}}: {{ctrl.getStatusTestCount(status)}} tests) - - - - - -
    -
  1. - - - {{capability.id}} - - - [{{ctrl.getCapabilityTestCount(capability)}}] - - - [{{capability.passedTests.length}}/{{capability.passedTests.length + - capability.notPassedTests.length}}] - - -
      - -
    • - - - - - {{test}} - — - [Aliases] -
      -
      • {{alias}}
      -
      -
      -
    • - - - -
    • - - - - - {{test}} - — - [Aliases] -
      -
      • {{alias}}
      -
      -
      -
    • - -
    -
  2. -
-
diff --git a/refstack-ui/app/components/results-report/resultsReport.html b/refstack-ui/app/components/results-report/resultsReport.html deleted file mode 100644 index 2df4a9bb..00000000 --- a/refstack-ui/app/components/results-report/resultsReport.html +++ /dev/null @@ -1,190 +0,0 @@ -

Test Run Results

- -
-
-
-
- Test ID: {{ctrl.testId}}
-
Cloud ID: {{ctrl.resultsData.cpid}}
- Upload Date: {{ctrl.resultsData.created_at}} UTC
- Duration: {{ctrl.resultsData.duration_seconds}} seconds
- Total Number of Passed Tests: - - {{ctrl.resultsData.results.length}} - -
-
-
- Publicly Shared: - Yes - No -
-
-
- Product: - {{ctrl.resultsData.product_version.product_info.name}} - - ({{ctrl.resultsData.product_version.version}}) -
-
-
- Associated Guideline: - {{ctrl.resultsData.meta.guideline.slice(0, -5)}} -
-
- Associated Target Program: - {{ctrl.targetMappings[ctrl.resultsData.meta.target]}} -
-
- Verified: - YES -
-
-
- -
-
- - -
-
-
-
- -
-
-
-
-
- -
-

See how these results stack up against Interop Working Group capabilities and OpenStack - target marketing programs. -

- - -
-
- Guideline Version: - - -
-
- Target Program: - -
-
- - -
-
- Guideline Status: - {{ctrl.guidelineStatus | capitalize}} -
- - Corresponding OpenStack Releases: -
    -
  • - {{release | capitalize}} -
  • -
-
- -
- Status: -

This cloud passes {{ctrl.requiredPassPercent | number:1}}% - ({{ctrl.caps.required.passedCount}}/{{ctrl.caps.required.count}}) - of the tests in the {{ctrl.version.slice(0, -5)}} required capabilities for the - {{ctrl.targetMappings[target]}} program.
- Excluding flagged tests, this cloud passes - {{ctrl.nonFlagRequiredPassPercent | number:1}}% - ({{ctrl.nonFlagPassCount}}/{{ctrl.totalNonFlagCount}}) - of the required tests. -

- -

Compliance with {{ctrl.version.slice(0, -5)}}: - - YES - NO - -

- -
-

Capability Overview

- - Test Filters:
-
- - - - -
- - - - - -
- - -
- - -
- - -
-
-
- -
-
-
-
-
- - diff --git a/refstack-ui/app/components/results-report/resultsReportController.js b/refstack-ui/app/components/results-report/resultsReportController.js deleted file mode 100644 index 2aa6d7e1..00000000 --- a/refstack-ui/app/components/results-report/resultsReportController.js +++ /dev/null @@ -1,942 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('ResultsReportController', ResultsReportController); - - ResultsReportController.$inject = [ - '$http', '$stateParams', '$window', - '$uibModal', 'refstackApiUrl', 'raiseAlert' - ]; - - /** - * RefStack Results Report Controller - * This controller is for the '/results/' page where a user can - * view details for a specific test run. - */ - function ResultsReportController($http, $stateParams, $window, - $uibModal, refstackApiUrl, raiseAlert) { - - var ctrl = this; - - ctrl.getVersionList = getVersionList; - ctrl.getResults = getResults; - ctrl.isResultAdmin = isResultAdmin; - ctrl.isShared = isShared; - ctrl.shareTestRun = shareTestRun; - ctrl.deleteTestRun = deleteTestRun; - ctrl.updateVerificationStatus = updateVerificationStatus; - ctrl.updateGuidelines = updateGuidelines; - ctrl.getTargetCapabilities = getTargetCapabilities; - ctrl.buildCapabilityV1_2 = buildCapabilityV1_2; - ctrl.buildCapabilityV1_3 = buildCapabilityV1_3; - ctrl.buildCapabilitiesObject = buildCapabilitiesObject; - ctrl.isTestFlagged = isTestFlagged; - ctrl.getFlaggedReason = getFlaggedReason; - ctrl.isCapabilityShown = isCapabilityShown; - ctrl.isTestShown = isTestShown; - ctrl.getCapabilityTestCount = getCapabilityTestCount; - ctrl.getStatusTestCount = getStatusTestCount; - ctrl.openFullTestListModal = openFullTestListModal; - ctrl.openEditTestModal = openEditTestModal; - getVersionList(); - - /** The testID extracted from the URL route. */ - ctrl.testId = $stateParams.testID; - - /** The target OpenStack marketing program to compare against. */ - ctrl.target = 'platform'; - - /** Mappings of Interop WG components to marketing program names. */ - ctrl.targetMappings = { - 'platform': 'Openstack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Object Storage', - 'dns': 'OpenStack with DNS', - 'orchestration': 'OpenStack with orchestration', - 'shared_file_system': 'OpenStack with Shared File System', - 'load_balancer': 'OpenStack with Load Balancer', - 'key_manager': 'OpenStack with Key Manager' - }; - - /** The schema version of the currently selected guideline data. */ - ctrl.schemaVersion = null; - - /** The selected test status used for test filtering. */ - ctrl.testStatus = 'total'; - - /** The HTML template that all accordian groups will use. */ - ctrl.detailsTemplate = 'components/results-report/partials/' + - 'reportDetails.html'; - - /** - * Retrieve an array of available guideline files from the Refstack - * API server, sort this array reverse-alphabetically, and store it in - * a scoped variable. The scope's selected version is initialized to - * the latest (i.e. first) version here as well. After a successful API - * call, the function to update the capabilities is called. - * Sample API return array: ["2015.03.json", "2015.04.json"] - */ - function getVersionList() { - if (ctrl.target === 'dns' || - ctrl.target === 'orchestration' || - ctrl.target === 'shared_file_system' || - ctrl.target === 'load_balancer' || - ctrl.target === 'key_manager' - ) { - ctrl.gl_type = ctrl.target; - - } else { - ctrl.gl_type = 'powered'; - } - var content_url = refstackApiUrl + '/guidelines'; - ctrl.versionsRequest = - $http.get(content_url).success(function (data) { - let gl_files = data[ctrl.gl_type]; - let gl_names = gl_files.map((gl_obj) => gl_obj.name); - ctrl.versionList = gl_names.sort().reverse(); - let file_names = gl_files.map((gl_obj) => gl_obj.file); - ctrl.fileList = file_names.sort().reverse(); - - if (!ctrl.version) { - // Default to the first approved guideline which is - // expected to be at index 1. - ctrl.version = ctrl.versionList[1]; - ctrl.versionFile = ctrl.fileList[1]; - } else { - let versionIndex = - ctrl.versionList.indexOf(ctrl.version); - ctrl.versionFile = ctrl.fileList[versionIndex]; - } - ctrl.updateGuidelines(); - }).error(function (error) { - ctrl.showError = true; - ctrl.error = 'Error retrieving version list: ' + - angular.toJson(error); - }); - } - - /** - * Retrieve results from the Refstack API server based on the test - * run id in the URL. This function is the first function that will - * be called from the controller. Upon successful retrieval of results, - * the function that gets the version list will be called. - */ - function getResults() { - var content_url = refstackApiUrl + '/results/' + ctrl.testId; - ctrl.resultsRequest = - $http.get(content_url).success(function (data) { - ctrl.resultsData = data; - ctrl.version = ctrl.resultsData.meta.guideline; - ctrl.isVerified = ctrl.resultsData.verification_status; - if (ctrl.resultsData.meta.target) { - ctrl.target = ctrl.resultsData.meta.target; - } - getVersionList(); - }).error(function (error) { - ctrl.showError = true; - ctrl.resultsData = null; - ctrl.error = 'Error retrieving results from server: ' + - angular.toJson(error); - }); - } - - /** - * This tells you whether the current user has administrative - * privileges for the test result. - * @returns {Boolean} true if the user has admin privileges. - */ - function isResultAdmin() { - return Boolean(ctrl.resultsData && - (ctrl.resultsData.user_role === 'owner' || - ctrl.resultsData.user_role === 'foundation')); - } - /** - * This tells you whether the current results are shared with the - * community or not. - * @returns {Boolean} true if the results are shared - */ - function isShared() { - return Boolean(ctrl.resultsData && - 'shared' in ctrl.resultsData.meta); - } - - /** - * This will send an API request in order to share or unshare the - * current results based on the passed in shareState. - * @param {Boolean} shareState - Whether to share or unshare results. - */ - function shareTestRun(shareState) { - var content_url = [ - refstackApiUrl, '/results/', ctrl.testId, '/meta/shared' - ].join(''); - if (shareState) { - ctrl.shareRequest = - $http.post(content_url, 'true').success(function () { - ctrl.resultsData.meta.shared = 'true'; - raiseAlert('success', '', 'Test run shared!'); - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } else { - ctrl.shareRequest = - $http.delete(content_url).success(function () { - delete ctrl.resultsData.meta.shared; - raiseAlert('success', '', 'Test run unshared!'); - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - } - - /** - * This will send a request to the API to delete the current - * test results set. - */ - function deleteTestRun() { - var content_url = [ - refstackApiUrl, '/results/', ctrl.testId - ].join(''); - ctrl.deleteRequest = - $http.delete(content_url).success(function () { - $window.history.back(); - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * This will send a request to the API to delete the current - * test results set. - */ - function updateVerificationStatus() { - var content_url = [ - refstackApiUrl, '/results/', ctrl.testId - ].join(''); - var data = {'verification_status': ctrl.isVerified}; - ctrl.updateRequest = - $http.put(content_url, data).success( - function () { - ctrl.resultsData.verification_status = ctrl.isVerified; - raiseAlert('success', '', - 'Verification status changed!'); - }).error(function (error) { - ctrl.isVerified = ctrl.resultsData.verification_status; - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * This will contact the Refstack API server to retrieve the JSON - * content of the guideline file corresponding to the selected - * version. A function to construct an object from the capability - * data will be called upon successful retrieval. - */ - function updateGuidelines() { - ctrl.guidelineData = null; - ctrl.showError = false; - - ctrl.content_url = refstackApiUrl + '/guidelines/' + - ctrl.versionFile; - let getparams = {'gl_file': ctrl.versionFile}; - ctrl.capsRequest = - $http.get(ctrl.content_url, getparams).success(function (data) { - ctrl.guidelineData = data; - if ('metadata' in data && data.metadata.schema >= '2.0') { - ctrl.schemaVersion = data.metadata.schema; - ctrl.guidelineStatus = - data.metadata.os_trademark_approval.status; - ctrl.releases = - data.metadata.os_trademark_approval.releases; - } else { - ctrl.schemaVersion = data.schema; - ctrl.guidelineStatus = data.status; - ctrl.releases = data.releases; - } - ctrl.buildCapabilitiesObject(); - }).error(function (error) { - ctrl.showError = true; - ctrl.guidelineData = null; - ctrl.error = 'Error retrieving guideline date: ' + - angular.toJson(error); - }); - } - - /** - * This will get all the capabilities relevant to the target and - * their corresponding statuses. - * @returns {Object} Object containing each capability and their status - */ - function getTargetCapabilities() { - var components = ctrl.guidelineData.components; - var targetCaps = {}; - var targetComponents = null; - var old_type = ctrl.gl_type; - if (ctrl.target === 'dns' || - ctrl.target === 'orchestration' || - ctrl.target === 'shared_file_system' || - ctrl.target === 'load_balancer' || - ctrl.target === 'key_manager' - ) { - ctrl.gl_type = ctrl.target; - } else { - ctrl.gl_type = 'powered'; - } - // If it has not been updated since the last program type change, - // will need to update the list - if (old_type !== ctrl.gl_type) { - ctrl.getVersionList(); - return false; - } - - // The 'platform' target is comprised of multiple components, so - // we need to get the capabilities belonging to each of its - // components. - if (ctrl.target === 'platform' || ctrl.schemaVersion >= '2.0') { - if ('add-ons' in ctrl.guidelineData) { - targetComponents = ['os_powered_' + ctrl.target]; - } else if (ctrl.schemaVersion >= '2.0') { - var platformsMap = { - 'platform': 'OpenStack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Storage', - }; - targetComponents = ctrl.guidelineData.platforms[ - platformsMap[ctrl.target]].components.map( - function(c) { - return c.name; - } - ); - } else { - targetComponents = ctrl.guidelineData.platform.required; - } - - // This will contain status priority values, where lower - // values mean higher priorities. - var statusMap = { - required: 1, - advisory: 2, - deprecated: 3, - removed: 4 - }; - - // For each component required for the platform program. - angular.forEach(targetComponents, function (component) { - var componentList = components[component]; - if (ctrl.schemaVersion >= '2.0') { - componentList = componentList.capabilities; - } - // Get each capability list belonging to each status. - angular.forEach(componentList, - function (caps, status) { - // For each capability. - angular.forEach(caps, function(cap) { - // If the capability has already been added. - if (cap in targetCaps) { - // If the status priority value is less - // than the saved priority value, update - // the value. - if (statusMap[status] < - statusMap[targetCaps[cap]]) { - targetCaps[cap] = status; - } - } else { - targetCaps[cap] = status; - } - }); - }); - }); - } else { - angular.forEach(components[ctrl.target], - function (caps, status) { - angular.forEach(caps, function(cap) { - targetCaps[cap] = status; - }); - }); - } - return targetCaps; - } - - /** - * This will build the a capability object for schema version 1.2. - * This object will contain the information needed to form a report in - * the HTML template. - * @param {String} capId capability ID - */ - function buildCapabilityV1_2(capId) { - var cap = { - 'id': capId, - 'passedTests': [], - 'notPassedTests': [], - 'passedFlagged': [], - 'notPassedFlagged': [] - }; - var capDetails = ctrl.guidelineData.capabilities[capId]; - // Loop through each test belonging to the capability. - angular.forEach(capDetails.tests, - function (testId) { - // If the test ID is in the results' test list, add - // it to the passedTests array. - if (ctrl.resultsData.results.indexOf(testId) > -1) { - cap.passedTests.push(testId); - if (capDetails.flagged.indexOf(testId) > -1) { - cap.passedFlagged.push(testId); - } - } else { - cap.notPassedTests.push(testId); - if (capDetails.flagged.indexOf(testId) > -1) { - cap.notPassedFlagged.push(testId); - } - } - }); - return cap; - } - - /** - * This will build the a capability object for schema version 1.3 and - * above. This object will contain the information needed to form a - * report in the HTML template. - * @param {String} capId capability ID - */ - function buildCapabilityV1_3(capId) { - var cap = { - 'id': capId, - 'passedTests': [], - 'notPassedTests': [], - 'passedFlagged': [], - 'notPassedFlagged': [] - }; - - // For cases where a capability listed in components is not - // in the capabilities object. - if (!(capId in ctrl.guidelineData.capabilities)) { - return cap; - } - - // Loop through each test belonging to the capability. - angular.forEach(ctrl.guidelineData.capabilities[capId].tests, - function (details, testId) { - var passed = false; - - // If the test ID is in the results' test list. - if (ctrl.resultsData.results.indexOf(testId) > -1) { - passed = true; - } else if ('aliases' in details) { - var len = details.aliases.length; - for (var i = 0; i < len; i++) { - var alias = details.aliases[i]; - if (ctrl.resultsData.results.indexOf(alias) > -1) { - passed = true; - break; - } - } - } - - // Add to correct array based on whether the test was - // passed or not. - if (passed) { - cap.passedTests.push(testId); - if ('flagged' in details) { - cap.passedFlagged.push(testId); - } - } else { - cap.notPassedTests.push(testId); - if ('flagged' in details) { - cap.notPassedFlagged.push(testId); - } - } - }); - return cap; - } - - /** - * This will check the schema version of the current capabilities file, - * and will call the correct method to build an object based on the - * capability data retrieved from the Refstack API server. - */ - function buildCapabilitiesObject() { - // This is the object template where 'count' is the number of - // total tests that fall under the given status, and 'passedCount' - // is the number of tests passed. The 'caps' array will contain - // objects with details regarding each capability. - ctrl.caps = { - 'required': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'advisory': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'deprecated': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'removed': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0} - }; - var capMethod = null; - - switch (ctrl.schemaVersion) { - case '1.2': - capMethod = 'buildCapabilityV1_2'; - break; - case '1.3': - case '1.4': - case '1.5': - case '1.6': - case '2.0': - capMethod = 'buildCapabilityV1_3'; - break; - default: - ctrl.showError = true; - ctrl.guidelineData = null; - ctrl.error = 'The schema version for the guideline ' + - 'file selected (' + ctrl.schemaVersion + - ') is currently not supported.'; - return; - } - - // Get test details for each relevant capability and store - // them in the scope's 'caps' object. - var targetCaps = ctrl.getTargetCapabilities(); - angular.forEach(targetCaps, function(status, capId) { - var cap = ctrl[capMethod](capId); - ctrl.caps[status].count += - cap.passedTests.length + cap.notPassedTests.length; - ctrl.caps[status].passedCount += cap.passedTests.length; - ctrl.caps[status].flagPassCount += cap.passedFlagged.length; - ctrl.caps[status].flagFailCount += - cap.notPassedFlagged.length; - ctrl.caps[status].caps.push(cap); - }); - - ctrl.requiredPassPercent = ctrl.caps.required.passedCount * - 100 / ctrl.caps.required.count; - - ctrl.totalRequiredFailCount = ctrl.caps.required.count - - ctrl.caps.required.passedCount; - ctrl.totalRequiredFlagCount = - ctrl.caps.required.flagFailCount + - ctrl.caps.required.flagPassCount; - ctrl.totalNonFlagCount = ctrl.caps.required.count - - ctrl.totalRequiredFlagCount; - ctrl.nonFlagPassCount = ctrl.totalNonFlagCount - - (ctrl.totalRequiredFailCount - - ctrl.caps.required.flagFailCount); - - ctrl.nonFlagRequiredPassPercent = ctrl.nonFlagPassCount * - 100 / ctrl.totalNonFlagCount; - } - - /** - * This will check if a given test is flagged. - * @param {String} test ID of the test to check - * @param {Object} capObj capability that test is under - * @returns {Boolean} truthy value if test is flagged - */ - function isTestFlagged(test, capObj) { - if (!capObj) { - return false; - } - return ctrl.schemaVersion === '1.2' && - capObj.flagged.indexOf(test) > -1 || - ctrl.schemaVersion >= '1.3' && - capObj.tests[test].flagged; - } - - /** - * This will return the reason a test is flagged. An empty string - * will be returned if the passed in test is not flagged. - * @param {String} test ID of the test to check - * @param {String} capObj capability that test is under - * @returns {String} reason - */ - function getFlaggedReason(test, capObj) { - if (ctrl.schemaVersion === '1.2' && - ctrl.isTestFlagged(test, capObj)) { - - // Return a generic message since schema 1.2 does not - // provide flag reasons. - return 'Interop Working Group has flagged this test.'; - } else if (ctrl.schemaVersion >= '1.3' && - ctrl.isTestFlagged(test, capObj)) { - - return capObj.tests[test].flagged.reason; - } else { - return ''; - } - } - - /** - * This will check the if a capability should be shown based on the - * test filter selected. If a capability does not have any tests - * belonging under the given filter, it should not be shown. - * @param {Object} capability Built object for capability - * @returns {Boolean} true if capability should be shown - */ - function isCapabilityShown(capability) { - return ctrl.testStatus === 'total' || - ctrl.testStatus === 'passed' && - capability.passedTests.length > 0 || - ctrl.testStatus === 'not passed' && - capability.notPassedTests.length > 0 || - ctrl.testStatus === 'flagged' && - capability.passedFlagged.length + - capability.notPassedFlagged.length > 0; - } - - /** - * This will check the if a test should be shown based on the test - * filter selected. - * @param {String} test ID of the test - * @param {Object} capability Built object for capability - * @return {Boolean} true if test should be shown - */ - function isTestShown(test, capability) { - return ctrl.testStatus === 'total' || - ctrl.testStatus === 'passed' && - capability.passedTests.indexOf(test) > -1 || - ctrl.testStatus === 'not passed' && - capability.notPassedTests.indexOf(test) > -1 || - ctrl.testStatus === 'flagged' && - (capability.passedFlagged.indexOf(test) > -1 || - capability.notPassedFlagged.indexOf(test) > -1); - } - - /** - * This will give the number of tests belonging under the selected - * test filter for a given capability. - * @param {Object} capability Built object for capability - * @returns {Number} number of tests under filter - */ - function getCapabilityTestCount(capability) { - if (ctrl.testStatus === 'total') { - return capability.passedTests.length + - capability.notPassedTests.length; - } else if (ctrl.testStatus === 'passed') { - return capability.passedTests.length; - } else if (ctrl.testStatus === 'not passed') { - return capability.notPassedTests.length; - } else if (ctrl.testStatus === 'flagged') { - return capability.passedFlagged.length + - capability.notPassedFlagged.length; - } else { - return 0; - } - } - - /** - * This will give the number of tests belonging under the selected - * test filter for a given status. - * @param {String} capability status - * @returns {Number} number of tests for status under filter - */ - function getStatusTestCount(status) { - if (!ctrl.caps) { - return -1; - } else if (ctrl.testStatus === 'total') { - return ctrl.caps[status].count; - } else if (ctrl.testStatus === 'passed') { - return ctrl.caps[status].passedCount; - } else if (ctrl.testStatus === 'not passed') { - return ctrl.caps[status].count - - ctrl.caps[status].passedCount; - } else if (ctrl.testStatus === 'flagged') { - return ctrl.caps[status].flagFailCount + - ctrl.caps[status].flagPassCount; - } else { - return -1; - } - } - - /** - * This will open the modal that will show the full list of passed - * tests for the current results. - */ - function openFullTestListModal() { - $uibModal.open({ - templateUrl: '/components/results-report/partials' + - '/fullTestListModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'FullTestListModalController as modal', - size: 'lg', - resolve: { - tests: function () { - return ctrl.resultsData.results; - }, - gl_type: function () { - return ctrl.gl_type; - } - } - - }); - } - - /** - * This will open the modal that will all a user to edit test run - * metadata. - */ - function openEditTestModal() { - $uibModal.open({ - templateUrl: '/components/results-report/partials' + - '/editTestModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'EditTestModalController as modal', - size: 'lg', - resolve: { - resultsData: function () { - return ctrl.resultsData; - }, - gl_type: function () { - return ctrl.gl_type; - } - } - }); - } - - getResults(); - } - - angular - .module('refstackApp') - .controller('FullTestListModalController', FullTestListModalController); - - FullTestListModalController.$inject = - ['$uibModalInstance', 'tests', 'gl_type']; - - /** - * Full Test List Modal Controller - * This controller is for the modal that appears if a user wants to see the - * full list of passed tests on a report page. - */ - function FullTestListModalController($uibModalInstance, tests, gl_type) { - var ctrl = this; - - ctrl.tests = tests; - ctrl.gl_type = gl_type; - - /** - * This function will close/dismiss the modal. - */ - ctrl.close = function () { - $uibModalInstance.dismiss('exit'); - }; - - /** - * This function will return a string representing the sorted - * tests list separated by newlines. - */ - ctrl.getTestListString = function () { - return ctrl.tests.sort().join('\n'); - }; - } - - angular - .module('refstackApp') - .controller('EditTestModalController', EditTestModalController); - - EditTestModalController.$inject = [ - '$uibModalInstance', '$http', '$state', 'raiseAlert', - 'refstackApiUrl', 'resultsData', 'gl_type' - ]; - - /** - * Edit Test Modal Controller - * This controller is for the modal that appears if a user wants to edit - * test run metadata. - */ - function EditTestModalController($uibModalInstance, $http, $state, - raiseAlert, refstackApiUrl, resultsData, gl_type) { - - var ctrl = this; - - ctrl.getVersionList = getVersionList; - ctrl.getUserProducts = getUserProducts; - ctrl.associateProductVersion = associateProductVersion; - ctrl.getProductVersions = getProductVersions; - ctrl.saveChanges = saveChanges; - - ctrl.resultsData = resultsData; - ctrl.metaCopy = angular.copy(resultsData.meta); - ctrl.prodVersionCopy = angular.copy(resultsData.product_version); - ctrl.gl_type = gl_type; - - ctrl.getVersionList(); - ctrl.getUserProducts(); - - /** - * Retrieve an array of available capability files from the Refstack - * API server, sort this array reverse-alphabetically, and store it in - * a scoped variable. - * Sample API return array: ["2015.03.json", "2015.04.json"] - */ - function getVersionList() { - if (ctrl.versionList) { - return; - } - var content_url = refstackApiUrl + '/guidelines'; - ctrl.versionsRequest = - $http.get(content_url).success(function (data) { - let gl_files = data[ctrl.gl_type]; - let gl_names = gl_files.map((gl_obj) => gl_obj.name); - ctrl.versionList = gl_names.sort().reverse(); - ctrl.version = ctrl.versionList[1]; - }).error(function (error) { - raiseAlert('danger', error.title, - 'Unable to retrieve version list'); - }); - } - - /** - * Get products user has management rights to or all products depending - * on the passed in parameter value. - */ - function getUserProducts() { - var contentUrl = refstackApiUrl + '/products'; - ctrl.productsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.products = {}; - angular.forEach(data.products, function(prod) { - if (prod.can_manage) { - ctrl.products[prod.id] = prod; - } - }); - if (ctrl.prodVersionCopy) { - ctrl.selectedProduct = ctrl.products[ - ctrl.prodVersionCopy.product_info.id - ]; - } - ctrl.getProductVersions(); - }).error(function (error) { - ctrl.products = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving Products listing from server: ' + - angular.toJson(error); - }); - } - - /** - * Send a PUT request to the API server to associate a product with - * a test result. - */ - function associateProductVersion() { - var verId = ctrl.selectedVersion ? - ctrl.selectedVersion.id : null; - var testId = resultsData.id; - var url = refstackApiUrl + '/results/' + testId; - ctrl.associateRequest = $http.put(url, {'product_version_id': - verId}) - .error(function (error) { - ctrl.showError = true; - ctrl.showSuccess = false; - ctrl.error = - 'Error associating product version with test run: ' + - angular.toJson(error); - }); - } - - /** - * Get all versions for a product. - */ - function getProductVersions() { - if (!ctrl.selectedProduct) { - ctrl.productVersions = []; - ctrl.selectedVersion = null; - return; - } - - var url = refstackApiUrl + '/products/' + - ctrl.selectedProduct.id + '/versions'; - ctrl.getVersionsRequest = $http.get(url) - .success(function (data) { - ctrl.productVersions = data; - if (ctrl.prodVersionCopy && - ctrl.prodVersionCopy.product_info.id === - ctrl.selectedProduct.id) { - ctrl.selectedVersion = ctrl.prodVersionCopy; - } else { - angular.forEach(data, function(ver) { - if (!ver.version) { - ctrl.selectedVersion = ver; - } - }); - } - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * Send a PUT request to the server with the changes. - */ - function saveChanges() { - ctrl.showError = false; - ctrl.showSuccess = false; - var metaBaseUrl = [ - refstackApiUrl, '/results/', resultsData.id, '/meta/' - ].join(''); - var metaFields = ['target', 'guideline', 'shared']; - var meta = ctrl.metaCopy; - angular.forEach(metaFields, function(field) { - var oldMetaValue = field in ctrl.resultsData.meta ? - ctrl.resultsData.meta[field] : ''; - if (field in meta && oldMetaValue !== meta[field]) { - var metaUrl = metaBaseUrl + field; - if (meta[field]) { - ctrl.assocRequest = $http.post(metaUrl, meta[field]) - .success(function() { - ctrl.resultsData.meta[field] = meta[field]; - }) - .error(function (error) { - ctrl.showError = true; - ctrl.showSuccess = false; - ctrl.error = - 'Error associating metadata with ' + - 'test run: ' + angular.toJson(error); - }); - } else { - ctrl.unassocRequest = $http.delete(metaUrl) - .success(function () { - delete ctrl.resultsData.meta[field]; - delete meta[field]; - }) - .error(function (error) { - ctrl.showError = true; - ctrl.showSuccess = false; - ctrl.error = - 'Error associating metadata with ' + - 'test run: ' + angular.toJson(error); - }); - } - } - }); - ctrl.associateProductVersion(); - if (!ctrl.showError) { - ctrl.showSuccess = true; - $state.reload(); - } - } - - /** - * This function will close/dismiss the modal. - */ - ctrl.close = function () { - $uibModalInstance.dismiss('exit'); - }; - } -})(); diff --git a/refstack-ui/app/components/results/results.html b/refstack-ui/app/components/results/results.html deleted file mode 100644 index 6445ade4..00000000 --- a/refstack-ui/app/components/results/results.html +++ /dev/null @@ -1,252 +0,0 @@ -

{{ctrl.pageHeader}}

-

{{ctrl.pageParagraph}}

- -
-

Filters

-
-
- -

- - - - -

-
-
- -

- - - - -

-
-
- - -
-
-
- -
-
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Upload DateTest Run IDVendorProduct (version)Target ProgramGuidelineVerifiedShared
- - - - - {{result.created_at}} - {{result.id.slice(0, 8)}}...{{result.id.slice(-8)}} - - - {{ctrl.vendors[result.product_version.product_info.organization_id].name || '-'}} - {{result.product_version.product_info.name || '-'}} - - ({{result.product_version.version}}) - - {{ctrl.targetMappings[result.meta.target] || '-'}}{{result.meta.guideline.slice(0, -5) || '-'}} - - - - - - -
- Publicly Shared: - Yes - - No - - - - -
- - Associated Guideline: - - None - - - {{result.meta.guideline.slice(0, -5)}} - - - - - -
- - Associated Target Program: - - None - - - {{ctrl.targetMappings[result.meta.target]}} - - - - - -
- - Associated Product: - - None - - - - - {{ctrl.products[result.product_version.product_info.id].name}} - - ({{result.product_version.version}}) - - - - - - {{ctrl.products[result.product_version.product_info.id].name}} - - ({{result.product_version.version}}) - - - - - - - - - - Version: - - - - - - - -
-
- -
- - -
-
- - diff --git a/refstack-ui/app/components/results/resultsController.js b/refstack-ui/app/components/results/resultsController.js deleted file mode 100644 index 0501ab69..00000000 --- a/refstack-ui/app/components/results/resultsController.js +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('ResultsController', ResultsController); - - ResultsController.$inject = [ - '$scope', '$http', '$filter', '$state', 'refstackApiUrl','raiseAlert' - ]; - - /** - * RefStack Results Controller - * This controller is for the '/results' page where a user can browse - * a listing of community uploaded results. - */ - function ResultsController($scope, $http, $filter, $state, refstackApiUrl, - raiseAlert) { - var ctrl = this; - - ctrl.update = update; - ctrl.open = open; - ctrl.clearFilters = clearFilters; - ctrl.associateMeta = associateMeta; - ctrl.getVersionList = getVersionList; - ctrl.getUserProducts = getUserProducts; - ctrl.getVendors = getVendors; - ctrl.associateProductVersion = associateProductVersion; - ctrl.getProductVersions = getProductVersions; - ctrl.prepVersionEdit = prepVersionEdit; - if (ctrl.target === 'dns' || - ctrl.target === 'orchestration' || - ctrl.target === 'shared_file_system' || - ctrl.target === 'load_balancer' || - ctrl.target === 'key_manager' - ) { - ctrl.gl_type = ctrl.target; - } else { - ctrl.gl_type = 'powered'; - } - - /** Mappings of Interop WG components to marketing program names. */ - ctrl.targetMappings = { - 'platform': 'Openstack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Object Storage', - 'dns': 'OpenStack with DNS', - 'orchestration': 'OpenStack with Orchestration', - 'shared_file_system': 'OpenStack with Shared File System', - 'load_balancer': 'OpenStack with Load Balancer', - 'key_manager': 'OpenStack with Key Manager' - }; - - /** Initial page to be on. */ - ctrl.currentPage = 1; - - /** - * How many results should display on each page. Since pagination - * is server-side implemented, this value should match the - * 'results_per_page' configuration of the Refstack server which - * defaults to 20. - */ - ctrl.itemsPerPage = 20; - - /** - * How many page buttons should be displayed at max before adding - * the '...' button. - */ - ctrl.maxSize = 5; - - /** The upload date lower limit to be used in filtering results. */ - ctrl.startDate = ''; - - /** The upload date upper limit to be used in filtering results. */ - ctrl.endDate = ''; - - /** The date format for the date picker. */ - ctrl.format = 'yyyy-MM-dd'; - - /** Check to see if this page should display user-specific results. */ - ctrl.isUserResults = $state.current.name === 'userResults'; - - // Should only be on user-results-page if authenticated. - if (ctrl.isUserResults && !$scope.auth.isAuthenticated) { - $state.go('home'); - } - - ctrl.pageHeader = ctrl.isUserResults ? - 'Private test results' : 'Community test results'; - - ctrl.pageParagraph = ctrl.isUserResults ? - 'Your most recently uploaded test results are listed here.' : - 'The most recently uploaded community test results are listed ' + - 'here.'; - - if (ctrl.isUserResults) { - ctrl.authRequest = $scope.auth.doSignCheck() - .then(ctrl.update); - ctrl.getUserProducts(); - } else { - ctrl.update(); - } - - ctrl.getVendors(); - - /** - * This will contact the Refstack API to get a listing of test run - * results. - */ - function update() { - ctrl.showError = false; - // Construct the API URL based on user-specified filters. - var content_url = refstackApiUrl + '/results' + - '?page=' + ctrl.currentPage; - var start = $filter('date')(ctrl.startDate, 'yyyy-MM-dd'); - if (start) { - content_url = - content_url + '&start_date=' + start + ' 00:00:00'; - } - var end = $filter('date')(ctrl.endDate, 'yyyy-MM-dd'); - if (end) { - content_url = content_url + '&end_date=' + end + ' 23:59:59'; - } - if (ctrl.isUserResults) { - content_url = content_url + '&signed'; - } - ctrl.resultsRequest = - $http.get(content_url).success(function (data) { - ctrl.data = data; - ctrl.totalItems = ctrl.data.pagination.total_pages * - ctrl.itemsPerPage; - ctrl.currentPage = ctrl.data.pagination.current_page; - }).error(function (error) { - ctrl.data = null; - ctrl.totalItems = 0; - ctrl.showError = true; - ctrl.error = - 'Error retrieving results listing from server: ' + - angular.toJson(error); - }); - } - - /** - * This is called when the date filter calendar is opened. It - * does some event handling, and sets a scope variable so the UI - * knows which calendar was opened. - * @param {Object} $event - The Event object - * @param {String} openVar - Tells which calendar was opened - */ - function open($event, openVar) { - $event.preventDefault(); - $event.stopPropagation(); - ctrl[openVar] = true; - } - - /** - * This function will clear all filters and update the results - * listing. - */ - function clearFilters() { - ctrl.startDate = null; - ctrl.endDate = null; - ctrl.update(); - } - - /** - * This will send an API request in order to associate a metadata - * key-value pair with the given testId - * @param {Number} index - index of the test object in the results list - * @param {String} key - metadata key - * @param {String} value - metadata value - */ - function associateMeta(index, key, value) { - var testId = ctrl.data.results[index].id; - var metaUrl = [ - refstackApiUrl, '/results/', testId, '/meta/', key - ].join(''); - - var editFlag = key + 'Edit'; - if (value) { - ctrl.associateRequest = $http.post(metaUrl, value) - .success(function () { - ctrl.data.results[index][editFlag] = false; - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } else { - ctrl.unassociateRequest = $http.delete(metaUrl) - .success(function () { - ctrl.data.results[index][editFlag] = false; - }).error(function (error) { - if (error.code === 404) { - // Key doesn't exist, so count it as a success, - // and don't raise an alert. - ctrl.data.results[index][editFlag] = false; - } else { - raiseAlert('danger', error.title, error.detail); - } - }); - } - } - - /** - * Retrieve an array of available capability files from the Refstack - * API server, sort this array reverse-alphabetically, and store it in - * a scoped variable. - * Sample API return array: ["2015.03.json", "2015.04.json"] - */ - function getVersionList() { - if (ctrl.versionList) { - return; - } - var content_url = refstackApiUrl + '/guidelines'; - ctrl.versionsRequest = - $http.get(content_url).success(function (data) { - // NEED TO sort after grabbing the GL_TYPE DATA - let gl_files = data[ctrl.gl_type]; - ctrl.versionList = gl_files.map((gl_obj) => gl_obj.name); - ctrl.version = ctrl.versionList[1]; - }).error(function (error) { - raiseAlert('danger', error.title, - 'Unable to retrieve version list'); - }); - } - - /** - * Get products user has management rights to or all products depending - * on the passed in parameter value. - */ - function getUserProducts() { - if (ctrl.products) { - return; - } - var contentUrl = refstackApiUrl + '/products'; - ctrl.productsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.products = {}; - angular.forEach(data.products, function(prod) { - if (prod.can_manage) { - ctrl.products[prod.id] = prod; - } - }); - }).error(function (error) { - ctrl.products = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving Products listing from server: ' + - angular.toJson(error); - }); - } - - /** - * This will contact the Refstack API to get a listing of - * vendors. - */ - function getVendors() { - var contentUrl = refstackApiUrl + '/vendors'; - ctrl.vendorsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.vendors = {}; - data.vendors.forEach(function(vendor) { - ctrl.vendors[vendor.id] = vendor; - }); - }).error(function (error) { - ctrl.vendors = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving vendor listing from server: ' + - angular.toJson(error); - }); - } - - /** - * Send a PUT request to the API server to associate a product with - * a test result. - */ - function associateProductVersion(result) { - var verId = result.selectedVersion ? - result.selectedVersion.id : null; - var testId = result.id; - var url = refstackApiUrl + '/results/' + testId; - ctrl.associateRequest = $http.put(url, {'product_version_id': - verId}) - .success(function () { - result.product_version = result.selectedVersion; - if (result.selectedVersion) { - result.product_version.product_info = - result.selectedProduct; - } - result.productEdit = false; - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * Get all versions for a product. - */ - function getProductVersions(result) { - if (!result.selectedProduct) { - result.productVersions = []; - result.selectedVersion = null; - return; - } - - var url = refstackApiUrl + '/products/' + - result.selectedProduct.id + '/versions'; - ctrl.getVersionsRequest = $http.get(url) - .success(function (data) { - result.productVersions = data; - - // If the test result isn't already associated to a - // version, default it to the null version. - if (!result.product_version) { - angular.forEach(data, function(ver) { - if (!ver.version) { - result.selectedVersion = ver; - } - }); - } - }).error(function (error) { - raiseAlert('danger', error.title, error.detail); - }); - } - - /** - * Instantiate variables needed for editing product/version - * associations. - */ - function prepVersionEdit(result) { - result.productEdit = true; - if (result.product_version) { - result.selectedProduct = - ctrl.products[result.product_version.product_info.id]; - } - result.selectedVersion = result.product_version; - ctrl.getProductVersions(result); - } - - } -})(); diff --git a/refstack-ui/app/components/vendors/partials/vendorEditModal.html b/refstack-ui/app/components/vendors/partials/vendorEditModal.html deleted file mode 100644 index 23eded96..00000000 --- a/refstack-ui/app/components/vendors/partials/vendorEditModal.html +++ /dev/null @@ -1,62 +0,0 @@ - diff --git a/refstack-ui/app/components/vendors/vendor.html b/refstack-ui/app/components/vendors/vendor.html deleted file mode 100644 index 63002ed0..00000000 --- a/refstack-ui/app/components/vendors/vendor.html +++ /dev/null @@ -1,115 +0,0 @@ -
-
-
-
- Vendor ID: {{ctrl.vendorId}}
- Type: - OpenStack - Private - Pending Approval - Official -
- Name: {{ctrl.vendor.name}}
- Description: {{ctrl.vendor.description || '-'}}
-
- Properties: -
    -
  • - {{key}}: {{value}} -
  • -
-
-
-
- - -
- -
-
-
- Registration decline reason: {{ctrl.vendorProperties.registration_decline_reason}}
-
-
- -
-
-

Vendor Users

- - - - - - - - - - - - - - - - - -
Open IDFull NameE-Mail
{{user.openid}}{{user.fullname}}{{user.email}} - Remove -
- -
-
- - -
-
- -
-
-
-

Vendor Products

- - - - - - - - - - - - - - - - - - - -
NameProduct TypeDescriptionVisibility
{{product.name}}{{product.name}}{{ctrl.getProductTypeDescription(product.product_type)}}{{product.description}}{{product.public ? 'Public' : 'Private'}}
-
-
- - -
-
- - diff --git a/refstack-ui/app/components/vendors/vendorController.js b/refstack-ui/app/components/vendors/vendorController.js deleted file mode 100644 index ec128c26..00000000 --- a/refstack-ui/app/components/vendors/vendorController.js +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('VendorController', VendorController); - - VendorController.$inject = [ - '$rootScope', '$scope', '$http', '$state', '$stateParams', '$window', - '$uibModal', 'refstackApiUrl', 'raiseAlert', 'confirmModal' - ]; - - /** - * RefStack Vendor Controller - * This controller is for the '/vendor/' details page where owner can - * view details of the Vendor and manage users. - */ - function VendorController($rootScope, $scope, $http, $state, $stateParams, - $window, $uibModal, refstackApiUrl, raiseAlert, confirmModal) { - var ctrl = this; - - ctrl.getVendor = getVendor; - ctrl.getVendorUsers = getVendorUsers; - ctrl.getVendorProducts = getVendorProducts; - ctrl.getProductTypeDescription = getProductTypeDescription; - ctrl.registerVendor = registerVendor; - ctrl.approveVendor = approveVendor; - ctrl.declineVendor = declineVendor; - ctrl.deleteVendor = deleteVendor; - ctrl.removeUserFromVendor = removeUserFromVendor; - ctrl.addUserToVendor = addUserToVendor; - ctrl.openVendorEditModal = openVendorEditModal; - - /** The vendor id extracted from the URL route. */ - ctrl.vendorId = $stateParams.vendorID; - - // Should only be on user-vendors-page if authenticated. - if (!$scope.auth.isAuthenticated) { - $state.go('home'); - } - - ctrl.getVendor(); - ctrl.getVendorUsers(); - ctrl.getVendorProducts(); - - /** - * This will contact the Refstack API to get a vendor information. - */ - function getVendor() { - ctrl.showError = false; - ctrl.vendor = null; - // Construct the API URL based on user-specified filters. - var contentUrl = refstackApiUrl + '/vendors/' + ctrl.vendorId; - ctrl.vendorRequest = - $http.get(contentUrl).success(function(data) { - ctrl.vendor = data; - var isAdmin = $rootScope.auth.currentUser.is_admin; - ctrl.vendor.canDelete = ctrl.vendor.canEdit = - ctrl.vendor.type !== 0 - && (ctrl.vendor.can_manage || isAdmin); - ctrl.vendor.canRegister = - ctrl.vendor.type === 1; - ctrl.vendor.canApprove = isAdmin; - ctrl.vendorProperties = angular.fromJson(data.properties); - }).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving from server: ' + - angular.toJson(error); - }); - } - - /** - * This will 'send' application for registration. - */ - function registerVendor() { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId, - '/action'].join(''); - $http.post(url, {register: null}).success(function() { - ctrl.getVendor(); - }).error(function(error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * This will approve application for registration. - */ - function approveVendor() { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId, - '/action'].join(''); - $http.post(url, {approve: null}).success(function() { - ctrl.getVendor(); - }).error(function(error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * This will decline a vendor's application for registration. - */ - function declineVendor() { - confirmModal('Please input decline reason', function(reason) { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId, - '/action'].join(''); - var content = {deny: null, registration_decline_reason: reason}; - $http.post(url, content).success( - function() { - ctrl.getVendor(); - }).error(function(error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - }); - } - - /** - * Delete the current vendor. - */ - function deleteVendor() { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId].join(''); - $http.delete(url).success(function () { - $window.location.href = '/'; - }).error(function (error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * Updates list of users in the vendor's group - */ - function getVendorUsers() { - ctrl.showError = false; - var contentUrl = refstackApiUrl + '/vendors/' + ctrl.vendorId - + '/users'; - ctrl.usersRequest = - $http.get(contentUrl).success(function(data) { - ctrl.vendorUsers = data; - ctrl.currentUser = $rootScope.auth.currentUser.openid; - }).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving from server: ' + - angular.toJson(error); - }); - } - - /** - * Updates list of users in the vendor's group - */ - function getVendorProducts() { - ctrl.showError = false; - var contentUrl = refstackApiUrl + '/products?organization_id=' - + ctrl.vendorId; - ctrl.productsRequest = - $http.get(contentUrl).success(function(data) { - ctrl.vendorProducts = data.products; - }).error(function(error) { - ctrl.showError = true; - ctrl.error = - 'Error retrieving from server: ' + - angular.toJson(error); - }); - } - - /** - * Get the product type description given the type integer. - */ - function getProductTypeDescription(product_type) { - switch (product_type) { - case 0: - return 'Distro'; - case 1: - return 'Public Cloud'; - case 2: - return 'Hosted Private Cloud'; - default: - return 'Unknown'; - } - } - - /** - * Removes user with specific openid from vendor's group - * @param {Object} openid - */ - function removeUserFromVendor(openid) { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId, - '/users/', btoa(openid)].join(''); - $http.delete(url).success(function () { - ctrl.getVendorUsers(); - }).error(function (error) { - raiseAlert('danger', 'Error: ', error.detail); - }); - } - - /** - * Adds a user to a vendor group given an Open ID. - * @param {Object} openid - */ - function addUserToVendor(openid) { - var url = [refstackApiUrl, '/vendors/', ctrl.vendorId, - '/users/', btoa(openid)].join(''); - $http.put(url).success(function() { - ctrl.userToAdd = ''; - ctrl.getVendorUsers(); - }).error(function(error) { - raiseAlert('danger', 'Problem adding user. ' + - 'Is the Open ID correct? Error: ', - error.detail); - }); - } - - /** - * This will open the modal that will allow a user to edit - */ - function openVendorEditModal() { - $uibModal.open({ - templateUrl: '/components/vendors/partials' + - '/vendorEditModal.html', - backdrop: true, - windowClass: 'modal', - animation: true, - controller: 'VendorEditModalController as modal', - size: 'lg', - resolve: { - vendor: function () { - return ctrl.vendor; - } - } - }); - } - } - - angular - .module('refstackApp') - .controller('VendorEditModalController', VendorEditModalController); - - VendorEditModalController.$inject = [ - '$rootScope', - '$uibModalInstance', '$http', '$state', 'vendor', 'refstackApiUrl' - ]; - - /** - * Vendor Edit Modal Controller - * This controls the modal that allows editing a vendor. - */ - function VendorEditModalController($rootScope, $uibModalInstance, $http, - $state, vendor, refstackApiUrl) { - - var ctrl = this; - - ctrl.close = close; - ctrl.addField = addField; - ctrl.saveChanges = saveChanges; - ctrl.removeProperty = removeProperty; - - ctrl.vendor = angular.copy(vendor); - ctrl.vendorName = vendor.name; - ctrl.vendorProperties = []; - ctrl.isAdmin = $rootScope.auth.currentUser.is_admin; - - parseVendorProperties(); - - /** - * Close the vendor edit modal. - */ - function close() { - $uibModalInstance.dismiss('exit'); - } - - /** - * Push a blank property key-value pair into the vendorProperties - * array. This will spawn new input boxes. - */ - function addField() { - ctrl.vendorProperties.push({'key': '', 'value': ''}); - } - - /** - * Send a PUT request to the server with the changes. - */ - function saveChanges() { - ctrl.showError = false; - ctrl.showSuccess = false; - var url = [refstackApiUrl, '/vendors/', ctrl.vendor.id].join(''); - var properties = propertiesToJson(); - var content = {'description': ctrl.vendor.description, - 'properties': properties}; - if (ctrl.vendorName !== ctrl.vendor.name) { - content.name = ctrl.vendor.name; - } - $http.put(url, content).success(function() { - ctrl.showSuccess = true; - $state.reload(); - }).error(function(error) { - ctrl.showError = true; - ctrl.error = error.detail; - }); - } - - /** - * Remove a property from the vendorProperties array at the given index. - */ - function removeProperty(index) { - ctrl.vendorProperties.splice(index, 1); - } - - /** - * Parse the vendor properties and put them in a format more suitable - * for forms. - */ - function parseVendorProperties() { - var props = angular.fromJson(ctrl.vendor.properties); - angular.forEach(props, function(value, key) { - ctrl.vendorProperties.push({'key': key, 'value': value}); - }); - } - - /** - * Convert the list of property objects to a dict containing the - * each key-value pair.. - */ - function propertiesToJson() { - var properties = {}; - for (var i = 0, len = ctrl.vendorProperties.length; i < len; i++) { - var prop = ctrl.vendorProperties[i]; - if (prop.key && prop.value) { - properties[prop.key] = prop.value; - } - } - return properties; - } - } -})(); diff --git a/refstack-ui/app/components/vendors/vendors.html b/refstack-ui/app/components/vendors/vendors.html deleted file mode 100644 index a336b174..00000000 --- a/refstack-ui/app/components/vendors/vendors.html +++ /dev/null @@ -1,75 +0,0 @@ -

{{ctrl.pageHeader}}

-

{{ctrl.pageParagraph}}

- -
-
- -
- -
- - - - - - - - - - - - - - - - -
NameDescriptionType
{{vendor.name}}{{vendor.name}}{{vendor.description || '-'}} -  OpenStack -  Private -  Pending Approval -  Official -
-
- -
-
-

Add New Vendor

-

Creating a vendor allows you to associate test results to specific vendors/products. - Created vendors are private, but vendors can be registered with the Foundation to become public and official. - This will require approval by a Foundation administrator.

-
-
- -

- -

-
-
- -

- -

-
-
- -
-
-
- - Success: - Vendor successfully created. -
-
-
- -
- - diff --git a/refstack-ui/app/components/vendors/vendorsController.js b/refstack-ui/app/components/vendors/vendorsController.js deleted file mode 100644 index 2ed14b18..00000000 --- a/refstack-ui/app/components/vendors/vendorsController.js +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('VendorsController', VendorsController); - - VendorsController.$inject = [ - '$rootScope', '$scope', '$http', '$state', 'refstackApiUrl']; - - /** - * RefStack Vendors Controller - * This controller is for the '/user_vendors' or '/public_vendors' page - * where a user can browse a listing of his/her vendors or public vendors. - */ - function VendorsController($rootScope, $scope, $http, $state, - refstackApiUrl) { - var ctrl = this; - - ctrl.update = update; - ctrl.updateData = updateData; - ctrl._filterVendor = _filterVendor; - ctrl.addVendor = addVendor; - - /** Check to see if this page should display user-specific vendors. */ - ctrl.isUserVendors = $state.current.name === 'userVendors'; - - /** Show private vendors in list for foundation admin */ - ctrl.withPrivate = false; - - /** Properties for adding new vendor */ - ctrl.name = ''; - ctrl.description = ''; - - // Should only be on user-vendors-page if authenticated. - if (ctrl.isUserVendors && !$scope.auth.isAuthenticated) { - $state.go('home'); - } - - ctrl.pageHeader = ctrl.isUserVendors ? - 'My Vendors' : 'Public Vendors'; - - ctrl.pageParagraph = ctrl.isUserVendors ? - 'Your added vendors are listed here.' : - 'Public Vendors approved by the OpenStack Foundation are ' + - 'listed here.'; - - if (ctrl.isUserVendors) { - ctrl.authRequest = $scope.auth.doSignCheck() - .then(ctrl.update); - } else { - ctrl.update(); - } - - ctrl.rawData = null; - ctrl.isAdminView = $rootScope.auth - && $rootScope.auth.currentUser - && $rootScope.auth.currentUser.is_admin; - - /** - * This will contact the Refstack API to get a listing of vendors - */ - function update() { - ctrl.showError = false; - ctrl.data = null; - // Construct the API URL based on user-specified filters. - var contentUrl = refstackApiUrl + '/vendors'; - if (typeof ctrl.rawData === 'undefined' - || ctrl.rawData === null) { - ctrl.vendorsRequest = - $http.get(contentUrl).success(function (data) { - ctrl.rawData = data; - ctrl.updateData(); - }).error(function (error) { - ctrl.rawData = null; - ctrl.showError = true; - ctrl.error = - 'Error retrieving vendors listing from server: ' + - angular.toJson(error); - }); - } else { - ctrl.updateData(); - } - } - - /** - * This will update data for view with current settings on page. - */ - function updateData() { - ctrl.data = {}; - ctrl.data.vendors = ctrl.rawData.vendors.filter(function(vendor) { - return ctrl._filterVendor(vendor); - }); - ctrl.data.vendors.sort(function(a, b) { - if (a.type > b.type) { - return 1; - } - if (a.type < b.type) { - return -1; - } - return a.name.localeCompare(b.name); - }); - } - - /** - * Returns true if vendor can be displayed on this page. - */ - function _filterVendor(vendor) { - if (!ctrl.isUserVendors) { - return vendor.type === 0 || vendor.type === 3; - } - - if (!$rootScope.auth || !$rootScope.auth.currentUser) { - return false; - } - - if ($rootScope.auth.currentUser.is_admin) { - return vendor.type !== 1 || ctrl.withPrivate; - } - - return vendor.can_manage; - } - - /** - * This will add a new vendor record. - */ - function addVendor() { - ctrl.showSuccess = false; - ctrl.showError = false; - var url = refstackApiUrl + '/vendors'; - var data = { - name: ctrl.name, - description: ctrl.description - }; - $http.post(url, data).success(function () { - ctrl.showSuccess = true; - ctrl.name = ''; - ctrl.description = ''; - ctrl.rawData = null; - ctrl.update(); - }).error(function (error) { - ctrl.showError = true; - ctrl.error = - 'Error adding new vendor: ' + angular.toJson(error); - }); - } - } -})(); diff --git a/refstack-ui/app/config.json.sample b/refstack-ui/app/config.json.sample deleted file mode 100644 index 7cdc52b6..00000000 --- a/refstack-ui/app/config.json.sample +++ /dev/null @@ -1 +0,0 @@ -{"refstackApiUrl": "https://refstack.openstack.org/api/v1"} diff --git a/refstack-ui/app/favicon-16x16.png b/refstack-ui/app/favicon-16x16.png deleted file mode 100755 index e08c8a15..00000000 Binary files a/refstack-ui/app/favicon-16x16.png and /dev/null differ diff --git a/refstack-ui/app/favicon-32x32.png b/refstack-ui/app/favicon-32x32.png deleted file mode 100755 index 7bf57e2f..00000000 Binary files a/refstack-ui/app/favicon-32x32.png and /dev/null differ diff --git a/refstack-ui/app/favicon.ico b/refstack-ui/app/favicon.ico deleted file mode 100644 index 156019aa..00000000 Binary files a/refstack-ui/app/favicon.ico and /dev/null differ diff --git a/refstack-ui/app/index.html b/refstack-ui/app/index.html deleted file mode 100644 index 0df25635..00000000 --- a/refstack-ui/app/index.html +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - Refstack - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- - diff --git a/refstack-ui/app/robots.txt b/refstack-ui/app/robots.txt deleted file mode 100644 index 93c44208..00000000 --- a/refstack-ui/app/robots.txt +++ /dev/null @@ -1,4 +0,0 @@ -# robotstxt.org - -User-agent: * - diff --git a/refstack-ui/app/shared/alerts/alertModal.html b/refstack-ui/app/shared/alerts/alertModal.html deleted file mode 100644 index 59fd5001..00000000 --- a/refstack-ui/app/shared/alerts/alertModal.html +++ /dev/null @@ -1,8 +0,0 @@ - diff --git a/refstack-ui/app/shared/alerts/alertModalFactory.js b/refstack-ui/app/shared/alerts/alertModalFactory.js deleted file mode 100644 index 63dfbe5c..00000000 --- a/refstack-ui/app/shared/alerts/alertModalFactory.js +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .factory('raiseAlert', raiseAlert); - - raiseAlert.$inject = ['$uibModal']; - - /** - * This allows alert pop-ups to be raised. Just inject it as a dependency - * in the calling controller. - */ - function raiseAlert($uibModal) { - return function(mode, title, text) { - $uibModal.open({ - templateUrl: '/shared/alerts/alertModal.html', - controller: 'RaiseAlertModalController as alert', - backdrop: true, - keyboard: true, - backdropClick: true, - size: 'md', - resolve: { - data: function () { - return { - mode: mode, - title: title, - text: text - }; - } - } - }); - }; - } - - angular - .module('refstackApp') - .controller('RaiseAlertModalController', RaiseAlertModalController); - - RaiseAlertModalController.$inject = ['$uibModalInstance', 'data']; - - /** - * This is the controller for the alert pop-up. - */ - function RaiseAlertModalController($uibModalInstance, data) { - var ctrl = this; - - ctrl.close = close; - ctrl.data = data; - - /** - * This method will close the alert modal. The modal will close - * when the user clicks the close button or clicks outside of the - * modal. - */ - function close() { - $uibModalInstance.close(); - } - } -})(); diff --git a/refstack-ui/app/shared/alerts/confirmModal.html b/refstack-ui/app/shared/alerts/confirmModal.html deleted file mode 100644 index 82478a51..00000000 --- a/refstack-ui/app/shared/alerts/confirmModal.html +++ /dev/null @@ -1,13 +0,0 @@ - - - diff --git a/refstack-ui/app/shared/alerts/confirmModalFactory.js b/refstack-ui/app/shared/alerts/confirmModalFactory.js deleted file mode 100644 index 91e568ec..00000000 --- a/refstack-ui/app/shared/alerts/confirmModalFactory.js +++ /dev/null @@ -1,67 +0,0 @@ -(function () { - 'use strict'; - - angular - .module('refstackApp') - .factory('confirmModal', confirmModal); - - confirmModal.$inject = ['$uibModal']; - - /** - * Opens confirm modal dialog with input textbox - */ - function confirmModal($uibModal) { - return function(text, successHandler) { - $uibModal.open({ - templateUrl: '/shared/alerts/confirmModal.html', - controller: 'CustomConfirmModalController as confirmModal', - size: 'md', - resolve: { - data: function () { - return { - text: text, - successHandler: successHandler - }; - } - } - }); - }; - } - - angular - .module('refstackApp') - .controller('CustomConfirmModalController', - CustomConfirmModalController); - - CustomConfirmModalController.$inject = ['$uibModalInstance', 'data']; - - /** - * This is the controller for the alert pop-up. - */ - function CustomConfirmModalController($uibModalInstance, data) { - var ctrl = this; - - ctrl.confirm = confirm; - ctrl.cancel = cancel; - - ctrl.data = angular.copy(data); - - /** - * Initiate confirmation and call the success handler with the - * input text. - */ - function confirm() { - $uibModalInstance.close(); - if (angular.isDefined(ctrl.data.successHandler)) { - ctrl.data.successHandler(ctrl.inputText); - } - } - - /** - * Close the confirm modal without initiating changes. - */ - function cancel() { - $uibModalInstance.dismiss('cancel'); - } - } -})(); diff --git a/refstack-ui/app/shared/filters.js b/refstack-ui/app/shared/filters.js deleted file mode 100644 index e38789f5..00000000 --- a/refstack-ui/app/shared/filters.js +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - /** - * Convert an object of objects to an array of objects to use with ng-repeat - * filters. - */ - angular - .module('refstackApp') - .filter('arrayConverter', arrayConverter); - - /** - * Convert an object of objects to an array of objects to use with ng-repeat - * filters. - */ - function arrayConverter() { - return function (objects) { - var array = []; - angular.forEach(objects, function (object, key) { - if (!('id' in object)) { - object.id = key; - } - array.push(object); - }); - return array; - }; - } - - angular - .module('refstackApp') - .filter('capitalize', capitalize); - - /** - * Angular filter that will capitalize the first letter of a string. - */ - function capitalize() { - return function (string) { - return string.substring(0, 1).toUpperCase() + string.substring(1); - }; - } -})(); diff --git a/refstack-ui/app/shared/header/header.html b/refstack-ui/app/shared/header/header.html deleted file mode 100644 index d94390fb..00000000 --- a/refstack-ui/app/shared/header/header.html +++ /dev/null @@ -1,51 +0,0 @@ -
RefStack -RefStack -
- diff --git a/refstack-ui/app/shared/header/headerController.js b/refstack-ui/app/shared/header/headerController.js deleted file mode 100644 index ca1ce558..00000000 --- a/refstack-ui/app/shared/header/headerController.js +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -(function () { - 'use strict'; - - angular - .module('refstackApp') - .controller('HeaderController', HeaderController); - - HeaderController.$inject = ['$location']; - - /** - * Refstack Header Controller - * This controller is for the header template which contains the site - * navigation. - */ - function HeaderController($location) { - var ctrl = this; - - ctrl.isActive = isActive; - ctrl.isCatalogActive = isCatalogActive; - - /** Whether the Navbar is collapsed for small displays. */ - ctrl.navbarCollapsed = true; - - /** - * This determines whether a button should be in the active state based - * on the URL. - */ - function isActive(viewLocation) { - var path = $location.path().substr(0, viewLocation.length); - if (path === viewLocation) { - // Make sure "/" only matches when viewLocation is "/". - if (!($location.path().substr(0).length > 1 && - viewLocation.length === 1)) { - return true; - } - } - return false; - } - - /** This determines the active state for the catalog dropdown. Type - * parameter should be passed in to specify if the catalog is the - * public or user one. - */ - function isCatalogActive(type) { - return ctrl.isActive('/' + type + '_vendors') - || ctrl.isActive('/' + type + '_products'); - } - } -})(); diff --git a/refstack-ui/tests/karma.conf.js b/refstack-ui/tests/karma.conf.js deleted file mode 100644 index 8c486122..00000000 --- a/refstack-ui/tests/karma.conf.js +++ /dev/null @@ -1,44 +0,0 @@ -module.exports = function (config) { - 'use strict'; - - config.set({ - - basePath: '../', - - files: [ - // Angular libraries. - 'app/assets/lib/angular/angular.js', - 'app/assets/lib/angular-ui-router/release/angular-ui-router.js', - 'app/assets/lib/angular-mocks/angular-mocks.js', - 'app/assets/lib/angular-bootstrap/ui-bootstrap-tpls.min.js', - 'app/assets/lib/angular-busy/dist/angular-busy.min.js', - 'app/assets/lib/angular-resource/angular-resource.min.js', - 'app/assets/lib/angular-confirm-modal/angular-confirm.js', - // JS files. - 'app/app.js', - 'app/components/**/*.js', - 'app/shared/*.js', - 'app/shared/**/*.js', - - // Test Specs. - 'tests/unit/*.js' - ], - - autoWatch: true, - - frameworks: ['jasmine'], - - browsers: ['Chrome'], - - plugins: [ - 'karma-chrome-launcher', - 'karma-jasmine' - ], - - junitReporter: { - outputFile: 'test_out/unit.xml', - suite: 'unit' - } - - }); -}; diff --git a/refstack-ui/tests/unit/AuthSpec.js b/refstack-ui/tests/unit/AuthSpec.js deleted file mode 100644 index 549deff2..00000000 --- a/refstack-ui/tests/unit/AuthSpec.js +++ /dev/null @@ -1,39 +0,0 @@ -describe('Auth', function () { - 'use strict'; - - var fakeApiUrl = 'http://foo.bar/v1'; - var $window, $rootScope, $httpBackend; - beforeEach(function () { - $window = {location: { href: jasmine.createSpy()} }; - module(function ($provide) { - $provide.constant('refstackApiUrl', fakeApiUrl); - $provide.value('$window', $window); - }); - module('refstackApp'); - inject(function (_$httpBackend_, _$rootScope_) { - $httpBackend = _$httpBackend_; - $rootScope = _$rootScope_; - }); - $httpBackend.whenGET('/components/home/home.html') - .respond('
mock template
'); - }); - it('should show signin url for signed user', function () { - $httpBackend.expectGET(fakeApiUrl + - '/profile').respond({'openid': 'foo@bar.com', - 'email': 'foo@bar.com', - 'fullname': 'foo' }); - $httpBackend.flush(); - $rootScope.auth.doSignIn(); - expect($window.location.href).toBe(fakeApiUrl + '/auth/signin'); - expect($rootScope.auth.isAuthenticated).toBe(true); - }); - - it('should show signout url for not signed user', function () { - $httpBackend.expectGET(fakeApiUrl + - '/profile').respond(401); - $httpBackend.flush(); - $rootScope.auth.doSignOut(); - expect($window.location.href).toBe(fakeApiUrl + '/auth/signout'); - expect($rootScope.auth.isAuthenticated).toBe(false); - }); -}); diff --git a/refstack-ui/tests/unit/ControllerSpec.js b/refstack-ui/tests/unit/ControllerSpec.js deleted file mode 100644 index 74210a8f..00000000 --- a/refstack-ui/tests/unit/ControllerSpec.js +++ /dev/null @@ -1,1758 +0,0 @@ -/** Jasmine specs for Refstack controllers */ -describe('Refstack controllers', function () { - 'use strict'; - - var fakeApiUrl = 'http://foo.bar/v1'; - var $httpBackend; - beforeEach(function () { - module(function ($provide) { - $provide.constant('refstackApiUrl', fakeApiUrl); - }); - module('refstackApp'); - inject(function(_$httpBackend_) { - $httpBackend = _$httpBackend_; - }); - $httpBackend.whenGET(fakeApiUrl + '/profile').respond(401); - $httpBackend.whenGET('/components/home/home.html') - .respond('
mock template
'); - }); - - describe('HeaderController', function () { - var $location, ctrl; - - beforeEach(inject(function ($controller, _$location_) { - $location = _$location_; - ctrl = $controller('HeaderController', {}); - })); - - it('should set "navbarCollapsed" to true', function () { - expect(ctrl.navbarCollapsed).toBe(true); - }); - - it('should have a function to check if the URL path is active', - function () { - $location.path('/'); - expect($location.path()).toBe('/'); - expect(ctrl.isActive('/')).toBe(true); - expect(ctrl.isActive('/about')).toBe(false); - - $location.path('/results?cpid=123&foo=bar'); - expect($location.path()).toBe('/results?cpid=123&foo=bar'); - expect(ctrl.isActive('/results')).toBe(true); - }); - }); - - describe('LogoutController', function () { - var $location, ctrl; - - beforeEach(inject(function ($controller, _$location_) { - $location = _$location_; - $location.url('/logout?openid_logout=some_url'); - ctrl = $controller('LogoutController', {}); - })); - - it('should set the openID logout URL based on query string', - function () { - expect($location.url()).toBe('/logout?openid_logout=some_url'); - expect(ctrl.openid_logout_url).toBe('some_url'); - }); - }); - - describe('AboutController', function () { - var $location, ctrl; - - beforeEach(inject(function ($controller, _$location_) { - $location = _$location_; - ctrl = $controller('AboutController', {}); - ctrl.options = { - 'about' : { - 'title': 'About RefStack', - 'template': 'about-template' - }, - 'option1' : { - 'title': 'Option One', - 'template': 'template-1' - } - }; - })); - - it('should have a function to select an option', - function () { - ctrl.selectOption('option1'); - expect(ctrl.selected).toBe('option1'); - expect(ctrl.template).toBe('template-1'); - expect($location.hash()).toBe('option1'); - }); - - it('should have a function to get the URL hash and select it', - function () { - // Test existing option. - $location.url('/about#option1'); - ctrl.getHash(); - expect(ctrl.selected).toBe('option1'); - expect(ctrl.template).toBe('template-1'); - - // Test nonexistent option. - $location.url('/about#foobar'); - ctrl.getHash(); - expect(ctrl.selected).toBe('about'); - expect(ctrl.template).toBe('about-template'); - }); - - }); - - describe('GuidelinesController', function () { - var ctrl; - - beforeEach(inject(function ($controller) { - ctrl = $controller('GuidelinesController', {}); - })); - - it('should set default states', function () { - expect(ctrl.target).toBe('platform'); - expect(ctrl.status).toEqual({ - required: true, advisory: false, - deprecated: false, removed: false - }); - }); - - it('should fetch the selected capabilities version and sort a ' + - 'program\'s capabilities into an object', - function () { - var fakeCaps = { - 'schema': '1.3', - 'status': 'approved', - 'platform': {'required': ['compute']}, - 'components': { - 'compute': { - 'required': ['cap_id_1'], - 'advisory': ['cap_id_2'], - 'deprecated': ['cap_id_3'], - 'removed': ['cap_id_4'] - } - } - }; - let get_gl_resp = { - 'powered': [ - {'name': 'next.json', 'file': 'next.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'}, - {'name': '2015.03.json', 'file': '2015.03.json'} - ] - }; - $httpBackend.expectGET(fakeApiUrl + '/guidelines').respond( - get_gl_resp); - // Should call request with latest version. - $httpBackend.expectGET( - fakeApiUrl + '/guidelines/2015.04.json').respond(fakeCaps); - $httpBackend.flush(); - // The version list should be sorted latest first. - let expected_version_list = [ - {'name': 'next.json', 'file': 'next.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'}, - {'name': '2015.03.json', 'file': '2015.03.json'} - ]; - expect(ctrl.versionList).toEqual(expected_version_list); - - expect(ctrl.guidelines).toEqual(fakeCaps); - // The guideline status should be approved. - expect(ctrl.guidelineStatus).toEqual('approved'); - var expectedTargetCaps = { - 'cap_id_1': 'required', - 'cap_id_2': 'advisory', - 'cap_id_3': 'deprecated', - 'cap_id_4': 'removed' - }; - expect(ctrl.targetCapabilities).toEqual(expectedTargetCaps); - }); - - it('should be able to handle guidelines using schema 2.0', - function () { - var fakeCaps = { - 'metadata': { - 'id': '2017.08', - 'schema': '2.0', - 'scoring': {}, - 'os_trademark_approval': { - 'target_approval': '2017.08', - 'replaces': '2017.01', - 'releases': ['newton', 'ocata', 'pike'], - 'status': 'approved' - } - }, - 'platforms': { - 'OpenStack Powered Platform': { - 'description': 'foo bar', - 'components': [ - { 'name': 'os_powered_compute' }, - { 'name': 'os_powered_storage' } - ] - } - }, - 'components': { - 'os_powered_compute': { - 'capabilities': { - 'required': ['cap_id_1'], - 'advisory': ['cap_id_2'], - 'deprecated': ['cap_id_3'], - 'removed': ['cap_id_4'] - } - }, - 'os_powered_storage': { - 'capabilities': { - 'required': ['cap_id_5'], - 'advisory': ['cap_id_6'], - 'deprecated': ['cap_id_7'], - 'removed': ['cap_id_8'] - } - } - } - }; - - $httpBackend.expectGET(fakeApiUrl + - '/guidelines').respond({ - 'powered': [ - {'name': 'next.json', 'file': 'next.json'}, - {'name': '2015.03.json', 'file': '2015.03.json'}, - {'name': '2017.08.json', 'file': '2017.08.json'} - ] - }); - - // Should call request with latest version. - $httpBackend.expectGET(fakeApiUrl + - '/guidelines/2017.08.json').respond(fakeCaps); - $httpBackend.flush(); - - ctrl.update(); - // The version list should be sorted latest first. - expect(ctrl.guidelines).toEqual(fakeCaps); - // The guideline status should be approved. - expect(ctrl.guidelineStatus).toEqual('approved'); - var expectedTargetCaps = { - 'cap_id_1': 'required', - 'cap_id_2': 'advisory', - 'cap_id_3': 'deprecated', - 'cap_id_4': 'removed', - 'cap_id_5': 'required', - 'cap_id_6': 'advisory', - 'cap_id_7': 'deprecated', - 'cap_id_8': 'removed' - }; - expect(ctrl.targetCapabilities).toEqual(expectedTargetCaps); - }); - - it('should have a function to check if a capability status is selected', - function () { - ctrl.targetCapabilities = { - 'cap_id_1': 'required', - 'cap_id_2': 'advisory', - 'cap_id_3': 'deprecated', - 'cap_id_4': 'removed' - }; - - // Expect only the required capability to return true. - expect(ctrl.filterStatus({'id': 'cap_id_1'})).toBe(true); - expect(ctrl.filterStatus({'id': 'cap_id_2'})).toBe(false); - expect(ctrl.filterStatus({'id': 'cap_id_3'})).toBe(false); - expect(ctrl.filterStatus({'id': 'cap_id_4'})).toBe(false); - - ctrl.status = { - required: true, - advisory: true, - deprecated: true, - removed: true - }; - - // Every capability should return true now. - expect(ctrl.filterStatus({'id': 'cap_id_1'})).toBe(true); - expect(ctrl.filterStatus({'id': 'cap_id_2'})).toBe(true); - expect(ctrl.filterStatus({'id': 'cap_id_3'})).toBe(true); - expect(ctrl.filterStatus({'id': 'cap_id_4'})).toBe(true); - }); - - it('should have a function to get the length of an object/dict', - function () { - var testObject = { - 'test_id_1': { - 'idempotent_id': 'id-1234' - }, - 'test_id_2': { - 'idempotent_id': 'id-5678' - } - }; - expect(ctrl.getObjectLength(testObject)).toBe(2); - }); - - it('should have a method to open a modal for the relevant test list', - function () { - var modal; - inject(function ($uibModal) { - modal = $uibModal; - }); - spyOn(modal, 'open'); - ctrl.openTestListModal(); - expect(modal.open).toHaveBeenCalled(); - }); - }); - - describe('TestListModalController', function () { - var modalInstance, ctrl; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - ctrl = $controller('TestListModalController', - {$uibModalInstance: modalInstance, - target: 'platform', - version: '2016.01', - version_file: '2016.01.json', - status: {required: true, advisory: false}} - ); - })); - - it('should have a method to close the modal', - function () { - ctrl.close(); - expect(modalInstance.dismiss).toHaveBeenCalledWith('exit'); - }); - - it('should have a method to download the test list string', - function () { - var fakeResp = 'test1\ntest2\ntest3'; - $httpBackend.expectGET(fakeApiUrl + - '/guidelines/2016.01.json/tests?target=platform&' + - 'type=required&alias=true&flag=false').respond(fakeResp); - $httpBackend.flush(); - ctrl.updateTestListString(); - expect(ctrl.testListCount).toBe(3); - }); - }); - - describe('ResultsController', function () { - var scope, ctrl; - var fakeResponse = { - 'pagination': {'current_page': 1, 'total_pages': 2}, - 'results': [{ - 'created_at': '2015-03-09 01:23:45', - 'id': 'some-id', - 'cpid': 'some-cpid' - }] - }; - var fakeVendorResp = { - 'vendors': [{'id': 'fakeid', 'name': 'Foo Vendor'}] - }; - - beforeEach(inject(function ($rootScope, $controller) { - scope = $rootScope.$new(); - ctrl = $controller('ResultsController', {$scope: scope}); - $httpBackend.when('GET', fakeApiUrl + - '/results?page=1').respond(fakeResponse); - $httpBackend.when('GET', fakeApiUrl + - '/vendors').respond(fakeVendorResp); - })); - - it('should fetch the first page of results with proper URL args', - function () { - // Initial results should be page 1 of all results. - $httpBackend.expectGET(fakeApiUrl + '/results?page=1') - .respond(fakeResponse); - $httpBackend.flush(); - expect(ctrl.data).toEqual(fakeResponse); - expect(ctrl.currentPage).toBe(1); - - // Simulate the user adding date filters. - ctrl.startDate = new Date('2015-03-10T11:51:00'); - ctrl.endDate = new Date('2015-04-10T11:51:00'); - ctrl.update(); - $httpBackend.expectGET(fakeApiUrl + - '/results?page=1' + - '&start_date=2015-03-10 00:00:00' + - '&end_date=2015-04-10 23:59:59') - .respond(fakeResponse); - $httpBackend.flush(); - expect(ctrl.data).toEqual(fakeResponse); - expect(ctrl.currentPage).toBe(1); - }); - - it('should set an error when results cannot be retrieved', function () { - $httpBackend.expectGET(fakeApiUrl + '/results?page=1').respond(404, - {'detail': 'Not Found'}); - $httpBackend.flush(); - expect(ctrl.data).toBe(null); - expect(ctrl.error).toEqual('Error retrieving results listing ' + - 'from server: {"detail":"Not Found"}'); - expect(ctrl.totalItems).toBe(0); - expect(ctrl.showError).toBe(true); - }); - - it('should have an function to clear filters and update the view', - function () { - ctrl.startDate = 'some date'; - ctrl.endDate = 'some other date'; - ctrl.clearFilters(); - expect(ctrl.startDate).toBe(null); - expect(ctrl.endDate).toBe(null); - }); - - it('should have a function to associate metadata to a test run', - function () { - $httpBackend.expectGET(fakeApiUrl + '/results?page=1') - .respond(fakeResponse); - ctrl.data = fakeResponse; - ctrl.data.results[0].targetEdit = true; - ctrl.associateMeta(0, 'target', 'platform'); - $httpBackend.expectPOST( - fakeApiUrl + '/results/some-id/meta/target', - 'platform') - .respond(201, ''); - $httpBackend.flush(); - expect(ctrl.data.results[0].targetEdit).toBe(false); - }); - - it('should have a function to delete metadata from a test run', - function () { - $httpBackend.expectGET(fakeApiUrl + '/results?page=1') - .respond(fakeResponse); - ctrl.data = fakeResponse; - ctrl.data.results[0].targetEdit = true; - ctrl.associateMeta(0, 'target', ''); - $httpBackend.expectDELETE( - fakeApiUrl + '/results/some-id/meta/target') - .respond(200, ''); - $httpBackend.flush(); - expect(ctrl.data.results[0].targetEdit).toBe(false); - }); - - it('should have a function to get guideline versions', - function () { - $httpBackend.expectGET(fakeApiUrl + '/results?page=1') - .respond(fakeResponse); - var expectedResponse = { - 'powered': [ - {'name': '2015.03.json', 'file': '2015.03.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'} - ] - }; - $httpBackend.expectGET(fakeApiUrl + - '/guidelines').respond(expectedResponse); - ctrl.getVersionList(); - $httpBackend.flush(); - // Expect the list to have the latest guideline first. - let gl_names = - expectedResponse.powered.map((gl_obj) => gl_obj.name); - let expectedVersionList = - gl_names.sort(); - if (typeof ctrl.versionList !== 'undefined') { - expect(ctrl.versionList).toEqual(expectedVersionList); - } - }); - - it('should have a function to get products manageable by a user', - function () { - var prodResp = {'products': [ - {'id': 'abc', 'can_manage': true}, - {'id': 'foo', 'can_manage': false}]}; - ctrl.products = null; - $httpBackend.expectGET(fakeApiUrl + '/products') - .respond(200, prodResp); - ctrl.getUserProducts(); - $httpBackend.flush(); - var expected = {'abc': {'id': 'abc', 'can_manage': true}}; - expect(ctrl.products).toEqual(expected); - }); - - it('should have a function to get a listing of vendors', - function () { - $httpBackend.expectGET(fakeApiUrl + '/vendors') - .respond(fakeVendorResp); - ctrl.getVendors(); - $httpBackend.flush(); - var expected = fakeVendorResp.vendors[0]; - expect(ctrl.vendors.fakeid).toEqual(expected); - }); - - it('should have a function to associate a product version to a test', - function () { - var result = {'id': 'bar', - 'selectedVersion': {'id': 'foo'}, - 'selectedProduct': {'id': 'prod'}}; - ctrl.products = null; - $httpBackend.expectPUT(fakeApiUrl + '/results/bar') - .respond(201); - ctrl.associateProductVersion(result); - $httpBackend.flush(); - var expected = {'id': 'foo', 'product_info': {'id': 'prod'}}; - expect(result.product_version).toEqual(expected); - }); - - it('should have a function to get product versions', - function () { - var result = {'id': 'bar', - 'selectedProduct': {'id': 'prod'}}; - var verResp = [{'id': 'ver1', 'version': '1.0'}, - {'id': 'ver2', 'version': null}]; - ctrl.products = null; - $httpBackend.expectGET(fakeApiUrl + '/products/prod/versions') - .respond(200, verResp); - ctrl.getProductVersions(result); - $httpBackend.flush(); - expect(result.productVersions).toEqual(verResp); - var expected = {'id': 'ver2', 'version': null}; - expect(result.selectedVersion).toEqual(expected); - }); - }); - - describe('ResultsReportController', function () { - var stateparams, ctrl; - var fakeResultResponse = {'results': ['test_id_1'], 'meta': { - 'public_key': 'ssh-rsa', 'guideline': '2015.04.json', 'target': - 'object' - }}; - var fakeCapabilityResponse = { - 'platform': {'required': ['compute']}, - 'schema': '1.2', - 'status': 'approved', - 'components': { - 'compute': { - 'required': ['cap_id_1'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - } - }, - 'capabilities': { - 'cap_id_1': { - 'flagged': [ 'test_id_1'], - 'tests': ['test_id_1', 'test_id_2'] - } - } - }; - var fakeGuidelinesListResponse = { - 'powered': [ - {'name': 'next.json', 'file': 'next.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'}, - {'name': '2015.03.json', 'file': '2015.03.json'} - ] - }; - - beforeEach(inject(function ($controller) { - stateparams = {testID: 1234}; - ctrl = $controller('ResultsReportController', - {$stateParams: stateparams} - ); - $httpBackend.when('GET', fakeApiUrl + - '/results/1234').respond(fakeResultResponse); - $httpBackend.when('GET', fakeApiUrl + - '/guidelines').respond(fakeGuidelinesListResponse); - $httpBackend.when('GET', fakeApiUrl + - '/guidelines/2015.04.json').respond(fakeCapabilityResponse); - })); - - it('should make all necessary API requests to get results ' + - 'and guidelines', - function () { - $httpBackend.expectGET(fakeApiUrl + - '/results/1234').respond(fakeResultResponse); - $httpBackend.expectGET(fakeApiUrl + - '/guidelines').respond({ - 'powered': [ - {'name': '2015.03.json', 'file': '2015.03.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'} - ] - }); - // Should call request with latest version. - $httpBackend.expectGET(fakeApiUrl + - '/guidelines/2015.04.json').respond(fakeCapabilityResponse); - $httpBackend.flush(); - expect(ctrl.resultsData).toEqual(fakeResultResponse); - // The version list should be sorted latest first. - let expected_version_list = ['2015.04.json', '2015.03.json']; - expect(ctrl.versionList).toEqual(expected_version_list); - expect(ctrl.guidelineData).toEqual(fakeCapabilityResponse); - // The guideline status should be approved. - expect(ctrl.guidelineData.status).toEqual('approved'); - expect(ctrl.schemaVersion).toEqual('1.2'); - }); - - it('should have a method that creates an object containing each ' + - 'relevant capability and its highest priority status', - function () { - ctrl.guidelineData = { - 'schema': '1.3', - 'platform': {'required': ['compute', 'object']}, - 'components': { - 'compute': { - 'required': ['cap_id_1'], - 'advisory': ['cap_id_2'], - 'deprecated': ['cap_id_3'], - 'removed': [] - }, - 'object': { - 'required': ['cap_id_2'], - 'advisory': ['cap_id_1', 'cap_id_3'], - 'deprecated': [], - 'removed': [] - } - } - }; - var expected = { - 'cap_id_1': 'required', - 'cap_id_2': 'required', - 'cap_id_3': 'advisory' - }; - expect(ctrl.getTargetCapabilities()).toEqual(expected); - }); - - it('should be able create an object containing each relevant' + - 'capability and its highest priority status for schema 2.0', - function () { - ctrl.schemaVersion = '2.0'; - ctrl.guidelineData = { - 'metadata': { - 'id': '2017.08', - 'schema': '2.0', - 'scoring': {}, - 'os_trademark_approval': { - 'target_approval': '2017.08', - 'replaces': '2017.01', - 'releases': ['newton', 'ocata', 'pike'], - 'status': 'approved' - } - }, - 'platforms': { - 'OpenStack Powered Platform': { - 'description': 'foo bar', - 'components': [ - { 'name': 'os_powered_compute' }, - { 'name': 'os_powered_storage' } - ] - } - }, - 'components': { - 'os_powered_compute': { - 'capabilities': { - 'required': ['cap_id_1'], - 'advisory': ['cap_id_2'], - 'deprecated': ['cap_id_3'], - 'removed': ['cap_id_4'] - } - }, - 'os_powered_storage': { - 'capabilities': { - 'required': ['cap_id_5'], - 'advisory': ['cap_id_6'], - 'deprecated': ['cap_id_7'], - 'removed': ['cap_id_8'] - } - } - } - }; - var expected = { - 'cap_id_1': 'required', - 'cap_id_2': 'advisory', - 'cap_id_3': 'deprecated', - 'cap_id_4': 'removed', - 'cap_id_5': 'required', - 'cap_id_6': 'advisory', - 'cap_id_7': 'deprecated', - 'cap_id_8': 'removed' - }; - expect(ctrl.getTargetCapabilities()).toEqual(expected); - }); - - it('should be able to sort the results into a capability object for ' + - 'schema version 1.2', - function () { - ctrl.resultsData = fakeResultResponse; - ctrl.guidelineData = fakeCapabilityResponse; - ctrl.schemaVersion = '1.2'; - ctrl.buildCapabilitiesObject(); - var expectedCapsObject = { - 'required': { - 'caps': [{ - 'id': 'cap_id_1', - 'passedTests': ['test_id_1'], - 'notPassedTests': ['test_id_2'], - 'passedFlagged': ['test_id_1'], - 'notPassedFlagged': [] - }], - 'count': 2, 'passedCount': 1, - 'flagFailCount': 0, 'flagPassCount': 1 - }, - 'advisory': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'deprecated': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'removed': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0} - }; - expect(ctrl.caps).toEqual(expectedCapsObject); - expect(ctrl.requiredPassPercent).toEqual(50); - expect(ctrl.nonFlagPassCount).toEqual(0); - }); - - it('should be able to sort the results into a capability object for ' + - 'schema version 1.3 and above', - function () { - ctrl.resultsData = {'results': ['test_id_1', - 'old_test_id_3', - 'test_id_4'] - }; - ctrl.guidelineData = { - 'platform': {'required': ['compute']}, - 'schema': '1.4', - 'components': { - 'compute': { - 'required': ['cap_id_1'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - } - }, - 'capabilities': { - 'cap_id_1': { - 'tests': { - 'test_id_1': { - 'flagged': { - 'action': 'foo', - 'date': '2015-03-24', - 'reason': 'bar' - }, - 'idempotent_id': 'id-1234' - }, - 'test_id_2': { - 'idempotent_id': 'id-5678' - }, - 'test_id_3': { - 'idempotent_id': 'id-5679', - 'aliases': ['old_test_id_3'] - }, - 'test_id_4': { - 'idempotent_id': 'id-5680' - } - } - } - } - }; - ctrl.schemaVersion = '1.4'; - ctrl.buildCapabilitiesObject(); - var expectedCapsObject = { - 'required': { - 'caps': [{ - 'id': 'cap_id_1', - 'passedTests': ['test_id_1', - 'test_id_3', - 'test_id_4'], - 'notPassedTests': ['test_id_2'], - 'passedFlagged': ['test_id_1'], - 'notPassedFlagged': [] - }], - 'count': 4, 'passedCount': 3, - 'flagFailCount': 0, 'flagPassCount': 1 - }, - 'advisory': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'deprecated': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0}, - 'removed': {'caps': [], 'count': 0, 'passedCount': 0, - 'flagFailCount': 0, 'flagPassCount': 0} - }; - expect(ctrl.caps).toEqual(expectedCapsObject); - expect(ctrl.requiredPassPercent).toEqual(75); - expect(ctrl.nonFlagPassCount).toEqual(2); - - // Test case where a component capability isn't listed in - // the capabilities object. - ctrl.guidelineData.components.compute.removed = ['fake_cap']; - ctrl.buildCapabilitiesObject(); - expectedCapsObject.removed.caps = [{ - 'id': 'fake_cap', - 'passedTests': [], - 'notPassedTests': [], - 'passedFlagged': [], - 'notPassedFlagged': [] - }]; - expect(ctrl.caps).toEqual(expectedCapsObject); - }); - - it('should have a method to determine if a test is flagged', - function () { - var capObj = {'flagged': [ 'test1'], - 'tests': ['test1', 'test2']}; - - ctrl.schemaVersion = '1.2'; - expect(ctrl.isTestFlagged('test1', capObj)).toEqual(true); - expect(ctrl.isTestFlagged('test2', capObj)).toEqual(false); - - capObj = { - 'tests': { - 'test1': { - 'flagged': { - 'action': 'foo', - 'date': '2015-03-24', - 'reason': 'bar' - }, - 'idempotent_id': 'id-1234' - }, - 'test2': { - 'idempotent_id': 'id-5678' - } - } - }; - - ctrl.schemaVersion = '1.3'; - expect(ctrl.isTestFlagged('test1', capObj)).toBeTruthy(); - expect(ctrl.isTestFlagged('test2', capObj)).toBeFalsy(); - - expect(ctrl.isTestFlagged('test2', null)).toEqual(false); - }); - - it('should have a method to get the reason a flagged test is flagged', - function () { - var capObj = {'flagged': [ 'test1'], - 'tests': ['test1', 'test2']}; - - ctrl.schemaVersion = '1.2'; - expect(ctrl.getFlaggedReason('test1', capObj)).toEqual( - 'Interop Working Group has flagged this test.'); - - // Check that non-flagged test returns empty string. - expect(ctrl.getFlaggedReason('test2', capObj)).toEqual(''); - - capObj = { - 'tests': { - 'test1': { - 'flagged': { - 'action': 'foo', - 'date': '2015-03-24', - 'reason': 'bar' - }, - 'idempotent_id': 'id-1234' - } - } - }; - - ctrl.schemaVersion = '1.3'; - expect(ctrl.getFlaggedReason('test1', capObj)).toEqual('bar'); - }); - - it('should have a method to determine whether a capability should ' + - 'be shown', - function () { - var caps = [{'id': 'cap_id_1', - 'passedTests': ['test_id_1'], - 'notPassedTests': [], - 'passedFlagged': ['test_id_1'], - 'notPassedFlagged': [] - }, - {'id': 'cap_id_2', - 'passedTests': [], - 'notPassedTests': ['test_id_4'], - 'passedFlagged': [], - 'notPassedFlagged': [] - }]; - - // Check that all capabilities are shown by default. - expect(ctrl.isCapabilityShown(caps[0])).toEqual(true); - expect(ctrl.isCapabilityShown(caps[1])).toEqual(true); - - // Check that only capabilities with passed tests are shown. - ctrl.testStatus = 'passed'; - expect(ctrl.isCapabilityShown(caps[0])).toEqual(true); - expect(ctrl.isCapabilityShown(caps[1])).toEqual(false); - - // Check that only capabilities with passed tests are shown. - ctrl.testStatus = 'not passed'; - expect(ctrl.isCapabilityShown(caps[0])).toEqual(false); - expect(ctrl.isCapabilityShown(caps[1])).toEqual(true); - - // Check that only capabilities with flagged tests are shown. - ctrl.testStatus = 'flagged'; - expect(ctrl.isCapabilityShown(caps[0])).toEqual(true); - expect(ctrl.isCapabilityShown(caps[1])).toEqual(false); - }); - - it('should have a method to determine whether a test should be shown', - function () { - var cap = {'id': 'cap_id_1', - 'passedTests': ['test_id_1'], - 'notPassedTests': [], - 'passedFlagged': ['test_id_1'], - 'notPassedFlagged': [] - }; - - expect(ctrl.isTestShown('test_id_1', cap)).toEqual(true); - ctrl.testStatus = 'passed'; - expect(ctrl.isTestShown('test_id_1', cap)).toEqual(true); - ctrl.testStatus = 'not passed'; - expect(ctrl.isTestShown('test_id_1', cap)).toEqual(false); - ctrl.testStatus = 'flagged'; - expect(ctrl.isTestShown('test_id_1', cap)).toEqual(true); - }); - - it('should have a method to determine how many tests in a ' + - 'capability belong under the current test filter', - function () { - var cap = {'id': 'cap_id_1', - 'passedTests': ['t1', 't2', 't3'], - 'notPassedTests': ['t4', 't5', 't6', 't7'], - 'passedFlagged': ['t1'], - 'notPassedFlagged': ['t3', 't4'] - }; - - // Should return the count of all tests. - expect(ctrl.getCapabilityTestCount(cap)).toEqual(7); - - // Should return the count of passed tests. - ctrl.testStatus = 'passed'; - expect(ctrl.getCapabilityTestCount(cap)).toEqual(3); - - // Should return the count of failed tests. - ctrl.testStatus = 'not passed'; - expect(ctrl.getCapabilityTestCount(cap)).toEqual(4); - - // Should return the count of flagged tests. - ctrl.testStatus = 'flagged'; - expect(ctrl.getCapabilityTestCount(cap)).toEqual(3); - }); - - it('should have a method to determine how many tests in a status ' + - 'belong under the current test filter', - function () { - ctrl.caps = {'required': {'caps': [], 'count': 10, - 'passedCount': 6, 'flagFailCount': 3, - 'flagPassCount': 2}}; - - // Should return the count of all tests (count). - expect(ctrl.getStatusTestCount('required')).toEqual(10); - - // Should return the count of passed tests (passedCount). - ctrl.testStatus = 'passed'; - expect(ctrl.getStatusTestCount('required')).toEqual(6); - - // Should return the count of failed tests - // (count - passedCount). - ctrl.testStatus = 'not passed'; - expect(ctrl.getStatusTestCount('required')).toEqual(4); - - // Should return the count of flagged tests - // (flagFailCount + flagPassCount). - ctrl.testStatus = 'flagged'; - expect(ctrl.getStatusTestCount('required')).toEqual(5); - - // Test when caps has not been set yet. - ctrl.caps = null; - expect(ctrl.getStatusTestCount('required')).toEqual(-1); - }); - - it('should have a method to update the verification status of a test', - function () { - $httpBackend.expectGET(fakeApiUrl + - '/guidelines').respond(200, { - 'powered': [ - {'name': '2015.03.json', 'file': '2015.03.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'} - ] - }); - $httpBackend.expectGET(fakeApiUrl + - '/guidelines/2015.03.json').respond(fakeCapabilityResponse); - $httpBackend.flush(); - ctrl.isVerified = 1; - $httpBackend.expectPUT(fakeApiUrl + '/results/1234', - {'verification_status': ctrl.isVerified}).respond(204, ''); - $httpBackend.when('GET', /\.html$/).respond(200); - ctrl.updateVerificationStatus(); - $httpBackend.flush(); - expect(ctrl.resultsData.verification_status).toEqual(1); - - }); - - it('should have a method to open a modal for the full passed test list', - function () { - var modal; - inject(function ($uibModal) { - modal = $uibModal; - }); - spyOn(modal, 'open'); - ctrl.openFullTestListModal(); - expect(modal.open).toHaveBeenCalled(); - }); - - it('should have a method to open a modal for editing test metadata', - function () { - var modal; - inject(function ($uibModal) { - modal = $uibModal; - }); - spyOn(modal, 'open'); - ctrl.openEditTestModal(); - expect(modal.open).toHaveBeenCalled(); - - }); - }); - - describe('FullTestListModalController', function () { - var modalInstance, ctrl; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - ctrl = $controller('FullTestListModalController', - {$uibModalInstance: modalInstance, tests: ['t1', 't2'], - gl_type: 'powered'} - ); - })); - - it('should set a scope variable to the passed in tests', function () { - expect(ctrl.tests).toEqual(['t1', 't2']); - }); - - it('should have a method to close the modal', - function () { - ctrl.close(); - expect(modalInstance.dismiss).toHaveBeenCalledWith('exit'); - }); - - it('should have a method to convert the tests to a string', - function () { - ctrl.tests = ['t2', 't1', 't3']; - var expectedString = 't1\nt2\nt3'; - expect(ctrl.getTestListString()).toEqual(expectedString); - }); - }); - - describe('EditTestModalController', function () { - var modalInstance, ctrl, state; - var fakeResultsData = { - 'results': ['test_id_1'], - 'id': 'some-id', - 'meta': { - 'public_key': 'ssh-rsa', 'guideline': '2015.04.json', - 'target': 'object' - } - }; - var fake_gl_type = 'powered'; - var fakeVersionResp = [{'id': 'ver1', 'version': '1.0'}, - {'id': 'ver2', 'version': null}]; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - state = { - reload: jasmine.createSpy('state.reload') - }; - ctrl = $controller('EditTestModalController', - {$uibModalInstance: modalInstance, $state: state, - resultsData: fakeResultsData, gl_type: fake_gl_type} - ); - $httpBackend.when('GET', fakeApiUrl + - '/guidelines').respond({ - 'powered': [ - {'name': '2015.03.json', 'file': '2015.03.json'}, - {'name': '2015.04.json', 'file': '2015.04.json'} - ] - }); - - $httpBackend.when('GET', fakeApiUrl + '/products') - .respond(200, fakeResultsData); - $httpBackend.when('GET', fakeApiUrl + - '/products/1234/versions').respond(fakeVersionResp); - })); - - it('should be able to get product versions', function () { - ctrl.selectedProduct = {'id': '1234'}; - ctrl.products = null; - $httpBackend.expectGET(fakeApiUrl + '/products/1234/versions') - .respond(200, fakeVersionResp); - ctrl.getProductVersions(); - $httpBackend.flush(); - expect(ctrl.productVersions).toEqual(fakeVersionResp); - var expected = {'id': 'ver2', 'version': null}; - expect(ctrl.selectedVersion).toEqual(expected); - }); - - it('should have a method to save all changes made.', function () { - ctrl.metaCopy.target = 'platform'; - ctrl.metaCopy.shared = 'true'; - ctrl.selectedVersion = {'id': 'ver2', 'version': null}; - ctrl.saveChanges(); - // Only meta changed should send a POST request. - $httpBackend.expectPOST( - fakeApiUrl + '/results/some-id/meta/target', - 'platform') - .respond(201, ''); - $httpBackend.expectPOST( - fakeApiUrl + '/results/some-id/meta/shared', - 'true') - .respond(201, ''); - $httpBackend.expectPUT(fakeApiUrl + '/results/some-id', - {'product_version_id': 'ver2'}) - .respond(201); - $httpBackend.flush(); - }); - }); - - describe('TestRaiseAlertModalController', function() { - var data, modalInstance, ctrl; - - data = { - mode: 'success', - title: '', - text: 'operation successful' - }; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss'), - close: jasmine.createSpy('modalInstance.close') - }; - ctrl = $controller('RaiseAlertModalController', - {$uibModalInstance: modalInstance, data: data} - ); - })); - - it('should close', - function () { - ctrl.close(); - expect(modalInstance.close).toHaveBeenCalledWith(); - }); - }); - - describe('TestCustomConfirmModalController', function() { - var data, someFunc, modalInstance, ctrl; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss'), - close: jasmine.createSpy('modalInstance.close') - }; - - someFunc = jasmine.createSpy('someFunc'); - data = { - text: 'Some input', - successHandler: someFunc - }; - - ctrl = $controller('CustomConfirmModalController', - {$uibModalInstance: modalInstance, data: data} - ); - })); - - it('should have a function to confirm', - function () { - ctrl.inputText = 'foo'; - ctrl.confirm(); - expect(someFunc).toHaveBeenCalledWith('foo'); - }); - - it('should have a function to dismiss the modal', - function () { - ctrl.cancel(); - expect(modalInstance.dismiss).toHaveBeenCalledWith('cancel'); - }); - }); - - describe('AuthFailureController', function() { - var $location, ctrl; - - beforeEach(inject(function ($controller, _$location_) { - $location = _$location_; - $location.url('/auth_failure?message=some_error_message'); - ctrl = $controller('AuthFailureController', {}); - })); - - it('should set the authentication failure url based on error message', - function () { - expect($location.url()).toBe('/auth_failure?message=' + - 'some_error_message'); - expect(ctrl.message).toBe('some_error_message'); - }); - }); - - describe('VendorController', function() { - var rootScope, scope, stateParams, ctrl; - var confirmModal = jasmine.createSpy('confirmModal'); - var fakeResp = {'id': 'fake-id', 'type': 1, - 'can_manage': true, 'properties' : {}}; - var fakeUsersResp = [{'openid': 'foo'}]; - var fakeProdResp = {'products': [{'id': 123}]}; - var fakeWindow = { - location: { - href: '' - } - }; - - beforeEach(inject(function ($controller, $rootScope) { - scope = $rootScope.$new(); - rootScope = $rootScope.$new(); - rootScope.auth = {'currentUser' : {'is_admin': false, - 'openid': 'foo'} - }; - stateParams = {vendorID: 1234}; - ctrl = $controller('VendorController', - {$rootScope: rootScope, $scope: scope, - $stateParams: stateParams, $window: fakeWindow, - confirmModal: confirmModal} - ); - - $httpBackend.when('GET', fakeApiUrl + - '/vendors/1234').respond(fakeResp); - $httpBackend.when('GET', fakeApiUrl + - '/products?organization_id=1234').respond(fakeProdResp); - $httpBackend.when('GET', fakeApiUrl + - '/vendors/1234/users').respond(fakeUsersResp); - })); - - it('should have a function to get vendor info from API', - function () { - ctrl.getVendor(); - $httpBackend.flush(); - expect(ctrl.vendor.id).toEqual('fake-id'); - expect(ctrl.vendor.can_manage).toEqual(true); - expect(ctrl.vendor.canDelete).toEqual(true); - expect(ctrl.vendor.canRegister).toEqual(true); - expect(ctrl.vendor.canApprove).toEqual(false); - }); - - it('should have a function to get vendor users', - function () { - ctrl.getVendorUsers(); - $httpBackend.flush(); - expect(ctrl.vendorUsers).toEqual(fakeUsersResp); - expect(ctrl.currentUser).toEqual('foo'); - }); - - it('should have a function to get vendor products', - function () { - ctrl.vendorProducts = null; - ctrl.getVendorProducts(); - $httpBackend.flush(); - expect(ctrl.vendorProducts).toEqual(fakeProdResp.products); - }); - - it('should have a function to register a vendor', - function () { - $httpBackend.expectPOST( - fakeApiUrl + '/vendors/1234/action', - {'register': null}) - .respond(201, ''); - ctrl.registerVendor(); - $httpBackend.flush(); - }); - - it('should have a function to approve a vendor', - function () { - $httpBackend.expectPOST( - fakeApiUrl + '/vendors/1234/action', - {'approve': null}) - .respond(201, ''); - ctrl.approveVendor(); - $httpBackend.flush(); - }); - - it('a confirmation modal should come up when declining a vendor', - function () { - ctrl.declineVendor(); - expect(confirmModal).toHaveBeenCalled(); - }); - - it('should have a function to delete a vendor', - function () { - $httpBackend.expectDELETE( - fakeApiUrl + '/vendors/1234').respond(202, ''); - ctrl.deleteVendor(); - $httpBackend.flush(); - expect(fakeWindow.location.href).toEqual('/'); - }); - - it('should have to a function to remove a user from a vendor', - function () { - var fakeId = 'fake-id'; - $httpBackend.expectDELETE( - fakeApiUrl + '/vendors/1234/users/' + btoa(fakeId)) - .respond(202, ''); - ctrl.removeUserFromVendor(fakeId); - $httpBackend.flush(); - }); - - it('should have to a function to add a user to a vendor', - function () { - var fakeId = 'fake-id'; - $httpBackend.expectPUT( - fakeApiUrl + '/vendors/1234/users/' + btoa(fakeId)) - .respond(204, ''); - ctrl.addUserToVendor(fakeId); - $httpBackend.flush(); - }); - }); - - describe('VendorEditModalController', function() { - var rootScope, ctrl, modalInstance, state; - var fakeVendor = {'name': 'Foo', 'description': 'Bar', 'id': '1234', - 'properties': {'key1': 'value1', 'key2': 'value2'}}; - - beforeEach(inject(function ($controller, $rootScope) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - state = { - reload: jasmine.createSpy('state.reload') - }; - rootScope = $rootScope.$new(); - rootScope.auth = {'currentUser' : {'is_admin': true, - 'openid': 'foo'} - }; - ctrl = $controller('VendorEditModalController', - {$rootScope: rootScope, - $uibModalInstance: modalInstance, $state: state, - vendor: fakeVendor} - ); - - })); - - it('should be able to add/remove properties', - function () { - var expected = [{'key': 'key1', 'value': 'value1'}, - {'key': 'key2', 'value': 'value2'}]; - expect(ctrl.vendorProperties).toEqual(expected); - ctrl.removeProperty(0); - expected = [{'key': 'key2', 'value': 'value2'}]; - expect(ctrl.vendorProperties).toEqual(expected); - ctrl.addField(); - expected = [{'key': 'key2', 'value': 'value2'}, - {'key': '', 'value': ''}]; - expect(ctrl.vendorProperties).toEqual(expected); - }); - - it('should have a function to save changes', - function () { - var expectedContent = { - 'name': 'Foo1', 'description': 'Bar', - 'properties': {'key1': 'value1', 'key2': 'value2'} - }; - $httpBackend.expectPUT( - fakeApiUrl + '/vendors/1234', expectedContent) - .respond(200, ''); - ctrl.vendor.name = 'Foo1'; - ctrl.saveChanges(); - $httpBackend.flush(); - }); - - it('should have a function to exit the modal', - function () { - ctrl.close(); - expect(modalInstance.dismiss).toHaveBeenCalledWith('exit'); - }); - }); - - describe('VendorsController', function () { - var rootScope, scope, ctrl; - var fakeResp = {'vendors': [{'can_manage': true, - 'type': 3, - 'name': 'Foo'}, - {'can_manage': true, - 'type': 3, - 'name': 'Bar'}]}; - beforeEach(inject(function ($controller, $rootScope) { - scope = $rootScope.$new(); - rootScope = $rootScope.$new(); - rootScope.auth = {'currentUser' : {'is_admin': false, - 'openid': 'foo'} - }; - ctrl = $controller('VendorsController', - {$rootScope: rootScope, $scope: scope} - ); - $httpBackend.when('GET', fakeApiUrl + - '/vendors').respond(fakeResp); - })); - - it('should have a function to get a listing of all vendors', - function () { - $httpBackend.expectGET(fakeApiUrl + '/vendors') - .respond(fakeResp); - ctrl.update(); - $httpBackend.flush(); - expect(ctrl.rawData).toEqual(fakeResp); - }); - - it('should have a function to update/sort data based on settings', - function () { - ctrl.rawData = fakeResp; - ctrl.updateData(); - var expectedResponse = {'vendors': [{'can_manage': true, - 'type': 3, - 'name' : 'Bar'}, - {'can_manage': true, - 'type': 3, - 'name': 'Foo'}]}; - expect(ctrl.data).toEqual(expectedResponse); - }); - - it('should have a function to determine if a vendor should be shown', - function () { - var fakeVendor = {'type': 0, 'can_manage': false}; - expect(ctrl._filterVendor(fakeVendor)).toEqual(true); - ctrl.isUserVendors = true; - expect(ctrl._filterVendor(fakeVendor)).toEqual(false); - ctrl.isUserVendors = false; - rootScope.auth.currentUser.is_admin = true; - expect(ctrl._filterVendor(fakeVendor)).toEqual(true); - }); - - it('should have a function to add a new vendor', - function () { - ctrl.name = 'New Vendor'; - ctrl.description = 'A description'; - $httpBackend.expectPOST( - fakeApiUrl + '/vendors', - {name: ctrl.name, description: ctrl.description}) - .respond(200, fakeResp); - ctrl.addVendor(); - $httpBackend.flush(); - }); - }); - - describe('ProductsController', function() { - var rootScope, scope, ctrl; - var vendResp = {'vendors': [{'can_manage': true, - 'type': 3, - 'name': 'Foo', - 'id': '123'}]}; - var prodResp = {'products': [{'id': 'abc', - 'product_type': 1, - 'public': 1, - 'name': 'Foo Product', - 'organization_id': '123'}]}; - - beforeEach(inject(function ($controller, $rootScope) { - scope = $rootScope.$new(); - rootScope = $rootScope.$new(); - rootScope.auth = {'currentUser' : {'is_admin': false, - 'openid': 'foo'} - }; - ctrl = $controller('ProductsController', - {$rootScope: rootScope, $scope: scope} - ); - $httpBackend.when('GET', fakeApiUrl + - '/vendors').respond(vendResp); - $httpBackend.when('GET', fakeApiUrl + - '/products').respond(prodResp); - })); - - it('should have a function to get/update vendors', - function () { - $httpBackend.flush(); - var newVendResp = {'vendors': [{'name': 'Foo', - 'id': '123', - 'can_manage': true}, - {'name': 'Bar', - 'id': '345', - 'can_manage': false}]}; - $httpBackend.expectGET(fakeApiUrl + '/vendors') - .respond(200, newVendResp); - ctrl.updateVendors(); - $httpBackend.flush(); - expect(ctrl.allVendors).toEqual({'123': {'name': 'Foo', - 'id': '123', - 'can_manage': true}, - '345': {'name': 'Bar', - 'id': '345', - 'can_manage': false}}); - expect(ctrl.vendors).toEqual([{'name': 'Foo', - 'id': '123', - 'can_manage': true}]); - }); - - it('should have a function to get products', - function () { - $httpBackend.expectGET(fakeApiUrl + '/products') - .respond(200, prodResp); - ctrl.update(); - $httpBackend.flush(); - expect(ctrl.rawData).toEqual(prodResp); - }); - - it('should have a function to update the view', - function () { - $httpBackend.flush(); - ctrl.allVendors = {'123': {'name': 'Foo', - 'id': '123', - 'can_manage': true}}; - ctrl.updateData(); - var expectedData = {'products': [{'id': 'abc', - 'product_type': 1, - 'public': 1, - 'name': 'Foo Product', - 'organization_id': '123'}]}; - expect(ctrl.data).toEqual(expectedData); - }); - - it('should have a function to map product types with descriptions', - function () { - expect(ctrl.getProductTypeDescription(0)).toEqual('Distro'); - expect(ctrl.getProductTypeDescription(1)) - .toEqual('Public Cloud'); - expect(ctrl.getProductTypeDescription(2)) - .toEqual('Hosted Private Cloud'); - expect(ctrl.getProductTypeDescription(5)).toEqual('Unknown'); - }); - }); - - describe('ProductController', function() { - var rootScope, scope, stateParams, ctrl; - var fakeProdResp = {'product_type': 1, - 'product_ref_id': null, - 'name': 'Good Stuff', - 'created_at': '2016-01-01 01:02:03', - 'updated_at': '2016-06-15 01:02:04', - 'properties': null, - 'organization_id': 'fake-org-id', - 'public': true, - 'can_manage': true, - 'created_by_user': 'fake-open-id', - 'type': 0, - 'id': '1234', - 'description': 'some description'}; - var fakeVersionResp = [{'id': 'asdf', - 'cpid': null, - 'version': '1.0', - 'product_id': '1234'}]; - var fakeTestsResp = {'pagination': {'current_page': 1, - 'total_pages': 1}, - 'results':[{'id': 'foo-test'}]}; - var fakeVendorResp = {'id': 'fake-org-id', - 'type': 3, - 'can_manage': true, - 'properties' : {}, - 'name': 'Foo Vendor', - 'description': 'foo bar'}; - var fakeWindow = { - location: { - href: '' - } - }; - - beforeEach(inject(function ($controller, $rootScope) { - scope = $rootScope.$new(); - rootScope = $rootScope.$new(); - stateParams = {id: 1234}; - rootScope.auth = {'currentUser' : {'is_admin': false, - 'openid': 'foo'} - }; - ctrl = $controller('ProductController', - {$rootScope: rootScope, $scope: scope, - $stateParams: stateParams, $window: fakeWindow} - ); - $httpBackend.when('GET', fakeApiUrl + - '/products/1234').respond(fakeProdResp); - $httpBackend.when('GET', fakeApiUrl + - '/products/1234/versions').respond(fakeVersionResp); - $httpBackend.when('GET', fakeApiUrl + - '/results?page=1&product_id=1234').respond(fakeTestsResp); - $httpBackend.when('GET', fakeApiUrl + - '/vendors/fake-org-id').respond(fakeVendorResp); - })); - - it('should have a function to get product information', - function () { - $httpBackend.expectGET(fakeApiUrl + '/products/1234') - .respond(200, fakeProdResp); - $httpBackend.expectGET(fakeApiUrl + '/vendors/fake-org-id') - .respond(200, fakeVendorResp); - ctrl.getProduct(); - $httpBackend.flush(); - expect(ctrl.product).toEqual(fakeProdResp); - expect(ctrl.vendor).toEqual(fakeVendorResp); - }); - - it('should have a function to get a list of product versions', - function () { - $httpBackend - .expectGET(fakeApiUrl + '/products/1234/versions') - .respond(200, fakeVersionResp); - ctrl.getProductVersions(); - $httpBackend.flush(); - expect(ctrl.productVersions).toEqual(fakeVersionResp); - }); - - it('should have a function to delete a product', - function () { - $httpBackend.expectDELETE(fakeApiUrl + '/products/1234') - .respond(202, ''); - ctrl.deleteProduct(); - $httpBackend.flush(); - expect(fakeWindow.location.href).toEqual('/'); - }); - - it('should have a function to delete a product version', - function () { - $httpBackend - .expectDELETE(fakeApiUrl + '/products/1234/versions/abc') - .respond(204, ''); - ctrl.deleteProductVersion('abc'); - $httpBackend.flush(); - }); - - it('should have a function to add a product version', - function () { - ctrl.newProductVersion = 'abc'; - $httpBackend.expectPOST( - fakeApiUrl + '/products/1234/versions', - {version: 'abc'}) - .respond(200, {'id': 'foo'}); - ctrl.addProductVersion(); - $httpBackend.flush(); - }); - - it('should have a function to get tests on a product', - function () { - ctrl.getProductTests(); - $httpBackend.flush(); - expect(ctrl.testsData).toEqual(fakeTestsResp.results); - expect(ctrl.currentPage).toEqual(1); - }); - - it('should have a function to unassociate a test from a product', - function () { - ctrl.testsData = [{'id': 'foo-test'}]; - $httpBackend.expectPUT( - fakeApiUrl + '/results/foo-test', - {product_version_id: null}) - .respond(200, {'id': 'foo-test'}); - ctrl.unassociateTest(0); - $httpBackend.flush(); - expect(ctrl.testsData).toEqual([]); - }); - - it('should have a function to switch the publicity of a project', - function () { - ctrl.product = {'public': true}; - $httpBackend.expectPUT(fakeApiUrl + '/products/1234', - {'public': false}) - .respond(200, fakeProdResp); - ctrl.switchProductPublicity(); - $httpBackend.flush(); - }); - - it('should have a method to open a modal for version management', - function () { - var modal; - inject(function ($uibModal) { - modal = $uibModal; - }); - spyOn(modal, 'open'); - ctrl.openVersionModal(); - expect(modal.open).toHaveBeenCalled(); - }); - - it('should have a method to open a modal for product editing', - function () { - var modal; - inject(function ($uibModal) { - modal = $uibModal; - }); - spyOn(modal, 'open'); - ctrl.openProductEditModal(); - expect(modal.open).toHaveBeenCalled(); - }); - }); - - describe('ProductVersionModalController', function() { - - var ctrl, modalInstance, state, parent; - var fakeVersion = {'id': 'asdf', 'cpid': null, - 'version': '1.0','product_id': '1234'}; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - parent = { - deleteProductVersion: jasmine.createSpy('deleteProductVersion') - }; - ctrl = $controller('ProductVersionModalController', - {$uibModalInstance: modalInstance, $state: state, - version: fakeVersion, parent: parent} - ); - })); - - it('should have a function to prompt a version deletion', - function () { - ctrl.deleteProductVersion(); - expect(parent.deleteProductVersion) - .toHaveBeenCalledWith('asdf'); - expect(modalInstance.dismiss).toHaveBeenCalledWith('exit'); - }); - - it('should have a function to save changes', - function () { - ctrl.version.cpid = 'some-cpid'; - var expectedContent = { 'cpid': 'some-cpid'}; - $httpBackend.expectPUT( - fakeApiUrl + '/products/1234/versions/asdf', - expectedContent).respond(200, ''); - ctrl.saveChanges(); - $httpBackend.flush(); - }); - }); - - describe('ProductEditModalController', function() { - var ctrl, modalInstance, state; - var fakeProduct = {'name': 'Foo', 'description': 'Bar', 'id': '1234', - 'properties': {'key1': 'value1'}}; - var fakeVersion = {'version': null, 'product_id': '1234', - 'cpid': null, 'id': 'asdf'}; - - beforeEach(inject(function ($controller) { - modalInstance = { - dismiss: jasmine.createSpy('modalInstance.dismiss') - }; - state = { - reload: jasmine.createSpy('state.reload') - }; - ctrl = $controller('ProductEditModalController', - {$uibModalInstance: modalInstance, $state: state, - product: fakeProduct, - version: fakeVersion} - ); - })); - - it('should be able to add/remove properties', - function () { - var expected = [{'key': 'key1', 'value': 'value1'}]; - expect(ctrl.productProperties).toEqual(expected); - ctrl.removeProperty(0); - expect(ctrl.productProperties).toEqual([]); - ctrl.addField(); - expected = [{'key': '', 'value': ''}]; - expect(ctrl.productProperties).toEqual(expected); - }); - - it('should have a function to save changes', - function () { - var expectedContent = { - 'name': 'Foo1', 'description': 'Bar', - 'properties': {'key1': 'value1'} - }; - var verContent = {'cpid': 'abc'}; - $httpBackend.expectPUT( - fakeApiUrl + '/products/1234', expectedContent) - .respond(200, ''); - $httpBackend.expectPUT( - fakeApiUrl + '/products/1234/versions/asdf', verContent) - .respond(200, ''); - ctrl.productVersion.cpid = 'abc'; - ctrl.product.name = 'Foo1'; - ctrl.saveChanges(); - $httpBackend.flush(); - }); - - it('should have a function to exit the modal', - function () { - ctrl.close(); - expect(modalInstance.dismiss).toHaveBeenCalledWith('exit'); - }); - }); -}); diff --git a/refstack-ui/tests/unit/FilterSpec.js b/refstack-ui/tests/unit/FilterSpec.js deleted file mode 100644 index b88a08ec..00000000 --- a/refstack-ui/tests/unit/FilterSpec.js +++ /dev/null @@ -1,42 +0,0 @@ -/** Jasmine specs for Refstack filters */ -describe('Refstack filters', function () { - 'use strict'; - - var fakeApiUrl = 'http://foo.bar/v1'; - beforeEach(function () { - module(function ($provide) { - $provide.constant('refstackApiUrl', fakeApiUrl); - }); - module('refstackApp'); - }); - - describe('Filter: arrayConverter', function () { - var $filter; - beforeEach(inject(function (_$filter_) { - $filter = _$filter_('arrayConverter'); - })); - - it('should convert dict to array of dict values', function () { - var object = {'id1': {'key1': 'value1'}, 'id2': {'key2': 'value2'}}; - var expected = [{'key1': 'value1', 'id': 'id1'}, - {'key2': 'value2', 'id': 'id2'}]; - expect($filter(object)).toEqual(expected); - }); - }); - - describe('Filter: capitalize', function() { - var $filter; - beforeEach(inject(function(_$filter_) { - $filter = _$filter_('capitalize'); - })); - - it('should capitalize the first letter', function () { - var string1 = 'somestring'; - var string2 = 'someString'; - var string3 = 'SOMESTRING'; - expect($filter(string1)).toEqual('Somestring'); - expect($filter(string2)).toEqual('SomeString'); - expect($filter(string3)).toEqual(string3); - }); - }); -}); diff --git a/refstack/__init__.py b/refstack/__init__.py deleted file mode 100644 index c70611c0..00000000 --- a/refstack/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Refstack package.""" diff --git a/refstack/api/__init__.py b/refstack/api/__init__.py deleted file mode 100644 index dbe30ac6..00000000 --- a/refstack/api/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Refstack API package.""" diff --git a/refstack/api/app.py b/refstack/api/app.py deleted file mode 100644 index 8b05f908..00000000 --- a/refstack/api/app.py +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""App factory.""" - -import json -import logging -import os - -from beaker.middleware import SessionMiddleware -from oslo_config import cfg -from oslo_log import log -import pecan -import webob - -from refstack.api import constants as const -from refstack.api import exceptions as api_exc -from refstack.api import utils as api_utils -from refstack import db - -LOG = log.getLogger(__name__) - -PROJECT_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), - os.pardir) -UI_OPTS = [ - cfg.StrOpt('ui_url', - default='https://refstack.openstack.org', - help='Url of user interface for RefStack. Need for redirects ' - 'after sign in and sign out.' - ), -] - -API_OPTS = [ - cfg.StrOpt('api_url', - default='https://refstack.openstack.org/api', - help='Url of public RefStack API.' - ), - cfg.StrOpt('static_root', - default='refstack-ui/app', - help='The directory where your static files can be found. ' - 'Pecan comes with middleware that can be used to serve ' - 'static files (like CSS and Javascript files) during ' - 'development. Here, a special variable %(project_root)s ' - 'can be used to point to the root directory of the ' - 'Refstack project\'s module, so paths can be specified ' - 'relative to that.' - ), - cfg.StrOpt('template_path', - default='refstack-ui/app', - help='Points to the directory where your template files live. ' - 'Here, a special variable %(project_root)s can be used to ' - 'point to the root directory of the Refstack project\'s ' - 'main module, so paths can be specified relative to that.' - ), - cfg.ListOpt('allowed_cors_origins', - default=[], - help='List of sites allowed cross-site resource access. If ' - 'this is empty, only same-origin requests are allowed.' - ), - cfg.BoolOpt('app_dev_mode', - default=False, - help='Switch Refstack app into debug mode. Helpful for ' - 'development. In debug mode static file will be served ' - 'by pecan application. Also, server responses will ' - 'contain some details with debug information.' - ), - cfg.StrOpt('test_results_url', - default='/#/results/%s', - help='Template for test result url.' - ), - cfg.StrOpt('opendev_api_capabilities_url', - default='https://opendev.org/api/v1/repos/openinfra/interop/' - 'contents/guidelines', - help='The GitHub API URL of the repository and location of the ' - 'Interop Working Group capability files. This URL is used ' - 'to get a listing of all capability files.' - ), - cfg.StrOpt('additional_capability_urls', - default='https://opendev.org/api/v1/repos/openinfra/interop/' - 'contents/add-ons/guidelines', - help=('The GitHub API URL of the repository and location of ' - 'any additional guideline sources which will need to ' - 'be parsed by the refstack API.')), - cfg.StrOpt('opendev_raw_base_url', - default='https://opendev.org/api/v1/repos/openinfra/interop/' - 'raw/', - help='This is the base URL that is used for retrieving ' - 'specific capability files. Capability file names will ' - 'be appended to this URL to get the contents of that file.' - ), - cfg.BoolOpt('enable_anonymous_upload', - default=True, - help='Enable or disable anonymous uploads. If set to False, ' - 'all clients will need to authenticate and sign with a ' - 'public/private keypair previously uploaded to their ' - 'user account.' - ) -] - -CONF = cfg.CONF - -opt_group = cfg.OptGroup(name='api', - title='Options for the Refstack API') - -CONF.register_opts(UI_OPTS) - -CONF.register_group(opt_group) -CONF.register_opts(API_OPTS, opt_group) - -log.register_options(CONF) - - -class JSONErrorHook(pecan.hooks.PecanHook): - """A pecan hook that translates webob HTTP errors into a JSON format.""" - - def __init__(self): - """Hook init.""" - self.debug = CONF.api.app_dev_mode - - def on_error(self, state, exc): - """Request error handler.""" - if isinstance(exc, webob.exc.HTTPRedirection): - return - elif isinstance(exc, webob.exc.HTTPError): - return webob.Response( - body=json.dumps({'code': exc.status_int, - 'title': exc.title, - 'detail': exc.detail}), - status=exc.status_int, - charset='UTF-8', - content_type='application/json' - ) - title = None - if isinstance(exc, api_exc.ValidationError): - status_code = 400 - elif isinstance(exc, api_exc.ParseInputsError): - status_code = 400 - elif isinstance(exc, db.NotFound): - status_code = 404 - elif isinstance(exc, db.Duplication): - status_code = 409 - else: - LOG.exception(exc) - status_code = 500 - title = 'Internal Server Error' - - body = {'title': title or exc.args[0], 'code': status_code} - if self.debug: - body['detail'] = str(exc) - return webob.Response( - body=json.dumps(body), - status=status_code, - charset='UTF-8', - content_type='application/json' - ) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level): - """Init the WritableLogger by getting logger and log level.""" - self.logger = logger - self.level = level - - def write(self, msg): - """Invoke logger with log level and message.""" - self.logger.log(self.level, msg.rstrip()) - - -class CORSHook(pecan.hooks.PecanHook): - """A pecan hook that handles Cross-Origin Resource Sharing.""" - - def __init__(self): - """Init the hook by getting the allowed origins.""" - self.allowed_origins = getattr(CONF.api, 'allowed_cors_origins', []) - - def after(self, state): - """Add CORS headers to the response. - - If the request's origin is in the list of allowed origins, add the - CORS headers to the response. - """ - origin = state.request.headers.get('Origin', None) - if origin in self.allowed_origins: - state.response.headers['Access-Control-Allow-Origin'] = origin - state.response.headers['Access-Control-Allow-Methods'] = \ - 'GET, OPTIONS, PUT, POST' - state.response.headers['Access-Control-Allow-Headers'] = \ - 'origin, authorization, accept, content-type' - state.response.headers['Access-Control-Allow-Credentials'] = 'true' - - -class JWTAuthHook(pecan.hooks.PecanHook): - """A pecan hook that handles authentication with JSON Web Tokens.""" - - def on_route(self, state): - """Check signature in request headers.""" - token = api_utils.decode_token(state.request) - if token: - state.request.environ[const.JWT_TOKEN_ENV] = token - - -def setup_app(config): - """App factory.""" - # By default we expect path to oslo config file in environment variable - # REFSTACK_OSLO_CONFIG (option for testing and development) - # If it is empty we look up those config files - # in the following directories: - # ~/.${project}/ - # ~/ - # /etc/${project}/ - # /etc/ - - default_config_files = ((os.getenv('REFSTACK_OSLO_CONFIG'), ) - if os.getenv('REFSTACK_OSLO_CONFIG') - else cfg.find_config_files('refstack')) - CONF('', - project='refstack', - default_config_files=default_config_files) - - log.setup(CONF, 'refstack') - CONF.log_opt_values(LOG, logging.DEBUG) - - template_path = CONF.api.template_path % {'project_root': PROJECT_ROOT} - static_root = CONF.api.static_root % {'project_root': PROJECT_ROOT} - app_conf = dict(config.app) - app = pecan.make_app( - app_conf.pop('root'), - debug=CONF.api.app_dev_mode, - static_root=static_root, - template_path=template_path, - hooks=[ - JWTAuthHook(), JSONErrorHook(), CORSHook(), - pecan.hooks.RequestViewerHook( - {'items': ['status', 'method', 'controller', 'path', 'body']}, - headers=False, writer=WritableLogger(LOG, logging.DEBUG) - ) - ] - ) - - beaker_conf = { - 'session.key': 'refstack', - 'session.type': 'ext:database', - 'session.url': CONF.database.connection, - 'session.timeout': 604800, - 'session.validate_key': api_utils.get_token(), - 'session.sa.pool_recycle': 600 - } - app = SessionMiddleware(app, beaker_conf) - - if CONF.api.app_dev_mode: - LOG.debug('\n\n <<< Refstack UI is available at %s >>>\n\n', - CONF.ui_url) - - return app diff --git a/refstack/api/app.wsgi b/refstack/api/app.wsgi deleted file mode 100644 index 683e71f5..00000000 --- a/refstack/api/app.wsgi +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from refstack.api import app -from refstack.api import config as api_config - -application = app.setup_app(api_config) \ No newline at end of file diff --git a/refstack/api/config.py b/refstack/api/config.py deleted file mode 100644 index a80655f1..00000000 --- a/refstack/api/config.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Configuration for running API. - -Custom Configurations must be in Python dictionary format: - -foo = {'bar':'baz'} - -All configurations are accessible at: -pecan.conf -""" - -# Server Specific Configurations -server = { - 'port': '8000', - 'host': '0.0.0.0', - 'protocol': 'http' -} - -# Pecan Application Configurations -app = { - 'root': 'refstack.api.controllers.root.RootController', - 'modules': ['refstack.api'], -} diff --git a/refstack/api/constants.py b/refstack/api/constants.py deleted file mode 100644 index 1ed32d97..00000000 --- a/refstack/api/constants.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Constants for Refstack API.""" - -# Names of input parameters for request -START_DATE = 'start_date' -END_DATE = 'end_date' -CPID = 'cpid' -PAGE = 'page' -SIGNED = 'signed' -VERIFICATION_STATUS = 'verification_status' -PRODUCT_ID = 'product_id' -ALL_PRODUCT_TESTS = 'all_product_tests' -OPENID = 'openid' -USER_PUBKEYS = 'pubkeys' - -# Guidelines tests requests parameters -ALIAS = 'alias' -FLAG = 'flag' -TYPE = 'type' -TARGET = 'target' - -# OpenID parameters -OPENID_MODE = 'openid.mode' -OPENID_NS = 'openid.ns' -OPENID_RETURN_TO = 'openid.return_to' -OPENID_CLAIMED_ID = 'openid.claimed_id' -OPENID_IDENTITY = 'openid.identity' -OPENID_REALM = 'openid.realm' -OPENID_NS_SREG = 'openid.ns.sreg' -OPENID_NS_SREG_REQUIRED = 'openid.sreg.required' -OPENID_NS_SREG_EMAIL = 'openid.sreg.email' -OPENID_NS_SREG_FULLNAME = 'openid.sreg.fullname' -OPENID_ERROR = 'openid.error' - -# User session parameters -CSRF_TOKEN = 'csrf_token' -USER_OPENID = 'user_openid' - -# Test metadata fields -USER = 'user' -SHARED_TEST_RUN = 'shared' - -# Test verification statuses -TEST_NOT_VERIFIED = 0 -TEST_VERIFIED = 1 - -# Roles -ROLE_USER = 'user' -ROLE_OWNER = 'owner' -ROLE_FOUNDATION = 'foundation' - -# Organization types. -# OpenStack Foundation -FOUNDATION = 0 -# User's private unofficial Vendor (allows creation and testing -# of user's products) -PRIVATE_VENDOR = 1 -# Vendor applied and waiting for official status. -PENDING_VENDOR = 2 -# Official Vendor approved by the Foundation. -OFFICIAL_VENDOR = 3 - -# Product object types. -CLOUD = 0 -SOFTWARE = 1 - -# Product specific types. -DISTRO = 0 -PUBLIC_CLOUD = 1 -HOSTED_PRIVATE_CLOUD = 2 - -JWT_TOKEN_HEADER = 'Authorization' -JWT_TOKEN_ENV = 'jwt.token' -JWT_VALIDATION_LEEWAY = 42 diff --git a/refstack/api/controllers/__init__.py b/refstack/api/controllers/__init__.py deleted file mode 100644 index 7fef83c1..00000000 --- a/refstack/api/controllers/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""API controllers package.""" - -from oslo_config import cfg - -from refstack.api import constants as const - -CTRLS_OPTS = [ - cfg.IntOpt('results_per_page', - default=20, - help='Number of results for one page'), - cfg.StrOpt('input_date_format', - default='%Y-%m-%d %H:%M:%S', - help='The format for %(start)s and %(end)s parameters' % { - 'start': const.START_DATE, - 'end': const.END_DATE - }) -] - -CONF = cfg.CONF - -CONF.register_opts(CTRLS_OPTS, group='api') diff --git a/refstack/api/controllers/auth.py b/refstack/api/controllers/auth.py deleted file mode 100644 index 295ba867..00000000 --- a/refstack/api/controllers/auth.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Authentication controller.""" - -from urllib import parse - -from oslo_config import cfg -import pecan -from pecan import rest - -from refstack.api import constants as const -from refstack.api import utils as api_utils -from refstack import db - - -OPENID_OPTS = [ - cfg.StrOpt('openstack_openid_endpoint', - default='https://openstackid.org/accounts/openid2', - help='OpenStackID Auth Server URI.' - ), - cfg.StrOpt('openid_logout_endpoint', - default='https://openstackid.org/accounts/user/logout', - help='OpenStackID logout URI.' - ), - cfg.StrOpt('openid_mode', - default='checkid_setup', - help='Interaction mode. Specifies whether Openstack Id ' - 'IdP may interact with the user to determine the ' - 'outcome of the request.' - ), - cfg.StrOpt('openid_ns', - default='http://specs.openid.net/auth/2.0', - help='Protocol version. Value identifying the OpenID ' - 'protocol version being used. This value should ' - 'be "http://specs.openid.net/auth/2.0".' - ), - cfg.StrOpt('openid_return_to', - default='/v1/auth/signin_return', - help='Return endpoint in Refstack\'s API. Value indicating ' - 'the endpoint where the user should be returned to after ' - 'signing in. Openstack Id Idp only supports HTTPS ' - 'address types.' - ), - cfg.StrOpt('openid_claimed_id', - default='http://specs.openid.net/auth/2.0/identifier_select', - help='Claimed identifier. This value must be set to ' - '"http://specs.openid.net/auth/2.0/identifier_select". ' - 'or to user claimed identity (user local identifier ' - 'or user owned identity [ex: custom html hosted on a ' - 'owned domain set to html discover]).' - ), - cfg.StrOpt('openid_identity', - default='http://specs.openid.net/auth/2.0/identifier_select', - help='Alternate identifier. This value must be set to ' - 'http://specs.openid.net/auth/2.0/identifier_select.' - ), - cfg.StrOpt('openid_ns_sreg', - default='http://openid.net/extensions/sreg/1.1', - help='Indicates request for user attribute information. ' - 'This value must be set to ' - '"http://openid.net/extensions/sreg/1.1".' - ), - cfg.StrOpt('openid_sreg_required', - default='email,fullname', - help='Comma-separated list of field names which, ' - 'if absent from the response, will prevent the ' - 'Consumer from completing the registration without ' - 'End User interation. The field names are those that ' - 'are specified in the Response Format, with the ' - '"openid.sreg." prefix removed. Valid values include: ' - '"country", "email", "firstname", "language", "lastname"' - ) -] - -CONF = cfg.CONF -opt_group = cfg.OptGroup(name='osid', - title='Options for the Refstack OpenID 2.0 through ' - 'Openstack Authentication Server') -CONF.register_group(opt_group) -CONF.register_opts(OPENID_OPTS, opt_group) - - -class AuthController(rest.RestController): - """Controller provides user authentication in OpenID 2.0 IdP.""" - - _custom_actions = { - "signin": ["GET"], - "signin_return": ["GET"], - "signout": ["GET"] - } - - def _auth_failure(self, message): - params = { - 'message': message - } - url = parse.urljoin(CONF.ui_url, - '/#/auth_failure?' + parse.urlencode(params)) - pecan.redirect(url) - - @pecan.expose() - def signin(self): - """Handle signin request.""" - session = api_utils.get_user_session() - if api_utils.is_authenticated(): - pecan.redirect(CONF.ui_url) - else: - api_utils.delete_params_from_user_session([const.USER_OPENID]) - - csrf_token = api_utils.get_token() - session[const.CSRF_TOKEN] = csrf_token - session.save() - return_endpoint = parse.urljoin(CONF.api.api_url, - CONF.osid.openid_return_to) - return_to = api_utils.set_query_params(return_endpoint, - {const.CSRF_TOKEN: csrf_token}) - - params = { - const.OPENID_MODE: CONF.osid.openid_mode, - const.OPENID_NS: CONF.osid.openid_ns, - const.OPENID_RETURN_TO: return_to, - const.OPENID_CLAIMED_ID: CONF.osid.openid_claimed_id, - const.OPENID_IDENTITY: CONF.osid.openid_identity, - const.OPENID_REALM: CONF.api.api_url, - const.OPENID_NS_SREG: CONF.osid.openid_ns_sreg, - const.OPENID_NS_SREG_REQUIRED: CONF.osid.openid_sreg_required, - } - url = CONF.osid.openstack_openid_endpoint - url = api_utils.set_query_params(url, params) - pecan.redirect(location=url) - - @pecan.expose() - def signin_return(self): - """Handle returned request from OpenID 2.0 IdP.""" - session = api_utils.get_user_session() - if pecan.request.GET.get(const.OPENID_ERROR): - api_utils.delete_params_from_user_session([const.CSRF_TOKEN]) - self._auth_failure(pecan.request.GET.get(const.OPENID_ERROR)) - - if pecan.request.GET.get(const.OPENID_MODE) == 'cancel': - api_utils.delete_params_from_user_session([const.CSRF_TOKEN]) - self._auth_failure('Authentication canceled.') - - session_token = session.get(const.CSRF_TOKEN) - request_token = pecan.request.GET.get(const.CSRF_TOKEN) - if request_token != session_token: - api_utils.delete_params_from_user_session([const.CSRF_TOKEN]) - self._auth_failure('Authentication failed. Please try again.') - - api_utils.verify_openid_request(pecan.request) - user_info = { - 'openid': pecan.request.GET.get(const.OPENID_CLAIMED_ID), - 'email': pecan.request.GET.get(const.OPENID_NS_SREG_EMAIL), - 'fullname': pecan.request.GET.get(const.OPENID_NS_SREG_FULLNAME) - } - user = db.user_save(user_info) - - api_utils.delete_params_from_user_session([const.CSRF_TOKEN]) - session[const.USER_OPENID] = user.openid - session.save() - - pecan.redirect(CONF.ui_url) - - @pecan.expose('json') - def signout(self): - """Handle signout request.""" - if api_utils.is_authenticated(): - api_utils.delete_params_from_user_session([const.USER_OPENID]) - - params = { - 'openid_logout': CONF.osid.openid_logout_endpoint - } - url = parse.urljoin(CONF.ui_url, - '/#/logout?' + parse.urlencode(params)) - pecan.redirect(url) diff --git a/refstack/api/controllers/guidelines.py b/refstack/api/controllers/guidelines.py deleted file mode 100755 index 07ad1f72..00000000 --- a/refstack/api/controllers/guidelines.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Interop WG guidelines controller.""" - -import pecan -from pecan import rest - -from refstack.api import constants as const -from refstack.api import guidelines -from refstack.api import utils as api_utils - - -class TestsController(rest.RestController): - """v1/guidelines//tests handler. - - This will allow users to retrieve specific test lists from specific - guidelines for use with refstack-client. - """ - - @pecan.expose(content_type='text/plain') - def get(self, version): - """Get the plain-text test list of the specified guideline version.""" - # Remove the .json from version if it is there. - version.replace('.json', '') - g = guidelines.Guidelines() - json = g.get_guideline_contents(version) - - if not json: - return 'Error getting JSON content for version: ' + version - - if pecan.request.GET.get(const.TYPE): - types = pecan.request.GET.get(const.TYPE).split(',') - else: - types = None - - if pecan.request.GET.get('alias'): - alias = api_utils.str_to_bool(pecan.request.GET.get('alias')) - else: - alias = True - - if pecan.request.GET.get('flag'): - flag = api_utils.str_to_bool(pecan.request.GET.get('flag')) - else: - flag = True - - target = pecan.request.GET.get('target', 'platform') - try: - target_caps = g.get_target_capabilities(json, types, target) - test_list = g.get_test_list(json, target_caps, alias, flag) - except KeyError: - return 'Invalid target: ' + target - - return '\n'.join(test_list) - - -class GuidelinesController(rest.RestController): - """/v1/guidelines handler. - - This acts as a proxy for retrieving guideline files - from the openstack/interop Github repository. - """ - - tests = TestsController() - - @pecan.expose('json') - def get(self): - """Get a list of all available guidelines.""" - g = guidelines.Guidelines() - version_list = g.get_guideline_list() - if version_list is None: - pecan.abort(500, 'The server was unable to get a list of ' - 'guidelines from the external source.') - else: - return version_list - - @pecan.expose('json') - def get_one(self, file_name): - """Handler for getting contents of specific guideline file.""" - g = guidelines.Guidelines() - json = g.get_guideline_contents(file_name) - if json: - return json - else: - pecan.abort(500, 'The server was unable to get the JSON ' - 'content for the specified guideline file.') diff --git a/refstack/api/controllers/products.py b/refstack/api/controllers/products.py deleted file mode 100644 index 43cf13c4..00000000 --- a/refstack/api/controllers/products.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Product controller.""" - -import json -import uuid - -from oslo_db.exception import DBReferenceError -from oslo_log import log -import pecan -from pecan.secure import secure - -from refstack.api import constants as const -from refstack.api.controllers import validation -from refstack.api import utils as api_utils -from refstack.api import validators -from refstack import db - -LOG = log.getLogger(__name__) - - -class VersionsController(validation.BaseRestControllerWithValidation): - """/v1/products//versions handler.""" - - __validator__ = validators.ProductVersionValidator - - @pecan.expose('json') - def get(self, id): - """Get all versions for a product.""" - product = db.get_product(id) - vendor_id = product['organization_id'] - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not product['public'] and not is_admin: - pecan.abort(403, 'Forbidden.') - - allowed_keys = ['id', 'product_id', 'version', 'cpid'] - return db.get_product_versions(id, allowed_keys=allowed_keys) - - @pecan.expose('json') - def get_one(self, id, version_id): - """Get specific version information.""" - product = db.get_product(id) - vendor_id = product['organization_id'] - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not product['public'] and not is_admin: - pecan.abort(403, 'Forbidden.') - allowed_keys = ['id', 'product_id', 'version', 'cpid'] - return db.get_product_version(version_id, allowed_keys=allowed_keys) - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def post(self, id): - """'secure' decorator doesn't work at store_item. it must be here.""" - self.product_id = id - return super(VersionsController, self).post() - - @pecan.expose('json') - def store_item(self, version_info): - """Add a new version for the product.""" - if (not api_utils.check_user_is_product_admin(self.product_id) and - not api_utils.check_user_is_foundation_admin()): - pecan.abort(403, 'Forbidden.') - - creator = api_utils.get_user_id() - pecan.response.status = 201 - allowed_keys = ['id', 'product_id', 'version', 'cpid'] - return db.add_product_version(self.product_id, version_info['version'], - creator, version_info.get('cpid'), - allowed_keys) - - @secure(api_utils.is_authenticated) - @pecan.expose('json', method='PUT') - def put(self, id, version_id, **kw): - """Update details for a specific version. - - Endpoint: /v1/products//versions/ - """ - if (not api_utils.check_user_is_product_admin(id) and - not api_utils.check_user_is_foundation_admin()): - pecan.abort(403, 'Forbidden.') - - version_info = {'id': version_id} - if 'cpid' in kw: - version_info['cpid'] = kw['cpid'] - version = db.update_product_version(version_info) - pecan.response.status = 200 - return version - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def delete(self, id, version_id): - """Delete a product version. - - Endpoint: /v1/products//versions/ - """ - if (not api_utils.check_user_is_product_admin(id) and - not api_utils.check_user_is_foundation_admin()): - - pecan.abort(403, 'Forbidden.') - try: - version = db.get_product_version(version_id, - allowed_keys=['version']) - if not version['version']: - pecan.abort(400, 'Can not delete the empty version as it is ' - 'used for basic product/test association. ' - 'This version was implicitly created with ' - 'the product, and so it cannot be deleted ' - 'explicitly.') - - db.delete_product_version(version_id) - except DBReferenceError: - pecan.abort(400, 'Unable to delete. There are still tests ' - 'associated to this product version.') - pecan.response.status = 204 - - -class ProductsController(validation.BaseRestControllerWithValidation): - """/v1/products handler.""" - - __validator__ = validators.ProductValidator - - _custom_actions = { - "action": ["POST"], - } - - versions = VersionsController() - - @pecan.expose('json') - def get(self): - """Get information of all products.""" - filters = api_utils.parse_input_params(['organization_id']) - - allowed_keys = ['id', 'name', 'description', 'product_ref_id', 'type', - 'product_type', 'public', 'organization_id'] - user = api_utils.get_user_id() - is_admin = user in db.get_foundation_users() - try: - if is_admin: - products = db.get_products(allowed_keys=allowed_keys, - filters=filters) - for s in products: - s['can_manage'] = True - else: - result = dict() - filters['public'] = True - - products = db.get_products(allowed_keys=allowed_keys, - filters=filters) - for s in products: - _id = s['id'] - result[_id] = s - result[_id]['can_manage'] = False - - filters.pop('public') - products = db.get_products_by_user(user, - allowed_keys=allowed_keys, - filters=filters) - for s in products: - _id = s['id'] - if _id not in result: - result[_id] = s - result[_id]['can_manage'] = True - products = list(result.values()) - except Exception as ex: - LOG.exception('An error occurred during ' - 'operation with database: %s', ex) - pecan.abort(400) - - products.sort(key=lambda x: x['name']) - return {'products': products} - - @pecan.expose('json') - def get_one(self, id): - """Get information about product.""" - allowed_keys = ['id', 'name', 'description', - 'product_ref_id', 'product_type', - 'public', 'properties', 'created_at', 'updated_at', - 'organization_id', 'created_by_user', 'type'] - product = db.get_product(id, allowed_keys=allowed_keys) - vendor_id = product['organization_id'] - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not is_admin and not product['public']: - pecan.abort(403, 'Forbidden.') - if not is_admin: - admin_only_keys = ['created_by_user', 'created_at', 'updated_at', - 'properties'] - for key in list(product): - if key in admin_only_keys: - product.pop(key) - - product['can_manage'] = is_admin - return product - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def post(self): - """'secure' decorator doesn't work at store_item. it must be here.""" - return super(ProductsController, self).post() - - @pecan.expose('json') - def store_item(self, product): - """Handler for storing item. Should return new item id.""" - creator = api_utils.get_user_id() - product['type'] = (const.SOFTWARE - if product['product_type'] == const.DISTRO - else const.CLOUD) - if product['type'] == const.SOFTWARE: - product['product_ref_id'] = str(uuid.uuid4()) - vendor_id = product.pop('organization_id', None) - if not vendor_id: - # find or create default vendor for new product - # TODO(andrey-mp): maybe just fill with info here and create - # at DB layer in one transaction - default_vendor_name = 'vendor_' + creator - vendors = db.get_organizations_by_user(creator) - for v in vendors: - if v['name'] == default_vendor_name: - vendor_id = v['id'] - break - else: - vendor = {'name': default_vendor_name} - vendor = db.add_organization(vendor, creator) - vendor_id = vendor['id'] - product['organization_id'] = vendor_id - product = db.add_product(product, creator) - return {'id': product['id']} - - @secure(api_utils.is_authenticated) - @pecan.expose('json', method='PUT') - def put(self, id, **kw): - """Handler for update item. Should return full info with updates.""" - product = db.get_product(id) - vendor_id = product['organization_id'] - vendor = db.get_organization(vendor_id) - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not is_admin: - pecan.abort(403, 'Forbidden.') - - product_info = {'id': id} - if 'name' in kw: - product_info['name'] = kw['name'] - if 'description' in kw: - product_info['description'] = kw['description'] - if 'product_ref_id' in kw: - product_info['product_ref_id'] = kw['product_ref_id'] - if 'public' in kw: - # user can mark product as public only if - # his/her vendor is public(official) - public = api_utils.str_to_bool(kw['public']) - if (vendor['type'] not in - (const.OFFICIAL_VENDOR, const.FOUNDATION) and public): - pecan.abort(403, 'Forbidden.') - product_info['public'] = public - if 'properties' in kw: - product_info['properties'] = json.dumps(kw['properties']) - db.update_product(product_info) - - pecan.response.status = 200 - product = db.get_product(id) - product['can_manage'] = True - return product - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def delete(self, id): - """Delete product.""" - if (not api_utils.check_user_is_foundation_admin() and - not api_utils.check_user_is_product_admin(id)): - pecan.abort(403, 'Forbidden.') - try: - db.delete_product(id) - except DBReferenceError: - pecan.abort(400, 'Unable to delete. There are still tests ' - 'associated to versions of this product.') - pecan.response.status = 204 diff --git a/refstack/api/controllers/results.py b/refstack/api/controllers/results.py deleted file mode 100644 index cf9f48b8..00000000 --- a/refstack/api/controllers/results.py +++ /dev/null @@ -1,331 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Test results controller.""" -import functools -from urllib import parse - -from oslo_config import cfg -from oslo_log import log -import pecan -from pecan import rest - -from refstack.api import constants as const -from refstack.api.controllers import validation -from refstack.api import utils as api_utils -from refstack.api import validators -from refstack import db - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - - -class MetadataController(rest.RestController): - """/v1/results//meta handler.""" - - rw_access_keys = ('shared', 'guideline', 'target',) - - def _check_key(func): - """Decorator to check that a specific key has write access.""" - @functools.wraps(func) - def wrapper(*args, **kwargs): - meta_key = args[2] - if meta_key not in args[0].rw_access_keys: - pecan.abort(403) - return func(*args, **kwargs) - return wrapper - - @pecan.expose('json') - def get(self, test_id): - """Get test run metadata.""" - test_info = db.get_test_result(test_id) - role = api_utils.get_user_role(test_id) - if role in (const.ROLE_FOUNDATION, const.ROLE_OWNER): - return test_info['meta'] - elif role in (const.ROLE_USER): - return {k: v for k, v in test_info['meta'].items() - if k in self.rw_access_keys} - pecan.abort(403) - - @pecan.expose('json') - def get_one(self, test_id, key): - """Get value for key from test run metadata.""" - role = api_utils.get_user_role(test_id) - if role in (const.ROLE_FOUNDATION, const.ROLE_OWNER): - return db.get_test_result_meta_key(test_id, key) - elif role in (const.ROLE_USER) and key in self.rw_access_keys: - return db.get_test_result_meta_key(test_id, key) - pecan.abort(403) - - @_check_key - @api_utils.check_permissions(level=const.ROLE_OWNER) - @pecan.expose('json') - def post(self, test_id, key): - """Save value for key in test run metadata.""" - test = db.get_test_result(test_id) - if test['verification_status'] == const.TEST_VERIFIED: - pecan.abort(403, 'Can not add/alter a new metadata key for a ' - 'verified test run.') - db.save_test_result_meta_item(test_id, key, pecan.request.body) - pecan.response.status = 201 - - @_check_key - @api_utils.check_permissions(level=const.ROLE_OWNER) - @pecan.expose('json') - def delete(self, test_id, key): - """Delete key from test run metadata.""" - test = db.get_test_result(test_id) - if test['verification_status'] == const.TEST_VERIFIED: - pecan.abort(403, 'Can not delete a metadata key for a ' - 'verified test run.') - db.delete_test_result_meta_item(test_id, key) - pecan.response.status = 204 - - -class ResultsController(validation.BaseRestControllerWithValidation): - """/v1/results handler.""" - - __validator__ = validators.TestResultValidator - - meta = MetadataController() - - def _check_authentication(self): - x_public_key = pecan.request.headers.get('X-Public-Key') - if x_public_key: - public_key = x_public_key.strip().split()[1] - stored_public_key = db.get_pubkey(public_key) - if not stored_public_key: - pecan.abort(401, 'User with specified key not found. ' - 'Please log into the RefStack server to ' - 'upload your key.') - else: - stored_public_key = None - - if not CONF.api.enable_anonymous_upload and not stored_public_key: - pecan.abort(401, 'Anonymous result uploads are disabled. ' - 'Please create a user account and an api ' - 'key at https://refstack.openstack.org/#/') - - return stored_public_key - - def _auto_version_associate(self, test, test_, pubkey): - if test.get('cpid'): - version = db.get_product_version_by_cpid( - test['cpid'], allowed_keys=['id', 'product_id']) - # Only auto-associate if there is a single product version - # with the given cpid. - if len(version) == 1: - is_foundation = api_utils.check_user_is_foundation_admin( - pubkey.openid) - is_product_admin = api_utils.check_user_is_product_admin( - version[0]['product_id'], pubkey.openid) - if is_foundation or is_product_admin: - test_['product_version_id'] = version[0]['id'] - return test_ - - @pecan.expose('json') - @api_utils.check_permissions(level=const.ROLE_USER) - def get_one(self, test_id): - """Handler for getting item.""" - user_role = api_utils.get_user_role(test_id) - if user_role in (const.ROLE_FOUNDATION, const.ROLE_OWNER): - test_info = db.get_test_result( - test_id, allowed_keys=['id', 'cpid', 'created_at', - 'duration_seconds', 'meta', - 'product_version', - 'verification_status'] - ) - else: - test_info = db.get_test_result(test_id) - test_list = db.get_test_results(test_id) - test_name_list = [test_dict['name'] for test_dict in test_list] - test_info.update({'results': test_name_list, - 'user_role': user_role}) - - if user_role not in (const.ROLE_FOUNDATION, const.ROLE_OWNER): - # Don't expose product information if product is not public. - if (test_info.get('product_version') and - not test_info['product_version'] - ['product_info']['public']): - - test_info['product_version'] = None - - test_info['meta'] = { - k: v for k, v in test_info['meta'].items() - if k in MetadataController.rw_access_keys - } - return test_info - - def store_item(self, test): - """Handler for storing item. Should return new item id.""" - # If we need a key, or the key isn't available, this will throw - # an exception with a 401 - pubkey = self._check_authentication() - test_ = test.copy() - if pubkey: - if 'meta' not in test_: - test_['meta'] = {} - test_['meta'][const.USER] = pubkey.openid - test_ = self._auto_version_associate(test, test_, pubkey) - - test_id = db.store_test_results(test_) - return {'test_id': test_id, - 'url': parse.urljoin(CONF.ui_url, - CONF.api.test_results_url) % test_id} - - @pecan.expose('json') - @api_utils.check_permissions(level=const.ROLE_OWNER) - def delete(self, test_id): - """Delete test run.""" - test = db.get_test_result(test_id) - if test['verification_status'] == const.TEST_VERIFIED: - pecan.abort(403, 'Can not delete a verified test run.') - - db.delete_test_result(test_id) - pecan.response.status = 204 - - @pecan.expose('json') - def get(self): - """Get information of all uploaded test results. - - Get information of all uploaded test results in descending - chronological order. Make it possible to specify some - input parameters for filtering. - For example: - /v1/results?page=&cpid=1234. - By default, page is set to page number 1, - if the page parameter is not specified. - """ - expected_input_params = [ - const.START_DATE, - const.END_DATE, - const.CPID, - const.SIGNED, - const.VERIFICATION_STATUS, - const.PRODUCT_ID - ] - - filters = api_utils.parse_input_params(expected_input_params) - - if const.PRODUCT_ID in filters: - product = db.get_product(filters[const.PRODUCT_ID]) - vendor_id = product['organization_id'] - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if is_admin: - filters[const.ALL_PRODUCT_TESTS] = True - elif not product['public']: - pecan.abort(403, 'Forbidden.') - - records_count = db.get_test_result_records_count(filters) - page_number, total_pages_number = \ - api_utils.get_page_number(records_count) - - try: - per_page = CONF.api.results_per_page - results = db.get_test_result_records( - page_number, per_page, filters) - is_foundation = api_utils.check_user_is_foundation_admin() - for result in results: - - if not (api_utils.check_owner(result['id']) or is_foundation): - - # Don't expose product info if the product is not public. - if (result.get('product_version') and not - result['product_version']['product_info'] - ['public']): - - result['product_version'] = None - # Only show all metadata if the user is the owner or a - # member of the Foundation group. - result['meta'] = { - k: v for k, v in result['meta'].items() - if k in MetadataController.rw_access_keys - } - result.update({'url': parse.urljoin( - CONF.ui_url, CONF.api.test_results_url - ) % result['id']}) - - page = {'results': results, - 'pagination': { - 'current_page': page_number, - 'total_pages': total_pages_number - }} - except Exception as ex: - LOG.debug('An error occurred during ' - 'operation with database: %s', str(ex)) - pecan.abort(500) - - return page - - @api_utils.check_permissions(level=const.ROLE_OWNER) - @pecan.expose('json') - def put(self, test_id, **kw): - """Update a test result.""" - test_info = {'id': test_id} - is_foundation_admin = api_utils.check_user_is_foundation_admin() - - if 'product_version_id' in kw: - test = db.get_test_result(test_id) - if test['verification_status'] == const.TEST_VERIFIED: - pecan.abort(403, 'Can not update product_version_id for a ' - 'verified test run.') - - if kw['product_version_id']: - # Verify that the user is a member of the product's vendor. - version = db.get_product_version(kw['product_version_id'], - allowed_keys=['product_id']) - is_vendor_admin = ( - api_utils - .check_user_is_product_admin(version['product_id']) - ) - else: - # No product vendor to check membership for, so just set - # is_vendor_admin to True. - is_vendor_admin = True - kw['product_version_id'] = None - - if not is_vendor_admin and not is_foundation_admin: - pecan.abort(403, 'Forbidden.') - - test_info['product_version_id'] = kw['product_version_id'] - - if 'verification_status' in kw: - if not is_foundation_admin: - pecan.abort(403, 'You do not have permission to change a ' - 'verification status.') - - if kw['verification_status'] not in (0, 1): - pecan.abort(400, 'Invalid verification_status value: %d' % - kw['verification_status']) - - # Check pre-conditions are met to mark a test verified. - if (kw['verification_status'] == 1 and - not (db.get_test_result_meta_key(test_id, 'target') and - db.get_test_result_meta_key(test_id, 'guideline') and - db.get_test_result_meta_key(test_id, - const.SHARED_TEST_RUN))): - - pecan.abort(403, 'In order to mark a test verified, the ' - 'test must be shared and have been ' - 'associated to a guideline and target ' - 'program.') - - test_info['verification_status'] = kw['verification_status'] - - test = db.update_test_result(test_info) - pecan.response.status = 201 - return test diff --git a/refstack/api/controllers/root.py b/refstack/api/controllers/root.py deleted file mode 100644 index e82aeb80..00000000 --- a/refstack/api/controllers/root.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Root controller.""" - -from oslo_config import cfg -from pecan import expose - -from refstack.api.controllers import v1 - -CONF = cfg.CONF - - -class RootController(object): - """Root handler.""" - - v1 = v1.V1Controller() - - if CONF.api.app_dev_mode: - @expose(generic=True, template='index.html') - def index(self): - """Return index.html in development mode. - - It allows to run both API and UI with pecan serve. - Template path should point into UI app folder - """ - return dict() diff --git a/refstack/api/controllers/user.py b/refstack/api/controllers/user.py deleted file mode 100644 index 38996ccb..00000000 --- a/refstack/api/controllers/user.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""User profile controller.""" - -import pecan -from pecan import rest -from pecan.secure import secure - -from refstack.api.controllers import validation -from refstack.api import utils as api_utils -from refstack.api import validators -from refstack import db - - -class PublicKeysController(validation.BaseRestControllerWithValidation): - """/v1/profile/pubkeys handler.""" - - __validator__ = validators.PubkeyValidator - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def post(self, ): - """Handler for uploading public pubkeys.""" - return super(PublicKeysController, self).post() - - def store_item(self, body): - """Handler for storing item.""" - pubkey = {'openid': api_utils.get_user_id()} - parts = body['raw_key'].strip().split() - if len(parts) == 2: - parts.append('') - pubkey['format'], pubkey['pubkey'], pubkey['comment'] = parts - pubkey_id = db.store_pubkey(pubkey) - return pubkey_id - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def get(self): - """Retrieve all user's public pubkeys.""" - return api_utils.get_user_public_keys() - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def delete(self, pubkey_id): - """Delete public key.""" - pubkeys = api_utils.get_user_public_keys() - for key in pubkeys: - if key['id'] == pubkey_id: - db.delete_pubkey(pubkey_id) - pecan.response.status = 204 - return - else: - pecan.abort(404) - - -class ProfileController(rest.RestController): - """Controller provides user information in OpenID 2.0 IdP. - - /v1/profile handler - """ - - pubkeys = PublicKeysController() - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def get(self): - """Handle get request on user info.""" - user = api_utils.get_user() - return { - "openid": user.openid, - "email": user.email, - "fullname": user.fullname, - "is_admin": api_utils.check_user_is_foundation_admin() - } diff --git a/refstack/api/controllers/v1.py b/refstack/api/controllers/v1.py deleted file mode 100644 index d820bb9c..00000000 --- a/refstack/api/controllers/v1.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Version 1 of the API.""" - -from refstack.api.controllers import auth -from refstack.api.controllers import guidelines -from refstack.api.controllers import products -from refstack.api.controllers import results -from refstack.api.controllers import user -from refstack.api.controllers import vendors - - -class V1Controller(object): - """Version 1 API controller root.""" - - results = results.ResultsController() - guidelines = guidelines.GuidelinesController() - auth = auth.AuthController() - profile = user.ProfileController() - products = products.ProductsController() - vendors = vendors.VendorsController() diff --git a/refstack/api/controllers/validation.py b/refstack/api/controllers/validation.py deleted file mode 100644 index 6b30861f..00000000 --- a/refstack/api/controllers/validation.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Base for controllers with validation.""" - -import json - -import pecan -from pecan import rest - - -class BaseRestControllerWithValidation(rest.RestController): - """Rest controller with validation. - - Controller provides validation for POSTed data - exposed endpoints: - POST base_url/ - GET base_url/ - GET base_url/schema - """ - - __validator__ = None - - _custom_actions = { - "schema": ["GET"], - } - - def __init__(self): # pragma: no cover - """Init.""" - if self.__validator__: - self.validator = self.__validator__() - else: - raise ValueError("__validator__ is not defined") - - def store_item(self, item_in_json): # pragma: no cover - """Handler for storing item. Should return new item id.""" - raise NotImplementedError - - @pecan.expose('json') - def schema(self): - """Return validation schema.""" - return self.validator.schema - - @pecan.expose('json') - def post(self, ): - """POST handler.""" - self.validator.validate(pecan.request) - item = json.loads(pecan.request.body.decode('utf-8')) - item_id = self.store_item(item) - pecan.response.status = 201 - return item_id diff --git a/refstack/api/controllers/vendors.py b/refstack/api/controllers/vendors.py deleted file mode 100644 index 1dba7cb6..00000000 --- a/refstack/api/controllers/vendors.py +++ /dev/null @@ -1,310 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Vendors controller.""" - -import base64 -import json - -from oslo_config import cfg -from oslo_db.exception import DBReferenceError -from oslo_log import log -import pecan -from pecan import rest -from pecan.secure import secure - -from refstack.api import constants as const -from refstack.api.controllers import validation -from refstack.api import exceptions as api_exc -from refstack.api import utils as api_utils -from refstack.api import validators -from refstack import db - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - - -def _check_is_not_foundation(vendor_id): - vendor = db.get_organization(vendor_id) - if vendor['type'] == const.FOUNDATION: - pecan.abort(403, 'Forbidden.') - - -class UsersController(rest.RestController): - """/v1/vendors//users handler.""" - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def get(self, vendor_id): - """Return list of users in the vendor's group.""" - if not (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)): - return None - - org_users = db.get_organization_users(vendor_id) - return [x for x in org_users.values()] - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def put(self, vendor_id, openid): - """Add user to vendor group.""" - openid = base64.b64decode(openid) - - if not (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)): - pecan.abort(403, 'Forbidden.') - - vendor = db.get_organization(vendor_id) - creator = api_utils.get_user_id() - db.add_user_to_group(openid, vendor['group_id'], creator) - pecan.response.status = 204 - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def delete(self, vendor_id, openid): - """Remove user from vendor group.""" - openid = base64.b64decode(openid) - - if not (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)): - pecan.abort(403, 'Forbidden.') - - vendor = db.get_organization(vendor_id) - db.remove_user_from_group(openid, vendor['group_id']) - pecan.response.status = 204 - - -class VendorsController(validation.BaseRestControllerWithValidation): - """/v1/vendors handler.""" - - users = UsersController() - - __validator__ = validators.VendorValidator - - _custom_actions = { - "action": ["POST"], - } - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def post(self): - """'secure' decorator doesn't work at store_item. it must be here.""" - return super(VendorsController, self).post() - - @pecan.expose('json') - def store_item(self, vendor): - """Handler for create item. Should return new item id.""" - creator = api_utils.get_user_id() - vendor = db.add_organization(vendor, creator) - return {'id': vendor['id']} - - @secure(api_utils.is_authenticated) - @pecan.expose('json', method='PUT') - def put(self, vendor_id, **kw): - """Handler for update item. Should return full info with updates.""" - is_foundation_admin = api_utils.check_user_is_foundation_admin() - is_admin = (is_foundation_admin or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not is_admin: - pecan.abort(403, 'Forbidden.') - vendor_info = {'id': vendor_id} - vendor = db.get_organization(vendor_id) - if 'name' in kw: - if (vendor['type'] == const.OFFICIAL_VENDOR and - not is_foundation_admin): - pecan.abort( - 403, 'Name change for an official vendor is not allowed.') - vendor_info['name'] = kw['name'] - if 'description' in kw: - vendor_info['description'] = kw['description'] - if 'properties' in kw: - vendor_info['properties'] = json.dumps(kw['properties']) - vendor = db.update_organization(vendor_info) - - pecan.response.status = 200 - vendor['can_manage'] = True - return vendor - - @pecan.expose('json') - def get(self): - """Get information of vendors.""" - allowed_keys = ['id', 'type', 'name', 'description'] - user = api_utils.get_user_id() - try: - is_admin = api_utils.check_user_is_foundation_admin() - if is_admin: - vendors = db.get_organizations(allowed_keys=allowed_keys) - for vendor in vendors: - vendor['can_manage'] = True - else: - result = dict() - types = [const.FOUNDATION, const.OFFICIAL_VENDOR] - vendors = db.get_organizations_by_types( - types, allowed_keys=allowed_keys) - for vendor in vendors: - _id = vendor['id'] - result[_id] = vendor - result[_id]['can_manage'] = False - - vendors = db.get_organizations_by_user( - user, allowed_keys=allowed_keys) - for vendor in vendors: - _id = vendor['id'] - if _id not in result: - result[_id] = vendor - result[_id]['can_manage'] = True - vendors = list(result.values()) - except Exception as ex: - LOG.exception('An error occurred during ' - 'operation with database: %s', ex) - pecan.abort(400) - return {'vendors': vendors} - - @pecan.expose('json') - def get_one(self, vendor_id): - """Get information about vendor.""" - allowed_keys = None - is_admin = (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)) - if not is_admin: - allowed_keys = ['id', 'type', 'name', 'description'] - - vendor = db.get_organization(vendor_id, allowed_keys=allowed_keys) - - allowed_types = [const.FOUNDATION, const.OFFICIAL_VENDOR] - if not is_admin and vendor['type'] not in allowed_types: - pecan.abort(403, 'Forbidden.') - - vendor['can_manage'] = is_admin - return vendor - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def delete(self, vendor_id): - """Delete vendor.""" - if not (api_utils.check_user_is_foundation_admin() or - api_utils.check_user_is_vendor_admin(vendor_id)): - pecan.abort(403, 'Forbidden.') - _check_is_not_foundation(vendor_id) - - try: - db.delete_organization(vendor_id) - except DBReferenceError: - pecan.abort(400, 'Unable to delete. There are still tests ' - 'associated to products for this vendor.') - pecan.response.status = 204 - - @secure(api_utils.is_authenticated) - @pecan.expose('json') - def action(self, vendor_id, **kw): - """Handler for action on Vendor object.""" - params = list() - for param in ('register', 'approve', 'deny', 'cancel'): - if param in kw: - params.append(param) - if len(params) != 1: - raise api_exc.ValidationError( - 'Invalid actions in the body: ' + str(params)) - - vendor = db.get_organization(vendor_id) - if 'register' in params: - self.register(vendor) - elif 'approve' in params: - self.approve(vendor) - elif 'cancel' in params: - self.cancel(vendor) - else: - self.deny(vendor, kw.get('registration_decline_reason')) - - def register(self, vendor): - """Handler for applying for registration with Foundation.""" - if not api_utils.check_user_is_vendor_admin(vendor['id']): - pecan.abort(403, 'Forbidden.') - _check_is_not_foundation(vendor['id']) - - if vendor['type'] != const.PRIVATE_VENDOR: - raise api_exc.ValidationError( - 'Invalid organization state for this action.') - - # change vendor type to pending - org_info = { - 'id': vendor['id'], - 'type': const.PENDING_VENDOR} - db.update_organization(org_info) - - def approve(self, vendor): - """Handler for making vendor official.""" - if not api_utils.check_user_is_foundation_admin(): - pecan.abort(403, 'Forbidden.') - _check_is_not_foundation(vendor['id']) - - if vendor['type'] != const.PENDING_VENDOR: - raise api_exc.ValidationError( - 'Invalid organization state for this action.') - - # change vendor type to public - props = vendor.get('properties') - props = json.loads(props) if props else {} - props.pop('registration_decline_reason', None) - org_info = { - 'id': vendor['id'], - 'type': const.OFFICIAL_VENDOR, - 'properties': json.dumps(props)} - db.update_organization(org_info) - - def cancel(self, vendor): - """Handler for canceling registration. - - This action available to user. It allows him to cancel - registrationand move state of his vendor from pending - to private. - """ - if not api_utils.check_user_is_vendor_admin(vendor['id']): - pecan.abort(403, 'Forbidden.') - _check_is_not_foundation(vendor['id']) - - if vendor['type'] != const.PENDING_VENDOR: - raise api_exc.ValidationError( - 'Invalid organization state for this action.') - - # change vendor type back to private - org_info = { - 'id': vendor['id'], - 'type': const.PRIVATE_VENDOR} - db.update_organization(org_info) - - def deny(self, vendor, reason): - """Handler for denying a vendor.""" - if not api_utils.check_user_is_foundation_admin(): - pecan.abort(403, 'Forbidden.') - _check_is_not_foundation(vendor['id']) - - if not reason: - raise api_exc.ValidationError( - 'The decline reason can not be empty') - if vendor['type'] != const.PENDING_VENDOR: - raise api_exc.ValidationError( - 'Invalid organization state for this action.') - - props = vendor.get('properties') - props = json.loads(props) if props else {} - props['registration_decline_reason'] = reason - - # change vendor type back to private - org_info = { - 'id': vendor['id'], - 'type': const.PRIVATE_VENDOR, - 'properties': json.dumps(props)} - db.update_organization(org_info) diff --git a/refstack/api/exceptions.py b/refstack/api/exceptions.py deleted file mode 100644 index c214dcd8..00000000 --- a/refstack/api/exceptions.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Refstack API exceptions.""" - - -class ParseInputsError(Exception): - """Raise if input params are invalid.""" - - pass - - -class ValidationError(Exception): - """Raise if request doesn't pass trough validation process.""" - - def __init__(self, title, exc=None): - """Init.""" - super(ValidationError, self).__init__(title) - self.exc = exc - self.title = title - self.details = "%s(%s: %s)" % (self.title, - self.exc.__class__.__name__, - str(self.exc)) \ - if self.exc else self.title - - def __repr__(self): - """Repr method.""" - return self.details - - def __str__(self): - """Str method.""" - return self.__repr__() diff --git a/refstack/api/guidelines.py b/refstack/api/guidelines.py deleted file mode 100755 index 58387294..00000000 --- a/refstack/api/guidelines.py +++ /dev/null @@ -1,254 +0,0 @@ -# Copyright (c) 2016 IBM, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Class for retrieving Interop WG guideline information.""" - -import itertools -from operator import itemgetter -import os -import re - -import requests -import requests_cache - -from oslo_config import cfg -from oslo_log import log - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# Cached requests will expire after 12 hours. -requests_cache.install_cache(cache_name='opendev_cache', - backend='memory', - expire_after=43200) - - -class Guidelines: - """This class handles guideline/capability listing and retrieval.""" - - def __init__(self, - repo_url=None, - raw_url=None, - additional_capability_urls=None): - """Initialize class with needed URLs. - - The URL for the guidelines repository is specified with 'repo_url'. - The URL for where raw files are served is specified with 'raw_url'. - These values will default to the values specified in the RefStack - config file. - """ - self.guideline_sources = list() - if additional_capability_urls: - self.additional_urls = additional_capability_urls.split(',') - else: - self.additional_urls = \ - CONF.api.additional_capability_urls.split(',') - [self.guideline_sources.append(url) for url in self.additional_urls] - if repo_url: - self.repo_url = repo_url - else: - self.repo_url = CONF.api.opendev_api_capabilities_url - if self.repo_url and self.repo_url not in self.guideline_sources: - self.guideline_sources.append(self.repo_url) - if raw_url: - self.raw_url = raw_url - else: - self.raw_url = CONF.api.opendev_raw_base_url - - def get_guideline_list(self): - """Return a list of a guideline files. - - The repository url specificed in class instantiation is checked - for a list of JSON guideline files. A list of these is returned. - """ - capability_files = {} - capability_list = [] - powered_files = [] - addon_files = [] - for src_url in self.guideline_sources: - try: - resp = requests.get(src_url) - - LOG.debug("Response Status: %s / Used Requests Cache: %s", - resp.status_code, - getattr(resp, 'from_cache', False)) - if resp.status_code == 200: - regex = re.compile(r'([0-9]{4}\.[0-9]{2}|next)\.json') - for rfile in resp.json(): - if rfile["type"] == "file" and \ - regex.search(rfile["name"]): - if 'add-ons' in rfile['path'] and \ - rfile[ - 'name'] not in map(itemgetter('name'), - addon_files): - file_dict = {'name': rfile['name']} - addon_files.append(file_dict) - elif 'add-ons' not in rfile['path'] and \ - rfile['name'] not in map(itemgetter('name'), - powered_files): - basename = os.path.basename(rfile['path']) - file_dict = {'name': rfile['name'], - 'file': basename} - powered_files.append(file_dict) - else: - LOG.warning('Guidelines repo URL (%s) returned ' - 'non-success HTTP code: %s', src_url, - resp.status_code) - - except requests.exceptions.RequestException as e: - LOG.warning('An error occurred trying to get repository ' - 'contents through %s: %s', src_url, e) - for k, v in itertools.groupby(addon_files, - key=lambda x: x['name'].split('.')[0]): - values = [{'name': x['name'].split('.', 1)[1], 'file': x['name']} - for x in list(v)] - capability_list.append((k, list(values))) - capability_list.append(('powered', powered_files)) - capability_files = dict((x, y) for x, y in capability_list) - return capability_files - - def get_guideline_contents(self, gl_file): - """Get contents for a given guideline path.""" - if '.json' not in gl_file: - gl_file = '.'.join((gl_file, 'json')) - regex = re.compile(r"[a-z]*\.([0-9]{4}\.[0-9]{2}|next)\.json") - if regex.search(gl_file): - guideline_path = 'add-ons/guidelines/' + gl_file - else: - guideline_path = 'guidelines/' + gl_file - - file_url = ''.join((self.raw_url.rstrip('/'), - '/', guideline_path)) - LOG.debug("file_url: %s", file_url) - try: - response = requests.get(file_url) - LOG.debug("Response Status: %s / Used Requests Cache: %s", - response.status_code, - getattr(response, 'from_cache', False)) - LOG.debug("Response body: %s", str(response.text)) - if response.status_code == 200: - return response.json() - else: - LOG.warning('Raw guideline URL (%s) returned non-success HTTP ' - 'code: %s', self.raw_url, response.status_code) - - return None - except requests.exceptions.RequestException as e: - LOG.warning('An error occurred trying to get raw capability file ' - 'contents from %s: %s', self.raw_url, e) - return None - - def get_target_capabilities(self, guideline_json, types=None, - target='platform'): - """Get list of capabilities that match the given statuses and target. - - If no list of types in given, then capabilities of all types - are given. If not target is specified, then all capabilities are given. - """ - components = guideline_json['components'] - if ('metadata' in guideline_json and - guideline_json['metadata']['schema'] >= '2.0'): - schema = guideline_json['metadata']['schema'] - platformsMap = { - 'platform': 'OpenStack Powered Platform', - 'compute': 'OpenStack Powered Compute', - 'object': 'OpenStack Powered Storage', - 'dns': 'OpenStack with DNS', - 'orchestration': 'OpenStack with Orchestration', - 'shared_file_system': 'OpenStack with Shared File System', - 'load_balancer': 'OpenStack with Load Balancer', - 'key_manager': 'OpenStack with Key Manager' - - } - add_ons = ['dns', 'orchestration', 'shared_file_system', - 'load_balancer', 'key_manager'] - if target in add_ons: - targets = ['os_powered_' + target] - else: - comps = \ - guideline_json['platforms'][platformsMap[target] - ]['components'] - targets = (obj['name'] for obj in comps) - else: - schema = guideline_json['schema'] - targets = set() - if target != 'platform': - targets.add(target) - else: - targets.update(guideline_json['platform']['required']) - target_caps = set() - for component in targets: - complist = components[component] - if schema >= '2.0': - complist = complist['capabilities'] - for status, capabilities in complist.items(): - if types is None or status in types: - target_caps.update(capabilities) - return list(target_caps) - - def get_test_list(self, guideline_json, capabilities=[], - alias=True, show_flagged=True): - """Generate a test list based on input. - - A test list is formed from the given guideline JSON data and - list of capabilities. If 'alias' is True, test aliases are - included in the list. If 'show_flagged' is True, flagged tests are - included in the list. - """ - caps = guideline_json['capabilities'] - if ('metadata' in guideline_json and - guideline_json['metadata']['schema'] >= '2.0'): - schema = guideline_json['metadata']['schema'] - else: - schema = guideline_json['schema'] - test_list = [] - for cap, cap_details in caps.items(): - if cap in capabilities: - if schema == '1.2': - for test in cap_details['tests']: - if show_flagged: - test_list.append(test) - elif not show_flagged and \ - test not in cap_details['flagged']: - test_list.append(test) - else: - for test, test_details in cap_details['tests'].items(): - added = False - if test_details.get('flagged'): - if show_flagged: - test_str = '{}[{}]'.format( - test, - test_details.get('idempotent_id', '') - ) - test_list.append(test_str) - added = True - else: - # Make sure the test UUID is in the test string. - test_str = '{}[{}]'.format( - test, - test_details.get('idempotent_id', '') - ) - test_list.append(test_str) - added = True - - if alias and test_details.get('aliases') and added: - for alias in test_details['aliases']: - test_str = '{}[{}]'.format( - alias, - test_details.get('idempotent_id', '') - ) - test_list.append(test_str) - test_list.sort() - return test_list diff --git a/refstack/api/utils.py b/refstack/api/utils.py deleted file mode 100644 index 83d71b14..00000000 --- a/refstack/api/utils.py +++ /dev/null @@ -1,420 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Refstack API's utils.""" -import binascii -import copy -import functools -import random -import string -import types -from urllib import parse - -from cryptography.hazmat import backends -from cryptography.hazmat.primitives import serialization -import jwt -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import pecan -import pecan.rest -import requests - -from refstack.api import constants as const -from refstack.api import exceptions as api_exc -from refstack import db - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -def _get_input_params_from_request(expected_params): - """Get input parameters from request. - - :param expecred_params: (array) Expected input - params specified in constants. - """ - filters = {} - for param in expected_params: - value = pecan.request.GET.get(param) - if value is not None: - filters[param] = value - LOG.debug('Parameter %s has been received ' - 'with value %s', param, value) - return filters - - -def parse_input_params(expected_input_params): - """Parse input parameters from request. - - :param expected_input_params: (array) Expected input - params specified in constants. - """ - raw_filters = _get_input_params_from_request(expected_input_params) - filters = copy.deepcopy(raw_filters) - date_fmt = CONF.api.input_date_format - - for key, value in filters.items(): - if key == const.START_DATE or key == const.END_DATE: - try: - filters[key] = timeutils.parse_strtime(value, date_fmt) - except (ValueError, TypeError) as exc: - raise api_exc.ParseInputsError( - 'Invalid date format: %(exc)s' % {'exc': exc}) - - start_date = filters.get(const.START_DATE) - end_date = filters.get(const.END_DATE) - if start_date and end_date: - if start_date > end_date: - raise api_exc.ParseInputsError( - 'Invalid dates: %(start)s more than %(end)s' - '' % {'start': const.START_DATE, 'end': const.END_DATE}) - if const.SIGNED in filters: - if is_authenticated(): - filters[const.OPENID] = get_user_id() - else: - raise api_exc.ParseInputsError( - 'To see signed test results you need to authenticate') - return filters - - -def str_to_bool(param): - """Check if a string value should be evaluated as True or False.""" - if isinstance(param, bool): - return param - return param.lower() in ("true", "yes", "1") - - -def _calculate_pages_number(per_page, records_count): - """Return pages number. - - :param per_page: (int) results number fot one page. - :param records_count: (int) total records count. - """ - quotient, remainder = divmod(records_count, per_page) - if remainder > 0: - return quotient + 1 - return quotient - - -def get_page_number(records_count): - """Get page number from request. - - :param records_count: (int) total records count. - """ - page_number = pecan.request.GET.get(const.PAGE) - per_page = CONF.api.results_per_page - - total_pages = _calculate_pages_number(per_page, records_count) - # The first page exists in any case - if page_number is None: - return (1, total_pages) - try: - page_number = int(page_number) - except (ValueError, TypeError): - raise api_exc.ParseInputsError( - 'Invalid page number: The page number can not be converted to ' - 'an integer') - - if page_number == 1: - return (page_number, total_pages) - - if page_number <= 0: - raise api_exc.ParseInputsError('Invalid page number: ' - 'The page number less or equal zero.') - - if page_number > total_pages: - raise api_exc.ParseInputsError( - 'Invalid page number: ' - 'The page number is greater than the total number of pages.') - - return (page_number, total_pages) - - -def set_query_params(url, params): - """Set params in given query.""" - url_parts = parse.urlparse(url) - url = parse.urlunparse(( - url_parts.scheme, - url_parts.netloc, - url_parts.path, - url_parts.params, - parse.urlencode(params), - url_parts.fragment)) - return url - - -def get_token(length=30): - """Get random token.""" - return ''.join(random.choice(string.ascii_lowercase) - for i in range(length)) - - -def delete_params_from_user_session(params): - """Delete params from user session.""" - session = get_user_session() - for param in params: - if session.get(param): - del session[param] - session.save() - - -def get_user_session(): - """Return user session.""" - return pecan.request.environ['beaker.session'] - - -def get_token_data(): - """Return dict with data encoded from token.""" - return pecan.request.environ.get(const.JWT_TOKEN_ENV) - - -def get_user_id(from_session=True, from_token=True): - """Return authenticated user id.""" - session = get_user_session() - token = get_token_data() - if from_session and session.get(const.USER_OPENID): - return session.get(const.USER_OPENID) - elif from_token and token: - return token.get(const.USER_OPENID) - - -def get_user(user_id=None): - """Return db record for authenticated user.""" - if not user_id: - user_id = get_user_id() - return db.user_get(user_id) - - -def get_user_public_keys(): - """Return public keys for authenticated user.""" - return db.get_user_pubkeys(get_user_id()) - - -def is_authenticated(by_session=True, by_token=True): - """Return True if user is authenticated.""" - user_id = get_user_id(from_session=by_session, from_token=by_token) - if user_id: - try: - if get_user(user_id=user_id): - return True - except db.NotFound: - pass - return False - - -def enforce_permissions(test_id, level): - """Check that user role is required for specified test run.""" - role = get_user_role(test_id) - if not role: - pecan.abort(401) - - if level == const.ROLE_USER: - if role in (const.ROLE_OWNER, const.ROLE_USER, const.ROLE_FOUNDATION): - return - pecan.abort(403) - elif level == const.ROLE_OWNER: - if role in (const.ROLE_OWNER, const.ROLE_FOUNDATION): - return - pecan.abort(403) - elif level == const.ROLE_FOUNDATION: - if role in (const.ROLE_FOUNDATION): - return - else: - raise ValueError('Permission level %s is undefined' % level) - - -def get_user_role(test_id): - """Return user role for current user and specified test run.""" - if check_user_is_foundation_admin(): - return const.ROLE_FOUNDATION - if check_owner(test_id): - return const.ROLE_OWNER - if check_user(test_id): - return const.ROLE_USER - return - - -def check_user(test_id): - """Check that user has access to shared test run.""" - test_owner = db.get_test_result_meta_key(test_id, const.USER) - if not test_owner: - return True - elif db.get_test_result_meta_key(test_id, const.SHARED_TEST_RUN): - return True - else: - return check_owner(test_id) - - -def check_owner(test_id): - """Check that user has access to specified test run as owner.""" - if not is_authenticated(): - return False - - test = db.get_test_result(test_id) - # If the test is owned by a product. - if test.get('product_version_id'): - version = db.get_product_version(test['product_version_id']) - return check_user_is_product_admin(version['product_id']) - # Otherwise, check the user ownership. - else: - user = db.get_test_result_meta_key(test_id, const.USER) - return user and user == get_user_id() - - -def check_permissions(level): - """Decorator for checking permissions. - - It checks that user have enough permissions to access and manipulate - an information about selected test run. - Any user has role: const.ROLE_USER. It allows access to unsigned, shared - and own test runs. - Owner role: const.ROLE_OWNER allows access only to user's own results. - """ - def decorator(method_or_class): - - def wrapper(method): - @functools.wraps(method) - def wrapped(*args, **kwargs): - test_id = args[1] - enforce_permissions(test_id, level) - return method(*args, **kwargs) - return wrapped - - if isinstance(method_or_class, types.FunctionType): - return wrapper(method_or_class) - elif issubclass(method_or_class, pecan.rest.RestController): - controller = method_or_class - for method_name in ('get', 'get_all', 'get_one', - 'post', 'put', 'delete'): - if hasattr(controller, method_name): - setattr(controller, method_name, - wrapper(getattr(controller, method_name))) - return controller - - return decorator - - -def verify_openid_request(request): - """Verify OpenID returned request in OpenID.""" - verify_params = dict(request.params.copy()) - verify_params["openid.mode"] = "check_authentication" - - verify_response = requests.post( - CONF.osid.openstack_openid_endpoint, data=verify_params, - verify=not CONF.api.app_dev_mode - ) - - vrc = verify_response.content.decode('utf-8') if isinstance( - verify_response.content, bytes) else verify_response.content - - verify_data_tokens = vrc.split() - verify_dict = dict((token.split(':')[0], token.split(':')[1]) - for token in verify_data_tokens - if len(token.split(':')) > 1) - - if (verify_response.status_code / 100 != 2 or - verify_dict['is_valid'] != 'true'): - pecan.abort(401, 'Authentication is failed. Try again.') - - # Is the data we've received within our required parameters? - required_parameters = { - const.OPENID_NS_SREG_EMAIL: 'Please permit access to ' - 'your email address.', - const.OPENID_NS_SREG_FULLNAME: 'Please permit access to ' - 'your name.', - } - - for name, error in required_parameters.items(): - if name not in verify_params or not verify_params[name]: - pecan.abort(401, 'Authentication is failed. %s' % error) - - return True - - -def check_user_is_foundation_admin(user_id=None): - """Check is user in foundation group or not.""" - user = user_id if user_id else get_user_id() - org_users = db.get_foundation_users() - return user in org_users - - -def check_user_is_vendor_admin(vendor_id, user_id=None): - """Check is user in vendor group or not.""" - user = user_id if user_id else get_user_id() - org_users = db.get_organization_users(vendor_id) - return user in org_users - - -def check_user_is_product_admin(product_id, user_id=None): - """Check if the current user is in the vendor group for a product.""" - product = db.get_product(product_id) - vendor_id = product['organization_id'] - return check_user_is_vendor_admin(vendor_id, user_id=user_id) - - -def decode_token(request): - """Validate request signature. - - ValidationError rises if request is not valid. - """ - if not request.headers.get(const.JWT_TOKEN_HEADER): - return - try: - auth_schema, token = request.headers.get( - const.JWT_TOKEN_HEADER).split(' ', 1) - except ValueError: - raise api_exc.ValidationError("Token is not valid") - if auth_schema != 'Bearer': - raise api_exc.ValidationError( - "Authorization schema 'Bearer' should be used") - try: - token_data = jwt.decode(token, algorithms=['RS256'], - options={"verify_signature": False}) - except jwt.InvalidTokenError: - raise api_exc.ValidationError("Token is not valid") - - openid = token_data.get(const.USER_OPENID) - if not openid: - raise api_exc.ValidationError("Token does not contain user's openid") - pubkeys = db.get_user_pubkeys(openid) - for pubkey in pubkeys: - try: - pubkey_string = '%s %s' % (pubkey['format'], pubkey['pubkey']) - pubkey_obj = serialization.load_ssh_public_key( - pubkey_string.encode('utf-8'), - backend=backends.default_backend() - ) - pem_pubkey = pubkey_obj.public_bytes( - serialization.Encoding.PEM, - serialization.PublicFormat.SubjectPublicKeyInfo) - except (ValueError, IndexError, TypeError, binascii.Error): - pass - else: - try: - token_data = jwt.decode( - token, algorithms=['RS256'], key=pem_pubkey, - options={'verify_signature': True, - 'verify_exp': True, - 'require': ['exp']}, - leeway=const.JWT_VALIDATION_LEEWAY) - # NOTE(sslipushenko) If at least one key is valid, let - # the validation pass - return token_data - except jwt.InvalidTokenError: - pass - - # NOTE(sslipushenko) If all user's keys are not valid, the validation fails - raise api_exc.ValidationError("Token is not valid") diff --git a/refstack/api/validators.py b/refstack/api/validators.py deleted file mode 100644 index 83c85324..00000000 --- a/refstack/api/validators.py +++ /dev/null @@ -1,267 +0,0 @@ -# -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Validators module.""" - -import binascii -import json -import uuid - -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat import backends -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.serialization import load_ssh_public_key -import jsonschema - -from refstack.api import exceptions as api_exc - -ext_format_checker = jsonschema.FormatChecker() - - -def is_uuid(inst): - """Check that inst is a uuid_hex string.""" - try: - uuid.UUID(hex=inst) - except (TypeError, ValueError): - return False - return True - - -@jsonschema.FormatChecker.checks(ext_format_checker, - format='uuid_hex', - raises=(TypeError, ValueError)) -def checker_uuid(inst): - """Checker 'uuid_hex' format for jsonschema validator.""" - return is_uuid(inst) - - -class BaseValidator(object): - """Base class for validators.""" - - schema = {} - - def __init__(self): - """Init.""" - jsonschema.Draft4Validator.check_schema(self.schema) - self.validator = jsonschema.Draft4Validator( - self.schema, - format_checker=ext_format_checker - ) - - def validate(self, request): - """Validate request.""" - try: - body = json.loads(request.body.decode('utf-8')) - except (ValueError, TypeError) as e: - raise api_exc.ValidationError('Malformed request', e) - - try: - jsonschema.validate(body, self.schema) - except jsonschema.ValidationError as e: - raise api_exc.ValidationError( - 'Request doesn''t correspond to schema', e) - - def check_emptyness(self, body, keys): - """Check that all values are not empty.""" - for key in keys: - value = body[key] - if isinstance(value, str): - value = value.strip() - if not value: - raise api_exc.ValidationError(key + ' should not be empty') - elif value is None: - raise api_exc.ValidationError(key + ' must be present') - - -class TestResultValidator(BaseValidator): - """Validator for incoming test results.""" - - schema = { - 'type': 'object', - 'properties': { - 'cpid': { - 'type': 'string' - }, - 'duration_seconds': {'type': 'integer'}, - 'results': { - 'type': 'array', - 'items': { - 'type': 'object', - 'properties': { - 'name': {'type': 'string'}, - 'uuid': { - 'type': 'string', - 'format': 'uuid_hex' - } - } - } - } - }, - 'required': ['cpid', 'duration_seconds', 'results'], - 'additionalProperties': False - } - - def validate(self, request): - """Validate uploaded test results.""" - super(TestResultValidator, self).validate(request) - if request.headers.get('X-Signature') or \ - request.headers.get('X-Public-Key'): - try: - sign = binascii.a2b_hex(request.headers.get('X-Signature', '')) - except (binascii.Error, TypeError) as e: - raise api_exc.ValidationError('Malformed signature', e) - - try: - pubkey = request.headers.get('X-Public-Key', '') - try: - pubkey = pubkey.encode('utf-8') - except AttributeError: - # it's already in bytes - pass - key = load_ssh_public_key( - pubkey, - backend=backends.default_backend() - ) - except (binascii.Error, ValueError) as e: - raise api_exc.ValidationError('Malformed public key', e) - - try: - key.verify(sign, request.body, padding.PKCS1v15(), - hashes.SHA256()) - except InvalidSignature: - raise api_exc.ValidationError('Signature verification failed') - if self._is_empty_result(request): - raise api_exc.ValidationError('Uploaded results must contain at ' - 'least one passing test.') - - def _is_empty_result(self, request): - """Check if the test results list is empty.""" - body = json.loads(request.body.decode('utf-8')) - if len(body['results']) != 0: - return False - return True - - @staticmethod - def assert_id(_id): - """Check that _id is a valid uuid_hex string.""" - return is_uuid(_id) - - -class PubkeyValidator(BaseValidator): - """Validator for uploaded public pubkeys.""" - - schema = { - 'type': 'object', - 'properties': { - 'raw_key': {'type': 'string'}, - 'self_signature': {'type': 'string'} - }, - 'required': ['raw_key', 'self_signature'], - 'additionalProperties': False - } - - def validate(self, request): - """Validate uploaded test results.""" - super(PubkeyValidator, self).validate(request) - body = json.loads(request.body.decode('utf-8')) - key_format = body['raw_key'].strip().split()[0] - - if key_format not in ('ssh-dss', 'ssh-rsa', - 'pgp-sign-rsa', 'pgp-sign-dss'): - raise api_exc.ValidationError('Public key has unsupported format') - - try: - sign = binascii.a2b_hex(body['self_signature']) - except (binascii.Error, TypeError) as e: - raise api_exc.ValidationError('Malformed signature', e) - - try: - key = load_ssh_public_key(body['raw_key'].encode('utf-8'), - backend=backends.default_backend()) - except (binascii.Error, ValueError) as e: - raise api_exc.ValidationError('Malformed public key', e) - - try: - key.verify(sign, 'signature'.encode('utf-8'), padding.PKCS1v15(), - hashes.SHA256()) - except InvalidSignature: - raise api_exc.ValidationError('Signature verification failed') - - -class VendorValidator(BaseValidator): - """Validator for adding new vendor.""" - - schema = { - 'type': 'object', - 'properties': { - 'name': {'type': 'string'}, - 'description': {'type': 'string'}, - }, - 'required': ['name'], - 'additionalProperties': False - } - - def validate(self, request): - """Validate uploaded vendor data.""" - super(VendorValidator, self).validate(request) - body = json.loads(request.body.decode('utf-8')) - - self.check_emptyness(body, ['name']) - - -class ProductValidator(BaseValidator): - """Validate uploaded product data.""" - - schema = { - 'type': 'object', - 'properties': { - 'name': {'type': 'string'}, - 'description': {'type': 'string'}, - 'product_type': {'type': 'integer'}, - 'organization_id': {'type': 'string', 'format': 'uuid_hex'}, - 'version': {'type': 'string'} - }, - 'required': ['name', 'product_type'], - 'additionalProperties': False - } - - def validate(self, request): - """Validate uploaded test results.""" - super(ProductValidator, self).validate(request) - body = json.loads(request.body.decode('utf-8')) - - self.check_emptyness(body, ['name', 'product_type']) - - -class ProductVersionValidator(BaseValidator): - """Validate adding product versions.""" - - schema = { - 'type': 'object', - 'properties': { - 'version': {'type': 'string'}, - 'cpid': {'type': 'string'} - }, - 'required': ['version'], - 'additionalProperties': False - } - - def validate(self, request): - """Validate product version data.""" - super(ProductVersionValidator, self).validate(request) - body = json.loads(request.body.decode('utf-8')) - - self.check_emptyness(body, ['version']) diff --git a/refstack/db/__init__.py b/refstack/db/__init__.py deleted file mode 100644 index 9b74e2aa..00000000 --- a/refstack/db/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""DB abstraction for Refstack.""" - -from refstack.db.api import * # noqa diff --git a/refstack/db/api.py b/refstack/db/api.py deleted file mode 100644 index f6ce4131..00000000 --- a/refstack/db/api.py +++ /dev/null @@ -1,302 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Defines interface for DB access. - -Functions in this module are imported into the refstack.db namespace. -Call these functions from refstack.db namespace, not the refstack.db.api -namespace. -""" -from oslo_config import cfg -from oslo_db import api as db_api - - -db_opts = [ - cfg.StrOpt('db_backend', - default='sqlalchemy', - help='The backend to use for database.'), - cfg.StrOpt('version_table', - default='alembic_version', - help='The alembic version table name to use within the ' + - 'database. To allow RefStack to upload and store ' + - 'the full set of subunit data, set this option to ' + - 'refstack_alembic_version.'), -] - -CONF = cfg.CONF -CONF.register_opts(db_opts) - -_BACKEND_MAPPING = {'sqlalchemy': 'refstack.db.sqlalchemy.api'} -IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING, - lazy=True) - -NotFound = IMPL.NotFound -Duplication = IMPL.Duplication - - -def store_test_results(results): - """Storing results into database. - - :param results: Dict describes test results. - """ - return IMPL.store_test_results(results) - - -def get_test_result(test_id, allowed_keys=None): - """Get test run information from the database. - - :param test_id: The ID of the test. - """ - return IMPL.get_test_result(test_id, allowed_keys=allowed_keys) - - -def delete_test_result(test_id): - """Delete test run information from the database. - - :param test_id: The ID of the test. - """ - return IMPL.delete_test_result(test_id) - - -def update_test_result(test_info): - """Update test from the given test_info dictionary. - - :param test_info: The test - """ - return IMPL.update_test_result(test_info) - - -def get_test_results(test_id): - """Get all passed tempest tests for a specified test run. - - :param test_id: The ID of the test. - """ - return IMPL.get_test_results(test_id) - - -def get_test_result_meta_key(test_id, key, default=None): - """Get metadata value related to specified test run. - - :param test_id: The ID of the test. - :param key: Metadata key - :param default: Default value - - """ - return IMPL.get_test_result_meta_key(test_id, key, default) - - -def save_test_result_meta_item(test_id, key, value): - """Store or update item value related to specified test run. - - :param test_id: The ID of the test. - :param key: Metadata key - - """ - return IMPL.save_test_result_meta_item(test_id, key, value) - - -def delete_test_result_meta_item(test_id, key): - """Delete metadata item related to specified test run. - - :param test_id: The ID of the test. - :param key: Metadata key - :param default: Default value - - :raise NotFound if default value is not set and no value found - """ - return IMPL.delete_test_result_meta_item(test_id, key) - - -def get_test_result_records(page_number, per_page, filters): - """Get page with applied filters for uploaded test records. - - :param page_number: The number of page. - :param per_page: The number of results for one page. - :param filters: (Dict) Filters that will be applied for records. - """ - return IMPL.get_test_result_records(page_number, per_page, filters) - - -def get_test_result_records_count(filters): - """Get total pages number with applied filters for uploaded test records. - - :param filters: (Dict) Filters that will be applied for records. - """ - return IMPL.get_test_result_records_count(filters) - - -def user_get(user_openid): - """Get user info. - - :param user_openid: User openid - """ - return IMPL.user_get(user_openid) - - -def user_save(user_info): - """Create user DB record if it exists, otherwise record will be updated. - - :param user_info: User record - """ - return IMPL.user_save(user_info) - - -def get_pubkey(key): - """Get pubkey info for a given key. - - :param key: public key - """ - return IMPL.get_pubkey(key) - - -def store_pubkey(pubkey_info): - """Store public key in to DB.""" - return IMPL.store_pubkey(pubkey_info) - - -def delete_pubkey(pubkey_id): - """Delete public key from DB.""" - return IMPL.delete_pubkey(pubkey_id) - - -def get_user_pubkeys(user_openid): - """Get public pubkeys for specified user.""" - return IMPL.get_user_pubkeys(user_openid) - - -def add_user_to_group(user_openid, group_id, created_by_user): - """Add specified user to specified group.""" - return IMPL.add_user_to_group(user_openid, group_id, created_by_user) - - -def remove_user_from_group(user_openid, group_id): - """Remove specified user from specified group.""" - return IMPL.remove_user_from_group(user_openid, group_id) - - -def add_organization(organization_info, creator): - """Add organization.""" - return IMPL.add_organization(organization_info, creator) - - -def update_organization(organization_info): - """Update organization.""" - return IMPL.update_organization(organization_info) - - -def get_organization(organization_id, allowed_keys=None): - """Get organization by id.""" - return IMPL.get_organization(organization_id, allowed_keys=allowed_keys) - - -def delete_organization(organization_id): - """delete organization by id.""" - return IMPL.delete_organization(organization_id) - - -def add_product(product_info, creator): - """Add product from product_info dicionary with creator.""" - return IMPL.add_product(product_info, creator) - - -def update_product(product_info): - """Update product from prodict_info dicionary.""" - return IMPL.update_product(product_info) - - -def get_product(id, allowed_keys=None): - """Get product by id.""" - return IMPL.get_product(id, allowed_keys=allowed_keys) - - -def delete_product(id): - """delete product by id.""" - return IMPL.delete_product(id) - - -def get_foundation_users(): - """Get users' openid-s that belong to group of foundation.""" - return IMPL.get_foundation_users() - - -def get_organization_users(organization_id): - """Get users with info that belong to group of organization.""" - return IMPL.get_organization_users(organization_id) - - -def get_organizations(allowed_keys=None): - """Get all organizations.""" - return IMPL.get_organizations(allowed_keys=allowed_keys) - - -def get_organizations_by_types(types, allowed_keys=None): - """Get organization by list of types.""" - return IMPL.get_organizations_by_types(types, allowed_keys=allowed_keys) - - -def get_organizations_by_user(user_openid, allowed_keys=None): - """Get organizations for specified user.""" - return IMPL.get_organizations_by_user(user_openid, - allowed_keys=allowed_keys) - - -def get_products(allowed_keys=None, filters=None): - """Get all products.""" - return IMPL.get_products(allowed_keys=allowed_keys, filters=filters) - - -def get_products_by_user(user_openid, allowed_keys=None, filters=None): - """Get all products that user can manage.""" - return IMPL.get_products_by_user(user_openid, allowed_keys=allowed_keys, - filters=filters) - - -def get_product_by_version(product_version_id, allowed_keys=None): - """Get product info from a product version ID.""" - return IMPL.get_product_by_version(product_version_id, - allowed_keys=allowed_keys) - - -def get_product_version(product_version_id, allowed_keys=None): - """Get details of a specific version given the id.""" - return IMPL.get_product_version(product_version_id, - allowed_keys=allowed_keys) - - -def get_product_version_by_cpid(cpid, allowed_keys=None): - """Get a product version given a cloud provider id.""" - return IMPL.get_product_version_by_cpid(cpid, allowed_keys=allowed_keys) - - -def get_product_versions(product_id, allowed_keys=None): - """Get all versions for a product.""" - return IMPL.get_product_versions(product_id, allowed_keys=allowed_keys) - - -def add_product_version(product_id, version, creator, cpid=None, - allowed_keys=None): - """Add a new product version.""" - return IMPL.add_product_version(product_id, version, creator, cpid, - allowed_keys=allowed_keys) - - -def update_product_version(product_version_info): - """Update product version from product_info_version dictionary.""" - return IMPL.update_product_version(product_version_info) - - -def delete_product_version(product_version_id): - """Delete a product version.""" - return IMPL.delete_product_version(product_version_id) diff --git a/refstack/db/migration.py b/refstack/db/migration.py deleted file mode 100644 index 53a4d652..00000000 --- a/refstack/db/migration.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Database setup and migration commands.""" - -from refstack.db import utils as db_utils - - -IMPL = db_utils.PluggableBackend( - 'db_backend', sqlalchemy='refstack.db.migrations.alembic.migration') - - -def version(): - """Display the current database version.""" - return IMPL.version() - - -def upgrade(version): - """Upgrade database to 'version' or the most recent version.""" - return IMPL.upgrade(version) - - -def downgrade(version): - """Downgrade database to 'version' or to initial state.""" - return IMPL.downgrade(version) - - -def stamp(version): - """Stamp database with 'version' or the most recent version.""" - return IMPL.stamp(version) - - -def revision(message, autogenerate): - """Generate new migration script.""" - return IMPL.revision(message, autogenerate) diff --git a/refstack/db/migrations/__init__.py b/refstack/db/migrations/__init__.py deleted file mode 100644 index 04a92d5b..00000000 --- a/refstack/db/migrations/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Migrations.""" diff --git a/refstack/db/migrations/alembic.ini b/refstack/db/migrations/alembic.ini deleted file mode 100644 index c0213ab5..00000000 --- a/refstack/db/migrations/alembic.ini +++ /dev/null @@ -1,49 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = %(here)s/alembic - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -#sqlalchemy.url = driver://user:pass@127.0.0.1/dbname - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/refstack/db/migrations/alembic/README b/refstack/db/migrations/alembic/README deleted file mode 100644 index 98e4f9c4..00000000 --- a/refstack/db/migrations/alembic/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/refstack/db/migrations/alembic/__init__.py b/refstack/db/migrations/alembic/__init__.py deleted file mode 100644 index 6384a678..00000000 --- a/refstack/db/migrations/alembic/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Alembic backend for migrations.""" diff --git a/refstack/db/migrations/alembic/env.py b/refstack/db/migrations/alembic/env.py deleted file mode 100755 index fce10e43..00000000 --- a/refstack/db/migrations/alembic/env.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -# -# Copyright (c) 2013 Piston Cloud Computing, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Alembic environment script.""" - -from alembic import context -from oslo_config import cfg - -from refstack.db.sqlalchemy import api as db_api -from refstack.db.sqlalchemy import models as db_models - -CONF = cfg.CONF - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - """ - engine = db_api.get_engine() - connection = engine.connect() - target_metadata = db_models.RefStackBase.metadata - context.configure(connection=connection, - target_metadata=target_metadata, - version_table=getattr(CONF, 'version_table')) - - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() - - -run_migrations_online() diff --git a/refstack/db/migrations/alembic/migration.py b/refstack/db/migrations/alembic/migration.py deleted file mode 100644 index 298860ea..00000000 --- a/refstack/db/migrations/alembic/migration.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Implementation of Alembic commands.""" -import alembic -import alembic.migration as alembic_migration -from oslo_config import cfg -from refstack.db.migrations.alembic import utils -from refstack.db.sqlalchemy import api as db_api - -CONF = cfg.CONF - - -def version(): - """Current database version. - - :returns: Database version - :type: string - """ - engine = db_api.get_engine() - with engine.connect() as conn: - conf_table = getattr(CONF, 'version_table') - utils.recheck_alembic_table(conn) - context = alembic_migration.MigrationContext.configure( - conn, opts={'version_table': conf_table}) - return context.get_current_revision() - - -def upgrade(revision): - """Upgrade database. - - :param version: Desired database version - :type version: string - """ - return alembic.command.upgrade(utils.alembic_config(), revision or 'head') - - -def downgrade(revision): - """Downgrade database. - - :param version: Desired database version - :type version: string - """ - return alembic.command.downgrade(utils.alembic_config(), - revision or 'base') - - -def stamp(revision): - """Stamp database with provided revision. - - Don't run any migrations. - - :param revision: Should match one from repository or head - to stamp - database with most recent revision - :type revision: string - """ - return alembic.command.stamp(utils.alembic_config(), revision or 'head') - - -def revision(message=None, autogenerate=False): - """Create template for migration. - - :param message: Text that will be used for migration title - :type message: string - :param autogenerate: If True - generates diff based on current database - state - :type autogenerate: bool - """ - return alembic.command.revision(utils.alembic_config(), - message, autogenerate) diff --git a/refstack/db/migrations/alembic/script.py.mako b/refstack/db/migrations/alembic/script.py.mako deleted file mode 100755 index c37c68fa..00000000 --- a/refstack/db/migrations/alembic/script.py.mako +++ /dev/null @@ -1,25 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - """Upgrade DB.""" - ${upgrades if upgrades else "pass"} - - -def downgrade(): - """Downgrade DB.""" - ${downgrades if downgrades else "pass"} diff --git a/refstack/db/migrations/alembic/utils.py b/refstack/db/migrations/alembic/utils.py deleted file mode 100644 index 0c578f45..00000000 --- a/refstack/db/migrations/alembic/utils.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Utilities used in the implementation of Alembic commands.""" -import os - -from alembic import config as alembic_conf -import alembic.migration as alembic_migration -from alembic.operations import Operations -try: - # Python 3.10 and above - from collections.abc import Iterable -except ImportError: - # Python <= 3.9, this is deprecated since Python 3.3 and it's - # removed in Python 3.10 - from collections import Iterable -from oslo_config import cfg -from sqlalchemy import text - - -CONF = cfg.CONF - - -def alembic_config(): - """Initialize config objext from .ini file. - - :returns: config object. - :type: object - """ - path = os.path.join(os.path.dirname(__file__), os.pardir, 'alembic.ini') - config = alembic_conf.Config(path) - return config - - -def get_table_version(conn, version_table_name): - """Get table version. - - :param engine: Initialized alembic engine object. - :param version_table_name: Version table name to check. - :type engine: object - :type version_table_name: string - :returns: string - """ - if not version_table_name: - return None - context = alembic_migration.MigrationContext.configure( - conn, opts={'version_table': version_table_name}) - return context.get_current_revision() - - -def get_db_tables(conn): - """Get current and default table values from the db. - - :param engine: Initialized alembic engine object. - :type engine: object - :returns: tuple - """ - query = text("SELECT TABLE_NAME from information_schema.tables\ - WHERE TABLE_NAME\ - LIKE '%alembic_version%'\ - AND table_schema = 'refstack'") - context = alembic_migration.MigrationContext.configure(conn) - op = Operations(context) - connection = op.get_bind() - search = connection.execute(query) - result = search.fetchall() - if isinstance(result, Iterable): - result = [table[0] for table in result] - else: - result = None - # if there is more than one version table, modify the - # one that does not have the default name, because subunit2sql uses the - # default name. - if result: - current_name =\ - next((table for table in result if table != "alembic_version"), - result[0]) - current_version = get_table_version(conn, current_name) - default_name =\ - next((table for table in result - if table == "alembic_version"), None) - default_version = get_table_version(conn, default_name) - if len(result) > 1 and not current_version: - if not default_name: - # this is the case where there is more than one - # nonstandard-named alembic table, and no default - current_name = next((table for table in result - if table != current_name), - result[0]) - elif current_name: - # this is the case where the current-named table - # exists, but is empty - current_name = default_name - current_version = default_version - current_table = (current_name, current_version) - default_table = (default_name, default_version) - else: - default_table = (None, None) - current_table = default_table - return current_table, default_table - - -def recheck_alembic_table(conn): - """check and update alembic version table. - - Should check current alembic version table against conf and rename the - existing table if the two values don't match. - """ - conf_table = getattr(CONF, 'version_table') - conf_table_version = get_table_version(conn, conf_table) - current_table, default_table = get_db_tables(conn) - if current_table[0]: - if current_table[0] != conf_table: - context = alembic_migration.MigrationContext.configure(conn) - op = Operations(context) - if conf_table and not conf_table_version: - # make sure there is not present-but-empty table - # that will prevent us from renaming the current table - op.drop_table(conf_table) - op.rename_table(current_table[0], conf_table) diff --git a/refstack/db/migrations/alembic/versions/19fded785b8c_create_organization_table.py b/refstack/db/migrations/alembic/versions/19fded785b8c_create_organization_table.py deleted file mode 100644 index 9958aad7..00000000 --- a/refstack/db/migrations/alembic/versions/19fded785b8c_create_organization_table.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Create organization table. - -Revision ID: 19fded785b8c -Revises: 319ee8fe47c7 -Create Date: 2016-01-18 14:40:00 - -""" - -# revision identifiers, used by Alembic. -revision = '19fded785b8c' -down_revision = '319ee8fe47c7' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'organization', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('id', sa.String(36), nullable=False), - sa.Column('type', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=80), nullable=False), - sa.Column('description', sa.Text()), - sa.Column('group_id', sa.String(36), nullable=False), - sa.Column('created_by_user', sa.String(128), nullable=False), - sa.Column('properties', sa.Text()), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), - sa.ForeignKeyConstraint(['created_by_user'], ['user.openid'], ), - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('organization') diff --git a/refstack/db/migrations/alembic/versions/23843be3da52_add_product_version_id.py b/refstack/db/migrations/alembic/versions/23843be3da52_add_product_version_id.py deleted file mode 100644 index 9c3fc176..00000000 --- a/refstack/db/migrations/alembic/versions/23843be3da52_add_product_version_id.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add product_version_id column to test. - -Revision ID: 23843be3da52 -Revises: 35bf54e2c13c -Create Date: 2016-07-30 18:15:52.429610 -""" - -# revision identifiers, used by Alembic. -revision = '23843be3da52' -down_revision = '35bf54e2c13c' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.add_column('test', sa.Column('product_version_id', sa.String(36), - nullable=True)) - op.create_foreign_key('fk_test_prod_version_id', 'test', 'product_version', - ['product_version_id'], ['id']) - - -def downgrade(): - """Downgrade DB.""" - op.drop_constraint('fk_test_prod_version_id', 'test', type_="foreignkey") - op.drop_column('test', 'product_version_id') diff --git a/refstack/db/migrations/alembic/versions/2f178b0bf762_create_user_table.py b/refstack/db/migrations/alembic/versions/2f178b0bf762_create_user_table.py deleted file mode 100644 index c177aacb..00000000 --- a/refstack/db/migrations/alembic/versions/2f178b0bf762_create_user_table.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Create user table. - -Revision ID: 2f178b0bf762 -Revises: 42278d6179b9 -Create Date: 2015-05-12 12:15:43.810938 - -""" - -# revision identifiers, used by Alembic. -revision = '2f178b0bf762' -down_revision = '42278d6179b9' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'user', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('_id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('openid', sa.String(length=128), - nullable=False, unique=True), - sa.Column('email', sa.String(length=128)), - sa.Column('fullname', sa.String(length=128)), - sa.PrimaryKeyConstraint('_id'), - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('user') diff --git a/refstack/db/migrations/alembic/versions/319ee8fe47c7_create_group_table.py b/refstack/db/migrations/alembic/versions/319ee8fe47c7_create_group_table.py deleted file mode 100644 index 6ec1599b..00000000 --- a/refstack/db/migrations/alembic/versions/319ee8fe47c7_create_group_table.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Create group table and group-user links table. - -Revision ID: 319ee8fe47c7 -Revises: 428e5aef5534 -Create Date: 2016-01-15 16:34:00 - -""" - -# revision identifiers, used by Alembic. -revision = '319ee8fe47c7' -down_revision = '428e5aef5534' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'group', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('id', sa.String(36), nullable=False), - sa.Column('name', sa.String(length=80), nullable=False), - sa.Column('description', sa.Text()), - sa.PrimaryKeyConstraint('id'), - mysql_charset=MYSQL_CHARSET - ) - op.create_table( - 'user_to_group', - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('created_by_user', sa.String(length=128), nullable=False), - sa.Column('_id', sa.Integer(), nullable=False), - sa.Column('group_id', sa.String(36), nullable=False), - sa.Column('user_openid', sa.String(length=128), nullable=False), - sa.PrimaryKeyConstraint('_id'), - sa.ForeignKeyConstraint(['user_openid'], ['user.openid'], ), - sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('user_to_group') - op.drop_table('group') diff --git a/refstack/db/migrations/alembic/versions/35bf54e2c13c_add_product_version.py b/refstack/db/migrations/alembic/versions/35bf54e2c13c_add_product_version.py deleted file mode 100644 index 782071e5..00000000 --- a/refstack/db/migrations/alembic/versions/35bf54e2c13c_add_product_version.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Add Product version table. - -Also product_ref_id is removed from the product table. - -Revision ID: 35bf54e2c13c -Revises: 709452f38a5c -Create Date: 2016-07-30 17:59:57.912306 - -""" - -# revision identifiers, used by Alembic. -revision = '35bf54e2c13c' -down_revision = '709452f38a5c' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'product_version', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('created_by_user', sa.String(128), nullable=False), - sa.Column('id', sa.String(36), nullable=False), - sa.Column('product_id', sa.String(36), nullable=False), - sa.Column('version', sa.String(length=36), nullable=True), - sa.Column('cpid', sa.String(length=36)), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), - sa.ForeignKeyConstraint(['created_by_user'], ['user.openid'], ), - sa.UniqueConstraint('product_id', 'version', name='prod_ver_uc'), - mysql_charset=MYSQL_CHARSET - ) - op.drop_column('product', 'product_ref_id') - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('product_version') - op.add_column('product', - sa.Column('product_ref_id', sa.String(36), nullable=True)) diff --git a/refstack/db/migrations/alembic/versions/42278d6179b9_init.py b/refstack/db/migrations/alembic/versions/42278d6179b9_init.py deleted file mode 100644 index 5957e18c..00000000 --- a/refstack/db/migrations/alembic/versions/42278d6179b9_init.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Init. - -Revision ID: 42278d6179b9 -Revises: None -Create Date: 2015-01-09 15:00:11.385580 - -""" - -# revision identifiers, used by Alembic. -revision = '42278d6179b9' -down_revision = None -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'test', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('id', sa.String(length=36), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('cpid', sa.String(length=128), nullable=False), - sa.Column('duration_seconds', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id'), - mysql_charset=MYSQL_CHARSET, - ) - op.create_table( - 'meta', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('_id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('test_id', sa.String(length=36), nullable=False), - sa.Column('meta_key', sa.String(length=64), nullable=False), - sa.Column('value', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['test_id'], ['test.id'], ), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('test_id', 'meta_key'), - mysql_charset=MYSQL_CHARSET - ) - op.create_table( - 'results', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('_id', sa.Integer(), nullable=False), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('test_id', sa.String(length=36), nullable=False), - sa.Column('name', - sa.String(length=512, collation='latin1_swedish_ci'), - nullable=True), - sa.Column('uuid', sa.String(length=36), nullable=True), - sa.ForeignKeyConstraint(['test_id'], ['test.id'], ), - sa.PrimaryKeyConstraint('_id'), - sa.UniqueConstraint('test_id', 'name'), - # TODO(sslypushenko) - # Constraint should turned on after duplication test uuids issue - # will be fixed - # sa.UniqueConstraint('test_id', 'uuid') - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('results') - op.drop_table('meta') - op.drop_table('test') diff --git a/refstack/db/migrations/alembic/versions/428e5aef5534_associate_test_result.py b/refstack/db/migrations/alembic/versions/428e5aef5534_associate_test_result.py deleted file mode 100644 index 170ad0cb..00000000 --- a/refstack/db/migrations/alembic/versions/428e5aef5534_associate_test_result.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Associate test results to users. - -Revision ID: 428e5aef5534 -Revises: 534e20be9964 -Create Date: 2015-11-03 00:51:34.096598 - -""" - -# revision identifiers, used by Alembic. -revision = '428e5aef5534' -down_revision = '534e20be9964' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - conn = op.get_bind() - res = conn.execute("select openid,format,pubkey from pubkeys") - results = res.fetchall() - - # Get public key to user mappings. - pubkeys = {} - for result in results: - pubkeys[result[1] + " " + result[2]] = result[0] - - res = conn.execute("select test_id,value from meta where " - "meta_key='public_key'") - results = res.fetchall() - - for result in results: - test_id = result[0] - if result[1] in pubkeys: - openid = pubkeys[result[1]] - conn.execute(sa.text("update meta set meta_key='user', " - "value=:value where " - "test_id=:testid and meta_key='public_key'" - ), - value=openid, testid=test_id) - - -def downgrade(): - """Downgrade DB.""" - pass diff --git a/refstack/db/migrations/alembic/versions/434be17a6ec3_fix_openids_with_space.py b/refstack/db/migrations/alembic/versions/434be17a6ec3_fix_openids_with_space.py deleted file mode 100644 index 76cc793c..00000000 --- a/refstack/db/migrations/alembic/versions/434be17a6ec3_fix_openids_with_space.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Fix openids with spaces. - -A change in the openstackid naming made is so IDs with spaces -are trimmed, so %20 are no longer in the openid url. This migration -will replace any '%20' with a '.' in each openid. - -Revision ID: 434be17a6ec3 -Revises: 59df512e82f -Create Date: 2017-03-23 12:20:08.219294 - -""" - -# revision identifiers, used by Alembic. -revision = '434be17a6ec3' -down_revision = '59df512e82f' -MYSQL_CHARSET = 'utf8' - -from alembic import op - - -def upgrade(): - """Upgrade DB.""" - conn = op.get_bind() - # Need to disable FOREIGN_KEY_CHECKS as a lot of tables reference the - # openid in the user table. - conn.execute("SET FOREIGN_KEY_CHECKS=0") - res = conn.execute("select * from user where openid LIKE '%%\%%20%%'") - results = res.fetchall() - for user in results: - old_openid = user[5] - new_openid = user[5].replace('%20', '.') - - # Remove instances of the new openid so the old one can take - # its place. - query = "delete from user where openid=%%:user" - conn.execute(query, user=new_openid) - - # Update the openid. - query = ("update user set openid=%%:new where openid=%%:old") - conn.execute(query, new=new_openid, old=old_openid) - - # Update all usage of %20 in all openid references using MySQL Replace. - conn.execute("update meta set value = " - "REPLACE (value, '%%20', '.')") - conn.execute("update pubkeys set openid = " - "REPLACE (openid, '%%20', '.')") - conn.execute("update organization set created_by_user = " - "REPLACE (created_by_user, '%%20', '.')") - conn.execute("update product set created_by_user = " - "REPLACE (created_by_user, '%%20', '.')") - conn.execute("update product_version set created_by_user = " - "REPLACE (created_by_user, '%%20', '.')") - conn.execute("update user_to_group set created_by_user = " - "REPLACE (created_by_user, '%%20', '.')") - conn.execute("update user_to_group set user_openid = " - "REPLACE (user_openid, '%%20', '.')") - - conn.execute("SET FOREIGN_KEY_CHECKS=1") - - -def downgrade(): - """Downgrade DB.""" - pass diff --git a/refstack/db/migrations/alembic/versions/534e20be9964_create_pubkey_table.py b/refstack/db/migrations/alembic/versions/534e20be9964_create_pubkey_table.py deleted file mode 100644 index a6e5a12e..00000000 --- a/refstack/db/migrations/alembic/versions/534e20be9964_create_pubkey_table.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Create user metadata table. - -Revision ID: 534e20be9964 -Revises: 2f178b0bf762 -Create Date: 2015-07-03 13:26:29.138416 - -""" - -# revision identifiers, used by Alembic. -revision = '534e20be9964' -down_revision = '2f178b0bf762' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'pubkeys', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('id', sa.String(length=36), primary_key=True), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('openid', sa.String(length=128), - nullable=False, index=True), - sa.Column('format', sa.String(length=24), nullable=False), - sa.Column('pubkey', sa.Text(), nullable=False), - sa.Column('md5_hash', sa.String(length=32), - nullable=False, index=True), - sa.Column('comment', sa.String(length=128)), - sa.ForeignKeyConstraint(['openid'], ['user.openid'], ), - mysql_charset=MYSQL_CHARSET - ) - op.create_index('indx_meta_value', 'meta', ['value'], mysql_length=32) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('pubkeys') diff --git a/refstack/db/migrations/alembic/versions/59df512e82f_add_verification_status.py b/refstack/db/migrations/alembic/versions/59df512e82f_add_verification_status.py deleted file mode 100644 index 89021d84..00000000 --- a/refstack/db/migrations/alembic/versions/59df512e82f_add_verification_status.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add verification_status field to test. - -Revision ID: 59df512e82f -Revises: 23843be3da52 -Create Date: 2016-09-26 11:51:08.955006 - -""" - -# revision identifiers, used by Alembic. -revision = '59df512e82f' -down_revision = '23843be3da52' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.add_column('test', sa.Column('verification_status', - sa.Integer, - nullable=False, - default=0)) - - -def downgrade(): - """Downgrade DB.""" - op.drop_column('test', 'verification_status') diff --git a/refstack/db/migrations/alembic/versions/7092392cbb8e_create_product_table.py b/refstack/db/migrations/alembic/versions/7092392cbb8e_create_product_table.py deleted file mode 100644 index 32069caf..00000000 --- a/refstack/db/migrations/alembic/versions/7092392cbb8e_create_product_table.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Create product table. - -Revision ID: 7092392cbb8e -Revises: 19fded785b8c -Create Date: 2016-01-18 16:10:00 - -""" - -# revision identifiers, used by Alembic. -revision = '7092392cbb8e' -down_revision = '19fded785b8c' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.create_table( - 'product', - sa.Column('updated_at', sa.DateTime()), - sa.Column('deleted_at', sa.DateTime()), - sa.Column('deleted', sa.Integer, default=0), - sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('created_by_user', sa.String(128), nullable=False), - sa.Column('id', sa.String(36), nullable=False), - sa.Column('name', sa.String(length=80), nullable=False), - sa.Column('description', sa.Text()), - sa.Column('product_id', sa.String(36), nullable=False), - sa.Column('type', sa.Integer(), nullable=False), - sa.Column('product_type', sa.Integer(), nullable=False), - sa.Column('public', sa.Boolean(), nullable=False), - sa.Column('organization_id', sa.String(36), nullable=False), - sa.Column('properties', sa.Text()), - sa.PrimaryKeyConstraint('id'), - sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], ), - sa.ForeignKeyConstraint(['created_by_user'], ['user.openid'], ), - mysql_charset=MYSQL_CHARSET - ) - - -def downgrade(): - """Downgrade DB.""" - op.drop_table('product') diff --git a/refstack/db/migrations/alembic/versions/7093ca478d35_product_table_make_product_id_nullable.py b/refstack/db/migrations/alembic/versions/7093ca478d35_product_table_make_product_id_nullable.py deleted file mode 100644 index 4ad19912..00000000 --- a/refstack/db/migrations/alembic/versions/7093ca478d35_product_table_make_product_id_nullable.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Make product_id nullable in product table. - -Revision ID: 7093ca478d35 -Revises: 7092392cbb8e -Create Date: 2016-05-12 13:10:00 - -""" - -# revision identifiers, used by Alembic. -revision = '7093ca478d35' -down_revision = '7092392cbb8e' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.alter_column('product', 'product_id', nullable=True, - type_=sa.String(36)) - - -def downgrade(): - """Downgrade DB.""" - pass diff --git a/refstack/db/migrations/alembic/versions/709452f38a5c_product_table_rename_product_id.py b/refstack/db/migrations/alembic/versions/709452f38a5c_product_table_rename_product_id.py deleted file mode 100644 index 79fd31b1..00000000 --- a/refstack/db/migrations/alembic/versions/709452f38a5c_product_table_rename_product_id.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Rename product_id to product_ref_id. - -Revision ID: 709452f38a5c -Revises: 7093ca478d35 -Create Date: 2016-06-27 13:10:00 - -""" - -# revision identifiers, used by Alembic. -revision = '709452f38a5c' -down_revision = '7093ca478d35' -MYSQL_CHARSET = 'utf8' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(): - """Upgrade DB.""" - op.alter_column('product', 'product_id', new_column_name='product_ref_id', - type_=sa.String(36)) - - -def downgrade(): - """Downgrade DB.""" - op.alter_column('product', 'product_ref_id', new_column_name='product_id', - type_=sa.String(36)) diff --git a/refstack/db/sqlalchemy/__init__.py b/refstack/db/sqlalchemy/__init__.py deleted file mode 100644 index 70b963e7..00000000 --- a/refstack/db/sqlalchemy/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""SQLAlchemy backend.""" diff --git a/refstack/db/sqlalchemy/api.py b/refstack/db/sqlalchemy/api.py deleted file mode 100644 index ce3ea412..00000000 --- a/refstack/db/sqlalchemy/api.py +++ /dev/null @@ -1,764 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Implementation of SQLAlchemy backend.""" - -import base64 -import hashlib -import sys -import uuid - -from oslo_config import cfg -from oslo_db import options as db_options -from oslo_db.sqlalchemy import session as db_session -from oslo_log import log - - -from refstack.api import constants as api_const -from refstack.db.sqlalchemy import models - - -CONF = cfg.CONF - -_FACADE = None -LOG = log.getLogger(__name__) - -db_options.set_defaults(cfg.CONF) - - -class NotFound(Exception): - """Raise if item not found in db.""" - - pass - - -class Duplication(Exception): - """Raise if unique constraint violates.""" - - pass - - -def _create_facade_lazily(): - """Create DB facade lazily.""" - global _FACADE - if _FACADE is None: - _FACADE = db_session.EngineFacade.from_config(CONF) - return _FACADE - - -def get_engine(): - """Get DB engine.""" - facade = _create_facade_lazily() - return facade.get_engine() - - -def get_session(**kwargs): - """Get DB session.""" - facade = _create_facade_lazily() - return facade.get_session(**kwargs) - - -def get_backend(): - """The backend is this module itself.""" - return sys.modules[__name__] - - -def _to_dict(sqlalchemy_object, allowed_keys=None): - if isinstance(sqlalchemy_object, list): - return [_to_dict(obj, allowed_keys=allowed_keys) - for obj in sqlalchemy_object] - if (hasattr(sqlalchemy_object, 'keys') and - hasattr(sqlalchemy_object, 'index')): - return {key: getattr(sqlalchemy_object, key) - for key in sqlalchemy_object.keys()} - if hasattr(sqlalchemy_object, 'default_allowed_keys'): - items = sqlalchemy_object.iteritems() - if not allowed_keys: - allowed_keys = sqlalchemy_object.default_allowed_keys - if allowed_keys: - items = filter(lambda item: item[0] in allowed_keys, items) - result = {} - for key, value in items: - if key in sqlalchemy_object.metadata_keys: - result[key] = { - item.get(sqlalchemy_object.metadata_keys[key]['key']): - item.get(sqlalchemy_object.metadata_keys[key]['value']) - for item in value} - elif hasattr(value, 'default_allowed_keys'): - result[key] = _to_dict(value) - elif (isinstance(value, list) and value and - hasattr(value[0], 'default_allowed_keys')): - result[key] = [_to_dict(item) for item in value] - else: - result[key] = value - return result - if hasattr(sqlalchemy_object, 'all'): - return _to_dict(sqlalchemy_object.all()) - return sqlalchemy_object - - -def store_test_results(results): - """Store test results.""" - test = models.Test() - test_id = str(uuid.uuid4()) - test.id = test_id - test.cpid = results.get('cpid') - test.duration_seconds = results.get('duration_seconds') - test.product_version_id = results.get('product_version_id') - with get_session() as session: - for result in results.get('results', []): - test_result = models.TestResults() - test_result.test_id = test_id - test_result.name = result['name'] - test_result.uuid = result.get('uuid', None) - test.results.append(test_result) - for k, v in results.get('meta', {}).items(): - meta = models.TestMeta() - meta.meta_key, meta.value = k, v - test.meta.append(meta) - test.save(session) - session.commit() - return test_id - - -def get_test_result(test_id, allowed_keys=None): - """Get test info.""" - with get_session() as session: - test_info = session.query(models.Test). \ - filter_by(id=test_id). \ - first() - if not test_info: - raise NotFound('Test result %s not found' % test_id) - - test_result_dict = _to_dict(test_info, allowed_keys) - - return test_result_dict - - -def delete_test_result(test_id): - """Delete test information from the database.""" - with get_session() as session: - test = session.query(models.Test).filter_by(id=test_id).first() - if test: - session.query(models.TestMeta) \ - .filter_by(test_id=test_id).delete() - session.query(models.TestResults) \ - .filter_by(test_id=test_id).delete() - session.delete(test) - session.commit() - else: - raise NotFound('Test result %s not found' % test_id) - - -def update_test_result(test_info): - """Update test from the given test_info dictionary.""" - with get_session() as session: - _id = test_info.get('id') - test = session.query(models.Test).filter_by(id=_id).first() - if test is None: - session.close() - raise NotFound('Test result with id %s not found' % _id) - - keys = ['product_version_id', 'verification_status'] - for key in keys: - if key in test_info: - setattr(test, key, test_info[key]) - - test.save(session=session) - test_result_dict = _to_dict(test) - session.commit() - return test_result_dict - - -def get_test_result_meta_key(test_id, key, default=None): - """Get metadata value related to specified test run.""" - with get_session() as session: - meta_item = session.query(models.TestMeta). \ - filter_by(test_id=test_id). \ - filter_by(meta_key=key). \ - first() - value = meta_item.value if meta_item else default - return value - - -def save_test_result_meta_item(test_id, key, value): - """Store or update item value related to specified test run.""" - with get_session() as session: - meta_item = (session.query(models.TestMeta) - .filter_by(test_id=test_id) - .filter_by(meta_key=key).first() or models.TestMeta()) - meta_item.test_id = test_id - meta_item.meta_key = key - meta_item.value = value - meta_item.save(session) - session.commit() - - -def delete_test_result_meta_item(test_id, key): - """Delete metadata item related to specified test run.""" - with get_session() as session: - meta_item = session.query(models.TestMeta). \ - filter_by(test_id=test_id). \ - filter_by(meta_key=key). \ - first() - if meta_item: - session.delete(meta_item) - session.commit() - else: - raise NotFound('Metadata key %s ' - 'not found for test run %s' % (key, test_id)) - - -def get_test_results(test_id): - """Get test results.""" - with get_session() as session: - results = session.query(models.TestResults). \ - filter_by(test_id=test_id). \ - all() - test_results_list = [_to_dict(result) for result in results] - return test_results_list - - -def _apply_filters_for_query(query, filters): - """Apply filters for DB query.""" - start_date = filters.get(api_const.START_DATE) - if start_date: - query = query.filter(models.Test.created_at >= start_date) - - end_date = filters.get(api_const.END_DATE) - if end_date: - query = query.filter(models.Test.created_at <= end_date) - - cpid = filters.get(api_const.CPID) - if cpid: - query = query.filter(models.Test.cpid == cpid) - - verification_status = filters.get(api_const.VERIFICATION_STATUS) - if verification_status: - query = query.filter(models.Test.verification_status == - verification_status) - - if api_const.PRODUCT_ID in filters: - query = (query - .join(models.ProductVersion) - .filter(models.ProductVersion.product_id == - filters[api_const.PRODUCT_ID])) - - all_product_tests = filters.get(api_const.ALL_PRODUCT_TESTS) - signed = api_const.SIGNED in filters - # If we only want to get the user's test results. - if signed: - query = (query - .join(models.Test.meta) - .filter(models.TestMeta.meta_key == api_const.USER) - .filter(models.TestMeta.value == filters[api_const.OPENID]) - ) - elif not all_product_tests: - # Get all non-signed (aka anonymously uploaded) test results - # along with signed but shared test results. - signed_results = (query.session - .query(models.TestMeta.test_id) - .filter_by(meta_key=api_const.USER)) - shared_results = (query.session - .query(models.TestMeta.test_id) - .filter_by(meta_key=api_const.SHARED_TEST_RUN)) - query = (query.filter(models.Test.id.notin_(signed_results)) - .union(query.filter(models.Test.id.in_(shared_results)))) - - return query - - -def get_test_result_records(page, per_page, filters): - """Get page with list of test records.""" - with get_session() as session: - query = session.query(models.Test) - query = _apply_filters_for_query(query, filters) - results = query.order_by(models.Test.created_at.desc()). \ - offset(per_page * (page - 1)). \ - limit(per_page).all() - test_result_records_dict = _to_dict(results) - return test_result_records_dict - - -def get_test_result_records_count(filters): - """Get total test records count.""" - with get_session() as session: - query = session.query(models.Test.id) - records_count = _apply_filters_for_query(query, filters).count() - return records_count - - -def user_get(user_openid): - """Get user info by openid.""" - with get_session() as session: - user = session.query(models.User).filter_by(openid=user_openid).first() - if user is None: - raise NotFound('User with OpenID %s not found' % user_openid) - - return user - - -def user_save(user_info): - """Create user DB record if it exists, otherwise record will be updated.""" - try: - user = user_get(user_info['openid']) - except NotFound: - user = models.User() - - with get_session() as session: - user.update(user_info) - user.save(session=session) - session.commit() - return user - - -def get_pubkey(key): - """Get the pubkey info corresponding to the given public key. - - The md5 hash of the key is used for the query for quicker lookups. - """ - with get_session() as session: - md5_hash = hashlib.md5(base64.b64decode(key)).hexdigest() - pubkeys = (session.query(models.PubKey) - .filter_by(md5_hash=md5_hash).all()) - - if len(pubkeys) == 1: - return pubkeys[0] - elif len(pubkeys) > 1: - for pubkey in pubkeys: - if pubkey['pubkey'] == key: - return pubkey - return None - - -def store_pubkey(pubkey_info): - """Store public key in to DB.""" - pubkey = models.PubKey() - pubkey.openid = pubkey_info['openid'] - pubkey.format = pubkey_info['format'] - pubkey.pubkey = pubkey_info['pubkey'] - pubkey.md5_hash = hashlib.md5( - base64.b64decode( - pubkey_info['pubkey'] - ) - ).hexdigest() - pubkey.comment = pubkey_info['comment'] - - with get_session() as session: - pubkeys_collision = (session. - query(models.PubKey). - filter_by(md5_hash=pubkey.md5_hash). - filter_by(pubkey=pubkey.pubkey).all()) - if not pubkeys_collision: - pubkey.save(session) - session.commit() - else: - raise Duplication('Public key already exists.') - - return pubkey.id - - -def delete_pubkey(id): - """Delete public key from DB.""" - with get_session() as session: - key = session.query(models.PubKey).filter_by(id=id).first() - session.delete(key) - session.commit() - - -def get_user_pubkeys(user_openid): - """Get public pubkeys for specified user.""" - with get_session() as session: - pubkeys = (session.query(models.PubKey) - .filter_by(openid=user_openid).all()) - return _to_dict(pubkeys) - - -def add_user_to_group(user_openid, group_id, created_by_user): - """Add specified user to specified group.""" - item = models.UserToGroup() - with get_session() as session: - item.user_openid = user_openid - item.group_id = group_id - item.created_by_user = created_by_user - item.save(session=session) - session.commit() - - -def remove_user_from_group(user_openid, group_id): - """Remove specified user from specified group.""" - with get_session() as session: - (session.query(models.UserToGroup). - filter_by(user_openid=user_openid). - filter_by(group_id=group_id). - delete(synchronize_session=False)) - session.commit() - - -def add_organization(organization_info, creator): - """Add organization.""" - with get_session() as session: - group = models.Group() - group.name = 'Group for %s' % organization_info['name'] - group.save(session=session) - group_id = group.id - - item = models.UserToGroup() - item.user_openid = creator - item.group_id = group_id - item.created_by_user = creator - item.save(session=session) - - organization = models.Organization() - organization.type = organization_info.get( - 'type', api_const.PRIVATE_VENDOR) - organization.name = organization_info['name'] - organization.description = organization_info.get('description') - organization.group_id = group_id - organization.created_by_user = creator - organization.properties = organization_info.get('properties') - organization.save(session=session) - session.commit() - organization_dict = _to_dict(organization) - return organization_dict - - -def update_organization(organization_info): - """Update organization.""" - with get_session() as session: - _id = organization_info['id'] - organization = (session.query(models.Organization). - filter_by(id=_id).first()) - if organization is None: - raise NotFound('Organization with id %s not found' % _id) - - organization.type = organization_info.get( - 'type', organization.type) - organization.name = organization_info.get( - 'name', organization.name) - organization.description = organization_info.get( - 'description', organization.description) - organization.properties = organization_info.get( - 'properties', organization.properties) - organization.save(session=session) - organization_dict = _to_dict(organization) - session.commit() - return organization_dict - - -def get_organization(organization_id, allowed_keys=None): - """Get organization by id.""" - with get_session() as session: - organization = (session.query(models.Organization). - filter_by(id=organization_id).first()) - if organization is None: - raise NotFound(f'Organization with id {organization_id} not found') - return _to_dict(organization, allowed_keys=allowed_keys) - - -def delete_organization(organization_id): - """delete organization by id.""" - with get_session() as session: - product_ids = (session - .query(models.Product.id) - .filter_by(organization_id=organization_id)) - (session.query(models.ProductVersion). - filter(models.ProductVersion.product_id.in_(product_ids)). - delete(synchronize_session=False)) - (session.query(models.Product). - filter_by(organization_id=organization_id). - delete(synchronize_session=False)) - (session.query(models.Organization). - filter_by(id=organization_id). - delete(synchronize_session=False)) - - -def add_product(product_info, creator): - """Add product.""" - product = models.Product() - product.id = str(uuid.uuid4()) - product.type = product_info['type'] - product.product_type = product_info['product_type'] - product.product_ref_id = product_info.get('product_ref_id') - product.name = product_info['name'] - product.description = product_info.get('description') - product.organization_id = product_info['organization_id'] - product.created_by_user = creator - product.public = product_info.get('public', False) - product.properties = product_info.get('properties') - - with get_session() as session: - product.save(session=session) - product_version = models.ProductVersion() - product_version.created_by_user = creator - product_version.version = product_info.get('version') - product_version.product_id = product.id - product_version.save(session=session) - product_dict = _to_dict(product) - session.commit() - - return product_dict - - -def update_product(product_info): - """Update product by id.""" - with get_session() as session: - _id = product_info.get('id') - product = session.query(models.Product).filter_by(id=_id).first() - if product is None: - raise NotFound('Product with id %s not found' % _id) - - keys = ['name', 'description', 'product_ref_id', 'public', - 'properties'] - for key in keys: - if key in product_info: - setattr(product, key, product_info[key]) - - product.save(session=session) - product_dict = _to_dict(product) - session.commit() - return product_dict - - -def get_product(id, allowed_keys=None): - """Get product by id.""" - with get_session() as session: - product = session.query(models.Product).filter_by(id=id).first() - if product is None: - session.close() - raise NotFound('Product with id "%s" not found' % id) - return _to_dict(product, allowed_keys=allowed_keys) - - -def delete_product(id): - """delete product by id.""" - with get_session() as session: - (session.query(models.ProductVersion) - .filter_by(product_id=id) - .delete(synchronize_session=False)) - (session.query(models.Product).filter_by(id=id). - delete(synchronize_session=False)) - session.commit() - - -def get_foundation_users(): - """Get users' openid-s that belong to group of foundation.""" - with get_session() as session: - organization = ( - session.query(models.Organization.group_id) - .filter_by(type=api_const.FOUNDATION).first()) - if organization is None: - session.close() - LOG.warning('Foundation organization record not found in DB.') - return [] - group_id = organization.group_id - users = (session.query(models.UserToGroup.user_openid). - filter_by(group_id=group_id)) - return [user.user_openid for user in users] - - -def get_organization_users(organization_id): - """Get users that belong to group of organization.""" - with get_session() as session: - organization = (session.query(models.Organization.group_id) - .filter_by(id=organization_id).first()) - if organization is None: - raise NotFound('Organization with id %s is not found' - % organization_id) - group_id = organization.group_id - users = (session.query(models.UserToGroup, models.User) - .join(models.User, - models.User.openid == models.UserToGroup.user_openid) - .filter(models.UserToGroup.group_id == group_id)) - keys = ['openid', 'fullname', 'email'] - organization_users_dict = {item[1].openid: - _to_dict(item[1], allowed_keys=keys) - for item in users} - return organization_users_dict - - -def get_organizations(allowed_keys=None): - """Get all organizations.""" - with get_session() as session: - items = ( - session.query(models.Organization) - .order_by(models.Organization.created_at.desc()).all()) - return _to_dict(items, allowed_keys=allowed_keys) - - -def get_organizations_by_types(types, allowed_keys=None): - """Get organization by list of types.""" - with get_session() as session: - items = ( - session.query(models.Organization) - .filter(models.Organization.type.in_(types)) - .order_by(models.Organization.created_at.desc()).all()) - return _to_dict(items, allowed_keys=allowed_keys) - - -def get_organizations_by_user(user_openid, allowed_keys=None): - """Get organizations for specified user.""" - with get_session() as session: - items = ( - session - .query(models.Organization, models.Group, models.UserToGroup) - .join(models.Group, - models.Group.id == models.Organization.group_id) - .join(models.UserToGroup, - models.Group.id == models.UserToGroup.group_id) - .filter(models.UserToGroup.user_openid == user_openid) - .order_by(models.Organization.created_at.desc()).all()) - items = [item[0] for item in items] - organizations_dict = _to_dict(items, allowed_keys=allowed_keys) - return organizations_dict - - -def get_products(allowed_keys=None, filters=None): - """Get products based on passed in filters.""" - if filters is None: - filters = {} - expected_filters = ['public', 'organization_id'] - filter_args = {} - for key, value in filters.items(): - if key not in expected_filters: - raise Exception('Unknown filter key "%s"' % key) - filter_args[key] = value - - with get_session() as session: - query = session.query(models.Product) - if filter_args: - query = query.filter_by(**filter_args) - items = query.order_by(models.Product.created_at.desc()).all() - products_dict = _to_dict(items, allowed_keys=allowed_keys) - return products_dict - - -def get_products_by_user(user_openid, allowed_keys=None, filters=None): - """Get products that a user can manage.""" - if filters is None: - filters = {} - with get_session() as session: - query = ( - session.query(models.Product, models.Organization, models.Group, - models.UserToGroup) - .join(models.Organization, - models.Organization.id == models.Product.organization_id) - .join(models.Group, - models.Group.id == models.Organization.group_id) - .join(models.UserToGroup, - models.Group.id == models.UserToGroup.group_id) - .filter(models.UserToGroup.user_openid == user_openid)) - - expected_filters = ['organization_id'] - for key, value in filters.items(): - if key not in expected_filters: - raise Exception('Unknown filter key "%s"' % key) - query = query.filter(getattr(models.Product, key) == - filters[key]) - items = query.order_by(models.Organization.created_at.desc()).all() - items = [item[0] for item in items] - products_dict = _to_dict(items, allowed_keys=allowed_keys) - return products_dict - - -def get_product_by_version(product_version_id, allowed_keys=None): - """Get product info from a product version ID.""" - with get_session() as session: - product = (session.query(models.Product).join(models.ProductVersion) - .filter(models.ProductVersion.id == product_version_id) - .first()) - return _to_dict(product, allowed_keys=allowed_keys) - - -def get_product_version(product_version_id, allowed_keys=None): - """Get details of a specific version given the id.""" - with get_session() as session: - version = ( - session.query(models.ProductVersion) - .filter_by(id=product_version_id).first() - ) - if version is None: - session.close() - raise NotFound(f'Version with id "{product_version_id}" not found') - product_version_dict = _to_dict(version, allowed_keys=allowed_keys) - return product_version_dict - - -def get_product_version_by_cpid(cpid, allowed_keys=None): - """Get a product version given a cloud provider id.""" - with get_session() as session: - version = ( - session.query(models.ProductVersion) - .filter_by(cpid=cpid).all() - ) - product_version_dict = _to_dict(version, allowed_keys=allowed_keys) - return product_version_dict - - -def get_product_versions(product_id, allowed_keys=None): - """Get all versions for a product.""" - with get_session() as session: - version_info = ( - session.query(models.ProductVersion) - .filter_by(product_id=product_id).all() - ) - product_version_dict = _to_dict(version_info, - allowed_keys=allowed_keys) - return product_version_dict - - -def add_product_version(product_id, version, creator, cpid, allowed_keys=None): - """Add a new product version.""" - product_version = models.ProductVersion() - product_version.created_by_user = creator - product_version.version = version - product_version.product_id = product_id - product_version.cpid = cpid - with get_session() as session: - product_version.save(session=session) - product_version_dict = _to_dict(product_version, - allowed_keys=allowed_keys) - session.commit() - return product_version_dict - - -def update_product_version(product_version_info): - """Update product version from product_info_version dictionary.""" - with get_session() as session: - _id = product_version_info.get('id') - version = (session.query(models.ProductVersion) - .filter_by(id=_id) - .first()) - if version is None: - raise NotFound('Product version with id %s not found' % _id) - - # Only allow updating cpid. - keys = ['cpid'] - for key in keys: - if key in product_version_info: - setattr(version, key, product_version_info[key]) - - version.save(session=session) - product_version_dict = _to_dict(version) - session.commit() - return product_version_dict - - -def delete_product_version(product_version_id): - """Delete a product version.""" - with get_session() as session: - (session.query(models.ProductVersion).filter_by(id=product_version_id). - delete(synchronize_session=False)) - session.commit() diff --git a/refstack/db/sqlalchemy/models.py b/refstack/db/sqlalchemy/models.py deleted file mode 100644 index d032a017..00000000 --- a/refstack/db/sqlalchemy/models.py +++ /dev/null @@ -1,275 +0,0 @@ -# -# Copyright (c) 2013 Piston Cloud Computing, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""SQLAlchemy models for Refstack data.""" - -import uuid - -from oslo_db.sqlalchemy import models -import sqlalchemy as sa -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import orm - -BASE = declarative_base() - - -class RefStackBase(models.ModelBase, - models.TimestampMixin, - models.SoftDeleteMixin): - """Base class for RefStack Models.""" - - __table_args__ = {'mysql_engine': 'InnoDB'} - - @property - def metadata_keys(self): # pragma: no cover - """Model keys with metadata structure. Will be converted in dict.""" - return dict() - - @property - def default_allowed_keys(self): # pragma: no cover - """Default keys will be present in resulted dict.""" - return () - - metadata = None - - -class Test(BASE, RefStackBase): # pragma: no cover - """Test.""" - - __tablename__ = 'test' - - id = sa.Column(sa.String(36), primary_key=True) - cpid = sa.Column(sa.String(128), index=True, nullable=False) - duration_seconds = sa.Column(sa.Integer, nullable=False) - results = orm.relationship('TestResults', backref='test') - meta = orm.relationship('TestMeta', backref='test') - product_version_id = sa.Column(sa.String(36), - sa.ForeignKey('product_version.id'), - nullable=True, unique=False) - verification_status = sa.Column(sa.Integer, nullable=False, default=0) - product_version = orm.relationship('ProductVersion', backref='test') - - @property - def _extra_keys(self): - """Relation should be pointed directly.""" - return ['results', 'meta', 'product_version'] - - @property - def metadata_keys(self): - """Model keys with metadata structure.""" - return {'meta': {'key': 'meta_key', - 'value': 'value'}} - - @property - def default_allowed_keys(self): - """Default keys.""" - return ('id', 'created_at', 'duration_seconds', 'meta', - 'verification_status', 'product_version') - - -class TestResults(BASE, RefStackBase): # pragma: no cover - """Test results.""" - - __tablename__ = 'results' - __table_args__ = ( - sa.UniqueConstraint('test_id', 'name'), - # TODO(sslypushenko) - # Constraint should turned on after duplication test uuids issue - # will be fixed - # sa.UniqueConstraint('test_id', 'uuid'), - ) - _id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) - test_id = sa.Column(sa.String(36), sa.ForeignKey('test.id'), - index=True, nullable=False, unique=False) - name = sa.Column(sa.String(512, collation='latin1_swedish_ci'),) - uuid = sa.Column(sa.String(36)) - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'name', 'uuid' - - -class TestMeta(BASE, RefStackBase): # pragma: no cover - """Test metadata.""" - - __tablename__ = 'meta' - __table_args__ = ( - sa.UniqueConstraint('test_id', 'meta_key'), - ) - _id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) - test_id = sa.Column(sa.String(36), sa.ForeignKey('test.id'), - index=True, nullable=False, unique=False) - meta_key = sa.Column(sa.String(64), index=True, nullable=False) - value = sa.Column(sa.Text()) - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'meta_key', 'value' - - -class User(BASE, RefStackBase): # pragma: no cover - """User information.""" - - __tablename__ = 'user' - _id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) - openid = sa.Column(sa.String(128), nullable=False, unique=True, - index=True) - email = sa.Column(sa.String(128)) - fullname = sa.Column(sa.String(128)) - pubkeys = orm.relationship('PubKey', backref='user') - - @property - def _extra_keys(self): - """Relation should be pointed directly.""" - return ['pubkeys'] - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'openid', 'email', 'fullname', 'pubkeys' - - -class PubKey(BASE, RefStackBase): # pragma: no cover - """User public pubkeys.""" - - __tablename__ = 'pubkeys' - - id = sa.Column(sa.String(36), primary_key=True, - default=lambda: str(uuid.uuid4())) - openid = sa.Column(sa.String(128), sa.ForeignKey('user.openid'), - nullable=False, unique=True, index=True) - format = sa.Column(sa.String(24), nullable=False) - pubkey = sa.Column(sa.Text(), nullable=False) - comment = sa.Column(sa.String(128)) - md5_hash = sa.Column(sa.String(32), nullable=False, index=True) - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'id', 'openid', 'format', 'pubkey', 'comment' - - -class Group(BASE, RefStackBase): # pragma: no cover - """Group definition.""" - - __tablename__ = 'group' - - id = sa.Column(sa.String(36), primary_key=True, - default=lambda: str(uuid.uuid4())) - name = sa.Column(sa.String(80), nullable=False) - description = sa.Column(sa.Text()) - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'id', 'name', 'description' - - -class UserToGroup(BASE, RefStackBase): # pragma: no cover - """user-group as many-to-many.""" - - __tablename__ = 'user_to_group' - - created_by_user = sa.Column(sa.String(128), nullable=False) - _id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) - user_openid = sa.Column(sa.String(128), sa.ForeignKey('user.openid'), - nullable=False, index=True) - group_id = sa.Column(sa.String(36), sa.ForeignKey('group.id'), - nullable=False) - - @property - def default_allowed_keys(self): - """Default keys.""" - return 'user_openid', 'group_id' - - -class Organization(BASE, RefStackBase): # pragma: no cover - """Organization definition.""" - - __tablename__ = 'organization' - - id = sa.Column(sa.String(36), primary_key=True, - default=lambda: str(uuid.uuid4())) - type = sa.Column(sa.Integer, nullable=False) - name = sa.Column(sa.String(80), nullable=False) - description = sa.Column(sa.Text()) - group_id = sa.Column(sa.String(36), sa.ForeignKey('group.id'), - nullable=False) - created_by_user = sa.Column(sa.String(128), sa.ForeignKey('user.openid'), - nullable=False) - properties = sa.Column(sa.Text()) - - @property - def default_allowed_keys(self): - """Default keys.""" - return ('id', 'type', 'name', 'description', 'group_id', - 'created_by_user', 'properties', 'created_at', 'updated_at') - - -class Product(BASE, RefStackBase): # pragma: no cover - """Product definition.""" - - __tablename__ = 'product' - - id = sa.Column(sa.String(36), primary_key=True, - default=lambda: str(uuid.uuid4())) - name = sa.Column(sa.String(80), nullable=False) - description = sa.Column(sa.Text()) - organization_id = sa.Column(sa.String(36), - sa.ForeignKey('organization.id'), - nullable=False) - created_by_user = sa.Column(sa.String(128), sa.ForeignKey('user.openid'), - nullable=False) - public = sa.Column(sa.Boolean(), nullable=False) - properties = sa.Column(sa.Text()) - type = sa.Column(sa.Integer(), nullable=False) - product_type = sa.Column(sa.Integer(), nullable=False) - - @property - def default_allowed_keys(self): - """Default keys.""" - return ('id', 'name', 'organization_id', 'public') - - -class ProductVersion(BASE, RefStackBase): - """Product Version definition.""" - - __tablename__ = 'product_version' - __table_args__ = ( - sa.UniqueConstraint('product_id', 'version'), - ) - - id = sa.Column(sa.String(36), primary_key=True, - default=lambda: str(uuid.uuid4())) - product_id = sa.Column(sa.String(36), sa.ForeignKey('product.id'), - index=True, nullable=False, unique=False) - version = sa.Column(sa.String(length=36), nullable=True) - cpid = sa.Column(sa.String(36), nullable=True) - created_by_user = sa.Column(sa.String(128), sa.ForeignKey('user.openid'), - nullable=False) - product_info = orm.relationship('Product', backref='product_version') - - @property - def _extra_keys(self): - """Relation should be pointed directly.""" - return ['product_info'] - - @property - def default_allowed_keys(self): - """Default keys.""" - return ('id', 'version', 'cpid', 'product_info') diff --git a/refstack/db/utils.py b/refstack/db/utils.py deleted file mode 100644 index aba1e01d..00000000 --- a/refstack/db/utils.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utilities for database.""" -from oslo_config import cfg -from oslo_log import log - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class PluggableBackend(object): - """A pluggable backend loaded lazily based on some value.""" - - def __init__(self, pivot, **backends): - """Init.""" - self.__backends = backends - self.__pivot = pivot - self.__backend = None - - def __get_backend(self): - """Get backend.""" - if not self.__backend: - backend_name = CONF[self.__pivot] - if backend_name not in self.__backends: # pragma: no cover - raise Exception('Invalid backend: %s' % backend_name) - - backend = self.__backends[backend_name] - if isinstance(backend, tuple): # pragma: no cover - name = backend[0] - fromlist = backend[1] - else: - name = backend - fromlist = backend - - self.__backend = __import__(name, None, None, fromlist) - LOG.debug('backend %s', self.__backend) - return self.__backend - - def __getattr__(self, key): - """Proxy interface to backend.""" - backend = self.__get_backend() - return getattr(backend, key) diff --git a/refstack/opts.py b/refstack/opts.py deleted file mode 100644 index d4527000..00000000 --- a/refstack/opts.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Function list_opts intended for oslo-config-generator. - -this tool used for generate config file with help info and default values -for options defined anywhere in application. -All new options must be imported here and must be returned from -list_opts function as list that contain tuple. -Use itertools.chain if config section contain more than one imported module -with options. For example: - -... -def list_opts(): - return [ - ('DEFAULT', refstack.db.api.db_opts), - ('api', - itertools.chain(refstack.api.first.module.opts, - refstack.api.second.modulei.opts,)), - ] -... -""" -import itertools - -import refstack.api.app -import refstack.api.controllers.auth -import refstack.api.controllers.v1 -import refstack.db.api - - -def list_opts(): - """List oslo config options. - - Keep a list in alphabetical order - """ - return [ - # - ('DEFAULT', itertools.chain(refstack.api.app.UI_OPTS, - refstack.db.api.db_opts)), - ('api', itertools.chain(refstack.api.app.API_OPTS, - refstack.api.controllers.CTRLS_OPTS)), - ('osid', refstack.api.controllers.auth.OPENID_OPTS), - ] diff --git a/refstack/tests/__init__.py b/refstack/tests/__init__.py deleted file mode 100644 index ab867d17..00000000 --- a/refstack/tests/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Refstack tests.""" diff --git a/refstack/tests/api/__init__.py b/refstack/tests/api/__init__.py deleted file mode 100644 index e6f3c1e2..00000000 --- a/refstack/tests/api/__init__.py +++ /dev/null @@ -1,221 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Base classes for API tests.""" -import os - -from oslo_config import fixture as config_fixture -from oslotest import base -import pecan.testing -from sqlalchemy import create_engine -from sqlalchemy.engine import reflection -from sqlalchemy.schema import ( - MetaData, - Table, - DropTable, - ForeignKeyConstraint, - DropConstraint, -) - -from refstack.db import migration - - -class FunctionalTest(base.BaseTestCase): - """Base class for functional test case. - - Used for functional tests where you need to test your. - literal application and its integration with the framework. - """ - - def setUp(self): - """Test setup.""" - super(FunctionalTest, self).setUp() - - self.connection = os.environ.get("REFSTACK_TEST_MYSQL_URL") - if self.connection is None: - raise ValueError("Database connection url was not found. " - "Environment variable REFSTACK_TEST_MYSQL_URL " - "is not defined") - - self.config = { - 'app': { - 'root': 'refstack.api.controllers.root.RootController', - 'modules': ['refstack.api'], - } - } - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - self.CONF.set_override('connection', - self.connection, - 'database') - - self.app = pecan.testing.load_test_app(self.config) - - self.drop_all_tables_and_constraints() - migration.upgrade('head') - - def tearDown(self): - """Test teardown.""" - super(FunctionalTest, self).tearDown() - pecan.set_config({}, overwrite=True) - self.app.reset() - - def drop_all_tables_and_constraints(self): - """Drop tables and cyclical constraints between tables.""" - engine = create_engine(self.connection) - conn = engine.connect() - trans = conn.begin() - - inspector = reflection.Inspector.from_engine(engine) - metadata = MetaData() - - tbs = [] - all_fks = [] - - try: - for table_name in inspector.get_table_names(): - fks = [] - for fk in inspector.get_foreign_keys(table_name): - if not fk['name']: - continue - fks.append( - ForeignKeyConstraint((), (), name=fk['name'])) - - t = Table(table_name, metadata, *fks) - tbs.append(t) - all_fks.extend(fks) - - for fkc in all_fks: - conn.execute(DropConstraint(fkc)) - - for table in tbs: - conn.execute(DropTable(table)) - - trans.commit() - trans.close() - conn.close() - except Exception: - trans.rollback() - conn.close() - raise - - def delete(self, url, headers=None, extra_environ=None, - status=None, expect_errors=False, **params): - """Send HTTP DELETE request. - - :param url: url path to target service - :param headers: a dictionary of extra headers to send - :param extra_environ: a dictionary of environmental variables that - should be added to the request - :param status: integer or string of the HTTP status code you expect - in response (if not 200 or 3xx). You can also use a - wildcard, like '3*' or '*' - :param expect_errors: boolean value, if this is False, then if - anything is written to environ wsgi.errors it - will be an error. If it is True, then - non-200/3xx responses are also okay - :param params: a query string, or a dictionary that will be encoded - into a query string. You may also include a URL query - string on the url - - """ - response = self.app.delete(url, - headers=headers, - extra_environ=extra_environ, - status=status, - expect_errors=expect_errors, - params=params) - - return response - - def get_json(self, url, headers=None, extra_environ=None, - status=None, expect_errors=False, **params): - """Send HTTP GET request. - - :param url: url path to target service - :param headers: a dictionary of extra headers to send - :param extra_environ: a dictionary of environmental variables that - should be added to the request - :param status: integer or string of the HTTP status code you expect - in response (if not 200 or 3xx). You can also use a - wildcard, like '3*' or '*' - :param expect_errors: boolean value, if this is False, then if - anything is written to environ wsgi.errors it - will be an error. If it is True, then - non-200/3xx responses are also okay - :param params: a query string, or a dictionary that will be encoded - into a query string. You may also include a URL query - string on the url - - """ - response = self.app.get(url, - headers=headers, - extra_environ=extra_environ, - status=status, - expect_errors=expect_errors, - params=params) - - if not expect_errors and response.content_type == 'application/json': - response = response.json - return response - - def post_json(self, url, headers=None, extra_environ=None, - status=None, expect_errors=False, - content_type='application/json', **params): - """Send HTTP POST request. - - :param url: url path to target service - :param headers: a dictionary of extra headers to send - :param extra_environ: a dictionary of environmental variables that - should be added to the request - :param status: integer or string of the HTTP status code you expect - in response (if not 200 or 3xx). You can also use a - wildcard, like '3*' or '*' - :param expect_errors: boolean value, if this is False, then if - anything is written to environ wsgi.errors it - will be an error. If it is True, then - non-200/3xx responses are also okay - :param params: a query string, or a dictionary that will be encoded - into a query string. You may also include a URL query - string on the url - - """ - response = self.app.post(url, - headers=headers, - extra_environ=extra_environ, - status=status, - expect_errors=expect_errors, - content_type=content_type, - **params) - - if not expect_errors and response.content_type == 'application/json': - response = response.json - return response - - def put_json(self, url, headers=None, extra_environ=None, - status=None, expect_errors=False, - content_type='application/json', **params): - """Send HTTP PUT request. Similar to :meth:`post_json`.""" - response = self.app.put(url, - headers=headers, - extra_environ=extra_environ, - status=status, - expect_errors=expect_errors, - content_type=content_type, - **params) - - if not expect_errors and response.content_type == 'application/json': - response = response.json - return response diff --git a/refstack/tests/api/test_guidelines.py b/refstack/tests/api/test_guidelines.py deleted file mode 100644 index 07d1ce53..00000000 --- a/refstack/tests/api/test_guidelines.py +++ /dev/null @@ -1,128 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json - -import httmock - -from refstack.tests import api - - -class TestGuidelinesEndpoint(api.FunctionalTest): - """Test case for the 'guidelines' API endpoint.""" - - URL = '/v1/guidelines/' - - def test_get_guideline_list(self): - @httmock.all_requests - def github_api_mock(url, request): - headers = {'content-type': 'application/json'} - content = [{'name': '2015.03.json', - 'path': '2015.03.json', - 'type': 'file'}, - {'name': '2015.next.json', - 'path': '2015.next.json', - 'type': 'file'}, - {'name': '2015.03', - 'path': '2015.03', - 'file': '2015.03', - 'type': 'dir'}, - {'name': 'test.2018.02.json', - 'path': 'add-ons/test.2018.02.json', - 'type': 'file'}, - {'name': 'test.next.json', - 'path': 'add-ons/test.next.json', - 'type': 'file'}] - content = json.dumps(content) - return httmock.response(200, content, headers, None, 5, request) - - with httmock.HTTMock(github_api_mock): - actual_response = self.get_json(self.URL) - - expected_powered = [ - {'name': u'2015.03.json', - 'file': u'2015.03.json'}, - {'name': u'2015.next.json', - 'file': u'2015.next.json'} - ] - expected_test_addons = [ - {u'name': u'2018.02.json', - u'file': u'test.2018.02.json'}, - {u'name': u'next.json', - u'file': u'test.next.json'} - ] - self.assertIn(u'powered', actual_response.keys()) - self.assertIn(u'test', actual_response.keys()) - self.assertEqual(expected_test_addons, actual_response['test']) - self.assertEqual(expected_powered, actual_response['powered']) - - def test_get_guideline_file(self): - @httmock.all_requests - def github_mock(url, request): - content = {'foo': 'bar'} - return httmock.response(200, content, None, None, 5, request) - url = self.URL + "2015.03.json" - with httmock.HTTMock(github_mock): - actual_response = self.get_json(url) - - expected_response = {'foo': 'bar'} - self.assertEqual(expected_response, actual_response) - - def test_get_guideline_test_list(self): - @httmock.all_requests - def github_mock(url, request): - content = { - 'schema': '1.4', - 'platform': {'required': ['compute', 'object']}, - 'components': { - 'compute': { - 'required': ['cap-1'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - }, - 'object': { - 'required': ['cap-2'], - 'advisory': ['cap-3'], - 'deprecated': [], - 'removed': [] - } - }, - 'capabilities': { - 'cap-1': { - 'tests': { - 'test_1': {'idempotent_id': 'id-1234'}, - 'test_2': {'idempotent_id': 'id-5678', - 'aliases': ['test_2_1']}, - 'test_3': {'idempotent_id': 'id-1111', - 'flagged': {'reason': 'foo'}} - } - }, - 'cap-2': { - 'tests': { - 'test_4': {'idempotent_id': 'id-1233'} - } - } - } - } - return httmock.response(200, content, None, None, 5, request) - url = self.URL + "2016.03/tests" - with httmock.HTTMock(github_mock): - actual_response = self.get_json(url, expect_errors=True) - - expected_list = ['test_1[id-1234]', 'test_2[id-5678]', - 'test_2_1[id-5678]', 'test_3[id-1111]', - 'test_4[id-1233]'] - expected_response = '\n'.join(expected_list) - self.assertEqual(expected_response, actual_response.text) diff --git a/refstack/tests/api/test_products.py b/refstack/tests/api/test_products.py deleted file mode 100644 index 13d0c1dc..00000000 --- a/refstack/tests/api/test_products.py +++ /dev/null @@ -1,322 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -from unittest import mock -import uuid - -from oslo_config import fixture as config_fixture -import webtest.app - -from refstack.api import constants as api_const -from refstack import db -from refstack.tests import api - -FAKE_PRODUCT = { - 'name': 'product name', - 'description': 'product description', - 'product_type': api_const.CLOUD, -} - - -class TestProductsEndpoint(api.FunctionalTest): - """Test case for the 'products' API endpoint.""" - - URL = '/v1/products/' - - def setUp(self): - super(TestProductsEndpoint, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - self.user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(self.user_info) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post(self, mock_get_user): - """Test products endpoint with post request.""" - product = json.dumps(FAKE_PRODUCT) - actual_response = self.post_json(self.URL, params=product) - self.assertIn('id', actual_response) - try: - uuid.UUID(actual_response.get('id'), version=4) - except ValueError: - self.fail("actual_response doesn't contain new item id") - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post_with_empty_object(self, mock_get_user): - """Test products endpoint with empty product request.""" - results = json.dumps(dict()) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=results) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post_with_invalid_schema(self, mock_get_user): - """Test post request with invalid schema.""" - products = json.dumps({ - 'foo': 'bar', - }) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=products) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_vendor_was_created(self, mock_get_user): - """Test get_one request.""" - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - - get_response = self.get_json(self.URL + post_response.get('id')) - vendor_id = get_response.get('organization_id') - self.assertIsNotNone(vendor_id) - - # check vendor is present - get_response = self.get_json('/v1/vendors/' + vendor_id) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_using_default_vendor(self, mock_get_user): - """Test get_one request.""" - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - - get_response = self.get_json(self.URL + post_response.get('id')) - vendor_id = get_response.get('organization_id') - self.assertIsNotNone(vendor_id) - - # check vendor is present - get_response = self.get_json('/v1/vendors/' + vendor_id) - - # create one more product - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_get_by_org(self, mock_get_user): - """Test getting products of an organization.""" - org1 = db.add_organization({'name': 'test-vendor1'}, 'test-open-id') - org2 = db.add_organization({'name': 'test-vendor2'}, 'test-open-id') - prod_info = {'name': 'product1', - 'description': 'product description', - 'product_type': 1, 'type': 0, - 'organization_id': org1['id'], 'public': True} - prod1 = db.add_product(prod_info, 'test-open-id') - prod_info['name'] = 'product2' - prod_info['organization_id'] = org2['id'] - prod2 = db.add_product(prod_info, 'test-open-id') - get_response = self.get_json(self.URL + - '?organization_id=' + org1['id']) - self.assertEqual(1, len(get_response['products'])) - self.assertEqual(prod1['id'], get_response['products'][0]['id']) - - get_response = self.get_json(self.URL + - '?organization_id=' + org2['id']) - self.assertEqual(1, len(get_response['products'])) - self.assertEqual(prod2['id'], get_response['products'][0]['id']) - - # Test that non-admin can't view non-public products of an org. - db.update_product({'id': prod1['id'], 'public': False}) - mock_get_user.return_value = 'some-user' - get_response = self.get_json(self.URL + - '?organization_id=' + org1['id']) - self.assertFalse(get_response['products']) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_get_one(self, mock_get_user): - """Test get_one request.""" - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - - get_response = self.get_json(self.URL + post_response.get('id')) - # some of these fields are only exposed to the owner/foundation. - self.assertIn('created_by_user', get_response) - self.assertIn('properties', get_response) - self.assertIn('created_at', get_response) - self.assertIn('updated_at', get_response) - self.assertEqual(FAKE_PRODUCT['name'], - get_response['name']) - self.assertEqual(FAKE_PRODUCT['description'], - get_response['description']) - self.assertEqual(api_const.PUBLIC_CLOUD, - get_response['type']) - self.assertEqual(api_const.CLOUD, - get_response['product_type']) - - # reset auth and check return result for anonymous - mock_get_user.return_value = None - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + post_response.get('id')) - - mock_get_user.return_value = 'foo-open-id' - # Make product public. - product_info = {'id': post_response.get('id'), 'public': 1} - db.update_product(product_info) - - # Test when getting product info when not owner/foundation. - get_response = self.get_json(self.URL + post_response.get('id')) - self.assertNotIn('created_by_user', get_response) - self.assertNotIn('created_at', get_response) - self.assertNotIn('updated_at', get_response) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_delete(self, mock_get_user): - """Test delete request.""" - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - self.delete(self.URL + post_response.get('id')) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_update(self, mock_get_user): - """Test put(update) request.""" - product = json.dumps(FAKE_PRODUCT) - post_response = self.post_json(self.URL, params=product) - id = post_response.get('id') - - # check update of properties - props = {'properties': {'fake01': 'value01'}} - post_response = self.put_json(self.URL + id, - params=json.dumps(props)) - get_response = self.get_json(self.URL + id) - self.assertEqual(FAKE_PRODUCT['name'], - get_response['name']) - self.assertEqual(FAKE_PRODUCT['description'], - get_response['description']) - self.assertEqual(props['properties'], - json.loads(get_response['properties'])) - - # check second update of properties - props = {'properties': {'fake02': 'value03'}} - post_response = self.put_json(self.URL + id, - params=json.dumps(props)) - get_response = self.get_json(self.URL + id) - self.assertEqual(props['properties'], - json.loads(get_response['properties'])) - - def test_get_one_invalid_url(self): - """Test get request with invalid url.""" - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + 'fake_id') - - def test_get_with_empty_database(self): - """Test get(list) request with no items in DB.""" - results = self.get_json(self.URL) - self.assertEqual([], results['products']) - - -class TestProductVersionEndpoint(api.FunctionalTest): - """Test case for the 'products//version' API endpoint.""" - - def setUp(self): - super(TestProductVersionEndpoint, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - self.user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(self.user_info) - - patcher = mock.patch('refstack.api.utils.get_user_id') - self.addCleanup(patcher.stop) - self.mock_get_user = patcher.start() - self.mock_get_user.return_value = 'test-open-id' - - product = json.dumps(FAKE_PRODUCT) - response = self.post_json('/v1/products/', params=product) - self.product_id = response['id'] - self.URL = '/v1/products/' + self.product_id + '/versions/' - - def test_get(self): - """Test getting a list of versions.""" - response = self.get_json(self.URL) - # Product created without version specified. - self.assertIsNone(response[0]['version']) - - # Create a version - post_response = self.post_json(self.URL, - params=json.dumps({'version': '1.0'})) - - response = self.get_json(self.URL) - self.assertEqual(2, len(response)) - self.assertEqual(post_response['version'], response[1]['version']) - - def test_get_one(self): - """"Test get a specific version.""" - # Create a version - post_response = self.post_json(self.URL, - params=json.dumps({'version': '2.0'})) - version_id = post_response['id'] - - response = self.get_json(self.URL + version_id) - self.assertEqual(post_response['version'], response['version']) - - # Test nonexistent version. - self.assertRaises(webtest.app.AppError, self.get_json, - self.URL + 'sdsdsds') - - def test_post(self): - """Test creating a product version.""" - version = {'cpid': '123', 'version': '5.0'} - post_response = self.post_json(self.URL, params=json.dumps(version)) - - get_response = self.get_json(self.URL + post_response['id']) - self.assertEqual(version['cpid'], get_response['cpid']) - self.assertEqual(version['version'], get_response['version']) - self.assertEqual(self.product_id, get_response['product_id']) - self.assertIn('id', get_response) - - # Test 'version' not in response body. - response = self.post_json(self.URL, expect_errors=True, - params=json.dumps({'cpid': '123'})) - self.assertEqual(400, response.status_code) - - def test_put(self): - """Test updating a product version.""" - post_response = self.post_json(self.URL, - params=json.dumps({'version': '6.0'})) - version_id = post_response['id'] - - response = self.get_json(self.URL + version_id) - self.assertIsNone(response['cpid']) - - props = {'cpid': '1233'} - self.put_json(self.URL + version_id, params=json.dumps(props)) - - response = self.get_json(self.URL + version_id) - self.assertEqual('1233', response['cpid']) - - def test_delete(self): - """Test deleting a product version.""" - post_response = self.post_json(self.URL, - params=json.dumps({'version': '7.0'})) - version_id = post_response['id'] - self.delete(self.URL + version_id) - self.assertRaises(webtest.app.AppError, self.get_json, - self.URL + 'version_id') - - # Get the null version and ensure it can't be deleted. - versions = self.get_json(self.URL) - version_id = versions[0]['id'] - response = self.delete(self.URL + version_id, expect_errors=True) - self.assertEqual(400, response.status_code) diff --git a/refstack/tests/api/test_profile.py b/refstack/tests/api/test_profile.py deleted file mode 100644 index 1c05eb07..00000000 --- a/refstack/tests/api/test_profile.py +++ /dev/null @@ -1,89 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import binascii -import json -from unittest import mock - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import serialization -import webtest.app - -from refstack import db -from refstack.tests import api - - -class TestProfileEndpoint(api.FunctionalTest): - """Test case for the 'profile' API endpoint.""" - - URL = '/v1/profile/' - - def setUp(self): - super(TestProfileEndpoint, self).setUp() - self.user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(self.user_info) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_get(self, mock_get_user): - response = self.get_json(self.URL) - self.user_info['is_admin'] = False - self.assertEqual(self.user_info, response) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_pubkeys(self, mock_get_user): - """Test '/v1/profile/pubkeys' API endpoint.""" - url = self.URL + 'pubkeys' - key = rsa.generate_private_key( - public_exponent=65537, - key_size=2048, - backend=default_backend() - ) - sign = key.sign('signature'.encode('utf-8'), - padding.PKCS1v15(), - hashes.SHA256()) - pubkey = key.public_key().public_bytes( - serialization.Encoding.OpenSSH, - serialization.PublicFormat.OpenSSH - ).decode('utf-8') - body = {'raw_key': pubkey, - 'self_signature': binascii.b2a_hex(sign).decode('utf-8')} - json_params = json.dumps(body) - - # POST endpoint - pubkey_id = self.post_json(url, params=json_params) - - # GET endpoint - user_pubkeys = self.get_json(url) - self.assertEqual(1, len(user_pubkeys)) - self.assertEqual(pubkey.split()[1], user_pubkeys[0]['pubkey']) - self.assertEqual('ssh-rsa', user_pubkeys[0]['format']) - self.assertEqual(pubkey_id, user_pubkeys[0]['id']) - - delete_url = '{}/{}'.format(url, pubkey_id) - # DELETE endpoint - response = self.delete(delete_url) - self.assertEqual(204, response.status_code) - - user_pubkeys = self.get_json(url) - self.assertEqual(0, len(user_pubkeys)) - - # DELETE endpoint - nonexistent pubkey - self.assertRaises(webtest.app.AppError, self.delete, delete_url) diff --git a/refstack/tests/api/test_results.py b/refstack/tests/api/test_results.py deleted file mode 100644 index b09ffa23..00000000 --- a/refstack/tests/api/test_results.py +++ /dev/null @@ -1,508 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import binascii -import json -from unittest import mock -import uuid - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import serialization -from oslo_config import fixture as config_fixture -import webtest.app - -from refstack.api import constants as api_const -from refstack.api import validators -from refstack import db -from refstack.tests import api - - -FAKE_TESTS_RESULT = { - 'cpid': 'foo', - 'duration_seconds': 10, - 'results': [ - {'name': 'tempest.foo.bar'}, - {'name': 'tempest.buzz', - 'uid': '42'} - ] -} - -FAKE_JSON_WITH_EMPTY_RESULTS = { - 'cpid': 'foo', - 'duration_seconds': 20, - 'results': [ - ] -} - - -class TestResultsEndpoint(api.FunctionalTest): - """Test case for the 'results' API endpoint.""" - - URL = '/v1/results/' - - def setUp(self): - super(TestResultsEndpoint, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - def test_post(self): - """Test results endpoint with post request.""" - results = json.dumps(FAKE_TESTS_RESULT) - actual_response = self.post_json(self.URL, params=results) - self.assertIn('test_id', actual_response) - try: - uuid.UUID(actual_response.get('test_id'), version=4) - except ValueError: - self.fail("actual_response doesn't contain test_id") - - def test_post_with_empty_result(self): - """Test results endpoint with empty test results request.""" - results = json.dumps(FAKE_JSON_WITH_EMPTY_RESULTS) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=results) - - def test_post_with_invalid_schema(self): - """Test post request with invalid schema.""" - results = json.dumps({ - 'foo': 'bar', - 'duration_seconds': 999, - }) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=results) - - @mock.patch('refstack.api.utils.check_owner') - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_put(self, mock_user, mock_check_foundation, mock_check_owner): - """Test results endpoint with put request.""" - results = json.dumps(FAKE_TESTS_RESULT) - test_response = self.post_json(self.URL, params=results) - test_id = test_response.get('test_id') - url = self.URL + test_id - - user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(user_info) - - fake_product = { - 'name': 'product name', - 'description': 'product description', - 'product_type': api_const.CLOUD, - } - - # Create a product - product_response = self.post_json('/v1/products/', - params=json.dumps(fake_product)) - # Create a product version - version_url = '/v1/products/' + product_response['id'] + '/versions/' - version_response = self.post_json(version_url, - params=json.dumps({'version': '1'})) - - # Test Foundation admin can put. - mock_check_foundation.return_value = True - body = {'product_version_id': version_response['id']} - self.put_json(url, params=json.dumps(body)) - get_response = self.get_json(url) - self.assertEqual(version_response['id'], - get_response['product_version']['id']) - - # Test when product_version_id is None. - body = {'product_version_id': None} - self.put_json(url, params=json.dumps(body)) - get_response = self.get_json(url) - self.assertIsNone(get_response['product_version']) - - # Test when test verification preconditions are not met. - body = {'verification_status': api_const.TEST_VERIFIED} - put_response = self.put_json(url, expect_errors=True, - params=json.dumps(body)) - self.assertEqual(403, put_response.status_code) - - # Share the test run. - db.save_test_result_meta_item(test_id, api_const.SHARED_TEST_RUN, True) - put_response = self.put_json(url, expect_errors=True, - params=json.dumps(body)) - self.assertEqual(403, put_response.status_code) - - # Now associate guideline and target program. Now we should be - # able to mark a test verified. - db.save_test_result_meta_item(test_id, 'target', 'platform') - db.save_test_result_meta_item(test_id, 'guideline', '2016.01.json') - put_response = self.put_json(url, params=json.dumps(body)) - self.assertEqual(api_const.TEST_VERIFIED, - put_response['verification_status']) - - # Unshare the test, and check that we can mark it not verified. - db.delete_test_result_meta_item(test_id, api_const.SHARED_TEST_RUN) - body = {'verification_status': api_const.TEST_NOT_VERIFIED} - put_response = self.put_json(url, params=json.dumps(body)) - self.assertEqual(api_const.TEST_NOT_VERIFIED, - put_response['verification_status']) - - # Test when verification_status value is invalid. - body = {'verification_status': 111} - put_response = self.put_json(url, expect_errors=True, - params=json.dumps(body)) - self.assertEqual(400, put_response.status_code) - - # Check test owner can put. - mock_check_foundation.return_value = False - mock_check_owner.return_value = True - body = {'product_version_id': version_response['id']} - self.put_json(url, params=json.dumps(body)) - get_response = self.get_json(url) - self.assertEqual(version_response['id'], - get_response['product_version']['id']) - - # Test non-Foundation user can't change verification_status. - body = {'verification_status': 1} - put_response = self.put_json(url, expect_errors=True, - params=json.dumps(body)) - self.assertEqual(403, put_response.status_code) - - # Test unauthorized put. - mock_check_foundation.return_value = False - mock_check_owner.return_value = False - self.assertRaises(webtest.app.AppError, - self.put_json, - url, - params=json.dumps(body)) - - def test_get_one(self): - """Test get request.""" - results = json.dumps(FAKE_TESTS_RESULT) - post_response = self.post_json(self.URL, params=results) - get_response = self.get_json(self.URL + post_response.get('test_id')) - # CPID is only exposed to the owner. - self.assertNotIn('cpid', get_response) - self.assertEqual(FAKE_TESTS_RESULT['duration_seconds'], - get_response['duration_seconds']) - for test in FAKE_TESTS_RESULT['results']: - self.assertIn(test['name'], get_response['results']) - - def test_get_one_with_nonexistent_uuid(self): - """Test get request with nonexistent uuid.""" - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + str(uuid.uuid4())) - - def test_get_one_schema(self): - """Test get request for getting JSON schema.""" - validator = validators.TestResultValidator() - expected_schema = validator.schema - actual_schema = self.get_json(self.URL + 'schema') - self.assertEqual(actual_schema, expected_schema) - - def test_get_one_invalid_url(self): - """Test get request with invalid url.""" - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + 'fake_url') - - def test_get_pagination(self): - self.CONF.set_override('results_per_page', - 2, - 'api') - - responses = [] - for i in range(3): - fake_results = { - 'cpid': str(i), - 'duration_seconds': i, - 'results': [ - {'name': 'tempest.foo.bar'}, - {'name': 'tempest.buzz'} - ] - } - actual_response = self.post_json(self.URL, - params=json.dumps(fake_results)) - responses.append(actual_response) - - page_one = self.get_json(self.URL) - page_two = self.get_json('/v1/results?page=2') - - self.assertEqual(len(page_one['results']), 2) - self.assertEqual(len(page_two['results']), 1) - self.assertNotIn(page_two['results'][0], page_one) - - self.assertEqual(page_one['pagination']['current_page'], 1) - self.assertEqual(page_one['pagination']['total_pages'], 2) - - self.assertEqual(page_two['pagination']['current_page'], 2) - self.assertEqual(page_two['pagination']['total_pages'], 2) - - def test_get_with_not_existing_page(self): - self.assertRaises(webtest.app.AppError, - self.get_json, - '/v1/results?page=2') - - def test_get_with_empty_database(self): - results = self.get_json(self.URL) - self.assertEqual([], results['results']) - - def test_get_with_cpid_filter(self): - self.CONF.set_override('results_per_page', - 2, - 'api') - - responses = [] - for i in range(2): - fake_results = { - 'cpid': '12345', - 'duration_seconds': i, - 'results': [ - {'name': 'tempest.foo'}, - {'name': 'tempest.bar'} - ] - } - json_result = json.dumps(fake_results) - actual_response = self.post_json(self.URL, - params=json_result) - responses.append(actual_response) - - for i in range(3): - fake_results = { - 'cpid': '54321', - 'duration_seconds': i, - 'results': [ - {'name': 'tempest.foo'}, - {'name': 'tempest.bar'} - ] - } - - results = self.get_json('/v1/results?page=1&cpid=12345') - self.assertEqual(len(results), 2) - response_test_ids = [test['test_id'] for test in responses[0:2]] - for r in results['results']: - self.assertIn(r['id'], response_test_ids) - - def test_get_with_date_filters(self): - self.CONF.set_override('results_per_page', - 10, - 'api') - - responses = [] - for i in range(5): - fake_results = { - 'cpid': '12345', - 'duration_seconds': i, - 'results': [ - {'name': 'tempest.foo'}, - {'name': 'tempest.bar'} - ] - } - json_result = json.dumps(fake_results) - actual_response = self.post_json(self.URL, - params=json_result) - responses.append(actual_response) - - all_results = self.get_json(self.URL) - - slice_results = all_results['results'][1:4] - - url = '/v1/results?start_date=%(start)s&end_date=%(end)s' % { - 'start': slice_results[2]['created_at'], - 'end': slice_results[0]['created_at'] - } - - filtering_results = self.get_json(url) - for r in slice_results: - self.assertIn(r, filtering_results['results']) - - url = '/v1/results?end_date=1000-01-01 12:00:00' - filtering_results = self.get_json(url) - self.assertEqual([], filtering_results['results']) - - @mock.patch('refstack.api.utils.get_user_id') - def test_get_with_product_id(self, mock_get_user): - user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(user_info) - - mock_get_user.return_value = 'test-open-id' - - fake_product = { - 'name': 'product name', - 'description': 'product description', - 'product_type': api_const.CLOUD, - } - - product = json.dumps(fake_product) - response = self.post_json('/v1/products/', params=product) - product_id = response['id'] - - # Create a version. - version_url = '/v1/products/' + product_id + '/versions' - version = {'cpid': '123', 'version': '6.0'} - post_response = self.post_json(version_url, params=json.dumps(version)) - version_id = post_response['id'] - - # Create a test and associate it to the product version and user. - results = json.dumps(FAKE_TESTS_RESULT) - post_response = self.post_json('/v1/results', params=results) - test_id = post_response['test_id'] - test_info = {'id': test_id, 'product_version_id': version_id} - db.update_test_result(test_info) - db.save_test_result_meta_item(test_id, api_const.USER, 'test-open-id') - - url = self.URL + '?page=1&product_id=' + product_id - - # Test GET. - response = self.get_json(url) - self.assertEqual(1, len(response['results'])) - self.assertEqual(test_id, response['results'][0]['id']) - - # Test unauthorized. - mock_get_user.return_value = 'test-foo-id' - response = self.get_json(url, expect_errors=True) - self.assertEqual(403, response.status_code) - - # Make product public. - product_info = {'id': product_id, 'public': 1} - db.update_product(product_info) - - # Test result is not shared yet, so no tests should return. - response = self.get_json(url) - self.assertFalse(response['results']) - - # Share the test run. - db.save_test_result_meta_item(test_id, api_const.SHARED_TEST_RUN, 1) - response = self.get_json(url) - self.assertEqual(1, len(response['results'])) - self.assertEqual(test_id, response['results'][0]['id']) - - @mock.patch('refstack.api.utils.check_owner') - def test_delete(self, mock_check_owner): - results = json.dumps(FAKE_TESTS_RESULT) - test_response = self.post_json(self.URL, params=results) - test_id = test_response.get('test_id') - url = self.URL + test_id - - mock_check_owner.return_value = True - - # Test can't delete verified test run. - db.update_test_result({'id': test_id, 'verification_status': 1}) - resp = self.delete(url, expect_errors=True) - self.assertEqual(403, resp.status_code) - - # Test can delete verified test run. - db.update_test_result({'id': test_id, 'verification_status': 0}) - resp = self.delete(url, expect_errors=True) - self.assertEqual(204, resp.status_code) - - -class TestResultsEndpointNoAnonymous(api.FunctionalTest): - - URL = '/v1/results/' - - def _generate_keypair_(self): - return rsa.generate_private_key( - public_exponent=65537, - key_size=2048, - backend=default_backend() - ) - - def _sign_body_(self, keypair, body): - return keypair.sign(body, padding.PKCS1v15(), hashes.SHA256()) - - def _get_public_key_(self, keypair): - pubkey = keypair.public_key().public_bytes( - serialization.Encoding.OpenSSH, - serialization.PublicFormat.OpenSSH - ) - return pubkey - - def setUp(self): - super(TestResultsEndpointNoAnonymous, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - self.CONF.api.enable_anonymous_upload = False - - self.user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - - db.user_save(self.user_info) - - good_key = self._generate_keypair_() - self.body = json.dumps(FAKE_TESTS_RESULT).encode() - signature = self._sign_body_(good_key, self.body) - pubkey = self._get_public_key_(good_key) - x_signature = binascii.b2a_hex(signature) - - self.good_headers = { - 'X-Signature': x_signature, - 'X-Public-Key': pubkey - } - - self.pubkey_info = { - 'openid': 'test-open-id', - 'format': 'ssh-rsa', - 'pubkey': pubkey.split()[1], - 'comment': 'comment' - } - - db.store_pubkey(self.pubkey_info) - - bad_key = self._generate_keypair_() - bad_signature = self._sign_body_(bad_key, self.body) - bad_pubkey = self._get_public_key_(bad_key) - x_bad_signature = binascii.b2a_hex(bad_signature) - - self.bad_headers = { - 'X-Signature': x_bad_signature, - 'X-Public-Key': bad_pubkey - } - - def test_post_with_no_token(self): - """Test results endpoint with post request.""" - results = json.dumps(FAKE_TESTS_RESULT) - actual_response = self.post_json(self.URL, expect_errors=True, - params=results) - self.assertEqual(actual_response.status_code, 401) - - def test_post_with_valid_token(self): - """Test results endpoint with post request.""" - results = json.dumps(FAKE_TESTS_RESULT) - actual_response = self.post_json(self.URL, - headers=self.good_headers, - params=results) - self.assertIn('test_id', actual_response) - try: - uuid.UUID(actual_response.get('test_id'), version=4) - except ValueError: - self.fail("actual_response doesn't contain test_id") - - def test_post_with_invalid_token(self): - results = json.dumps(FAKE_TESTS_RESULT) - actual_response = self.post_json(self.URL, - headers=self.bad_headers, - expect_errors=True, - params=results) - self.assertEqual(actual_response.status_code, 401) diff --git a/refstack/tests/api/test_vendors.py b/refstack/tests/api/test_vendors.py deleted file mode 100644 index 6fb3de11..00000000 --- a/refstack/tests/api/test_vendors.py +++ /dev/null @@ -1,161 +0,0 @@ -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -from unittest import mock -import uuid - -from oslo_config import fixture as config_fixture -import webtest.app - -from refstack.api import constants as api_const -from refstack import db -from refstack.tests import api - -FAKE_VENDOR = { - 'name': 'vendor name', - 'description': 'vendor description', -} - - -class TestVendorsEndpoint(api.FunctionalTest): - """Test case for the 'vendors' API endpoint.""" - - URL = '/v1/vendors/' - - def setUp(self): - super(TestVendorsEndpoint, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - self.user_info = { - 'openid': 'test-open-id', - 'email': 'foo@bar.com', - 'fullname': 'Foo Bar' - } - db.user_save(self.user_info) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post(self, mock_get_user): - """Test vendors endpoint with post request.""" - vendor = json.dumps(FAKE_VENDOR) - actual_response = self.post_json(self.URL, params=vendor) - self.assertIn('id', actual_response) - try: - uuid.UUID(actual_response.get('id'), version=4) - except ValueError: - self.fail("actual_response doesn't contain new item id") - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post_with_empty_object(self, mock_get_user): - """Test vendors endpoint with empty vendor request.""" - results = json.dumps(dict()) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=results) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_post_with_invalid_schema(self, mock_get_user): - """Test post request with invalid schema.""" - vendors = json.dumps({ - 'foo': 'bar', - }) - self.assertRaises(webtest.app.AppError, - self.post_json, - self.URL, - params=vendors) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_get_one(self, mock_get_user): - """Test get_one request.""" - vendor = json.dumps(FAKE_VENDOR) - post_response = self.post_json(self.URL, params=vendor) - - get_response = self.get_json(self.URL + post_response.get('id')) - # this fields are only exposed to the owner/foundation. - self.assertIn('group_id', get_response) - self.assertIn('created_by_user', get_response) - self.assertIn('properties', get_response) - self.assertIn('created_at', get_response) - self.assertIn('updated_at', get_response) - self.assertEqual(FAKE_VENDOR['name'], - get_response['name']) - self.assertEqual(FAKE_VENDOR['description'], - get_response['description']) - self.assertEqual(api_const.PRIVATE_VENDOR, - get_response['type']) - - # reset auth and check return result for anonymous - mock_get_user.return_value = None - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + post_response.get('id')) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_delete(self, mock_get_user): - """Test delete request.""" - vendor = json.dumps(FAKE_VENDOR) - post_response = self.post_json(self.URL, params=vendor) - self.delete(self.URL + post_response.get('id')) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_action(self, mock_get_user): - """Test action/register request.""" - vendor = json.dumps(FAKE_VENDOR) - post_response = self.post_json(self.URL, params=vendor) - vendor_id = post_response.get('id') - self.post_json(self.URL + vendor_id + '/action', - params=json.dumps({'register': None})) - get_response = self.get_json(self.URL + vendor_id) - self.assertEqual(api_const.PENDING_VENDOR, - get_response['type']) - - @mock.patch('refstack.api.utils.get_user_id', return_value='test-open-id') - def test_update(self, mock_get_user): - """Test put(update) request.""" - vendor = json.dumps(FAKE_VENDOR) - post_response = self.post_json(self.URL, params=vendor) - vendor_id = post_response.get('id') - - # check update of properties - props = {'properties': {'fake01': 'value01'}} - post_response = self.put_json(self.URL + vendor_id, - params=json.dumps(props)) - get_response = self.get_json(self.URL + vendor_id) - self.assertEqual(FAKE_VENDOR['name'], - get_response['name']) - self.assertEqual(FAKE_VENDOR['description'], - get_response['description']) - self.assertEqual(props['properties'], - json.loads(get_response['properties'])) - - # check second update of properties - props = {'properties': {'fake02': 'value03'}} - post_response = self.put_json(self.URL + vendor_id, - params=json.dumps(props)) - get_response = self.get_json(self.URL + vendor_id) - self.assertEqual(props['properties'], - json.loads(get_response['properties'])) - - def test_get_one_invalid_url(self): - """Test get request with invalid url.""" - self.assertRaises(webtest.app.AppError, - self.get_json, - self.URL + 'fake_url') - - def test_get_with_empty_database(self): - """Test get(list) request with no items in DB.""" - results = self.get_json(self.URL) - self.assertEqual([], results['vendors']) diff --git a/refstack/tests/unit/__init__.py b/refstack/tests/unit/__init__.py deleted file mode 100644 index 1c1938b8..00000000 --- a/refstack/tests/unit/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Refstack unittests.""" - -from unittest import mock - -from oslotest import base - - -class RefstackBaseTestCase(base.BaseTestCase): - """Refstack test base class.""" - - def setup_mock(self, *args, **kwargs): - """Mock in test setup.""" - patcher = mock.patch(*args, **kwargs) - self.addCleanup(patcher.stop) - return patcher.start() diff --git a/refstack/tests/unit/test_api.py b/refstack/tests/unit/test_api.py deleted file mode 100644 index 525b06b3..00000000 --- a/refstack/tests/unit/test_api.py +++ /dev/null @@ -1,832 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for API's controllers""" - -import json -from unittest import mock -from urllib import parse - -from oslo_config import fixture as config_fixture -import webob.exc - -from refstack.api import constants as const -from refstack.api.controllers import auth -from refstack.api.controllers import guidelines -from refstack.api.controllers import results -from refstack.api.controllers import user -from refstack.api.controllers import validation -from refstack.api.controllers import vendors -from refstack.api import exceptions as api_exc -from refstack.tests import unit as base - - -class BaseControllerTestCase(base.RefstackBaseTestCase): - - def setUp(self): - super(BaseControllerTestCase, self).setUp() - self.mock_request = self.setup_mock('pecan.request') - self.mock_response = self.setup_mock('pecan.response') - self.mock_abort = \ - self.setup_mock('pecan.abort', - side_effect=webob.exc.HTTPError) - self.mock_get_user_role = \ - self.setup_mock('refstack.api.utils.get_user_role') - self.mock_is_authenticated = \ - self.setup_mock('refstack.api.utils.is_authenticated', - return_value=True, spec=self.setUp) - - -class RootControllerTestCase(BaseControllerTestCase): - - @mock.patch('pecan.expose', return_value=lambda f: f) - def test_index(self, expose_mock): - config = config_fixture.Config() - CONF = self.useFixture(config).conf - CONF.set_override('app_dev_mode', True, 'api') - from refstack.api.controllers import root - controller = root.RootController() - result = controller.index() - self.assertEqual({}, result) - expose_mock.assert_called_with(generic=True, template='index.html') - - -class ResultsControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(ResultsControllerTestCase, self).setUp() - self.validator = mock.Mock() - results.ResultsController.__validator__ = \ - mock.Mock(exposed=False, return_value=self.validator) - self.controller = results.ResultsController() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - self.test_results_url = '/#/results/%s' - self.ui_url = 'host.org' - self.CONF.set_override('test_results_url', - self.test_results_url, - 'api') - self.CONF.set_override('ui_url', self.ui_url) - - @mock.patch('refstack.db.get_test_result') - @mock.patch('refstack.db.get_test_results') - def test_get(self, mock_get_test_results, mock_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_FOUNDATION - test_info = {'created_at': 'bar', - 'duration_seconds': 999, - 'meta': {'shared': 'true', 'user': 'fake-user'}} - mock_get_test_result.return_value = test_info - - mock_get_test_results.return_value = [{'name': 'test1'}, - {'name': 'test2'}] - - actual_result = self.controller.get_one('fake_arg') - # All meta should be exposed when user is a Foundation admin. - expected_result = { - 'created_at': 'bar', - 'duration_seconds': 999, - 'results': ['test1', 'test2'], - 'user_role': const.ROLE_FOUNDATION, - 'meta': {'shared': 'true', 'user': 'fake-user'} - } - - self.assertEqual(expected_result, actual_result) - mock_get_test_results.assert_called_once_with('fake_arg') - - # If not owner or Foundation admin, don't show all metadata. - self.mock_get_user_role.return_value = const.ROLE_USER - mock_get_test_result.return_value = test_info - mock_get_test_results.return_value = [{'name': 'test1'}, - {'name': 'test2'}] - actual_result = self.controller.get_one('fake_arg') - expected_result['meta'] = {'shared': 'true'} - expected_result['user_role'] = const.ROLE_USER - self.assertEqual(expected_result, actual_result) - - @mock.patch('refstack.db.get_test_result') - @mock.patch('refstack.db.get_test_results') - def test_get_for_owner(self, mock_get_test_results, mock_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_OWNER - test_info = {'cpid': 'foo', - 'created_at': 'bar', - 'duration_seconds': 999} - mock_get_test_result.return_value = test_info - - mock_get_test_results.return_value = [{'name': 'test1'}, - {'name': 'test2'}] - - actual_result = self.controller.get_one('fake_arg') - expected_result = { - 'cpid': 'foo', - 'created_at': 'bar', - 'duration_seconds': 999, - 'results': ['test1', 'test2'], - 'user_role': const.ROLE_OWNER - } - - self.assertEqual(actual_result, expected_result) - mock_get_test_results.assert_called_once_with('fake_arg') - mock_get_test_result.assert_called_once_with( - 'fake_arg', allowed_keys=['id', 'cpid', 'created_at', - 'duration_seconds', 'meta', - 'product_version', - 'verification_status'] - ) - - @mock.patch('refstack.db.store_test_results') - def test_post(self, mock_store_test_results): - self.mock_request.body = b'{"answer": 42}' - self.mock_request.headers = {} - mock_store_test_results.return_value = 'fake_test_id' - result = self.controller.post() - self.assertEqual( - result, - {'test_id': 'fake_test_id', - 'url': parse.urljoin(self.ui_url, - self.test_results_url) % 'fake_test_id'} - ) - self.assertEqual(self.mock_response.status, 201) - mock_store_test_results.assert_called_once_with({'answer': 42}) - - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.api.utils.check_user_is_product_admin') - @mock.patch('refstack.db.get_product_version_by_cpid') - @mock.patch('refstack.db.store_test_results') - @mock.patch('refstack.db.get_pubkey') - def test_post_with_sign(self, mock_get_pubkey, mock_store_test_results, - mock_get_version, mock_check, mock_foundation): - self.mock_request.body = b'{"answer": 42, "cpid": "123"}' - self.mock_request.headers = { - 'X-Signature': 'fake-sign', - 'X-Public-Key': 'ssh-rsa Zm9vIGJhcg==' - } - - mock_get_pubkey.return_value.openid = 'fake_openid' - mock_get_version.return_value = [{'id': 'ver1', - 'product_id': 'prod1'}] - mock_check.return_value = True - mock_foundation.return_value = False - mock_store_test_results.return_value = 'fake_test_id' - result = self.controller.post() - self.assertEqual(result, - {'test_id': 'fake_test_id', - 'url': self.test_results_url % 'fake_test_id'}) - self.assertEqual(self.mock_response.status, 201) - mock_check.assert_called_once_with('prod1', 'fake_openid') - mock_store_test_results.assert_called_once_with( - {'answer': 42, 'cpid': '123', 'product_version_id': 'ver1', - 'meta': {const.USER: 'fake_openid'}} - ) - - @mock.patch('refstack.db.get_test_result') - def test_get_item_failed(self, mock_get_test_result): - mock_get_test_result.return_value = None - self.assertRaises(webob.exc.HTTPError, - self.controller.get_one, - 'fake_id') - - @mock.patch('refstack.api.utils.parse_input_params') - def test_get_failed_in_parse_input_params(self, parse_inputs): - - parse_inputs.side_effect = api_exc.ParseInputsError() - self.assertRaises(api_exc.ParseInputsError, - self.controller.get) - - @mock.patch('refstack.db.get_test_result_records_count') - @mock.patch('refstack.api.utils.parse_input_params') - def test_get_failed_in_get_test_result_records_number(self, - parse_inputs, - db_get_count): - db_get_count.side_effect = api_exc.ParseInputsError() - self.assertRaises(api_exc.ParseInputsError, - self.controller.get) - - @mock.patch('refstack.db.get_test_result_records_count') - @mock.patch('refstack.api.utils.parse_input_params') - @mock.patch('refstack.api.utils.get_page_number') - def test_get_failed_in_get_page_number(self, - get_page, - parse_input, - db_get_count): - - get_page.side_effect = api_exc.ParseInputsError() - self.assertRaises(api_exc.ParseInputsError, - self.controller.get) - - @mock.patch('refstack.db.get_test_result_records') - @mock.patch('refstack.db.get_test_result_records_count') - @mock.patch('refstack.api.utils.parse_input_params') - @mock.patch('refstack.api.utils.get_page_number') - def test_get_failed_in_get_test_result_records(self, - get_page, - parce_input, - db_get_count, - db_get_test_result): - - get_page.return_value = (mock.Mock(), mock.Mock()) - db_get_test_result.side_effect = Exception() - self.assertRaises(webob.exc.HTTPError, - self.controller.get) - - @mock.patch('refstack.api.utils.check_owner') - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.db.get_test_result_records') - @mock.patch('refstack.db.get_test_result_records_count') - @mock.patch('refstack.api.utils.get_page_number') - @mock.patch('refstack.api.utils.parse_input_params') - def test_get_success(self, - parse_input, - get_page, - get_test_result_count, - db_get_test_result, - check_foundation, - check_owner): - - expected_input_params = [ - const.START_DATE, - const.END_DATE, - const.CPID, - const.SIGNED, - const.VERIFICATION_STATUS, - const.PRODUCT_ID - ] - page_number = 1 - total_pages_number = 10 - per_page = 5 - records_count = 50 - get_test_result_count.return_value = records_count - get_page.return_value = (page_number, total_pages_number) - check_foundation.return_value = False - check_owner.return_value = True - self.CONF.set_override('results_per_page', - per_page, - 'api') - - record = {'id': 111, 'created_at': '12345', 'cpid': '54321'} - expected_record = record.copy() - expected_record['url'] = self.test_results_url % record['id'] - - db_get_test_result.return_value = [record] - expected_result = { - 'results': [expected_record], - 'pagination': { - 'current_page': page_number, - 'total_pages': total_pages_number - } - } - - actual_result = self.controller.get() - self.assertEqual(expected_result, actual_result) - - parse_input.assert_called_once_with(expected_input_params) - - filters = parse_input.return_value - get_test_result_count.assert_called_once_with(filters) - get_page.assert_called_once_with(records_count) - - db_get_test_result.assert_called_once_with( - page_number, per_page, filters) - - @mock.patch('refstack.db.get_test_result') - @mock.patch('refstack.db.delete_test_result') - def test_delete(self, mock_db_delete, mock_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_OWNER - - self.controller.delete('test_id') - self.assertEqual(204, self.mock_response.status) - - # Verified test deletion attempt should raise error. - mock_get_test_result.return_value = {'verification_status': - const.TEST_VERIFIED} - self.assertRaises(webob.exc.HTTPError, - self.controller.delete, 'test_id') - - self.mock_get_user_role.return_value = const.ROLE_USER - self.assertRaises(webob.exc.HTTPError, - self.controller.delete, 'test_id') - - -class GuidelinesControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(GuidelinesControllerTestCase, self).setUp() - self.controller = guidelines.GuidelinesController() - self.mock_abort.side_effect = None - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_list') - def test_get_guidelines(self, mock_list): - """Test when getting a list of all guideline files.""" - mock_list.return_value = ['2015.03.json'] - result = self.controller.get() - self.assertEqual(['2015.03.json'], result) - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_list') - def test_get_guidelines_error(self, mock_list): - """Test when there is a problem getting the guideline list and - nothing is returned. - """ - mock_list.return_value = None - self.controller.get() - self.mock_abort.assert_called_with(500, mock.ANY) - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_contents') - def test_get_guideline_file(self, mock_get_contents): - """Test when getting a specific guideline file""" - mock_get_contents.return_value = {'foo': 'bar'} - result = self.controller.get_one('2015.03') - self.assertEqual({'foo': 'bar'}, result) - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_contents') - def test_get_guideline_file_error(self, mock_get_contents): - """Test when there is a problem getting the guideline file contents.""" - mock_get_contents.return_value = None - self.controller.get_one('2010.03') - self.mock_abort.assert_called_with(500, mock.ANY) - - -class GuidelinesTestsControllerTestCase(BaseControllerTestCase): - - FAKE_GUIDELINES = { - 'schema': '1.4', - 'platform': {'required': ['compute', 'object']}, - 'components': { - 'compute': { - 'required': ['cap-1'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - }, - 'object': { - 'required': ['cap-2'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - } - }, - 'capabilities': { - 'cap-1': { - 'tests': { - 'test_1': {'idempotent_id': 'id-1234'}, - 'test_2': {'idempotent_id': 'id-5678', - 'aliases': ['test_2_1']}, - 'test_3': {'idempotent_id': 'id-1111', - 'flagged': {'reason': 'foo'}} - } - }, - 'cap-2': { - 'tests': { - 'test_4': {'idempotent_id': 'id-1233'} - } - } - } - } - - def setUp(self): - super(GuidelinesTestsControllerTestCase, self).setUp() - self.controller = guidelines.TestsController() - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_contents') - @mock.patch('pecan.request') - def test_get_guideline_tests(self, mock_request, mock_get_contents): - """Test getting the test list string of a guideline.""" - mock_get_contents.return_value = self.FAKE_GUIDELINES - mock_request.GET = {} - test_list_str = self.controller.get('2016,01') - expected_list = ['test_1[id-1234]', 'test_2[id-5678]', - 'test_2_1[id-5678]', 'test_3[id-1111]', - 'test_4[id-1233]'] - expected_result = '\n'.join(expected_list) - self.assertEqual(expected_result, test_list_str) - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_contents') - def test_get_guideline_tests_fail(self, mock_get_contents): - """Test when the JSON content of a guideline can't be retrieved.""" - mock_get_contents.return_value = None - result_str = self.controller.get('2016.02') - self.assertIn('Error getting JSON', result_str) - - @mock.patch('refstack.api.guidelines.Guidelines.get_guideline_contents') - @mock.patch('pecan.request') - def test_get_guideline_tests_invalid_target(self, mock_request, - mock_get_contents): - """Test when the target is invalid.""" - mock_get_contents.return_value = self.FAKE_GUIDELINES - mock_request.GET = {'target': 'foo'} - result_str = self.controller.get('2016.02') - self.assertIn('Invalid target', result_str) - - -class BaseRestControllerWithValidationTestCase(BaseControllerTestCase): - - def setUp(self): - super(BaseRestControllerWithValidationTestCase, self).setUp() - self.validator = mock.Mock() - validation.BaseRestControllerWithValidation.__validator__ = \ - mock.Mock(exposed=False, return_value=self.validator) - self.controller = validation.BaseRestControllerWithValidation() - - @mock.patch('pecan.response') - @mock.patch('pecan.request') - def test_post(self, mock_request, mock_response): - mock_request.body = b'[42]' - self.controller.store_item = mock.Mock(return_value='fake_id') - - result = self.controller.post() - - self.assertEqual(result, 'fake_id') - self.assertEqual(mock_response.status, 201) - self.controller.store_item.assert_called_once_with([42]) - - def test_get_one_return_schema(self): - self.validator.assert_id = mock.Mock(return_value=False) - self.validator.schema = 'fake_schema' - result = self.controller.schema() - self.assertEqual(result, 'fake_schema') - - -class ProfileControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(ProfileControllerTestCase, self).setUp() - self.controller = user.ProfileController() - - @mock.patch('refstack.db.get_foundation_users', - return_value=['foo@bar.org']) - @mock.patch('refstack.db.user_get', - return_value=mock.Mock(openid='foo@bar.org', - email='foo@bar.org', - fullname='Dobby')) - @mock.patch('refstack.api.utils.get_user_session', - return_value={const.USER_OPENID: 'foo@bar.org'}) - def test_get(self, mock_get_user_session, mock_user_get, - mock_get_foundation_users): - actual_result = self.controller.get() - self.assertEqual({'openid': 'foo@bar.org', - 'email': 'foo@bar.org', - 'fullname': 'Dobby', - 'is_admin': True}, actual_result) - - -class AuthControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(AuthControllerTestCase, self).setUp() - self.controller = auth.AuthController() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - self.CONF.set_override('app_dev_mode', True, 'api') - self.CONF.set_override('ui_url', 'http://127.0.0.1') - self.CONF.set_override('openid_logout_endpoint', 'http://some-url', - 'osid') - - @mock.patch('refstack.api.utils.get_user_session') - @mock.patch('pecan.redirect', side_effect=webob.exc.HTTPRedirection) - def test_signed_signin(self, mock_redirect, mock_get_user_session): - mock_session = mock.MagicMock(**{const.USER_OPENID: 'foo@bar.org'}) - mock_get_user_session.return_value = mock_session - self.assertRaises(webob.exc.HTTPRedirection, self.controller.signin) - mock_redirect.assert_called_with('http://127.0.0.1') - - @mock.patch('refstack.api.utils.get_user_session') - @mock.patch('pecan.redirect', side_effect=webob.exc.HTTPRedirection) - def test_unsigned_signin(self, mock_redirect, mock_get_user_session): - self.mock_is_authenticated.return_value = False - mock_session = mock.MagicMock(**{const.USER_OPENID: 'foo@bar.org'}) - mock_get_user_session.return_value = mock_session - self.assertRaises(webob.exc.HTTPRedirection, self.controller.signin) - self.assertIn(self.CONF.osid.openstack_openid_endpoint, - mock_redirect.call_args[1]['location']) - - @mock.patch('socket.gethostbyname', return_value='1.1.1.1') - @mock.patch('refstack.api.utils.get_user_session') - @mock.patch('pecan.redirect', side_effect=webob.exc.HTTPRedirection) - def test_signin_return_failed(self, mock_redirect, - mock_get_user_session, - mock_socket): - mock_session = mock.MagicMock(**{const.USER_OPENID: 'foo@bar.org', - const.CSRF_TOKEN: '42'}) - mock_get_user_session.return_value = mock_session - self.mock_request.remote_addr = '1.1.1.2' - - self.mock_request.GET = { - const.OPENID_ERROR: 'foo is not bar!!!' - } - self.mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.assertRaises(webob.exc.HTTPRedirection, - self.controller.signin_return) - mock_redirect.assert_called_once_with( - 'http://127.0.0.1/#/auth_failure?message=foo+is+not+bar%21%21%21') - self.assertNotIn(const.CSRF_TOKEN, - self.mock_request.environ['beaker.session']) - - mock_redirect.reset_mock() - self.mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.mock_request.GET = { - const.OPENID_MODE: 'cancel' - } - self.assertRaises(webob.exc.HTTPRedirection, - self.controller.signin_return) - mock_redirect.assert_called_once_with( - 'http://127.0.0.1/#/auth_failure?message=Authentication+canceled.') - self.assertNotIn(const.CSRF_TOKEN, - self.mock_request.environ['beaker.session']) - - mock_redirect.reset_mock() - self.mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.mock_request.GET = {} - self.assertRaises(webob.exc.HTTPRedirection, - self.controller.signin_return) - mock_redirect.assert_called_once_with( - 'http://127.0.0.1/#/auth_failure' - '?message=Authentication+failed.+Please+try+again.') - self.assertNotIn(const.CSRF_TOKEN, - self.mock_request.environ['beaker.session']) - - mock_redirect.reset_mock() - self.mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.mock_request.GET = {const.CSRF_TOKEN: '24'} - self.mock_request.remote_addr = '1.1.1.1' - self.assertRaises(webob.exc.HTTPRedirection, - self.controller.signin_return) - mock_redirect.assert_called_once_with( - 'http://127.0.0.1/#/auth_failure' - '?message=Authentication+failed.+Please+try+again.') - self.assertNotIn(const.CSRF_TOKEN, - self.mock_request.environ['beaker.session']) - - @mock.patch('refstack.api.utils.verify_openid_request', return_value=True) - @mock.patch('refstack.db.user_save') - @mock.patch('refstack.api.utils.get_user_session') - @mock.patch('pecan.redirect', side_effect=webob.exc.HTTPRedirection) - def test_signin_return_success(self, mock_redirect, mock_get_user_session, - mock_user, mock_verify): - mock_session = mock.MagicMock(**{const.USER_OPENID: 'foo@bar.org', - const.CSRF_TOKEN: 42}) - mock_session.get = mock.Mock(return_value=42) - mock_get_user_session.return_value = mock_session - - self.mock_request.GET = { - const.OPENID_CLAIMED_ID: 'foo@bar.org', - const.OPENID_NS_SREG_EMAIL: 'foo@bar.org', - const.OPENID_NS_SREG_FULLNAME: 'foo', - const.CSRF_TOKEN: 42 - } - self.mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.assertRaises(webob.exc.HTTPRedirection, - self.controller.signin_return) - - @mock.patch('pecan.request') - @mock.patch('pecan.redirect', side_effect=webob.exc.HTTPRedirection) - def test_signout(self, mock_redirect, mock_request): - mock_request.environ['beaker.session'] = { - const.CSRF_TOKEN: 42 - } - self.assertRaises(webob.exc.HTTPRedirection, self.controller.signout) - mock_redirect.assert_called_with('http://127.0.0.1/#/logout?' - 'openid_logout=http%3A%2F%2Fsome-url') - self.assertNotIn(const.CSRF_TOKEN, - mock_request.environ['beaker.session']) - - -class MetadataControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(MetadataControllerTestCase, self).setUp() - self.controller = results.MetadataController() - - @mock.patch('refstack.db.get_test_result') - def test_get(self, mock_db_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_USER - mock_db_get_test_result.return_value = {'meta': {'shared': 'true', - 'user': 'fake-user'}} - # Only the key 'shared' should be allowed through. - self.assertEqual({'shared': 'true'}, self.controller.get('test_id')) - mock_db_get_test_result.assert_called_once_with('test_id') - - # Test that the result owner can see all metadata keys. - self.mock_get_user_role.return_value = const.ROLE_OWNER - self.assertEqual({'shared': 'true', 'user': 'fake-user'}, - self.controller.get('test_id')) - - # Test that a Foundation admin can see all metadata keys. - self.mock_get_user_role.return_value = const.ROLE_FOUNDATION - self.assertEqual({'shared': 'true', 'user': 'fake-user'}, - self.controller.get('test_id')) - - @mock.patch('refstack.db.get_test_result_meta_key') - def test_get_one(self, mock_db_get_test_result_meta_key): - self.mock_get_user_role.return_value = const.ROLE_USER - - # Test when key is not an allowed key. - self.assertRaises(webob.exc.HTTPError, - self.controller.get_one, 'test_id', 'answer') - - # Test when key is an allowed key. - mock_db_get_test_result_meta_key.return_value = 42 - self.assertEqual(42, self.controller.get_one('test_id', 'shared')) - mock_db_get_test_result_meta_key.assert_called_once_with( - 'test_id', 'shared') - - # Test when the user owns the test result. - self.mock_get_user_role.return_value = const.ROLE_OWNER - self.assertEqual(42, self.controller.get_one('test_id', 'user')) - - # Test when the user is a Foundation admin. - self.mock_get_user_role.return_value = const.ROLE_FOUNDATION - self.assertEqual(42, self.controller.get_one('test_id', 'user')) - - @mock.patch('refstack.db.get_test_result') - @mock.patch('refstack.db.save_test_result_meta_item') - def test_post(self, mock_save_test_result_meta_item, mock_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_OWNER - mock_get_test_result.return_value = { - 'verification_status': const.TEST_NOT_VERIFIED - } - - # Test trying to post a valid key. - self.controller.post('test_id', 'shared') - self.assertEqual(201, self.mock_response.status) - mock_save_test_result_meta_item.assert_called_once_with( - 'test_id', 'shared', self.mock_request.body) - - # Test trying to post an invalid key. - self.assertRaises(webob.exc.HTTPError, - self.controller.post, 'test_id', 'user') - - # Test when not an owner of the result. - self.mock_get_user_role.return_value = const.ROLE_USER - self.mock_abort.side_effect = webob.exc.HTTPError() - self.assertRaises(webob.exc.HTTPError, - self.controller.post, 'test_id', 'shared') - - @mock.patch('refstack.db.get_test_result') - @mock.patch('refstack.db.delete_test_result_meta_item') - def test_delete(self, mock_delete_test_result_meta_item, - mock_get_test_result): - self.mock_get_user_role.return_value = const.ROLE_OWNER - mock_get_test_result.return_value = { - 'verification_status': const.TEST_NOT_VERIFIED - } - self.controller.delete('test_id', 'shared') - self.assertEqual(204, self.mock_response.status) - mock_delete_test_result_meta_item.assert_called_once_with( - 'test_id', 'shared') - - # The key 'user' is not a valid key that can be deleted. - self.assertRaises(webob.exc.HTTPError, - self.controller.delete, 'test_id', 'user') - - self.mock_get_user_role.return_value = const.ROLE_USER - self.mock_abort.side_effect = webob.exc.HTTPError() - self.assertRaises(webob.exc.HTTPError, - self.controller.delete, 'test_id', 'answer') - - -class PublicKeysControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(PublicKeysControllerTestCase, self).setUp() - self.controller = user.PublicKeysController() - - @mock.patch('refstack.api.utils.get_user_public_keys') - def test_get(self, mock_get_user_public_keys): - mock_get_user_public_keys.return_value = 42 - self.assertEqual(42, self.controller.get()) - mock_get_user_public_keys.assert_called_once_with() - - @mock.patch('refstack.api.utils.get_user_id') - @mock.patch('refstack.db.store_pubkey') - def test_post(self, mock_store_pubkey, mock_get_user_id): - self.controller.validator.validate = mock.Mock() - mock_get_user_id.return_value = 'fake_id' - mock_store_pubkey.return_value = 42 - raw_key = 'fake key Don\'t_Panic.' - fake_pubkey = { - 'format': 'fake', - 'pubkey': 'key', - 'comment': 'Don\'t_Panic.', - 'openid': 'fake_id' - } - self.mock_request.body = json.dumps( - {'raw_key': raw_key} - ).encode('utf-8') - self.controller.post() - self.assertEqual(201, self.mock_response.status) - mock_store_pubkey.assert_called_once_with(fake_pubkey) - mock_store_pubkey.reset_mock() - - raw_key = 'fake key' - fake_pubkey = { - 'format': 'fake', - 'pubkey': 'key', - 'comment': '', - 'openid': 'fake_id' - } - self.mock_request.body = json.dumps( - {'raw_key': raw_key} - ).encode('utf-8') - self.controller.post() - mock_store_pubkey.assert_called_once_with(fake_pubkey) - - @mock.patch('refstack.db.delete_pubkey') - @mock.patch('refstack.api.utils.get_user_public_keys') - def test_delete(self, mock_get_user_public_keys, mock_delete_pubkey): - mock_get_user_public_keys.return_value = ({'id': 'key_id'},) - self.controller.delete('key_id') - self.assertEqual(204, self.mock_response.status) - mock_delete_pubkey.assert_called_once_with('key_id') - - self.assertRaises(webob.exc.HTTPError, - self.controller.delete, 'other_key_id') - - -class VendorUsersControllerTestCase(BaseControllerTestCase): - - def setUp(self): - super(VendorUsersControllerTestCase, self).setUp() - self.controller = vendors.UsersController() - - @mock.patch('refstack.db.get_organization_users') - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.api.utils.check_user_is_vendor_admin') - def test_get(self, mock_vendor, mock_foundation, mock_db_get_org_users): - mock_vendor.return_value = True - mock_foundation.return_value = False - mock_db_get_org_users.return_value = { - 'foobar': { - 'openid': 'foobar', - 'fullname': 'Foo Bar', - 'email': 'foo@bar.com' - } - } - expected = [{'openid': 'foobar', - 'fullname': 'Foo Bar', - 'email': 'foo@bar.com'}] - self.assertEqual(expected, self.controller.get('some-org')) - - mock_vendor.return_value = False - self.assertIsNone(self.controller.get('some-org')) - - mock_foundation.return_value = True - self.assertEqual(expected, self.controller.get('some-org')) - - @mock.patch('refstack.db.add_user_to_group') - @mock.patch('refstack.db.get_organization') - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.api.utils.check_user_is_vendor_admin') - @mock.patch('refstack.api.utils.get_user_id') - def test_put(self, mock_get_user, mock_vendor, mock_foundation, - mock_db_org, mock_add): - # This is 'foo' in Base64 - encoded_openid = 'Zm9v' - mock_vendor.return_value = True - mock_foundation.return_value = False - mock_db_org.return_value = {'group_id': 'abc'} - mock_get_user.return_value = 'fake-id' - - self.controller.put('fake-vendor', encoded_openid) - mock_add.assert_called_once_with(b'foo', 'abc', 'fake-id') - - mock_vendor.return_value = False - self.assertRaises(webob.exc.HTTPError, - self.controller.put, 'fake-vendor', encoded_openid) - - @mock.patch('refstack.db.remove_user_from_group') - @mock.patch('refstack.db.get_organization') - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('refstack.api.utils.check_user_is_vendor_admin') - def test_delete(self, mock_vendor, mock_foundation, mock_db_org, - mock_remove): - # This is 'foo' in Base64 - encoded_openid = 'Zm9v' - mock_vendor.return_value = True - mock_foundation.return_value = False - mock_db_org.return_value = {'group_id': 'abc'} - self.controller.delete('fake-vendor', encoded_openid) - mock_remove.assert_called_with(b'foo', 'abc') - - mock_vendor.return_value = False - self.assertRaises(webob.exc.HTTPError, self.controller.delete, - 'fake-vendor', encoded_openid) diff --git a/refstack/tests/unit/test_api_utils.py b/refstack/tests/unit/test_api_utils.py deleted file mode 100644 index cfffc374..00000000 --- a/refstack/tests/unit/test_api_utils.py +++ /dev/null @@ -1,578 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for API's utils""" -import time -from unittest import mock -from urllib import parse - -import jwt -from oslo_config import fixture as config_fixture -from oslo_utils import timeutils -from oslotest import base -from pecan import rest -from webob import exc - -from refstack.api import constants as const -from refstack.api import exceptions as api_exc -from refstack.api import utils as api_utils -from refstack import db - -PRIV_KEY = '''-----BEGIN PRIVATE KEY----- -MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEA2tgf+sqQ/aI7Cytr -cpQYzbpOk1xy9GQP+kFN8ewIJgSLKX9bJf+7YqRuK8vsdtmPWVaLZtKTpPnXL0lM -jMotYwIDAQABAkA1eKtPruEAZ/w/PWuygkcRNV1vmh4oYq6Yug4ed0qCZxPxkBNx -0nnK9LeiWDnSCQ/Fi46y7XS6BLsbZ2wqGarJAiEA+r6oaDqFoScgl7KyQfkIY7ph -bnlIxVm4HWCLwEH4020CIQDfbk76sO8NuUbSaU6tIAoF9jmtaSW7kMr8/7M+SISy -DwIhAKsUaLzsqP4iPyehoeRHcMTyhsWkdNVJ+Mf6dn+Pw6ElAiEAnHFgW6gHulRA -gpO5wv7sBcCiIgm9odeASiXAG5wrTYECIHKU0v03nQlGOL2HUognsEw/nihi/667 -pcPXhEWd4qmC ------END PRIVATE KEY-----''' - -PUB_KEY = ('AAAAB3NzaC1yc2EAAAADAQABAAAAQQDa2B/6ypD9ojsLK2tylBjNuk6TXH' - 'L0ZA/6QU3x7AgmBIspf1sl/7tipG4ry+x22Y9ZVotm0pOk+dcvSUyMyi1j') - - -class APIUtilsTestCase(base.BaseTestCase): - - def setUp(self): - super(APIUtilsTestCase, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - @mock.patch('pecan.request') - def test_get_input_params_from_request_all_results(self, mock_request): - received_params = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:45', - const.CPID: '12345', - } - - expected_params = [ - const.START_DATE, - const.END_DATE, - const.CPID - ] - - mock_request.GET = received_params - - result = api_utils._get_input_params_from_request(expected_params) - - self.assertEqual(result, received_params) - - @mock.patch('pecan.request') - def test_get_input_params_from_request_partial_results(self, - mock_request): - received_params = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:45', - const.CPID: '12345', - } - - expected_params = [ - const.START_DATE, - const.END_DATE, - ] - - expected_results = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:45', - } - - mock_request.GET = received_params - - result = api_utils._get_input_params_from_request(expected_params) - - self.assertEqual(result, expected_results) - - @mock.patch('oslo_utils.timeutils.parse_strtime') - @mock.patch.object(api_utils, '_get_input_params_from_request') - def test_parse_input_params_failed_in_parse_time(self, mock_get_input, - mock_strtime): - fmt = '%Y-%m-%d %H:%M:%S' - self.CONF.set_override('input_date_format', - fmt, - 'api') - raw_filters = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:45', - const.CPID: '12345', - } - - expected_params = mock.Mock() - mock_get_input.return_value = raw_filters - mock_strtime.side_effect = ValueError() - self.assertRaises(api_exc.ParseInputsError, - api_utils.parse_input_params, - expected_params) - - @mock.patch.object(api_utils, '_get_input_params_from_request') - def test_parse_input_params_failed_in_compare_date(self, mock_get_input): - fmt = '%Y-%m-%d %H:%M:%S' - self.CONF.set_override('input_date_format', - fmt, - 'api') - raw_filters = { - const.START_DATE: '2015-03-26 15:04:50', - const.END_DATE: '2015-03-26 15:04:40', - const.CPID: '12345', - } - - expected_params = mock.Mock() - mock_get_input.return_value = raw_filters - self.assertRaises(api_exc.ParseInputsError, - api_utils.parse_input_params, - expected_params) - - @mock.patch.object(api_utils, '_get_input_params_from_request') - @mock.patch.object(api_utils, 'is_authenticated', return_value=False) - def test_parse_input_params_failed_in_auth(self, mock_is_authenticated, - mock_get_input): - fmt = '%Y-%m-%d %H:%M:%S' - self.CONF.set_override('input_date_format', - fmt, - 'api') - raw_filters = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:50', - const.CPID: '12345', - const.SIGNED: True - } - expected_params = mock.Mock() - mock_get_input.return_value = raw_filters - self.assertRaises(api_exc.ParseInputsError, - api_utils.parse_input_params, expected_params) - - @mock.patch.object(api_utils, '_get_input_params_from_request') - @mock.patch.object(api_utils, 'is_authenticated', return_value=True) - @mock.patch.object(api_utils, 'get_user_id', return_value='fake_id') - def test_parse_input_params_success(self, - mock_get_user_id, - mock_is_authenticated, - mock_get_input): - fmt = '%Y-%m-%d %H:%M:%S' - self.CONF.set_override('input_date_format', - fmt, - 'api') - raw_filters = { - const.START_DATE: '2015-03-26 15:04:40', - const.END_DATE: '2015-03-26 15:04:50', - const.CPID: '12345', - const.SIGNED: True - } - - expected_params = mock.Mock() - mock_get_input.return_value = raw_filters - - parsed_start_date = timeutils.parse_strtime( - raw_filters[const.START_DATE], - fmt - ) - - parsed_end_date = timeutils.parse_strtime( - raw_filters[const.END_DATE], - fmt - ) - - expected_result = { - const.START_DATE: parsed_start_date, - const.END_DATE: parsed_end_date, - const.CPID: '12345', - const.SIGNED: True, - const.OPENID: 'fake_id', - } - - result = api_utils.parse_input_params(expected_params) - self.assertEqual(expected_result, result) - - mock_get_input.assert_called_once_with(expected_params) - - def test_str_to_bool(self): - self.assertTrue(api_utils.str_to_bool('True')) - self.assertTrue(api_utils.str_to_bool('1')) - self.assertTrue(api_utils.str_to_bool('YES')) - self.assertFalse(api_utils.str_to_bool('False')) - self.assertFalse(api_utils.str_to_bool('no')) - - def test_calculate_pages_number_full_pages(self): - # expected pages number: 20/10 = 2 - page_number = api_utils._calculate_pages_number(10, 20) - self.assertEqual(page_number, 2) - - def test_calculate_pages_number_half_page(self): - # expected pages number: 25/10 - # => quotient == 2 and remainder == 5 - # => total number of pages == 3 - page_number = api_utils._calculate_pages_number(10, 25) - self.assertEqual(page_number, 3) - - @mock.patch('pecan.request') - def test_get_page_number_page_number_is_none(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: None - } - - page_number, total_pages = api_utils.get_page_number(total_records) - - self.assertEqual(page_number, 1) - self.assertEqual(total_pages, total_records / per_page) - - @mock.patch('pecan.request') - def test_get_page_number_page_number_not_int(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: 'abc' - } - - self.assertRaises(api_exc.ParseInputsError, - api_utils.get_page_number, - total_records) - - @mock.patch('pecan.request') - def test_get_page_number_page_number_is_one(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: '1' - } - - page_number, total_pages = api_utils.get_page_number(total_records) - - self.assertEqual(page_number, 1) - self.assertEqual(total_pages, total_records / per_page) - - @mock.patch('pecan.request') - def test_get_page_number_page_number_less_zero(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: '-1' - } - - self.assertRaises(api_exc.ParseInputsError, - api_utils.get_page_number, - total_records) - - @mock.patch('pecan.request') - def test_get_page_number_page_number_more_than_total(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: '100' - } - - self.assertRaises(api_exc.ParseInputsError, - api_utils.get_page_number, - total_records) - - @mock.patch('pecan.request') - def test_get_page_number_success(self, mock_request): - per_page = 20 - total_records = 100 - self.CONF.set_override('results_per_page', - per_page, - 'api') - mock_request.GET = { - const.PAGE: '2' - } - - page_number, total_pages = api_utils.get_page_number(total_records) - - self.assertEqual(page_number, 2) - self.assertEqual(total_pages, total_records / per_page) - - def test_set_query_params(self): - url = 'http://e.io/path#fragment' - new_url = api_utils.set_query_params(url, {'foo': 'bar', '?': 42}) - self.assertEqual(parse.parse_qs(parse.urlparse(new_url)[4]), - {'foo': ['bar'], '?': ['42']}) - - def test_get_token(self): - token = api_utils.get_token(42) - self.assertRegex(token, "[a-z]{42}") - - @mock.patch.object(api_utils, 'get_user_session') - def test_delete_params_from_user_session(self, mock_get_user_session): - mock_session = mock.MagicMock(**{'foo': 'bar', 'answer': 42}) - mock_get_user_session.return_value = mock_session - api_utils.delete_params_from_user_session(('foo', 'answer')) - self.assertNotIn('foo', mock_session.__dir__) - self.assertNotIn('answer', mock_session.__dir__) - mock_session.save.called_once_with() - - @mock.patch('pecan.request') - def test_get_user_session(self, mock_request): - mock_request.environ = {'beaker.session': 42} - session = api_utils.get_user_session() - self.assertEqual(42, session) - - @mock.patch.object(api_utils, 'get_user_session') - @mock.patch.object(api_utils, 'db') - @mock.patch('pecan.request') - def test_is_authenticated(self, mock_request, - mock_db, mock_get_user_session): - mock_request.headers = {} - mock_session = {const.USER_OPENID: 'foo@bar.com'} - mock_get_user_session.return_value = mock_session - mock_get_user = mock_db.user_get - mock_get_user.return_value = 'FAKE_USER' - self.assertTrue(api_utils.is_authenticated()) - mock_db.user_get.assert_called_once_with('foo@bar.com') - - mock_request.environ = { - const.JWT_TOKEN_ENV: {const.USER_OPENID: 'foo@bar.com'}} - mock_get_user_session.return_value = {} - mock_get_user.reset_mock() - mock_get_user.return_value = 'FAKE_USER' - self.assertTrue(api_utils.is_authenticated()) - mock_get_user.assert_called_once_with('foo@bar.com') - - mock_db.NotFound = db.NotFound - mock_get_user.side_effect = mock_db.NotFound('User') - self.assertFalse(api_utils.is_authenticated()) - - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('pecan.abort', side_effect=exc.HTTPError) - @mock.patch('refstack.db.get_test_result_meta_key') - @mock.patch('refstack.db.get_test_result') - @mock.patch.object(api_utils, 'is_authenticated') - @mock.patch.object(api_utils, 'get_user_id') - def test_check_get_user_role(self, mock_get_user_id, - mock_is_authenticated, - mock_get_test_result, - mock_get_test_result_meta_key, - mock_pecan_abort, - mock_check_foundation): - # Check user level - mock_check_foundation.return_value = False - mock_get_test_result_meta_key.return_value = None - mock_get_test_result.return_value = {} - self.assertEqual(const.ROLE_USER, api_utils.get_user_role('fake_test')) - api_utils.enforce_permissions('fake_test', const.ROLE_USER) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_OWNER) - - mock_get_test_result_meta_key.side_effect = { - ('fake_test', const.USER): 'fake_openid', - ('fake_test', const.SHARED_TEST_RUN): 'true', - }.get - self.assertEqual(const.ROLE_USER, api_utils.get_user_role('fake_test')) - api_utils.enforce_permissions('fake_test', const.ROLE_USER) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_OWNER) - - mock_is_authenticated.return_value = True - mock_get_user_id.return_value = 'fake_openid' - mock_get_test_result_meta_key.side_effect = { - ('fake_test', const.USER): 'fake_openid', - ('fake_test', const.SHARED_TEST_RUN): 'true', - }.get - self.assertEqual(const.ROLE_USER, api_utils.get_user_role('fake_test')) - api_utils.enforce_permissions('fake_test', const.ROLE_USER) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_OWNER) - - # Check owner level - mock_is_authenticated.return_value = True - mock_get_user_id.return_value = 'fake_openid' - mock_get_test_result_meta_key.side_effect = lambda *args: { - ('fake_test', const.USER): 'fake_openid', - ('fake_test', const.SHARED_TEST_RUN): None, - }.get(args) - self.assertEqual(const.ROLE_OWNER, - api_utils.get_user_role('fake_test')) - api_utils.enforce_permissions('fake_test', const.ROLE_USER) - api_utils.enforce_permissions('fake_test', const.ROLE_OWNER) - - # Check negative cases - mock_is_authenticated.return_value = False - mock_get_test_result_meta_key.side_effect = lambda *args: { - ('fake_test', const.USER): 'fake_openid', - ('fake_test', const.SHARED_TEST_RUN): None, - }.get(args) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_USER) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_OWNER) - - mock_is_authenticated.return_value = True - mock_get_user_id.return_value = 'fake_openid' - mock_get_test_result_meta_key.side_effect = lambda *args: { - ('fake_test', const.USER): 'some_other_user', - ('fake_test', const.SHARED_TEST_RUN): None, - }.get(args) - self.assertIsNone(api_utils.get_user_role('fake_test')) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_USER) - self.assertRaises(exc.HTTPError, api_utils.enforce_permissions, - 'fake_test', const.ROLE_OWNER) - - @mock.patch('refstack.api.utils.check_user_is_foundation_admin') - @mock.patch('pecan.abort', side_effect=exc.HTTPError) - @mock.patch('refstack.db.get_test_result_meta_key') - @mock.patch('refstack.db.get_test_result') - @mock.patch.object(api_utils, 'is_authenticated') - @mock.patch.object(api_utils, 'get_user_id') - def test_check_permissions(self, mock_get_user_id, - mock_is_authenticated, - mock_get_test_result, - mock_get_test_result_meta_key, - mock_pecan_abort, - mock_foundation_check): - - @api_utils.check_permissions(level=const.ROLE_USER) - class ControllerWithPermissions(rest.RestController): - - def get(self, test_id): - return test_id - - @api_utils.check_permissions(level=const.ROLE_OWNER) - def delete(self, test_id): - return test_id - - @api_utils.check_permissions(level='fake_role') - def post(self, test_id): - return test_id - - fake_controller = ControllerWithPermissions() - - public_test = 'fake_public_test' - private_test = 'fake_test' - - mock_get_user_id.return_value = 'fake_openid' - mock_get_test_result.return_value = {} - mock_get_test_result_meta_key.side_effect = lambda *args: { - (public_test, const.USER): None, - (private_test, const.USER): 'fake_openid', - (private_test, const.SHARED_TEST_RUN): None, - }.get(args) - - mock_is_authenticated.return_value = True - mock_foundation_check.return_value = False - self.assertEqual(public_test, fake_controller.get(public_test)) - self.assertRaises(exc.HTTPError, fake_controller.delete, public_test) - self.assertEqual(private_test, fake_controller.get(private_test)) - self.assertEqual(private_test, fake_controller.delete(private_test)) - - mock_is_authenticated.return_value = False - self.assertEqual(public_test, fake_controller.get(public_test)) - self.assertRaises(exc.HTTPError, fake_controller.delete, public_test) - self.assertRaises(exc.HTTPError, fake_controller.get, private_test) - self.assertRaises(exc.HTTPError, fake_controller.delete, private_test) - - self.assertRaises(ValueError, fake_controller.post, public_test) - - @mock.patch('requests.post') - @mock.patch('pecan.abort') - def test_verify_openid_request(self, mock_abort, mock_post): - mock_response = mock.Mock() - mock_response.content = ('is_valid:true\n' - 'ns:http://specs.openid.net/auth/2.0\n') - mock_response.status_code = 200 - mock_post.return_value = mock_response - mock_request = mock.Mock() - mock_request.params = { - const.OPENID_NS_SREG_EMAIL: 'foo@bar.org', - const.OPENID_NS_SREG_FULLNAME: 'foo' - } - self.assertTrue(api_utils.verify_openid_request(mock_request)) - - mock_response.content = ('is_valid:false\n' - 'ns:http://specs.openid.net/auth/2.0\n') - api_utils.verify_openid_request(mock_request) - mock_abort.assert_called_once_with( - 401, 'Authentication is failed. Try again.' - ) - - mock_abort.reset_mock() - mock_response.content = ('is_valid:true\n' - 'ns:http://specs.openid.net/auth/2.0\n') - mock_request.params = { - const.OPENID_NS_SREG_EMAIL: 'foo@bar.org', - } - api_utils.verify_openid_request(mock_request) - mock_abort.assert_called_once_with( - 401, 'Authentication is failed. ' - 'Please permit access to your name.' - ) - - @mock.patch('refstack.db.get_organization_users') - @mock.patch.object(api_utils, 'get_user_id', return_value='fake_id') - def test_check_user_is_vendor_admin(self, mock_user, mock_db): - mock_user.return_value = 'some-user' - mock_db.return_value = ['some-user', 'another-user'] - result = api_utils.check_user_is_vendor_admin('some-vendor') - self.assertTrue(result) - - mock_db.return_value = ['another-user'] - result = api_utils.check_user_is_vendor_admin('some-vendor') - self.assertFalse(result) - - @mock.patch('refstack.db.get_user_pubkeys') - def test_encode_token(self, mock_pubkey): - mock_request = mock.MagicMock() - mock_request.headers = {} - self.assertIsNone(api_utils.decode_token(mock_request)) - - fake_token = jwt.encode({'foo': 'bar'}, key=PRIV_KEY, - algorithm='RS256') - auth_str = 'Bearer %s' % fake_token - mock_request.headers = {const.JWT_TOKEN_HEADER: auth_str} - self.assertRaises(api_exc.ValidationError, api_utils.decode_token, - mock_request) - - fake_token = jwt.encode({const.USER_OPENID: 'oid'}, key=PRIV_KEY, - algorithm='RS256') - auth_str = 'Bearer %s' % fake_token - mock_request.headers = {const.JWT_TOKEN_HEADER: auth_str} - mock_pubkey.return_value = [{'format': 'ssh-rsa', - 'pubkey': 'fakepubkey'}] - self.assertRaises(api_exc.ValidationError, api_utils.decode_token, - mock_request) - - mock_pubkey.return_value = [{'format': 'ssh-rsa', - 'pubkey': PUB_KEY}] - self.assertRaises(api_exc.ValidationError, api_utils.decode_token, - mock_request) - - fake_token = jwt.encode({const.USER_OPENID: 'oid', - 'exp': int(time.time()) + 3600}, - key=PRIV_KEY, - algorithm='RS256') - auth_str = 'Bearer %s' % fake_token - mock_request.headers = {const.JWT_TOKEN_HEADER: auth_str} - mock_pubkey.return_value = [{'format': 'ssh-rsa', - 'pubkey': PUB_KEY}] - self.assertEqual('oid', - api_utils.decode_token( - mock_request)[const.USER_OPENID]) diff --git a/refstack/tests/unit/test_app.py b/refstack/tests/unit/test_app.py deleted file mode 100644 index 0a5015b2..00000000 --- a/refstack/tests/unit/test_app.py +++ /dev/null @@ -1,242 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for API's utility""" - -import json -from unittest import mock - -from oslo_config import fixture as config_fixture -from oslotest import base -import pecan -import webob - -from refstack.api import app -from refstack.api import exceptions as api_exc - - -def get_response_kwargs(response_mock): - _, kwargs = response_mock.call_args - if kwargs['body']: - kwargs['body'] = json.loads(kwargs.get('body', '')) - return kwargs - - -class JSONErrorHookTestCase(base.BaseTestCase): - - def setUp(self): - super(JSONErrorHookTestCase, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - def _on_error(self, response, exc, expected_status_code, expected_body): - response.return_value = 'fake_value' - hook = app.JSONErrorHook() - result = hook.on_error(mock.Mock(), exc) - self.assertEqual(result, 'fake_value') - self.assertEqual( - dict(body=expected_body, - status=expected_status_code, - charset='UTF-8', - content_type='application/json'), - get_response_kwargs(response) - ) - - @mock.patch.object(webob, 'Response') - def test_on_error_with_webob_instance(self, response): - self.CONF.set_override('app_dev_mode', False, 'api') - exc = mock.Mock(spec=webob.exc.HTTPError, - status=418, status_int=418, - title='fake_title', - detail='fake_detail') - - self._on_error( - response, exc, expected_status_code=exc.status, - expected_body={'code': exc.status_int, - 'title': exc.title, - 'detail': exc.detail} - ) - - @mock.patch.object(webob, 'Response') - def test_on_error_with_validation_error(self, response): - self.CONF.set_override('app_dev_mode', False, 'api') - exc = mock.MagicMock(spec=api_exc.ValidationError, - title='No No No!') - exc.args = ('No No No!',) - self._on_error( - response, exc, expected_status_code=400, - expected_body={'code': 400, 'title': exc.title} - ) - - self.CONF.set_override('app_dev_mode', True, 'api') - self._on_error( - response, exc, expected_status_code=400, - expected_body={'code': 400, 'title': exc.title, - 'detail': str(exc)} - ) - - @mock.patch.object(webob, 'Response') - def test_on_http_redirection(self, response): - self.CONF.set_override('app_dev_mode', False, 'api') - - exc = mock.Mock(spec=webob.exc.HTTPRedirection) - hook = app.JSONErrorHook() - result = hook.on_error(mock.Mock(), exc) - self.assertIsNone(result) - - @mock.patch.object(webob, 'Response') - def test_on_error_with_other_exceptions(self, response): - self.CONF.set_override('app_dev_mode', False, 'api') - exc = mock.Mock(status=500) - - self._on_error( - response, exc, expected_status_code=500, - expected_body={'code': 500, 'title': 'Internal Server Error'} - ) - - self.CONF.set_override('app_dev_mode', True, 'api') - self._on_error( - response, exc, expected_status_code=500, - expected_body={'code': 500, 'title': 'Internal Server Error', - 'detail': str(exc)} - ) - - -class CORSHookTestCase(base.BaseTestCase): - """ - Tests for the CORS hook used by the application. - """ - - def setUp(self): - super(CORSHookTestCase, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - def test_allowed_origin(self): - """Test when the origin is in the list of allowed origins.""" - self.CONF.set_override('allowed_cors_origins', 'test.com', 'api') - hook = app.CORSHook() - request = pecan.core.Request({}) - request.headers = {'Origin': 'test.com'} - state = pecan.core.RoutingState(request, pecan.core.Response(), None) - hook.after(state) - - self.assertIn('Access-Control-Allow-Origin', state.response.headers) - allow_origin = state.response.headers['Access-Control-Allow-Origin'] - self.assertEqual('test.com', allow_origin) - - self.assertIn('Access-Control-Allow-Methods', state.response.headers) - allow_methods = state.response.headers['Access-Control-Allow-Methods'] - self.assertEqual('GET, OPTIONS, PUT, POST', allow_methods) - - self.assertIn('Access-Control-Allow-Headers', state.response.headers) - allow_headers = state.response.headers['Access-Control-Allow-Headers'] - self.assertEqual('origin, authorization, accept, content-type', - allow_headers) - - def test_unallowed_origin(self): - """Test when the origin is not in the list of allowed origins.""" - hook = app.CORSHook() - request_headers = {'Origin': 'test.com'} - request = pecan.core.Request({}) - request.headers = request_headers - state = pecan.core.RoutingState(request, pecan.core.Response(), None) - hook.after(state) - self.assertNotIn('Access-Control-Allow-Origin', state.response.headers) - self.assertNotIn('Access-Control-Allow-Methods', - state.response.headers) - self.assertNotIn('Access-Control-Allow-Headers', - state.response.headers) - - def test_no_origin_header(self): - """Test when there is no 'Origin' header in the request, in which case, - the request is not cross-origin and doesn't need the CORS headers. - """ - - hook = app.CORSHook() - request = pecan.core.Request({}) - state = pecan.core.RoutingState(request, pecan.core.Response(), None) - hook.after(state) - self.assertNotIn('Access-Control-Allow-Origin', state.response.headers) - self.assertNotIn('Access-Control-Allow-Methods', - state.response.headers) - self.assertNotIn('Access-Control-Allow-Headers', - state.response.headers) - - -class SetupAppTestCase(base.BaseTestCase): - - def setUp(self): - super(SetupAppTestCase, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - @mock.patch('pecan.hooks') - @mock.patch.object(app, 'JSONErrorHook') - @mock.patch.object(app, 'CORSHook') - @mock.patch.object(app, 'JWTAuthHook') - @mock.patch('os.path.join') - @mock.patch('pecan.make_app') - @mock.patch('refstack.api.app.SessionMiddleware') - @mock.patch('refstack.api.utils.get_token', return_value='42') - def test_setup_app(self, get_token, session_middleware, make_app, os_join, - auth_hook, json_error_hook, cors_hook, pecan_hooks): - - self.CONF.set_override('app_dev_mode', - True, - 'api') - self.CONF.set_override('template_path', - 'fake_template_path', - 'api') - self.CONF.set_override('static_root', - 'fake_static_root', - 'api') - self.CONF.set_override('connection', - 'fake_connection', - 'database') - - os_join.return_value = 'fake_project_root' - - json_error_hook.return_value = 'json_error_hook' - cors_hook.return_value = 'cors_hook' - auth_hook.return_value = 'jwt_auth_hook' - pecan_hooks.RequestViewerHook.return_value = 'request_viewer_hook' - pecan_config = mock.Mock() - pecan_config.app = {'root': 'fake_pecan_config'} - make_app.return_value = 'fake_app' - session_middleware.return_value = 'fake_app_with_middleware' - - result = app.setup_app(pecan_config) - - self.assertEqual(result, 'fake_app_with_middleware') - - app_conf = dict(pecan_config.app) - make_app.assert_called_once_with( - app_conf.pop('root'), - debug=True, - static_root='fake_static_root', - template_path='fake_template_path', - hooks=['jwt_auth_hook', 'cors_hook', 'json_error_hook', - 'request_viewer_hook'] - ) - session_middleware.assert_called_once_with( - 'fake_app', - {'session.key': 'refstack', - 'session.type': 'ext:database', - 'session.url': 'fake_connection', - 'session.timeout': 604800, - 'session.validate_key': get_token.return_value, - 'session.sa.pool_recycle': 600} - ) diff --git a/refstack/tests/unit/test_db.py b/refstack/tests/unit/test_db.py deleted file mode 100644 index 08904fc8..00000000 --- a/refstack/tests/unit/test_db.py +++ /dev/null @@ -1,873 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for database.""" - -import base64 -import hashlib -from unittest import mock - -from oslo_config import fixture as config_fixture -from oslotest import base -import sqlalchemy.orm - -from refstack.api import constants as api_const -from refstack import db -from refstack.db.sqlalchemy import api -from refstack.db.sqlalchemy import models - - -class DBAPITestCase(base.BaseTestCase): - """Test case for database API.""" - - @mock.patch.object(api, 'store_test_results') - def test_store_test_results(self, mock_store_test_results): - db.store_test_results('fake_results') - mock_store_test_results.assert_called_once_with('fake_results') - - @mock.patch.object(api, 'get_test_result') - def test_get_test_result(self, mock_get_test_result): - db.get_test_result(12345) - mock_get_test_result.assert_called_once_with(12345, allowed_keys=None) - - @mock.patch.object(api, 'get_test_results') - def test_get_test_results(self, mock_get_test_results): - db.get_test_results(12345) - mock_get_test_results.assert_called_once_with(12345) - - @mock.patch.object(api, 'get_test_result_records') - def test_get_test_result_records(self, mock_db): - filters = mock.Mock() - db.get_test_result_records(1, 2, filters) - mock_db.assert_called_once_with(1, 2, filters) - - @mock.patch.object(api, 'get_test_result_records_count') - def test_get_test_result_records_count(self, mock_db): - filters = mock.Mock() - db.get_test_result_records_count(filters) - mock_db.assert_called_once_with(filters) - - @mock.patch.object(api, 'user_get') - def test_user_get(self, mock_db): - user_openid = 'user@example.com' - db.user_get(user_openid) - mock_db.assert_called_once_with(user_openid) - - @mock.patch.object(api, 'user_save') - def test_user_save(self, mock_db): - user_info = 'user@example.com' - db.user_save(user_info) - mock_db.assert_called_once_with(user_info) - - -class DBHelpersTestCase(base.BaseTestCase): - """Test case for database backend helpers.""" - - @mock.patch.object(api, '_create_facade_lazily') - def test_get_engine(self, mock_create_facade): - facade = mock_create_facade.return_value - facade.get_engine = mock.Mock(return_value='fake_engine') - - result = api.get_engine() - mock_create_facade.assert_called_once_with() - facade.get_engine.assert_called_once_with() - self.assertEqual(result, 'fake_engine') - - @mock.patch.object(api, '_create_facade_lazily') - def test_get_session(self, mock_create_facade): - facade = mock_create_facade.return_value - facade.get_session = mock.Mock(return_value='fake_session') - - fake_kwargs = {'foo': 'bar'} - result = api.get_session(**fake_kwargs) - - mock_create_facade.assert_called_once_with() - facade.get_session.assert_called_once_with(**fake_kwargs) - self.assertEqual(result, 'fake_session') - - @mock.patch('oslo_db.sqlalchemy.session.EngineFacade.from_config') - def test_create_facade_lazily(self, session): - session.return_value = 'fake_session' - result = api._create_facade_lazily() - self.assertEqual(result, 'fake_session') - - -class DBBackendTestCase(base.BaseTestCase): - """Test case for database backend.""" - - def setUp(self): - super(DBBackendTestCase, self).setUp() - self.config_fixture = config_fixture.Config() - self.CONF = self.useFixture(self.config_fixture).conf - - def test_to_dict(self): - fake_query_result = mock.Mock() - fake_query_result.keys.return_value = ('fake_id',) - fake_query_result.index = 1 - fake_query_result.fake_id = 12345 - self.assertEqual({'fake_id': 12345}, api._to_dict(fake_query_result)) - - fake_query_result_list = [fake_query_result] - self.assertEqual([{'fake_id': 12345}], - api._to_dict(fake_query_result_list)) - - fake_query = mock.Mock(spec=sqlalchemy.orm.Query) - fake_query.all.return_value = fake_query_result - self.assertEqual({'fake_id': 12345}, api._to_dict(fake_query)) - - fake_model = mock.Mock(spec=models.RefStackBase) - fake_model.default_allowed_keys = ('fake_id', 'meta', - 'child', 'childs') - fake_child = mock.Mock(spec=models.RefStackBase) - fake_child.iteritems.return_value = {'child_id': 42}.items() - fake_child.default_allowed_keys = ('child_id',) - fake_child.metadata_keys = {} - actuall_dict = {'fake_id': 12345, - 'meta': [{'meta_key': 'answer', - 'value': 42}], - 'child': fake_child, - 'childs': [fake_child]} - fake_model.iteritems.return_value = actuall_dict.items() - fake_model.metadata_keys = {'meta': {'key': 'meta_key', - 'value': 'value'}} - - self.assertEqual({'fake_id': 12345, - 'meta': {'answer': 42}, - 'child': {'child_id': 42}, - 'childs': [{'child_id': 42}]}, - api._to_dict(fake_model)) - - fake_model = mock.Mock(spec=models.RefStackBase) - fake_model.default_allowed_keys = ('meta', 'beta') - fake_model.metadata_keys = {} - fake_model.iteritems.return_value = {'meta': 1, 'beta': 2}.items() - self.assertEqual([{'meta': 1}], - api._to_dict([fake_model], allowed_keys=('meta'))) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.TestResults') - @mock.patch('refstack.db.sqlalchemy.models.Test') - @mock.patch('refstack.db.sqlalchemy.models.TestMeta') - @mock.patch('uuid.uuid4') - def test_store_test_results(self, mock_uuid, mock_test_meta, mock_test, - mock_test_result, mock_get_session): - fake_tests_result = { - 'cpid': 'foo', - 'duration_seconds': 10, - 'results': [ - {'name': 'tempest.some.test'}, - {'name': 'tempest.test', 'uid': '12345678'} - ], - 'meta': {'answer': 42} - } - _id = 12345 - - mock_uuid.return_value = _id - test = mock_test.return_value - test.save = mock.Mock() - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - test_result = mock_test_result.return_value - test_result.save = mock.Mock() - - test_id = api.store_test_results(fake_tests_result) - - mock_test.assert_called_once_with() - mock_get_session.assert_called_once_with() - test.save.assert_called_once_with(session) - - self.assertEqual(test_id, str(_id)) - self.assertEqual(test.cpid, fake_tests_result['cpid']) - self.assertEqual(test.duration_seconds, - fake_tests_result['duration_seconds']) - self.assertEqual(mock_test_result.call_count, - len(fake_tests_result['results'])) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Test') - @mock.patch.object(api, '_to_dict', side_effect=lambda x, *args: x) - def test_get_test_result(self, mock_to_dict, mock_test, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - session.query = mock.Mock() - query = session.query.return_value - query.filter_by = mock.Mock() - filter_by = query.filter_by.return_value - mock_result = 'fake_test_info' - filter_by.first = mock.Mock(return_value=mock_result) - test_id = 'fake_id' - actual_result = api.get_test_result(test_id) - - mock_get_session.assert_called_once_with() - session.query.assert_called_once_with(mock_test) - query.filter_by.assert_called_once_with(id=test_id) - filter_by.first.assert_called_once_with() - self.assertEqual(mock_result, actual_result) - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query = mock.Mock() - query = session.query.return_value - query.filter_by.return_value.first.return_value = None - self.assertRaises(api.NotFound, api.get_test_result, 'fake_id') - - @mock.patch('refstack.db.sqlalchemy.api.models') - @mock.patch.object(api, 'get_session') - def test_delete_test_result(self, mock_get_session, mock_models): - test_query = mock.Mock() - test_meta_query = mock.Mock() - test_results_query = mock.Mock() - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - session.query = mock.Mock(side_effect={ - mock_models.Test: test_query, - mock_models.TestMeta: test_meta_query, - mock_models.TestResults: test_results_query - }.get) - - db.delete_test_result('fake_id') - - test_query.filter_by.return_value.first\ - .assert_called_once_with() - test_meta_query.filter_by.return_value.delete\ - .assert_called_once_with() - test_results_query.filter_by.return_value.delete\ - .assert_called_once_with() - session.delete.assert_called_once_with( - test_query.filter_by.return_value.first.return_value) - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - session.query.return_value\ - .filter_by.return_value\ - .first.return_value = None - - self.assertRaises(api.NotFound, db.delete_test_result, 'fake_id') - - @mock.patch.object(api, 'get_session') - @mock.patch.object(api, '_to_dict', side_effect=lambda x: x) - def test_update_test_result(self, mock_to_dict, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - mock_test = mock.Mock() - session.query.return_value.filter_by.return_value\ - .first.return_value = mock_test - - test_info = {'product_version_id': '123'} - api.update_test_result(test_info) - - mock_get_session.assert_called_once_with() - mock_test.save.assert_called_once_with(session=session) - - @mock.patch('refstack.db.sqlalchemy.api.models') - @mock.patch.object(api, 'get_session') - def test_get_test_result_meta_key(self, mock_get_session, mock_models): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = mock.Mock(value=42) - self.assertEqual( - 42, db.get_test_result_meta_key('fake_id', 'fake_key')) - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = None - self.assertEqual(24, db.get_test_result_meta_key( - 'fake_id', 'fake_key', 24)) - - @mock.patch('refstack.db.sqlalchemy.api.models') - @mock.patch.object(api, 'get_session') - def test_save_test_result_meta_item(self, mock_get_session, mock_models): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - mock_meta_item = mock.Mock() - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = mock_meta_item - db.save_test_result_meta_item('fake_id', 'fake_key', 42) - self.assertEqual('fake_id', mock_meta_item.test_id) - self.assertEqual('fake_key', mock_meta_item.meta_key) - self.assertEqual(42, mock_meta_item.value) - mock_meta_item.save.assert_called_once_with(session) - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = None - mock_meta_item = mock.Mock() - mock_models.TestMeta.return_value = mock_meta_item - db.save_test_result_meta_item('fake_id', 'fake_key', 42) - self.assertEqual('fake_id', mock_meta_item.test_id) - self.assertEqual('fake_key', mock_meta_item.meta_key) - self.assertEqual(42, mock_meta_item.value) - - @mock.patch('refstack.db.sqlalchemy.api.models') - @mock.patch.object(api, 'get_session') - def test_delete_test_result_meta_item(self, mock_get_session, mock_models): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - mock_meta_item = mock.Mock() - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = mock_meta_item - db.delete_test_result_meta_item('fake_id', 'fake_key') - session.delete.assert_called_once_with(mock_meta_item) - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .first.return_value = None - self.assertRaises(db.NotFound, - db.delete_test_result_meta_item, - 'fake_id', 'fake_key') - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.TestResults') - def test_get_test_results(self, mock_test_result, mock_get_session): - mock_test_result.name = mock.Mock() - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - session.query = mock.Mock() - query = session.query.return_value - query.filter_by = mock.Mock() - filter_by = query.filter_by.return_value - mock_result = 'fake_test_results' - expected_result = ['fake_test_results'] - filter_by.all = mock.Mock(return_value=[mock_result]) - - test_id = 'fake_id' - actual_result = api.get_test_results(test_id) - - mock_get_session.assert_called_once_with() - session.query.assert_called_once_with(mock_test_result) - query.filter_by.assert_called_once_with(test_id=test_id) - filter_by.all.assert_called_once_with() - self.assertEqual(expected_result, actual_result) - - @mock.patch('refstack.db.sqlalchemy.models.Test') - @mock.patch('refstack.db.sqlalchemy.models.TestMeta') - def test_apply_filters_for_query_unsigned(self, mock_meta, - mock_test): - query = mock.Mock() - mock_test.created_at = str() - mock_meta.test_id = str() - - filters = { - api_const.START_DATE: 'fake1', - api_const.END_DATE: 'fake2', - api_const.CPID: 'fake3' - } - - unsigned_query = (query - .filter.return_value - .filter.return_value - .filter.return_value) - - unsigned_query.session.query.return_value.filter_by.side_effect = ( - 'signed_results_query', 'shared_results_query' - ) - - result = api._apply_filters_for_query(query, filters) - - query.filter.assert_called_once_with(mock_test.created_at >= - filters[api_const.START_DATE]) - - query = query.filter.return_value - query.filter.assert_called_once_with(mock_test.created_at <= - filters[api_const.END_DATE]) - - query = query.filter.return_value - query.filter.assert_called_once_with(mock_test.cpid == - filters[api_const.CPID]) - - unsigned_query.session.query.assert_has_calls(( - mock.call(mock_meta.test_id), - mock.call().filter_by(meta_key='user'), - mock.call(mock_meta.test_id), - mock.call().filter_by(meta_key='shared'), - )) - unsigned_query.filter.assert_has_calls(( - mock.call(mock_test.id.notin_.return_value), - mock.call(mock_test.id.in_.return_value), - mock.call().union(unsigned_query.filter.return_value) - )) - filtered_query = unsigned_query.filter.return_value.union.return_value - - self.assertEqual(result, filtered_query) - - @mock.patch('refstack.db.sqlalchemy.models.Test') - @mock.patch('refstack.db.sqlalchemy.models.TestMeta') - def test_apply_filters_for_query_signed(self, mock_meta, - mock_test): - query = mock.Mock() - mock_test.created_at = str() - mock_meta.test_id = str() - mock_meta.meta_key = 'user' - mock_meta.value = 'test-openid' - - filters = { - api_const.START_DATE: 'fake1', - api_const.END_DATE: 'fake2', - api_const.CPID: 'fake3', - api_const.USER_PUBKEYS: ['fake_pk'], - api_const.SIGNED: 'true', - api_const.OPENID: 'test-openid' - } - - signed_query = (query - .filter.return_value - .filter.return_value - .filter.return_value) - - result = api._apply_filters_for_query(query, filters) - - signed_query.join.assert_called_once_with(mock_test.meta) - signed_query = signed_query.join.return_value - signed_query.filter.assert_called_once_with( - mock_meta.meta_key == api_const.USER - ) - signed_query = signed_query.filter.return_value - signed_query.filter.assert_called_once_with( - mock_meta.value == filters[api_const.OPENID] - ) - filtered_query = signed_query.filter.return_value - self.assertEqual(result, filtered_query) - - @mock.patch.object(api, '_apply_filters_for_query') - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Test') - def test_get_test_result_records(self, mock_model, - mock_get_session, - mock_apply): - - per_page = 9000 - filters = { - api_const.START_DATE: 'fake1', - api_const.END_DATE: 'fake2', - api_const.CPID: 'fake3' - } - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - first_query = session.query.return_value - second_query = mock_apply.return_value - ordered_query = second_query.order_by.return_value - query_with_offset = ordered_query.offset.return_value - query_with_offset.limit.return_value.all.return_value = 'fake_uploads' - - result = api.get_test_result_records(2, per_page, filters) - - mock_get_session.assert_called_once_with() - session.query.assert_called_once_with(mock_model) - mock_apply.assert_called_once_with(first_query, filters) - second_query.order_by.\ - assert_called_once_with(mock_model.created_at.desc()) - - self.assertEqual(result, 'fake_uploads') - ordered_query.offset.assert_called_once_with(per_page) - query_with_offset.limit.assert_called_once_with(per_page) - - @mock.patch.object(api, '_apply_filters_for_query') - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Test') - def test_get_test_result_records_count(self, mock_model, - mock_get_session, - mock_apply): - - filters = mock.Mock() - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - apply_result = mock_apply.return_value - apply_result.count.return_value = 999 - - result = api.get_test_result_records_count(filters) - self.assertEqual(result, 999) - - session.query.assert_called_once_with(mock_model.id) - mock_apply.assert_called_once_with(query, filters) - apply_result.count.assert_called_once_with() - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.User') - def test_user_get(self, mock_model, mock_get_session): - user_openid = 'user@example.com' - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - user = filtered.first.return_value - - result = api.user_get(user_openid) - self.assertEqual(result, user) - - session.query.assert_called_once_with(mock_model) - query.filter_by.assert_called_once_with(openid=user_openid) - filtered.first.assert_called_once_with() - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.User') - def test_user_get_none(self, mock_model, mock_get_session): - user_openid = 'user@example.com' - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - filtered.first.return_value = None - self.assertRaises(api.NotFound, api.user_get, user_openid) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.User') - @mock.patch.object(api, 'user_get', side_effect=api.NotFound('User')) - def test_user_update_or_create(self, mock_get_user, mock_model, - mock_get_session): - user_info = {'openid': 'user@example.com'} - - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - user = mock_model.return_value - result = api.user_save(user_info) - self.assertEqual(result, user) - - mock_model.assert_called_once_with() - mock_get_session.assert_called_once_with() - user.save.assert_called_once_with(session=session) - user.update.assert_called_once_with(user_info) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.PubKey') - def test_get_pubkey(self, mock_model, mock_get_session): - key = 'AAAAB3Nz' - khash = hashlib.md5(base64.b64decode(key.encode('ascii'))).hexdigest() - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - - # Test no key match. - filtered.all.return_value = [] - result = api.get_pubkey(key) - self.assertIsNone(result) - - session.query.assert_called_once_with(mock_model) - query.filter_by.assert_called_once_with(md5_hash=khash) - filtered.all.assert_called_once_with() - - # Test only one key match. - filtered.all.return_value = [{'pubkey': key, 'md5_hash': khash}] - result = api.get_pubkey(key) - self.assertEqual({'pubkey': key, 'md5_hash': khash}, result) - - # Test multiple keys with same md5 hash. - filtered.all.return_value = [{'pubkey': 'key2', 'md5_hash': khash}, - {'pubkey': key, 'md5_hash': khash}] - result = api.get_pubkey(key) - self.assertEqual({'pubkey': key, 'md5_hash': khash}, result) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - def test_store_pubkey(self, mock_models, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - pubkey_info = { - 'openid': 'fake_id', - 'format': 'ssh-rsa', - 'pubkey': 'cHV0aW4gaHVpbG8=', - 'comment': 'comment' - } - mock_pubkey = mock.Mock() - mock_pubkey.id = 42 - mock_models.PubKey.return_value = mock_pubkey - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .all.return_value = None - self.assertEqual(42, db.store_pubkey(pubkey_info)) - self.assertEqual('fake_id', mock_pubkey.openid) - self.assertEqual('ssh-rsa', mock_pubkey.format) - self.assertEqual('cHV0aW4gaHVpbG8=', mock_pubkey.pubkey) - self.assertEqual( - hashlib.md5( - base64.b64decode('cHV0aW4gaHVpbG8='.encode('ascii')) - ).hexdigest(), - '3b30cd2bdac1eeb7e92dfc983bf5f943' - ) - mock_pubkey.save.assert_called_once_with(session) - session.query.return_value\ - .filter_by.return_value\ - .filter_by.return_value\ - .all.return_value = mock_pubkey - self.assertRaises(db.Duplication, - db.store_pubkey, pubkey_info) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - def test_delete_pubkey(self, mock_models, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - - db.delete_pubkey('key_id') - key = session\ - .query.return_value\ - .filter_by.return_value\ - .first.return_value - session.query.assert_called_once_with(mock_models.PubKey) - session.query.return_value.filter_by.assert_called_once_with( - id='key_id') - session.delete.assert_called_once_with(key) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - @mock.patch.object(api, '_to_dict', side_effect=lambda x: x) - def test_get_user_pubkeys(self, mock_to_dict, mock_models, - mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - actual_keys = db.get_user_pubkeys('user_id') - keys = session \ - .query.return_value \ - .filter_by.return_value \ - .all.return_value - session.query.assert_called_once_with(mock_models.PubKey) - session.query.return_value.filter_by.assert_called_once_with( - openid='user_id') - self.assertEqual(keys, actual_keys) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.UserToGroup') - def test_add_user_to_group(self, mock_model, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - api.add_user_to_group('user-123', 'GUID', 'user-321') - - mock_model.assert_called_once_with() - mock_get_session.assert_called_once_with() - mock_model.return_value.save.assert_called_once_with(session=session) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - def test_remove_user_from_group(self, mock_models, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - db.remove_user_from_group('user-123', 'GUID') - - session.query.assert_called_once_with(mock_models.UserToGroup) - session.query.return_value.filter_by.assert_has_calls(( - mock.call(user_openid='user-123'), - mock.call().filter_by(group_id='GUID'), - mock.call().filter_by().delete(synchronize_session=False))) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Organization') - @mock.patch('refstack.db.sqlalchemy.models.Group') - @mock.patch('refstack.db.sqlalchemy.models.UserToGroup') - @mock.patch.object(api, '_to_dict', side_effect=lambda x: x) - def test_organization_add(self, mock_to_dict, mock_model_user_to_group, - mock_model_group, mock_model_organization, - mock_get_session): - - organization_info = {'name': 'a', 'description': 'b', 'type': 1} - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - organization = mock_model_organization.return_value - result = api.add_organization(organization_info, 'user-123') - self.assertEqual(result, organization) - - group = mock_model_group.return_value - self.assertIsNotNone(group.id) - self.assertIsNotNone(organization.id) - self.assertIsNotNone(organization.group_id) - - mock_model_organization.assert_called_once_with() - mock_model_group.assert_called_once_with() - mock_model_user_to_group.assert_called_once_with() - mock_get_session.assert_called_once_with() - organization.save.assert_called_once_with(session=session) - group.save.assert_called_once_with(session=session) - user_to_group = mock_model_user_to_group.return_value - user_to_group.save.assert_called_once_with(session=session) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Product') - @mock.patch('refstack.db.sqlalchemy.models.ProductVersion') - @mock.patch.object(api, '_to_dict', side_effect=lambda x: x) - def test_product_add(self, mock_to_dict, mock_version, - mock_product, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - version = mock_version.return_value - product = mock_product.return_value - product_info = {'product_ref_id': 'hash_or_guid', 'name': 'a', - 'organization_id': 'GUID0', 'type': 0, - 'product_type': 0} - result = api.add_product(product_info, 'user-123') - self.assertEqual(result, product) - - self.assertIsNotNone(product.id) - self.assertIsNotNone(version.id) - self.assertIsNotNone(version.product_id) - self.assertIsNone(version.version) - - mock_get_session.assert_called_once_with() - product.save.assert_called_once_with(session=session) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Product') - def test_incomplete_product_add(self, mock_product, mock_get_session): - product_info = {} - self.assertRaises(KeyError, api.add_product, product_info, 'u') - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Product.save') - def test_product_update(self, mock_product_save, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - product = models.Product() - product.id = '123' - filtered.first.return_value = product - - product_info = {'product_ref_id': '098', 'name': 'a', - 'description': 'b', 'creator_openid': 'abc', - 'organization_id': '1', 'type': 0, 'product_type': 0, - 'id': '123'} - api.update_product(product_info) - - self.assertEqual('098', product.product_ref_id) - self.assertIsNone(product.created_by_user) - self.assertIsNone(product.organization_id) - self.assertIsNone(product.type) - self.assertIsNone(product.product_type) - - mock_get_session.assert_called_once_with() - mock_product_save.assert_called_once_with(session=session) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Organization') - @mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x) - def test_organization_get(self, mock_to_dict, mock_model, - mock_get_session): - organization_id = 12345 - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - organization = filtered.first.return_value - - result = api.get_organization(organization_id) - self.assertEqual(result, organization) - - session.query.assert_called_once_with(mock_model) - query.filter_by.assert_called_once_with(id=organization_id) - filtered.first.assert_called_once_with() - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Product') - @mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x) - def test_product_get(self, mock_to_dict, mock_model, mock_get_session): - _id = 12345 - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - product = filtered.first.return_value - - result = api.get_product(_id) - self.assertEqual(result, product) - - session.query.assert_called_once_with(mock_model) - query.filter_by.assert_called_once_with(id=_id) - filtered.first.assert_called_once_with() - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - def test_product_delete(self, mock_models, mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - db.delete_product('product_id') - - session.query.return_value.filter_by.assert_has_calls(( - mock.call(product_id='product_id'), - mock.call().delete(synchronize_session=False))) - session.query.return_value.filter_by.assert_has_calls(( - mock.call(id='product_id'), - mock.call().delete(synchronize_session=False))) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.api.models') - def test_get_organization_users(self, mock_models, mock_get_session): - organization_id = 12345 - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - filtered = query.filter_by.return_value - filtered.first.return_value.group_id = 'foo' - - join = query.join.return_value - - fake_user = models.User() - fake_user.openid = 'foobar' - fake_user.fullname = 'Foo Bar' - fake_user.email = 'foo@bar.com' - join.filter.return_value = [(mock.Mock(), fake_user)] - - result = api.get_organization_users(organization_id) - expected = {'foobar': {'openid': 'foobar', - 'fullname': 'Foo Bar', - 'email': 'foo@bar.com'}} - self.assertEqual(expected, result) - - session.query.assert_any_call(mock_models.Organization.group_id) - query.filter_by.assert_called_once_with(id=organization_id) - session.query.assert_any_call(mock_models.UserToGroup, - mock_models.User) - - @mock.patch.object(api, 'get_session') - @mock.patch('refstack.db.sqlalchemy.models.Organization') - @mock.patch.object(api, '_to_dict', side_effect=lambda x, allowed_keys: x) - def test_organizations_get(self, mock_to_dict, mock_model, - mock_get_session): - session = mock.Mock() - mock_get_session.return_value.__enter__.return_value = session - query = session.query.return_value - ordered = query.order_by.return_value - organizations = ordered.all.return_value - - result = api.get_organizations() - self.assertEqual(organizations, result) - - session.query.assert_called_once_with(mock_model) - query.order_by.assert_called_once_with(mock_model.created_at.desc()) - ordered.all.assert_called_once_with() diff --git a/refstack/tests/unit/test_guidelines.py b/refstack/tests/unit/test_guidelines.py deleted file mode 100644 index a0b792e7..00000000 --- a/refstack/tests/unit/test_guidelines.py +++ /dev/null @@ -1,296 +0,0 @@ -# Copyright (c) 2016 IBM, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -from unittest import mock - -import httmock -from oslotest import base -import requests - -from refstack.api import guidelines - - -class GuidelinesTestCase(base.BaseTestCase): - - def setUp(self): - super(GuidelinesTestCase, self).setUp() - self.guidelines = guidelines.Guidelines() - - def test_guidelines_list(self): - @httmock.all_requests - def github_api_mock(url, request): - headers = {'content-type': 'application/json'} - content = [{'name': '2015.03.json', - 'path': '2015.03.json', - 'type': 'file'}, - {'name': '2015.next.json', - 'path': '2015.next.json', - 'type': 'file'}, - {'name': '2015.03', - 'path': '2015.03', - 'type': 'dir'}, - {'name': 'test.2018.02.json', - 'path': 'add-ons/test.2018.02.json', - 'type': 'file'}, - {'name': 'test.next.json', - 'path': 'add-ons/test.next.json', - 'type': 'file'}] - content = json.dumps(content) - return httmock.response(200, content, headers, None, 5, request) - with httmock.HTTMock(github_api_mock): - result = self.guidelines.get_guideline_list() - expected_keys = ['powered', u'test'] - expected_powered = [ - {'name': u'2015.03.json', - 'file': u'2015.03.json'}, - {'name': u'2015.next.json', - 'file': u'2015.next.json'} - ] - expected_test_addons = [ - {'name': u'2018.02.json', - 'file': u'test.2018.02.json'}, - {'name': u'next.json', - 'file': u'test.next.json'} - ] - - self.assertIn('powered', expected_keys) - self.assertIn(u'test', expected_keys) - self.assertEqual(expected_powered, - result['powered']) - self.assertEqual(expected_test_addons, - result[u'test']) - - def test_get_guidelines_list_error_code(self): - """Test when the HTTP status code isn't a 200 OK.""" - @httmock.all_requests - def github_api_mock(url, request): - content = {'title': 'Not Found'} - return httmock.response(404, content, None, None, 5, request) - - with httmock.HTTMock(github_api_mock): - result = self.guidelines.get_guideline_list() - self.assertEqual(result, {'powered': []}) - - @mock.patch('requests.get') - def test_get_guidelines_exception(self, mock_requests_get): - """Test when the GET request raises an exception.""" - mock_requests_get.side_effect = requests.exceptions.RequestException() - result = self.guidelines.get_guideline_list() - self.assertEqual(result, {'powered': []}) - - def test_get_capability_file(self): - """Test when getting a specific guideline file.""" - @httmock.all_requests - def github_mock(url, request): - content = {'foo': 'bar'} - return httmock.response(200, content, None, None, 5, request) - - with httmock.HTTMock(github_mock): - gl_file_name = 'dns.2018.02.json' - result = self.guidelines.get_guideline_contents(gl_file_name) - self.assertEqual({'foo': 'bar'}, result) - - def test_get_capability_file_error_code(self): - """Test when the HTTP status code isn't a 200 OK.""" - @httmock.all_requests - def github_api_mock(url, request): - content = {'title': 'Not Found'} - return httmock.response(404, content, None, None, 5, request) - - with httmock.HTTMock(github_api_mock): - result = self.guidelines.get_guideline_contents('2010.03.json') - self.assertIsNone(result) - - @mock.patch('requests.get') - def test_get_capability_file_exception(self, mock_requests_get): - """Test when the GET request raises an exception.""" - mock_requests_get.side_effect = requests.exceptions.RequestException() - result = self.guidelines.get_guideline_contents('2010.03.json') - self.assertIsNone(result) - - def test_get_target_capabilities(self): - """Test getting relevant capabilities.""" - - # Schema version 2.0 - json = { - 'metadata': { - 'id': '2017.08', - 'schema': '2.0', - 'scoring': {}, - 'os_trademark_approval': { - 'target_approval': '2017.08', - 'replaces': '2017.01', - 'releases': ['newton', 'ocata', 'pike'], - 'status': 'approved' - } - }, - 'platforms': { - 'OpenStack Powered Platform': { - 'description': 'foo platform', - 'components': [ - {'name': 'os_powered_compute'}, - {'name': 'os_powered_storage'} - ] - }, - 'OpenStack Powered Storage': { - 'description': 'foo storage', - 'components': [ - {'name': 'os_powered_storage'} - ] - }, - }, - 'components': { - 'os_powered_compute': { - 'capabilities': { - 'required': ['cap_id_1'], - 'advisory': ['cap_id_2'], - 'deprecated': ['cap_id_3'], - 'removed': [] - } - }, - 'os_powered_storage': { - 'capabilities': { - 'required': ['cap_id_5'], - 'advisory': ['cap_id_6'], - 'deprecated': [], - 'removed': [] - } - } - } - } - - caps = self.guidelines.get_target_capabilities(json) - expected = sorted(['cap_id_1', 'cap_id_2', 'cap_id_3', - 'cap_id_5', 'cap_id_6']) - self.assertEqual(expected, sorted(caps)) - - caps = self.guidelines.get_target_capabilities(json, - types=['required'], - target='object') - expected = ['cap_id_5'] - self.assertEqual(expected, caps) - - # Schema version 1.4 - json = { - 'platform': {'required': ['compute', 'object']}, - 'schema': '1.4', - 'components': { - 'compute': { - 'required': ['cap_id_1'], - 'advisory': [], - 'deprecated': [], - 'removed': [] - }, - 'object': { - 'required': ['cap_id_2'], - 'advisory': ['cap_id_3'], - 'deprecated': [], - 'removed': [] - } - } - } - - # Test platform capabilities - caps = self.guidelines.get_target_capabilities(json) - expected = sorted(['cap_id_1', 'cap_id_2', 'cap_id_3']) - self.assertEqual(expected, sorted(caps)) - - caps = self.guidelines.get_target_capabilities(json, - types=['required'], - target='object') - expected = ['cap_id_2'] - self.assertEqual(expected, caps) - - def test_get_test_list(self): - """Test when getting the guideline test list.""" - - # Schema version 2.0 - json = { - 'metadata': { - 'schema': '2.0', - }, - 'capabilities': { - 'cap-1': { - 'tests': { - 'test_1': {'idempotent_id': 'id-1234'}, - 'test_2': {'idempotent_id': 'id-5678', - 'aliases': ['test_2_1']}, - 'test_3': {'idempotent_id': 'id-1111', - 'flagged': {'reason': 'foo'}} - } - }, - 'cap-2': { - 'tests': { - 'test_4': {'idempotent_id': 'id-1233'} - } - } - } - } - - tests = self.guidelines.get_test_list(json, ['cap-1']) - expected = ['test_1[id-1234]', 'test_2[id-5678]', - 'test_2_1[id-5678]', 'test_3[id-1111]'] - self.assertEqual(expected, tests) - - tests = self.guidelines.get_test_list(json, ['cap-1'], - alias=False, show_flagged=False) - expected = ['test_1[id-1234]', 'test_2[id-5678]'] - self.assertEqual(expected, tests) - - # Schema version 1.4 - json = { - 'schema': '1.4', - 'capabilities': { - 'cap-1': { - 'tests': { - 'test_1': {'idempotent_id': 'id-1234'}, - 'test_2': {'idempotent_id': 'id-5678', - 'aliases': ['test_2_1']}, - 'test_3': {'idempotent_id': 'id-1111', - 'flagged': {'reason': 'foo'}} - } - }, - 'cap-2': { - 'tests': { - 'test_4': {'idempotent_id': 'id-1233'} - } - } - } - } - tests = self.guidelines.get_test_list(json, ['cap-1']) - expected = ['test_1[id-1234]', 'test_2[id-5678]', - 'test_2_1[id-5678]', 'test_3[id-1111]'] - self.assertEqual(expected, tests) - - tests = self.guidelines.get_test_list(json, ['cap-1'], - alias=False, show_flagged=False) - expected = ['test_1[id-1234]', 'test_2[id-5678]'] - self.assertEqual(expected, tests) - - # Schema version 1.2 - json = { - 'schema': '1.2', - 'capabilities': { - 'cap-1': { - 'tests': ['test_1', 'test_2'] - }, - 'cap-2': { - 'tests': ['test_3'] - } - } - } - tests = self.guidelines.get_test_list(json, ['cap-2']) - self.assertEqual(['test_3'], tests) diff --git a/refstack/tests/unit/test_migration.py b/refstack/tests/unit/test_migration.py deleted file mode 100644 index 998a7477..00000000 --- a/refstack/tests/unit/test_migration.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for refstack's migrations.""" - -from unittest import mock - -import alembic -from oslotest import base - -from refstack.db import migration -from refstack.db.migrations.alembic import utils - - -class AlembicConfigTestCase(base.BaseTestCase): - - @mock.patch('alembic.config.Config') - @mock.patch('os.path.join') - def test_alembic_config(self, os_join, alembic_config): - os_join.return_value = 'fake_path' - alembic_config.return_value = 'fake_config' - result = utils.alembic_config() - self.assertEqual(result, 'fake_config') - alembic_config.assert_called_once_with('fake_path') - - -class MigrationTestCase(base.BaseTestCase): - """Test case for alembic's migrations API.""" - - def setUp(self): - super(MigrationTestCase, self).setUp() - self.config_patcher = mock.patch( - 'refstack.db.migrations.alembic.utils.alembic_config') - self.config = self.config_patcher.start() - self.config.return_value = 'fake_config' - self.addCleanup(self.config_patcher.stop) - - @mock.patch.object(alembic.migration.MigrationContext, 'configure', - mock.Mock()) - def test_version(self): - context = mock.Mock() - context.get_current_revision = mock.Mock() - alembic.migration.MigrationContext.configure.return_value = context - with mock.patch('refstack.db.sqlalchemy.api.get_engine') as get_engine: - engine = mock.Mock() - engine.connect = mock.MagicMock() - get_engine.return_value = engine - migration.version() - context.get_current_revision.assert_called_with() - engine.connect.assert_called_once_with() - - @mock.patch('alembic.command.upgrade') - def test_upgrade(self, upgrade): - migration.upgrade('some_revision') - upgrade.assert_called_once_with('fake_config', 'some_revision') - - @mock.patch('alembic.command.upgrade') - def test_upgrade_without_revision(self, upgrade): - migration.upgrade(None) - upgrade.assert_called_once_with('fake_config', 'head') - - @mock.patch('alembic.command.downgrade') - def test_downgrade(self, downgrade): - migration.downgrade('some_revision') - downgrade.assert_called_once_with('fake_config', 'some_revision') - - @mock.patch('alembic.command.downgrade') - def test_downgrade_without_revision(self, downgrade): - migration.downgrade(None) - downgrade.assert_called_once_with('fake_config', 'base') - - @mock.patch('alembic.command.stamp') - def test_stamp(self, stamp): - migration.stamp('some_revision') - stamp.assert_called_once_with('fake_config', 'some_revision') - - @mock.patch('alembic.command.stamp') - def test_stamp_without_revision(self, stamp): - migration.stamp(None) - stamp.assert_called_once_with('fake_config', 'head') - - @mock.patch('alembic.command.revision') - def test_revision(self, revision): - migration.revision('some_message', True) - revision.assert_called_once_with('fake_config', 'some_message', True) diff --git a/refstack/tests/unit/test_validators.py b/refstack/tests/unit/test_validators.py deleted file mode 100644 index 70cfbc1a..00000000 --- a/refstack/tests/unit/test_validators.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright (c) 2015 Mirantis, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tests for validators.""" -import binascii -import json -from unittest import mock - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.asymmetric import padding -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives import serialization - -import jsonschema -from oslotest import base - -from refstack.api import exceptions as api_exc -from refstack.api import validators - - -class ValidatorsTestCase(base.BaseTestCase): - """Test case for validator's helpers.""" - - def test_str_validation_error(self): - err = api_exc.ValidationError( - 'Something went wrong!', - AttributeError("'NoneType' object has no attribute 'a'") - ) - self.assertEqual(err.title, 'Something went wrong!') - self.assertEqual("%s(%s: %s)" % ( - 'Something went wrong!', - 'AttributeError', - "'NoneType' object has no attribute 'a'" - ), str(err)) - err = api_exc.ValidationError( - 'Something went wrong again!' - ) - self.assertEqual('Something went wrong again!', str(err)) - - def test_is_uuid(self): - self.assertTrue(validators.is_uuid('12345678123456781234567812345678')) - - def test_is_uuid_fail(self): - self.assertFalse(validators.is_uuid('some_string')) - - def test_checker_uuid(self): - value = validators.checker_uuid('12345678123456781234567812345678') - self.assertTrue(value) - - def test_checker_uuid_fail(self): - self.assertFalse(validators.checker_uuid('some_string')) - - -class TestResultValidatorTestCase(base.BaseTestCase): - """Test case for TestResultValidator.""" - - FAKE_JSON = { - 'cpid': 'foo', - 'duration_seconds': 10, - 'results': [ - {'name': 'tempest.some.test'}, - {'name': 'tempest.test', 'uid': '12345678'} - ] - } - - FAKE_JSON_WITH_EMPTY_RESULTS = { - 'cpid': 'foo', - 'duration_seconds': 20, - 'results': [ - ] - } - - def setUp(self): - super(TestResultValidatorTestCase, self).setUp() - self.validator = validators.TestResultValidator() - - def test_assert_id(self): - value = self.validator.assert_id('12345678123456781234567812345678') - self.assertTrue(value) - - def test_assert_id_fail(self): - self.assertFalse(self.validator.assert_id('some_string')) - - def test_validation(self): - with mock.patch('jsonschema.validate') as mock_validate: - request = mock.Mock() - request.body = json.dumps(self.FAKE_JSON).encode('utf-8') - request.headers = {} - self.validator.validate(request) - mock_validate.assert_called_once_with(self.FAKE_JSON, - self.validator.schema) - - def test_validation_with_signature(self): - request = mock.Mock() - request.body = json.dumps(self.FAKE_JSON).encode('utf-8') - - key = rsa.generate_private_key( - public_exponent=65537, - key_size=2048, - backend=default_backend() - ) - sign = key.sign(request.body, padding.PKCS1v15(), hashes.SHA256()) - pubkey = key.public_key().public_bytes( - serialization.Encoding.OpenSSH, - serialization.PublicFormat.OpenSSH - ) - request.headers = { - 'X-Signature': binascii.b2a_hex(sign), - 'X-Public-Key': pubkey - } - self.validator.validate(request) - - def test_validation_fail_no_json(self): - wrong_request = mock.Mock() - wrong_request.body = b'foo' - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - wrong_request) - try: - self.validator.validate(wrong_request) - except api_exc.ValidationError as e: - self.assertIsInstance(e.exc, ValueError) - - def test_validation_fail(self): - wrong_request = mock.Mock() - wrong_request.body = json.dumps({ - 'foo': 'bar' - }).encode('utf-8') - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - wrong_request) - try: - self.validator.validate(wrong_request) - except api_exc.ValidationError as e: - self.assertIsInstance(e.exc, jsonschema.ValidationError) - - def test_validation_fail_with_empty_result(self): - wrong_request = mock.Mock() - wrong_request.body = json.dumps( - self.FAKE_JSON_WITH_EMPTY_RESULTS - ).encode('utf-8') - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - wrong_request) - - @mock.patch('jsonschema.validate') - def test_validation_with_broken_signature(self, mock_validate): - - request = mock.Mock() - request.body = json.dumps(self.FAKE_JSON).encode('utf-8') - key = rsa.generate_private_key( - public_exponent=65537, - key_size=2048, - backend=default_backend() - ) - pubkey = key.public_key().public_bytes( - serialization.Encoding.OpenSSH, - serialization.PublicFormat.OpenSSH - ) - request.headers = { - 'X-Signature': binascii.b2a_hex(b'fake_sign'), - 'X-Public-Key': pubkey - } - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - request) - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, - 'Signature verification failed') - - request.headers = { - 'X-Signature': 'z-z-z-z!!!', - 'X-Public-Key': pubkey - } - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, 'Malformed signature') - - request.headers = { - 'X-Signature': binascii.b2a_hex(b'fake_sign'), - 'X-Public-Key': b'H--0' - } - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertIsInstance(e.exc, ValueError) - - -class PubkeyValidatorTestCase(base.BaseTestCase): - """Test case for TestResultValidator.""" - - FAKE_JSON = { - 'raw_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC4GAwIjFN6mkN09Vfc8h' - 'VCnbztex/kjVdPlGraBLR+M9VoehOMJgLawpn2f+rM7NjDDgIwvj0kHVMZ' - 'cBk5MZ1eQg3ACtP2EBw0SLLZ9uMSuHoDTf8oHVgNlNrHL3sc/QYJYfSqRh' - 'FS2JvIVNnC2iG8jwnxUBI9rBspYU8AkrrczQ== Don\'t_Panic.', - 'self_signature': '9d6c4c74b4ec47bb4db8f288a502d2d2f686e7228d387377b8' - 'c89ee67345ad04f8e518e0a627afe07217defbbd8acdd6dd88' - '74104e631731a1fb4dab1a34e06a0680f11337d1fae0b7a9ad' - '5942e0aacd2245c4cf7a78a96c4800eb4f6d8c363822aaaf43' - 'aa3a648ddee84f3ea0b91e2e977ca19df72ad80226c12b1221' - 'c2fb61' - } - - def setUp(self): - super(PubkeyValidatorTestCase, self).setUp() - self.validator = validators.PubkeyValidator() - - def test_validation(self): - request = mock.Mock() - request.body = json.dumps(self.FAKE_JSON).encode('utf-8') - self.validator.validate(request) - - def test_validation_fail_no_json(self): - wrong_request = mock.Mock() - wrong_request.body = b'foo' - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - wrong_request) - try: - self.validator.validate(wrong_request) - except api_exc.ValidationError as e: - self.assertIsInstance(e.exc, ValueError) - - def test_validation_fail(self): - wrong_request = mock.Mock() - wrong_request.body = json.dumps({ - 'foo': 'bar' - }).encode('utf-8') - self.assertRaises(api_exc.ValidationError, - self.validator.validate, - wrong_request) - try: - self.validator.validate(wrong_request) - except api_exc.ValidationError as e: - self.assertIsInstance(e.exc, jsonschema.ValidationError) - - @mock.patch('jsonschema.validate') - def test_validation_with_broken_signature(self, mock_validate): - body = self.FAKE_JSON.copy() - body['self_signature'] = 'deadbeef' - - request = mock.Mock() - request.body = json.dumps(body).encode('utf-8') - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, - 'Signature verification failed') - - body = { - 'raw_key': 'fake key comment', - 'self_signature': 'deadbeef' - } - request = mock.Mock() - request.body = json.dumps(body).encode('utf-8') - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, - 'Public key has unsupported format') - - body = { - 'raw_key': 'ssh-rsa key comment', - 'self_signature': 'deadbeef?' - } - request = mock.Mock() - request.body = json.dumps(body).encode('utf-8') - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, - 'Malformed signature') - - body = { - 'raw_key': 'ssh-rsa key comment', - 'self_signature': 'deadbeef' - } - request = mock.Mock() - request.body = json.dumps(body).encode('utf-8') - try: - self.validator.validate(request) - except api_exc.ValidationError as e: - self.assertEqual(e.title, - 'Malformed public key') diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d49dc26b..00000000 --- a/requirements.txt +++ /dev/null @@ -1,17 +0,0 @@ -SQLAlchemy>=0.8.3 -alembic>=1.8.0 -beaker -beautifulsoup4 -cryptography>=3.0 # BSD/Apache-2.0 -docutils>=0.11 -oslo.config>=1.6.0 # Apache-2.0 -oslo.db>=1.4.1 # Apache-2.0 -oslo.log>=3.11.0 -oslo.utils>=3.16.0 # Apache-2.0 -pecan>=0.8.2 -requests>=2.2.0,!=2.4.0 -requests-cache>=0.4.9,<0.6.0 -jsonschema>=4.7.0 -PyJWT>=2.0.0 # MIT -WebOb>=1.7.1 # MIT -PyMySQL>=0.6.2,!=0.6.4 diff --git a/run-in-docker b/run-in-docker deleted file mode 100755 index 8bb154df..00000000 --- a/run-in-docker +++ /dev/null @@ -1,153 +0,0 @@ -#!/bin/bash -TAG=$(BRANCH=$(git status -bs| grep "##" | awk '{print $2}'); echo ${BRANCH##*/}) -IMAGE="refstack:${TAG}" -CONTAINER="refstack_${TAG}" -PROJ_DIR=$(git rev-parse --show-toplevel) - -function usage () { -set +x -echo "Usage: $0 [OPTIONS] [COMMAND]" -echo "Build '${IMAGE}' image if it is does not exist." -echo "Run '${CONTAINER}' container and execute COMMAND in it." -echo "Default COMMAND is 'api-up'" -echo "If container '${CONTAINER}' exists (running or stopped) it will be reused." -echo "If you want to get access to your local RefStack not only from localhost, " -echo "please specify public RefStack host:port in env[REFSTACK_HOST]." -echo "You can customize RefStack API config by editing docker/refstack.conf.tmpl." -echo "It is bash template. You can use \${SOME_ENV_VARIABLE} in it." -echo "Default is 127.0.0.1:443" -echo "" -echo " -r Force delete '${CONTAINER}' container and run it again." -echo " Main usecase for it - updating config from templates" -echo " -b Force delete '${IMAGE}' image and build it again" -echo " Main usecase for it - force build new python/js env" -echo " -i Run container with isolated MySQL data." -echo " By default MySQL data stores in refstack_data_DATA-BASE-REVISON container" -echo " It reuses if such container exists. If you want to drop DB data, just execute" -echo " sudo docker rm refstack_data_DATA-BASE-REVISON" -echo " -d Turn on debug information" -echo " -h Print this usage message" -echo "" -echo "" -echo "Using examples:" -echo "" -echo "Run RefStack API:" -echo "$ ./run-in-docker" -echo "" -echo "Run RefStack API by hands:" -echo "$ ./run-in-docker bash" -echo "$ activate" -echo "$ pecan serve refstack/api/config.py" -echo "" -echo "Open shell in container:" -echo "$ ./run-in-docker bash" -echo "" -echo "Open mysql console in container:" -echo "$ ./run-in-docker bash" -echo "$ mysql" -} - -build_image () { -sudo docker rm -f ${CONTAINER} -PREV_ID=$(sudo docker images refstack | grep ${TAG} | awk '{print $3}') -echo "Try to build ${IMAGE} image" -sudo docker build -t ${IMAGE} -f ${PROJ_DIR}/docker/Dockerfile ${PROJ_DIR} || exit $? -NEW_ID=$(sudo docker images refstack | grep ${TAG} | awk '{print $3}') -if [[ ${PREV_ID} ]] && [[ ! ${PREV_ID} == ${NEW_ID} ]]; then - sudo docker rmi -f ${PREV_ID} && echo "Previous image removed" -fi -} - -wait_ready() { -while true; do - echo "Wait while container is not ready" - sudo docker exec ${CONTAINER} [ ! -e /tmp/is-not-ready ] && \ - echo "Container ${CONTAINER} is running!" && break - sleep 1 -done -} - -run_container (){ -echo "Stop all other refstack containers" -for id in $(sudo docker ps -q); do - NAME=$(sudo docker inspect --format='{{.Name}}' $id) - if [[ ${NAME} == /refstack_* ]] && [[ ! ${NAME} == "/${CONTAINER}" ]]; then - echo "Stopped container ${NAME}" && sudo docker stop $id - fi -done -if [[ $(sudo docker ps -a | grep "${CONTAINER}") ]]; then - echo "Container ${CONTAINER} exists it is reused" - sudo docker start ${CONTAINER} - wait_ready -else - echo "Try to run container ${CONTAINER}" - sudo docker run -d \ - -e REFSTACK_HOST=${REFSTACK_HOST:-127.0.0.1} \ - -e DEBUG_MODE=${DEBUG_MODE} \ - -v ${PROJ_DIR}:/refstack:ro -p 443:443 --name ${CONTAINER} \ - ${IMAGE} start.sh -s - wait_ready - if [[ ! ${ISOLATED_DB} ]]; then - DB_VERSION=$(sudo docker exec -it ${CONTAINER} api-db-version) - DB_CONTAINER=refstack_data_${DB_VERSION::-1} - sudo docker rm -f ${CONTAINER} - if [[ ! $(sudo docker ps -a | grep "${DB_CONTAINER}") ]]; then - sudo docker run -v /home/dev/mysql --name ${DB_CONTAINER} ubuntu /bin/true - echo "Container with mysql data ${DB_CONTAINER} created" - sudo docker run -d \ - -e REFSTACK_HOST=${REFSTACK_HOST:-127.0.0.1} \ - -e DEBUG_MODE=${DEBUG_MODE} \ - -v ${PROJ_DIR}:/refstack:ro --volumes-from ${DB_CONTAINER} -p 443:443 \ - --name ${CONTAINER} ${IMAGE} - wait_ready - sudo docker exec ${CONTAINER} api-init-db - echo "DB init done" - else - sudo docker run -d \ - -e REFSTACK_HOST=${REFSTACK_HOST:-127.0.0.1} \ - -e DEBUG_MODE=${DEBUG_MODE} \ - -v ${PROJ_DIR}:/refstack:ro --volumes-from ${DB_CONTAINER} -p 443:443 \ - --name ${CONTAINER} ${IMAGE} - echo "Container with mysql data ${DB_CONTAINER} attached to ${CONTAINER}" - wait_ready - fi - - - fi -fi -} - -COMMAND="" -while [[ $1 ]] -do - case "$1" in - -h) usage - exit 0;; - -r) echo "Try to remove old ${CONTAINER} container" - sudo docker rm -f ${CONTAINER} - shift;; - -i) echo "Run container with isolated MySQL data." - echo "By default MySQL data stores in refstack_data_[DATA-BASE-REVISON] container" - echo "It reuses if such container exists. If you want to drop DB data, just execute" - echo "sudo docker rm ${DB_CONTAINER}" - ISOLATED_DB=true - shift;; - -b) FORCE_BUILD=true - shift;; - -d) DEBUG_MODE=true - shift;; - *) COMMAND="${COMMAND} $1" - shift;; - esac -done - -[[ ${DEBUG_MODE} ]] && set -x - -#Build proper image if it does not exist of force rebuild fired -if [[ ${FORCE_BUILD} ]] || [[ ! $(sudo docker images refstack | grep ${TAG}) ]]; then - build_image -fi -#Run or start(if it exists) proper container -[[ ! $(sudo docker ps | grep ${CONTAINER}) ]] && run_container - -sudo docker exec -it ${CONTAINER} ${COMMAND:-api-up} diff --git a/setup-mysql-tests.sh b/setup-mysql-tests.sh deleted file mode 100755 index 136285a2..00000000 --- a/setup-mysql-tests.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash -x - -wait_for_line () { - while read line - do - echo "$line" | grep -q "$1" && break - done < "$2" - # Read the fifo for ever otherwise process would block - cat "$2" >/dev/null & -} - -# insert sbin into path if it exists and isnt already there -echo $PATH | grep -q "/usr/sbin" - -if [ $? -ne 0 ] && [ -d "/usr/sbin" ]; then - echo "SBIN NOT IN PATH" - export PATH="$PATH:/usr/sbin" - echo "$PATH" -fi - -# If test DB url is provided, run tests with it -if [[ "$REFSTACK_TEST_MYSQL_URL" ]] -then - $* - exit $? -fi - -# Else setup mysql base for tests. -# Start MySQL process for tests -MYSQL_DATA=`mktemp -d /tmp/refstack-mysql-XXXXX` -ls -lshd ${MYSQL_DATA} -mkfifo ${MYSQL_DATA}/out -# On systems like Fedora here's where mysqld can be found -PATH=$PATH:/usr/libexec -MYSQL_SOCKET="/var/run/mysqld/mysqld.sock" -sudo chown -R mysql:mysql ${MYSQL_DATA} -mysqld --initialize-insecure --basedir=${MYSQL_DATA} --datadir=${MYSQL_DATA}/data --pid-file=${MYSQL_DATA}/mysql.pid --socket=${MYSQL_SOCKET}/ --skip-networking --skip-grant-tables &> ${MYSQL_DATA}/out & -# Wait for MySQL to start listening to connections -wait_for_line "mysqld: ready for connections." ${MYSQL_DATA}/out -sudo mysql -S ${MYSQL_SOCKET} -e 'set @@global.show_compatibility_56=ON;' > /dev/null 2>&1 -sudo mysql -S ${MYSQL_SOCKET} -e 'CREATE DATABASE test;' -sudo mysql -S ${MYSQL_SOCKET} -e "CREATE USER 'refstack'@'localhost' IDENTIFIED BY 'ref_pass';" -sudo mysql -S ${MYSQL_SOCKET} -e "GRANT ALL PRIVILEGES ON test . * TO 'refstack'@'localhost';" -sudo mysql -S ${MYSQL_SOCKET} -e "FLUSH PRIVILEGES;" -export REFSTACK_TEST_MYSQL_URL="mysql+pymysql://refstack:ref_pass@localhost/test?unix_socket=${MYSQL_SOCKET}&charset=utf8" - -# Yield execution to venv command -$* - -# Cleanup after tests -ret=$? -kill $(jobs -p) -rm -rf "${MYSQL_DATA}" -exit $ret diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e14d7070..00000000 --- a/setup.cfg +++ /dev/null @@ -1,45 +0,0 @@ -[metadata] -name = refstack -summary = OpenStack interop testing -description_file = - README.rst -author = OpenStack -author_email = openstack-discuss@lists.openstack.org -home_page = https://refstack.openstack.org -python_requires = >=3.8 -classifier = - Environment :: OpenStack - Intended Audience :: Developers - Intended Audience :: Information Technology - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: Implementation :: CPython - -[files] -packages = - refstack - -scripts = - bin/refstack-manage - bin/refstack-api - -[global] -setup-hooks = - pbr.hooks.setup_hook - -[entry_points] -oslo.config.opts = - refstack = refstack.opts:list_opts - -[build_sphinx] -all_files = 1 -build-dir = doc/build -source-dir = doc/source - diff --git a/setup.py b/setup.py deleted file mode 100644 index 3a372abf..00000000 --- a/setup.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2014 Piston Cloud Computing, inc. all rights reserved -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import setuptools - -setuptools.setup( - setup_requires=['pbr'], - pbr=True) diff --git a/specs/README.rst b/specs/README.rst deleted file mode 100644 index 46e947db..00000000 --- a/specs/README.rst +++ /dev/null @@ -1,54 +0,0 @@ -======================= -Refstack Specifications -======================= - -This folder is used to hold design specifications for additions -to the RefStack project. Reviews of the specs are done in gerrit, using a -similar workflow to how we review and merge changes to the code itself. - -The layout of this folder is as follows:: - - specs// - specs//approved - specs//implemented - -The lifecycle of a specification --------------------------------- - -Specifications are proposed by adding an .rst file to the -``specs//approved`` directory and posting it for review. You can -find an example specification in ``/specs/template.rst``. - -Once a specification has been fully implemented, meaning a patch has landed, -it will be moved to the ``implemented`` directory and the corresponding -blueprint will be marked as complete. - -`Specifications are only approved for a single release`. If a specification -was previously approved but not implemented (or not completely implemented), -then the specification needs to be re-proposed by copying (not move) it to -the right directory for the current release. - -Previously approved specifications ----------------------------------- - -The RefStack specs directory was re-structured during the Mitaka cycle. -Therefore, the specs approved and implemented prior to the Mitaka cycle will be -saved in the ``specs/prior/`` directories. - -Others ------- - -Please note, Launchpad blueprints are still used for tracking the status of the -blueprints. For more information, see:: - - https://wiki.openstack.org/wiki/Blueprints - https://blueprints.launchpad.net/refstack - -For more information about working with gerrit, see:: - - http://docs.openstack.org/infra/manual/developers.html#development-workflow - -To validate that the specification is syntactically correct (i.e. get more -confidence in the Jenkins result), please execute the following command:: - - $ tox diff --git a/specs/mitaka/implemented/product-registration-api.rst b/specs/mitaka/implemented/product-registration-api.rst deleted file mode 100755 index 62fdfbf4..00000000 --- a/specs/mitaka/implemented/product-registration-api.rst +++ /dev/null @@ -1,426 +0,0 @@ -============================ -Product Registration API -============================ - -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/vendor-result-validation - -Requirement document: https://goo.gl/bvo4FG - -Data model document: https://goo.gl/zWYnoq - -Based on the blueprint and requirement documents listed above, this spec -defines the REST APIs needed to support the product registration process. - - -Problem description -=================== - -As RefStack implements the vendor and product registration process, additional -REST APIs are needed for management of the newly added entities. This spec -will focus on the product management APIs. - - -Proposed change -=============== - -Add new REST APIs to to RefStack v1 API support the following: - -* Create a product - - Any RefStack authenticated user can create a product. - -* Delete a product - - Foundation admins or admins in this vendor can delete the product records. - -* Update a product record - - Foundation admins or admins in this vendor can make update to the product - records. - -* List product - - All RefStack users can list (view) publicly available product records with - limited details. Foundation admins and vendor admins can retrieve full - detail information of the products. - - -Alternatives ------------- - -Direct access to the database to retrieve test records. Open to suggestions. - -Data model impact ------------------ - -None - -REST API impact ---------------- - -The following REST APIs will be added to RefStack. - -**List products** - -* Description: - - This API will be used to list the products in RefStack. By default, the - response will include all product records that the user has privilege to - retrieve. The result list will be sorted by names in alphabetical ascending - order. At the time of this writing, the number of products will be - registered in RefStack is expected to be small. Therefore, no - result-limiting features such as pagination or filtering is implemented. - More sophisticated filter, sorting and pagination features may be added in - the future. - - **Note:** A "list products with detail" REST API will also be added later. - Foundation and vendor admins can use this API to obtain additional private - product information such as product record created date, created by user, - etc. - -* Method type: GET - -* URI: v1/products/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - -* Request parameters: N/A - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - This response may include all publicly shared and private product records - that the requester has privilege to retrieve. - - .. parsed-literal:: - { - "products": [ - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Product ABC", - "description" : "My description", - "product_id" : "7e0072fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 1, - "public" : true, - "type" : 0, - "can_manage" : false, - "organization_id" : "69346866-307f-4052-ba31-ff6270635e19" - }, - { - "id" : "78346866-307f-4052-ba31-ff6270635e19", - "name" : "Product EFG", - "description" : "My description", - "product_id" : "8c9u72fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 0, - "public" : true, - "type" : 1, - "can_manage" : false, - "organization_id" : "87346866-307f-4052-ba31-ff6270635e19" - }, - { - "id" : "12346866-307f-4052-ba31-ff6270635e19", - "name" : "Product HIJ", - "description" : "My description", - "product_id" : "987672fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 2, - "public" : true, - "type" : 0, - "can_manage" : false, - "organization_id" : "77346866-307f-4052-ba31-ff6270635e19" - }, - ...... - ] - } - - -**Show product details** - -* Description: This API will be used to retrieve the detail information of a - particular product. -* Method type: GET -* URI: v1/products/{id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | ID to retrieve data. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - The response data will be filtered depending on whether the requester is a - foundation admin or an admin user of the vendor which owns the product. - - * Response for non-foundation or none-vendor admins: - - .. parsed-literal:: - { - { - "id" : "12346866-307f-4052-ba31-ff6270635e19", - "name" : "Product HIG", - "description" : "My description", - "product_id" : "987672fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 2, - "public" : true, - "type" : 0, - "can_manage" : false, - "organization_id" : "77346866-307f-4052-ba31-ff6270635e19" - } - } - - * Response for foundation or vendor admin users: - - .. parsed-literal:: - { - { - "id" : "12346866-307f-4052-ba31-ff6270635e19", - "name" : "Product HIG", - "description" : "My description" - "product_id" : "987672fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 2, - "public" : true, - "properties" : "some text" - "created_at": "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at": "2016-02-02 08:42:25", - "type" : 0, - "can_manage" : true, - "organization_id" : "77346866-307f-4052-ba31-ff6270635e19" - } - } - -**Create product** - -* Description: - - This API will be used to create a product in RefStack. Any RefStack - authenticated user can create a product. Per current RefStack design, a - product must be owned by a vendor. Therefore, if a vendor owner is not - specified at the time when the product is created, a default private vendor - will be created with the requester being assigned as the newly created - vendor's admin user. By default, a product will be created as private. - -* Method type: POST - -* URI: v1/products/ - -* Normal Response Codes: - - * Created (201) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: N/A - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "name" : "ABC", - "description" : "My description", - "product_type" : 2, - "organization_id" : "95346866-307f-4052-ba31-ff6270635e14", - "required": ["name", "product_type"] - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - "id" : "345676866-307f-4052-ba31-ff6270635f20" - } - -**Update product** - -* Description: - - This API will be used to update the fields of a product in RefStack. Only - foundation admins or admin users of this vendor can perform update on a - product record. - -* Method type: PUT - -* URI: v1/products/{id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | ID for update. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - "name" : "Product EFG", - "description" : "My description", - "product_id" : "987672fb-a3e9-4901-82cd-9a3a911507d8", - "public" : true, - "properties" : "some text", - "required": [] - } - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Product EFG", - "description" : "My description", - "product_id" : "987672fb-a3e9-4901-82cd-9a3a911507d8", - "product_type" : 2, - "public" : true, - "properties" : "some text", - "created_at": "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at": "2016-02-02 08:42:25", - "type" : 0, - "can_manage" : true, - "organization_id" : "77346866-307f-4052-ba31-ff6270635e19" - } - } - - -**Delete product** - -* Description: - - This API will be used to delete a product in RefStack. Foundation admins and - admin users of this vendor can delete a product. - -* Method type: DELETE - -* URI: v1/products/{id} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | ID to be removed. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create the REST APIs. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/mitaka/implemented/rsa-key-existence-check.rst b/specs/mitaka/implemented/rsa-key-existence-check.rst deleted file mode 100755 index e6102265..00000000 --- a/specs/mitaka/implemented/rsa-key-existence-check.rst +++ /dev/null @@ -1,142 +0,0 @@ -============================================== -RSA Key Existence Check for Signed Data Upload -============================================== - -Launchpad blueprint: - - -RefStack recently added features to enable the uploading of data with key. -Currently, RefStack accepts the uploaded data regardless of whether -the public keys exist in RefStack or not. This document describes the -validation process update needed to ensure that RefStack only accepts -data with those keys that are previously imported into RefStack. - - -Problem description -=================== - -Currently, the RefStack API server would accept the uploaded data regardless -of whether the keys exist in RefStack or not. More importantly, those keys are -used to associated the test data to the users. And, there is no enforcement -that the keys used for data uploading must exist in RefStack. In addition, -for security reasons, keys are expected to be updated from time to time. -As a consequence of the non-existing or updated keys, some data will be -inaccessible. - - -Proposed change -=============== - -* RefStack API servers will check whether the key used to upload data exists in - the 'pubkeys' table and reject the data if it does not. Note that this method - of checking is possible because RefStack currently enforces a policy such - that there are no duplicate public keys in the database. This implies that - no two users can have the same public key uploaded, key-pairs can not be - shared, and if a user creates a new openstackid account, he/she would have to - use a different key or delete the public key from his/her old account. - -* RefStack then associate the data to the user ID of the key owner by adding, - in the "meta" table, a "meta_key" named "user" with value being the "openid" - from the "user" table. - - -Alternatives ------------- - -Alternatively, if RefStack wants to allow for key sharing among users in the -future, an additional user identifier parameter such as user email is needed, -besides the key, for data uploading. In this case, RefStack will check for -for the existence of the key in the user's profile. - -As for orphan data management, RefStack may want to implement a limited life -time policy for data without owner associated to them. - -Open to other suggestions. - - -Data model impact ------------------ - -None. - -There is no data modal change needed. - - -REST API impact ---------------- - -None - - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - - -Developer impact ----------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - TBD - -Other contributors: - TBD - -Work Items ----------- - -* RefStack API server will need to validate the existing of the key in RefStack - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/mitaka/implemented/use-url-as-cpid.rst b/specs/mitaka/implemented/use-url-as-cpid.rst deleted file mode 100755 index 94ce8825..00000000 --- a/specs/mitaka/implemented/use-url-as-cpid.rst +++ /dev/null @@ -1,149 +0,0 @@ -============================================== -Use Cloud URL as the Cloud Provider ID (CPID) -============================================== - -Launchpad blueprint: - -This spec proposes RefStack to add a method to use the cloud -access URL as the base to generate the CPID. - - -Problem description -=================== - -As defined in the "Test Submission API should use Target Identity -Endpoint UUID" specification (refstack-org-api-cloud-uuid.rst). Currently, -RefStack uses the cloud's Identity (Keystone) UUID as the CPID. - -For Keystone V2 API, this ID can be the ID of any one of the -three access endpoints, namely admin, public or private endpoints. However, -for Keystone V3 API, this ID is the ID of the Keystone service. Furthermore, -when testing a distro product, the Identity ID will be different every time -a cloud is stood up, regardless that whether this cloud is built by the -same person, with exactly the same OpenStack code and configuration. In such -circumstances, multiple CPIDs could represent a single cloud. - -We have also encountered some cases that the cloud's Keystone does not even -returns the identity service ID in the tokens it returns. In addition, there -is recent request for RefStack to support uploading test results that were -not collected using refstack-client. These type of data in subunit format -won't have CPID created at testing time. RefStack should provide a method -to generate CPID without the need of re-connecting to the cloud again. - - -Proposed change -=============== - -In addition to the current practice of using the different types of Identity -ID for CPID, RefStack should add additional support to generate the -CPID based on the cloud URL. This will also be used as the failover method -for CPID. - - -Alternatives ------------- - -For consistency, RefStack should consider to only use the cloud access URL -to generate the UUID for CPID. In consequence, RefStack no longer has to keep -track and adjust to changes in Keystone client and API for retrieving the -the CPID. - -Open to other suggestions. - - -Data model impact ------------------ - -None. - -There is no data modal change needed. - - -REST API impact ---------------- - -None - - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -With this failover addition, refstack-client should never again fail due to -CPID retrieval error. This also allows RefStack to provide users with an -option to upload data in subunit format. - - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -There is possibility of CPIDs being the same for two different clouds. -This can happen primarily in the private address space, where people may -have use the same IP address such as 192.168.*.* (or whatever commonly used -default addresses) for keystone address. Since this likely won't be the case -with actual production clouds and it is a last resort, we are okay with this -possibility. - -Furthermore, RefStack is no longer completely dependent on whether or not -the cloud's Keystone even returns the Identity service ID in the tokens it -returns. - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - TBD - -Other contributors: - TBD - -Work Items ----------- - -* Develop code to generate CPID based on access URL - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/mitaka/implemented/user-group.rst b/specs/mitaka/implemented/user-group.rst deleted file mode 100755 index 10ca031f..00000000 --- a/specs/mitaka/implemented/user-group.rst +++ /dev/null @@ -1,192 +0,0 @@ -================================= -User Group Support in RefStack -================================= - -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/vendor-result-validation - -Requirement document: https://goo.gl/bvo4FG - -Data model document: https://goo.gl/zWYnoq - -This spec proposes RefStack to add user group support. - - -Problem description -=================== - -As RefStack implements the vendor/product entities, RefStack needs to allow -management and visibility of these entities to a group of users not just the -user who creates the entities. - - -Proposed change -=============== - -Add the following tables to the RefStack database: - -* A table named "group". -* A table named "user_to_group". - -Details about these tables are described in the "Data model impact" section. - -Add methods to support: - -* Add a user to a group by inserting a record into the "user_to_group" table. -* Remove a user from a group - -**Note:** - -* Only an interop user or a user in this group can perform the action of adding a user to a group. -* Only an interop user, a user in this group, or the user himself/herself can remove a user from the group. -* In the current implementation, all users in a group are admin users with CRUD privilege. - - -Alternatives ------------- - -Alternatively, a 'role' column can be added to the user_to_group table to support -having users with different roles in a group. The various 'roles' can be -defined in a policy file. - -Open to other suggestions. - -Data model impact ------------------ -The following tables will be added to the RefStack database. - -* "group" table - - +------------------------+-------------+----------+ - | Column | Type | | - +========================+=============+==========+ - | created_at | datetime | | - +------------------------+-------------+----------+ - | updated_at | datetime | | - +------------------------+-------------+----------+ - | deleted_at | datetime | | - +------------------------+-------------+----------+ - | deleted | int(11) | | - +------------------------+-------------+----------+ - | id | varchar(36) | PK | - +------------------------+-------------+----------+ - | name | varchar(80) | | - +------------------------+-------------+----------+ - | description | text | | - +------------------------+-------------+----------+ - - **Note:** - - The values in the "id" column are GUIDs generated with UUID4. - -* "user_to_group" table - - +------------------------+-------------+----------+ - | Column | Type | | - +========================+=============+==========+ - | created_at | datetime | | - +------------------------+-------------+----------+ - | updated_at | datetime | | - +------------------------+-------------+----------+ - | deleted_at | datetime | | - +------------------------+-------------+----------+ - | deleted | int(11) | | - +------------------------+-------------+----------+ - | created_by_user | varchar(128)| | - +------------------------+-------------+----------+ - | _id | int(11) | PK | - +------------------------+-------------+----------+ - | group_id | varchar(36) | FK | - +------------------------+-------------+----------+ - | user_openid | varchar(128)| FK | - +------------------------+-------------+----------+ - - **Note:** - - Since more than one users (an interop user or a user in this group) can add - a user to a group, the created_by_user field was added for auditing purpose. - - -REST API impact ---------------- - -None. - -No REST API will be implemented in the initial phase because a group will only -be created implicitly when an organization is created. No "group management" -features will be exposed to the end users. - -Security impact ---------------- - -Previously private entities such as test results can only be viewed/managed by -the owner user. The group implementation allows a group of users to -view/manage those entities. - -Notifications impact --------------------- - -None, for the initial implementation. In the future, RefStack may want to -notify the related parties (users or organizations) whenever a user is added to -or removed from a group. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create database tables. -* Create the specified private methods. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/mitaka/implemented/vendor-registration-data-model.rst b/specs/mitaka/implemented/vendor-registration-data-model.rst deleted file mode 100755 index 485c3df4..00000000 --- a/specs/mitaka/implemented/vendor-registration-data-model.rst +++ /dev/null @@ -1,237 +0,0 @@ -================================================= -Database Tables for Vendor Registration Support -================================================= - -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/vendor-result-validation - -Requirement document: https://goo.gl/bvo4FG - -Data model document: https://goo.gl/zWYnoq - -Based on the blueprint and requirement documents listed above, this spec is the -first of a series of specifications that will be defined for RefStack to -implement the vendor registration process. This spec will mainly focus on -the data model aspect of the vendor registration implementation. - - -Problem description -=================== - -As RefStack implements the vendor/product registration process, additional -database tables are needed to store the newly added entities such as vendor, -cloud provider, etc. Based on the object model described in the requirement -document, this spec defines the tables and the basic methods/functions needed -to manage them. - - -Proposed change -=============== - -The following tables will be added to the RefStack database: - -* A table named "organization" - - The organization table will store the data representing entities such as - Software Vendors, Cloud Operators, OpenStack Foundation, etc. The various - types of entities stored in this table will be differentiated by the - values stored in the "type" column. These values are pre-defined in a set - of constants (enum) with descriptive names. For example: 1 = foundation, - 2 = official_vendor, 3 = private_vendor, etc. There will be only one - organization with the type of "foundation" in a RefStack instance. This - organization will be created by the RefStack admin. - -* A table named "product" - - This table will contain the product information. Each product must be owned - by a vendor. A "product_type" column will be used to identify the different - types of products. The types of products are pre-defined constants (enum) - with descriptive names as defined in the OpenStack Marketplace - ( http://www.openstack.org/marketplace/). For example: 1 = distro, - 2 = public_cloud, 3 = hosted_private_cloud, etc. - -Details about these tables are described in the "Data model impact" section. - -The following methods will be added: - -* Methods to add/remove a vendor and its associated attributes. -* Methods to add/remove a product and its associated attributes. - -Alternatives ------------- - -Auditability is not included in the current implementation. RefStack should -require at least some logging/auditing capability. While RefStack can add richer -auditability features overtime incrementally, at the minimum an updated_by_user -column should be added to the tables to log the last update activity made on an -organization or product entity. - -Open to other suggestions. - -Data model impact ------------------ -The following tables will be added to the RefStack database. - -* "organization" table - - +------------------------+-------------+----------+ - | Column | Type | | - +========================+=============+==========+ - | created_at | datetime | | - +------------------------+-------------+----------+ - | deleted_at | datetime | | - +------------------------+-------------+----------+ - | deleted | int(11) | | - +------------------------+-------------+----------+ - | updated_at | datetime | | - +------------------------+-------------+----------+ - | created_by_user | varchar(128)| FK | - +------------------------+-------------+----------+ - | id | varchar(36) | PK | - +------------------------+-------------+----------+ - | name | varchar(80) | | - +------------------------+-------------+----------+ - | description | text | | - +------------------------+-------------+----------+ - | type | int(11) | | - +------------------------+-------------+----------+ - | group_id | varchar(36) | FK | - +------------------------+-------------+----------+ - | properties | text | | - +------------------------+-------------+----------+ - - -* "product" table - - +------------------------+-------------+----------+ - | Column | Type | | - +========================+=============+==========+ - | created_at | datetime | | - +------------------------+-------------+----------+ - | deleted_at | datetime | | - +------------------------+-------------+----------+ - | deleted | int(11) | | - +------------------------+-------------+----------+ - | updated_at | datetime | | - +------------------------+-------------+----------+ - | created_by_user | varchar(128)| FK | - +------------------------+-------------+----------+ - | id | varchar(36) | PK | - +------------------------+-------------+----------+ - | name | varchar(80) | | - +------------------------+-------------+----------+ - | description | text | | - +------------------------+-------------+----------+ - | product_id | varchar(36) | | - +------------------------+-------------+----------+ - | type | int(11) | | - +------------------------+-------------+----------+ - | product_type | int(11) | | - +------------------------+-------------+----------+ - | public | tinyint(1) | | - +------------------------+-------------+----------+ - | organization_id | varchar(36) | FK | - +------------------------+-------------+----------+ - | properties | text | | - +------------------------+-------------+----------+ - - - **Notes:** - - The value of the product_id field is used for storing a secondary ID to - provide additional information about the cloud, such as a hash of the cloud - access URL. product_id can be initialized at product creation time or later. - - The values in the "public" column are boolean numbers indicating whether the - products are privately or publicly visible. - - Ideally, the "deleted" column should be of type tinyint(1) (which is a - boolean in SQLAlchemy). Int(11) is used here for being consistent with Oslo. - - The product_type column will store the pre-defined constants (enum) with - descriptive names as defined in the OpenStack Marketplace - ( http://www.openstack.org/marketplace/). For example: 1 = distro, - 2 = public_cloud, 3 = hosted_private_cloud, etc. - - The values in the "type" column are used by RefStack to identity the type of - the vendor object. - - -REST API impact ---------------- - -None at the database level. - - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None, for the initial implementation. In the future, RefStack may want to notify the related parties -(users or organizations) when updates are made to these tables. - - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create the tables. -* Create the defined methods. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/mitaka/implemented/vendor-user-management-api.rst b/specs/mitaka/implemented/vendor-user-management-api.rst deleted file mode 100755 index 85c42ea9..00000000 --- a/specs/mitaka/implemented/vendor-user-management-api.rst +++ /dev/null @@ -1,260 +0,0 @@ -========================== -Vendor User Management API -========================== -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/vendor-result-validation - -Requirement document: https://goo.gl/bvo4FG - -Data model document: https://goo.gl/zWYnoq - -Based on the blueprint and requirement documents listed above, this -specification is among a group of specifications that are defined for RefStack -to implement the vendor registration process. - - -Problem description -=================== - -RefStack needs to allow management of the vendor entity to a group of users not -just the users who create the vendors. This specification defines the REST APIs -needed to manage the users in a vendor. - - -Proposed change -=============== - -Add new REST APIs to support the following: - -* List users in vendor - - Only foundation admins or admins in this vendor can request to get a list of - the users belong to this vendor. - -* Add user to vendor - - Only foundation admins or admins in this vendor can add a user to a vendor. - -* Remove user from vendor - - Only foundation admins or admins in this vendor can remove a user from a - vendor. In addition, a user can remove himself/herself from a vendor. - - -Alternatives ------------- - -Since RefStack currently does not expose the "group" entity to the end users, -user management REST APIS are provided at the vendor level. In the future, if -RefStack decides to support "group" management, then the APIs defined in this -specification can be updated by replacing the "vendor" entity with the "group" -entity. - - -Data model impact ------------------ - -None - -REST API impact ---------------- - -The following REST APIs will be added to RefStack. - -**List users in vendor** - -* Description: - - This API will be used by the OpenStack Foundation and vendor - admins to list the users of a vendor. Note: currently the number of users - in a vendor is expected to be small, no filter option will be implemented. - However, in the future, as the number of user increases, RefStack may want - to add filter options (such as filtering by name) to limit the amount of - returned data. - -* Method type: GET - -* URI: v1/vendors/{vendor_id}/users - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID to retrieve user list. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - "users": [ - { - "fullname" : "John Doe", - "email" : "john_doe@compay1", - "openid" : "https://openstackid.org/john.doe" - }, - { - "fullname" : "Jane Roe", - "email" : "jane_roe@compay2", - "openid" : "https://openstackid.org/jane.roe" - }, - ...... - ] - } - - -**Add user to vendor** - -* Description: - - This API will be used by the OpenStack Foundation and vendor - admins to add a user to a vendor. - -* Method type: PUT - -* URI: v1/vendors/{vendor_id}/users/{encoded_openid} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID to add user to. | - +---------------+-------+--------------+-----------------------------------+ - | encoded_openid| URI | xsd:string | Base64 encoded user's OpenStack | - | | | | OpenID | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - - -**Remove user from vendor** - -* Description: - - This API will be used by the OpenStack Foundation and vendor - admins to remove a user from a vendor. - -* Method type: DELETE - -* URI: v1/vendors/{vendor_id}/users/{encoded_openid} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID to remove user from. | - +---------------+-------+--------------+-----------------------------------+ - | encoded_openid| URI | xsd:string | Base64 encoded user's OpenStack | - | | | | OpenID | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create the REST APIs. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/newton/implemented/defcore-guideline-api.rst b/specs/newton/implemented/defcore-guideline-api.rst deleted file mode 100755 index 77c45ac6..00000000 --- a/specs/newton/implemented/defcore-guideline-api.rst +++ /dev/null @@ -1,281 +0,0 @@ -====================== -Defcore Guideline API -====================== - -This spec defines the REST APIs needed to support the retrieval of DefCore -Guideline files and the test lists included in these files. - - -Problem description -=================== - -The DefCore Guideline files contain many different types of test lists such as -required test lists, advisory test lists, etc. Very often, these lists are used -by RefStack users to test only the tests that they are interested in. -For each Guideline file, there is a corresponding directory which holds files -that contain the required or flagged tests (see example [1]). Since the test -lists in these files can change from time to time due to test name changes [2] -or addition of flagged tests [3], it is useful for RefStack to provide -REST APIs so that users can dynamically retrieve the test lists with the latest -updates in the Guideline files as needed. - -[1] https://opendev.org/openinfra/interop/src/branch/master/2016.01/ -[2] https://review.opendev.org/290689/ -[3] https://review.opendev.org/215263/ - -Proposed change -=============== - -RefStack to provide the REST APIs as described in the "REST API Impact" section -to retrieve the Guideline files and the test lists included in these files. - - -Alternatives ------------- - -Users to continue using the required and flagged test list files in the DefCore -repository. - -Data model impact ------------------ - -None - -REST API impact ---------------- - -The following REST APIs will be added to RefStack. - -**List DefCore Guideline files** - -* Description: - - List the names of the DefCore Guideline files - -* Method type: GET - -* URI: v1/guidelines/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - -* Request parameters: - - N/A - -* JSON schema definition for the body data: - - N/A - -* Schema definition for the response data: - - .. parsed-literal:: - [ - "2015.03.json", - "2015.04.json", - ... - ] - - -**Show Guideline file details** - -* Description: - - This API will be used to retrieve the content of a - DefCore Guideline file. - -* Method type: GET - -* URI: v1/guidelines/{name} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | name | URI | xsd:string | The name of the Guideline file | - | | | | such as "2015.04". | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - N/A - -* JSON schema definition for the response data: - - See DefCore Guideline JSON schema - https://opendev.org/openinfra/interop/src/master/doc/source/schema - - -**List tests** - -* Description: - - This API will be used to list the tests included in the Guideline. By default, - this API will return all the tests included in the required, advisory, - deprecated and removed categories. - -* Method type: GET - -* URI: v1/guidelines/{name}/tests - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | type | query | xsd:string | Type can be a comma separated list| - | (optional) | | | of required, advisory, deprecated | - | | | | and removed. Default is to | - | | | | retrieve test list of all types. | - +---------------+-------+--------------+-----------------------------------+ - | alias | query | xsd:string | Set alias=true (default) to | - | (optional) | | | include alias test names in the | - | | | | response test list. | - | | | | alias=false will exclude the alias| - | | | | test names. | - +---------------+-------+--------------+-----------------------------------+ - | flag | query | xsd:string | Set flag=true (default) to include| - | (optional) | | | flagged test names in the | - | | | | response test list. | - | | | | flag=false will not include | - | | | | flagged tests. | - +---------------+-------+--------------+-----------------------------------+ - | target | query | xsd:string | Use this parameter to retrieve the| - | (optional) | | | test lists for a target program. | - | | | | Current valid values include the | - | | | | following: | - | | | | | - | | | | - platform (default) | - | | | | - compute | - | | | | - object-storage | - +---------------+-------+--------------+-----------------------------------+ - - **Note** - - More information about OpenStack Target Programs can be found at - http://www.openstack.org/brand/interop/ . - - **Examples** - - * Get the required test list including alias and flagged tests. - - `v1/guidelines/2016.01/tests?type=required` - - * Get the required test list including alias but excluding flagged tests. - - `v1/guidelines/2016.01/tests?type=required&flag=false` - - * Get the required and advisory tests for the OpenStack Powered Compute - program, including alias but excluding flagged tests - - `v1/guidelines/2016.01/tests?type=required,advisory&flag=false&target=compute` - - -* JSON schema definition for the body data: - - N/A - -* Schema definition for the response data: - - The response is a straight list of tests so that users can immediately use the file - as-is for testing with refstack-client. - - .. parsed-literal:: - tempest.api.compute.images.test_list_images.ListImagesTestJSON.test_get_image[id-490d0898-e12a-463f-aef0-c50156b9f789] - tempest.api.compute.images.test_list_images.ListImagesTestJSON.test_list_images[id-fd51b7f4-d4a3-4331-9885-866658112a6f] - .... - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Paul Van Eck - -Other contributors: - TBD - -Work Items ----------- - -* Create the REST APIs. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/newton/implemented/product-version-datamodel-api.rst b/specs/newton/implemented/product-version-datamodel-api.rst deleted file mode 100755 index c9e6e51d..00000000 --- a/specs/newton/implemented/product-version-datamodel-api.rst +++ /dev/null @@ -1,484 +0,0 @@ -==================================== -Product Version Data Model and API -==================================== - -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/marketplace-product-model - -Based on the requirements listed in the blueprint, this spec defines the -database and API updates needed to support the product model used by the -OpenStack Marketplace. - - -Problem description -=================== - -In RefStack, a product is an entry saved in the "product" table. Currently, -RefStack does not support the model where a product may have one to many -product versions. RefStack needs to update its database and object models to -meet the product model described in the blueprint. - -RefStack interprets OpenStack Marketplace product definition and requirement -as following: - -* Model: - - * Each product will have one to many versions. - -* Vocabulary: - - * product: a product is an entry on the marketplace website. For example: - "IBM Spectrum Scale for Object Storage" [1] is a product. - - * version: version is unique name/number denoting a particular state of a - product. A product may have one to many versions. For example: - "IBM Spectrum Scale for Object Storage" has release 4.1.1, 4.2.0, 4.2.1, - etc. While a public cloud may have only one version. In this case, the - version name can be null. Note that the term version and release are used - interchangeably. - -* Use cases: - - * User can create a new product after log into to - https://refstack.openstack.org/#/. - * User can adds new versions to an existing product. - -[1] https://www.openstack.org/marketplace/distros/distribution/ibm/ibm-spectrum-scale-for-object-storage - - -Proposed change -=============== - - -* Add a new table named "product_version" and methods to access it. - - This table will store the version information of a product. Detailed - information about this table is described in the "Data model impact" - section. - -* Add new REST APIs ( get/create/update/delete ) to operate on the product - version resource. - - -**Note** - -Currently, RefStack has not implemented "user role" to differentiate the roles -of users in a "user group". As such, at this time, all users in a "user group" -would be admin users. - -Alternatives ------------- - -There is no appropriate alternative found to model the "1 to N" relationship -between product and its versions. - -There is suggestion that this can be achieved by simply adding a "version" -column to the "product" table. This is the most simple implementation with -minimum changes. Unfortunately, it does not support the required "1 to N" -relationship because an entry with user input product information will be -created each time. This is regardless of whether the user wants to create a -new product or a new version for an existing product. - -With this approach, each product is an entry in the "product" table with -columns: product_id, name, version (and many other columns that are not -relevant to this discussion). While "product_id" is created uniquely by -the system, "name" and "version" are user input fields. A row with two users -input fields are created each time for a new product or a new version for an -existing product. - - -.. |reg| unicode:: U+00AE .. REGISTERED SIGN - -In the following examples, Are "ABC OpenStack\ |reg|." and "ABC OpenStack" one or two -different products? - -* It could be one product because the users had made a mistake when creating a - new version for the existing "ABC OpenStack\ |reg|." product. -* It could also be two products, since the 2 names are not the same. - -Such kind of data integrity and consistency issues should be avoid whenever -possible with appropriate database design and/or business layer code. - -========== =================== ======= -product_id Name Version -========== =================== ======= -11111 ABC OpenStack |reg| v6.0 -22222 ABC OpenStack v7.0 -33333 ABC OpenStack |reg| v8.0 -========== =================== ======= - -Data model impact ------------------ - -* Add a product_version table - - +------------------------+-------------+----------+ - | Column | Type | | - +========================+=============+==========+ - | created_at | datetime | | - +------------------------+-------------+----------+ - | deleted_at | datetime | | - +------------------------+-------------+----------+ - | deleted | int(11) | | - +------------------------+-------------+----------+ - | updated_at | datetime | | - +------------------------+-------------+----------+ - | created_by_user | varchar(128)| FK | - +------------------------+-------------+----------+ - | id | varchar(36) | PK | - +------------------------+-------------+----------+ - | version | varchar(30) | | - +------------------------+-------------+----------+ - | product_id | varchar(36) | FK | - +------------------------+-------------+----------+ - | cpid | varchar(36) | | - +------------------------+-------------+----------+ - - - ** Note ** - - * The version field can be blank. This is to support the case where public - cloud may have no version. - * The combination of the version and product_id fields must be unique. - This can be achieved by implementing a compound unique key of - (product_id, version) as UniqueConstraint to provide some level of - duplication protection. - * cpid is the ID of a cloud which is deployed using this product version. - cpid can be blank. - - -REST API impact ---------------- - -The following REST APIs will be added to RefStack. - -**List product versions** - -* Description: - - This API will be used to list all the versions of a product. - -* Method type: GET - -* URI: v1/products/{product_id}/versions - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | product_id | URI | csapi:UUID | ID of a product. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - This response may include versions of all publicly shared and private - products that the requester has privilege to retrieve. Access checking for - version is always done at the product level. - - .. parsed-literal:: - { - "versions": [ - { - "id" : "85346866-307f-4052-ba31-ff6270635e14", - "version" : "v1", - "product_id" : "7e0072fb-a3e9-4901-82cd-9a3a911507d8", - "cpid" : "" - }, - { - "id" : "36846866-307f-4052-ba31-ff6270635e19", - "version" : "", - "product_id" : "9u9c72fb-a3e9-4901-82cd-9a3a911507d8", - "cpid" : "69346866-307f-4052-ba31-ff6270635e19" - }, - ...... - ] - } - - -**Show product version details** - -* Description: This API will be used to retrieve the detailed information of a - product version. -* Method type: GET -* URI: v1/products/{product_id}/versions/{version_id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | product_id | URI | csapi:UUID | ID of a product. | - +---------------+-------+--------------+-----------------------------------+ - | version_id | URI | csapi:UUID | ID of a product version. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - The response data will be filtered depending on whether the requester is an - interop admin or an admin user of the vendor which owns the product. - - * Response for non-foundation or none-vendor admins: - - .. parsed-literal:: - { - { - "id" : "85346866-307f-4052-ba31-ff6270635e14", - "version" : "v1", - "product_id" : "7e0072fb-a3e9-4901-82cd-9a3a911507d8", - "cpid" : "" - } - } - - * Response for foundation or vendor admin users: - - .. parsed-literal:: - { - { - "id" : "85346866-307f-4052-ba31-ff6270635e14", - "version" : "v1", - "product_id" : "7e0072fb-a3e9-4901-82cd-9a3a911507d8", - "cpid" : "" - "created_at": "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at": "2016-02-02 08:42:25", - } - } - -**Create product version** - -* Description: - - This API will be used to create a product version. Only interop or vendor - admins of the product can create a product version. - -* Method type: POST - -* URI: v1/products/{product_id}/versions - -* Normal Response Codes: - - * Created (201) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | product_id | URI | csapi:UUID | ID of a product. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "version" : "", - "cpid" : "69346866-307f-4052-ba31-ff6270635e19", - "required": ["version"] - } - - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - "id" : "345676866-307f-4052-ba31-ff6270635f20" - } - -**Update product version** - -* Description: - - This API will be used to update the fields of a product version in RefStack - Only interop admins or admin users of the product vendor can perform update - on a product version record. - -* Method type: PUT - -* URI: v1/products/{product_id}/versions/{version_id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | product_id | URI | csapi:UUID | ID of a product. | - +---------------+-------+--------------+-----------------------------------+ - | version_id | URI | csapi:UUID | ID of a product version. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - "version" : "", - "cpid" : "69346866-307f-4052-ba31-ff6270635e19", - "required": [] - } - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - { - "id" : "85346866-307f-4052-ba31-ff6270635e14", - "version" : "v1", - "product_id" : "7e0072fb-a3e9-4901-82cd-9a3a911507d8", - "cpid" : "69346866-307f-4052-ba31-ff6270635e19" - "created_at": "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at": "2016-02-02 08:42:25", - } - } - - -**Delete a product version** - -* Description: - - This API will be used to delete a product version in RefStack. Interop admins - and admin users of the product vendor can delete a product version. - -* Method type: DELETE - -* URI: v1/products/{product_id}/versions/{version_id} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | product_id | URI | csapi:UUID | ID of a product. | - +---------------+-------+--------------+-----------------------------------+ - | version_id | URI | csapi:UUID | ID of a product version. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Paul Van Eck - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create the product version table. -* Create the newly APIs. -* Update RefStack UI to include product version information. - - -Dependencies -============ - -None - - -Testing -======= - -* Add unit tests to verify newly developed code. - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/newton/implemented/vendor-registration-api.rst b/specs/newton/implemented/vendor-registration-api.rst deleted file mode 100755 index 71d519bf..00000000 --- a/specs/newton/implemented/vendor-registration-api.rst +++ /dev/null @@ -1,571 +0,0 @@ -======================== -Vendor Registration API -======================== - -Launchpad blueprint: https://blueprints.launchpad.net/refstack/+spec/vendor-result-validation - -Requirement document: https://goo.gl/bvo4FG - -Data model document: https://goo.gl/zWYnoq - -Based on the blueprint and requirement documents listed above, this spec -defines the REST APIs needed to support the vendor registration process. - - -Problem description -=================== - -As RefStack implements the vendor registration process, additional REST APIs -are needed for management of the newly added entities such as product and -vendor. Two categories of REST APIs will be created, one for vendor management -(CRUD) and the other for product management. This spec will focus on the vendor -management API. - -The new APIs will be added to RefStack v1 API. - - -Proposed change -=============== - -Add new REST APIs to support the following: - -* Create a vendor - - Any RefStack authenticated user can create a vendor. By default, the vendor - is of type "private" vendor when it is created. - -* Delete a vendor - - Only foundation admins can delete official vendors. In addition, vendor - admin users can delete own private/pending vendor. - -* Update a vendor record - - Foundation admins or admins in this vendor can make update to the vendor - records. - -* List vendor - - All RefStack users can list (view) official vendor records with limited - details. Foundation admins can retrieve full detail records of all vendors. - - -Alternatives ------------- - -Direct access to the database to retrieve test records. Open to suggestions. - -Data model impact ------------------ - -None - -REST API impact ---------------- - -The following REST APIs will be added to RefStack. - -**List vendors** - -* Description: - - This API will be used to list the vendors in RefStack. By default, the - response will include all vendor records that the user has privilege to - retrieve. This list will be sorted by names in alphabetical ascending - order. At the time of this writing, the number of vendor will be registered - in RefStack is expected to be small. Therefore, no result-limiting features - such as pagination or filtering is implemented. More sophisticated filter, - sorting and pagination features may be added in the future. - - **Note:** A "list vendors with detail" REST API will also be added later. - Foundation and vendor admins can use this API to obtain more detail private - vendor information such as vendor record created date, created user, etc. - -* Method type: GET - -* URI: v1/vendors/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - -* Request parameters: N/A - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - This response may include all official (public) vendors, the private and - pending vendors that the requester has privilege to retrieve. - - .. parsed-literal:: - { - "vendors": [ - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Vendor ABC", - "description" : "My description", - "type" : 3 - }, - { - "id" : "78346866-307f-4052-ba31-ff6270635e19", - "name" : "Vendor EFG", - "description" : "My description", - "type" : 1 - }, - ...... - ] - } - - **Note:** The values of the "type" filed are a set of pre-defined constants - (enum) depicting the type of vendors. The constant definition can be found - in https://opendev.org/openinfra/refstack/src/master/refstack/api/constants.py . - -**Show vendor details** - -* Description: This API will be used to retrieve the detail information of a - particular vendor. -* Method type: GET -* URI: v1/vendors/{vendor_id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID to retrieve data. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - The response data will be filtered depending on whether the requester is a - foundation admin or an admin user of this vendor. - - * Response for non-foundation or vendor admins: - - .. parsed-literal:: - { - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Vendor ABC", - "description" : "My description", - "type" : 3 - } - } - - * Response for foundation or vendor admin users: - - .. parsed-literal:: - { - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Vendor ABC", - "description" : "My description", - "type" : 3, - "created_at": "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at": "2016-02-02 08:42:25", - "properties" : "some text" - } - } - -**Create vendor** - -* Description: - - This API will be used to create a vendor in RefStack. By default the vendor - will be created as a private vendor. - -* Method type: POST - -* URI: v1/vendors/ - -* Normal Response Codes: - - * Created (201) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - -* Request parameters: N/A - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "name" : "ABC", - "description" : "My description", - "required": ["name"] - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - "id" : "95346866-307f-4052-ba31-ff6270635e14" - } - -**Update vendor** - -* Description: - - This API will be used to update the fields of a vendor in RefStack. Only - foundation admins or admin users of this vendor can perform update on a - vendor record. - -* Method type: PUT - -* URI: v1/vendors/{vendor_id} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID for update. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - "name" : "Vendor ABC", - "description" : "My description", - "properties" : "some text", - "required": [] - } - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - { - "id" : "95346866-307f-4052-ba31-ff6270635e14", - "name" : "Vendor ABC", - "description" : "My description", - "type" : 3, - "created_at" : "2016-02-01 08:42:25", - "created_by_user": "john@abc.com", - "updated_at" : "2016-02-02 08:42:25", - "properties" : "some text" - } - } - - -**Vendor action API** - - The action API is used to perform an action on the vendor object. The action - is defined in the request body. - - -**Register as an official vendor** - -* Description: - - This API will be used by the vendor admins to register a private vendor for - foundation approval to become an official vendor. - -* Method type: POST - -* URI: v1/vendors/{vendor_id}/action - -* Normal Response Codes: - - * OK (202) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID for update. | - +---------------+-------+--------------+-----------------------------------+ - | register | plain | xsd:string | Action to request registering a | - | | | | private vendor to become an | - | | | | official vendor. vendor "type" | - | | | | will change from "private" to | - | | | | "pending" | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "register" : null - } - -* JSON schema definition for the response data: N/A - - -**Cancel submitted register request** - -* Description: - - This API will be used by the vendor admins to cancel previously submitted register - requests. - -* Method type: POST - -* URI: v1/vendors/{vendor_id}/action - -* Normal Response Codes: - - * OK (202) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID for update. | - +---------------+-------+--------------+-----------------------------------+ - | cancel | plain | xsd:string | Action to request canceling a | - | | | | previously submitted register | - | | | | request. | - | | | | Vendor "type" will change from | - | | | | "pending" to "private". | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "cancel" : null - } - -* JSON schema definition for the response data: N/A - - -**Approve to become an official vendor** - -* Description: - - This API will be used by the foundation admins to apporove a vendor of type - "pending" to become an official vendor. - -* Method type: POST - -* URI: v1/vendors/{vendor_id}/action - -* Normal Response Codes: - - * OK (202) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID for update. | - +---------------+-------+--------------+-----------------------------------+ - | approve | plain | xsd:string | Action to approve a vendor of type| - | | | | "pending" to "official" | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "approve" : null - } - -* JSON schema definition for the response data: N/A - -**Deny to become an official vendor** - -* Description: - - This API will be used by the foundation admins to deny a vendor of type - "pending" to become an official vendor. - -* Method type: POST - -* URI: v1/vendors/{vendor_id}/action - -* Normal Response Codes: - - * OK (202) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID for update. | - +---------------+-------+--------------+-----------------------------------+ - | deny | plain | xsd:string | Action to deny a vendor of type | - | | | | "pending" to "official". Vendor | - | | | | type will change from "pending" to| - | | | | "private". | - +---------------+-------+--------------+-----------------------------------+ - | reason | plain | xsd:string | Reason for denial. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - "deny" : null - "reason" : "My reason for denial" - } - -* JSON schema definition for the response data: N/A - - -**Delete vendor** - -* Description: - - This API will be used to delete a vendor in RefStack. Only foundation admins - can delete an official (public) vendor. Foundation admins and admin users of - this vendor can delete a private or pending vendor. - -* Method type: DELETE - -* URI: v1/vendors/{vendor_id} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | vendor_id | URI | csapi:UUID | Vendor ID to be removed. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Create the REST APIs. - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/ocata/implemented/associate-test-result-to-product.rst b/specs/ocata/implemented/associate-test-result-to-product.rst deleted file mode 100755 index 211a59fd..00000000 --- a/specs/ocata/implemented/associate-test-result-to-product.rst +++ /dev/null @@ -1,353 +0,0 @@ -============================================= -Test Results to Vendor Products Association -============================================= - -Launchpad blueprint: - -* https://blueprints.launchpad.net/refstack/+spec/result-listing-page -* https://blueprints.launchpad.net/refstack/+spec/marketplace-product-model - -This specification defines the processes and mechanisms to associate test -results to vendor products. - -Problem description -=================== - -So far, community test results are shared to the public anonymously, with no -linkage to users, vendors or products. DefCore, OpenStack Foundation (OSF) -and community user feedback revealed strong interest in enabling RefStack to -show the results with linkage to vendors, products and test statuses. This is -especially important for those test results that were used for official -OpenStack Powered Logo applications. - -With the recent implementation of vendor and product registration process, it -is now possible to associate a particular test result record to a vendor's -product of a particular version. - -**Note** - -Test results are not associated directly to vendors. Test results are -associated to products which belong to vendors. A product may have many -versions. - -RefStack interprets OSF/DefCore requirements as following: - -* Vocabulary: - - * product version: a product version is a version of a product. - * product vendor: a product vendor is a vendor who owns the product. - -* Model: - - * A product version can be used to deploy many clouds. - * A product version can have many tests. - * Test results will be associated to product versions (not products). - -* Use cases: - - * Only the user who uploads the result and is also an admin of the product - vendor or an interop admin can associate a test result to a product - version. - * Once a test is associated to a product version, only the admins of the - product vendor or interop admins can perform CRUD operations on the test - result. - - -Proposed change -=============== - -* Add a new columns to the "test" table in the database: - - * **product_version_id**: this field stores the product version ID that the - test run is associated to. - -* Associate a test result to a product - - * The association must be initiated by a user who creates the test result - record and is an admin of the vendor which owns the product. The - association is done by updating the product_version_id field of the test - result with the ID of the product version from the "version" table. - - * Once a test result is associated to a product version, the test result can - not be deleted until it is disassociate from a product. - - * Once a test result is associated to a product version, only interop - (i.e. RefStack site) admins or vendor admins can manage the test result - (e.g. making decision of sharing the test result record). - - * A product version can't be deleted if there are tests associated to it. - - **Note** - - Currently, RefStack has not implemented "user role" to differentiate the - roles of users in a "user group". As such, at this time, all users in a - "user group" are admin users. - -Alternatives ------------- - -An alternative method to associate a test record to a product is by matching -values of the "cpid" field (in the "version" table) and the "cpid" field (in -the "test" table). The major concern and shortcomings of this option are as -follows: - -* The "cpid" field is a user input parameter, therefore it is not guarantee to - be unique. - -* The cpid fields may be identical for test results run by different users - against the same public cloud instance. - -* A product may have many cloud instances which are identified by different - cpids. - - -Data model impact ------------------ - -Add a new column named "product_version_id" to the existing "test" table. -This field can be null. - -+------------------------+-------------+----------+ -| Column | Type | | -+========================+=============+==========+ -| product_version_id | varchar(36) | FK | -+------------------------+-------------+----------+ - -Note: The user input product_version_id must exist in the "version" table. - -REST API impact ---------------- - -The following REST APIs will be modified. - -**Update result** - -* Description: - - This API will be used to make update to a test entry of the "test" table. - To begin with, only the owner user who uploaded the test result, can make - update to the product_version_id filed. Once a test is associated to a - product version, only interop or vendor admins can make updates to a test - result. - -* Method type: PUT - -* URI: v1/results/{result_id}/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Forbidden (403) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | result_id | URI | csapi:UUID | Test result ID for marking. | - +---------------+-------+--------------+-----------------------------------+ - - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - "verification_status" : 1, - "product_version_id": "85346866-307f-4052-ba31-ff6270635e14", - "required": [] - } - } - - **Note** - - * Although the verification_status column is listed here for completeness of - the API body schema, this field can only be updated by interop admin as - described in spec https://review.opendev.org/#/c/343954/ . - * Update request including the "verification_status" field will return - "Forbidden(403)" if the requester is not an interop admin. - -* JSON schema definition for the response data: N/A - -**List results** - -* Description: (No update) - -* Method type: GET (No update) - -* URI: v1/results/ (No update) - -* Normal Response Codes: (No update) - -* Error Response Codes: (No update) - -* Request parameters: (No update) - - Add the following parameter to the existing ones: - - +---------------------+-------+-------------+---------------------------------+ - | Parameter | Style | Type | Description | - +=====================+=======+=============+=================================+ - | product_version_id | query | xsd:string | Only return the test records | - | (optional) | | | belonging to this | - | | | | product_version_id. | - +---------------------+-------+-------------+---------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - Update to add product_version_id to the response body. - - .. parsed-literal:: - { - pagination: { - current_page: 6, - total_pages: 37 - }, - results: [ - { - url: "https://refstack.openstack.org/#/results/7943e04a-2b95-453c-b627-8a24b2c6faa0", - created_at: "2016-07-25 02:24:34", - meta: { }, - id: "7943e04a-2b95-453c-b627-8a24b2c6faa0", - duration_seconds: 0, - verification_status : 0, - product_version_id: "" - }, - { - url: "https://refstack.openstack.org/#/results/91ae10c5-ecf5-4823-81d4-09836dc212cf", - created_at: "2016-07-13 18:37:53", - meta: { - shared: ""true"", - target: "compute", - guideline: "2016.01.json" - }, - id: "91ae10c5-ecf5-4823-81d4-09836dc212cf", - duration_seconds: 6037, - verification_status : 1, - product_version_id: "68668534-307f-4052-ba31-ff6270635e14" - }, - ........ - ] - } - -**Show result details** - -* Description: (No update) - -* Method type: GET (No update) - -* URI: v1/results/{result_id} (No update) - -* Normal Response Codes: (No update) - -* Error Response Codes: (No update) - -* Request parameters: (No update) - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - Update to add product_version_id to the response body. - - .. parsed-literal:: - { - user_role: "user", - created_at: "2016-07-13 18:37:53", - meta: { - shared: ""true"", - target: "compute", - guideline: "2016.01.json" - }, - id: "91ae10c5-ecf5-4823-81d4-09836dc212cf", - duration_seconds: 6037, - verification_status : 1, - product_version_id; "68668534-307f-4052-ba31-ff6270635e14", - results: [ - "tempest.api.compute.certificates.test_certificates.CertificatesV2TestJSON.test_create_root_certificate", - "tempest.api.compute.certificates.test_certificates.CertificatesV2TestJSON.test_get_root_certificate", - ...... - ] - } - - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Paul Van Eck - Andrey Pavlov - -Other contributors: - TBD - -Work Items ----------- - -* Add the defined additional field to the "test" table. -* Develop business and UI code to enable association of a test result to a - product. - - -Dependencies -============ - -None - -Testing -======= - -* Add unit tests to test the newly added code. - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/ocata/implemented/mark-test-result-as-verified.rst b/specs/ocata/implemented/mark-test-result-as-verified.rst deleted file mode 100755 index 7a488427..00000000 --- a/specs/ocata/implemented/mark-test-result-as-verified.rst +++ /dev/null @@ -1,314 +0,0 @@ -==================================== -Mark Test Result as Verified -==================================== - -Launchpad blueprint: - -* https://blueprints.launchpad.net/refstack/+spec/certification-test-record - -This specification defines the processes and mechanisms to mark a test result -as "verified for OpenStack Powered Logo application". - - -Problem description -=================== - -Currently, there is no mechanism to identify test results that are used by -the OpenStack Foundation (OSF) for OpenStack Powered Logo application. At the -present, the test result URL links are sent to the OSF by the vendors per the -instructions described in the "How to Run the Tests" section of the -http://www.openstack.org/brand/interop/ site. - -RefStack should provide a mean for OSF admins to mark a test result as -"verified for OpenStack Powered Logo application". - -RefStack interprets OSF/DefCore requirements as following: - -* The marked test results should be easily identified. -* The marked test results can not be deleted or updated. -* Only OSF admins can mark/umark a test. - -Proposed change -=============== - -* Add a new field named "verification_status" to the "test" table. Detailed - information about this field can be found in the "Data model impact" section. - -* Marking a test result record - - * Only interop admins can make updates to the verification_status field. For - the https://refstack.openstack.org/#/ website, interop admins will be - someone from the OSF. - - * Only a test result which has been shared and associated to a Guideline and - Target Program can be marked as verified. - - * Once a test result is marked as verified, only interop admins can unmark - the test. - - * Test results that are marked as verified cannot be deleted or updated. - - * A new API will be added to manage the verification_status field. Detailed - information about the API can be found in the "REST API impact" section. - -Alternatives ------------- - -Alternatively, we can use the existing test "meta" table to mark a test result. -This can be done by adding a key-value pair to the "meta" table, with key name -as "verification_status". Following are the reasons why this implementation is -not chosen. - -* Filtering/searching for marked data may not be as efficient. -* Marked tests can not be identified from the "test" table. - - -Data model impact ------------------ - -Add a new column named "verification_status" to the existing "test" table. - -+------------------------+-------------+----------+ -| Column | Type | | -+========================+=============+==========+ -| verification_status | int(11) | | -+------------------------+-------------+----------+ - -The verification_status column will store the pre-defined constants (enum) with -descriptive names as follows: - -* 0 = not verified -* 1 = verified - - -REST API impact ---------------- - -The "Update result" REST API will be added to RefStack. The "List results" -and "Show result details" REST APIs will be modified. - -**Update result** - -* Description: - - This API will be used to make updates to a test entry of the "test" table. - Although the test owner, interop or vendor admins should be able to make - updates to a rest record, extra checking must be implemented to ensure that - only interop admins can mark or unmark a test. - - -* Method type: PUT - -* URI: v1/results/{result_id}/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not found (404) - * Forbidden (403) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | result_id | URI | csapi:UUID | Test result ID for marking. | - +---------------+-------+--------------+-----------------------------------+ - - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - "verification_status" : 1, - "required": [] - } - } - - **Note** - - * The verification_status column will store a set of pre-defined constants - (enum) with 0 = not verified, 1 = verified, etc. - * Request to update the "verification_status" field will return - "Forbidden(403)" if the requester is not an interop admin. - -* JSON schema definition for the response data: N/A - -**List results** - -* Description: (No update) - -* Method type: GET (No update) - -* URI: v1/results/ (No update) - -* Normal Response Codes: (No update) - -* Error Response Codes: (No update) - -* Request parameters: - - Add the following parameter to the existing ones: - - +---------------------+-------+----------+---------------------------------+ - | Parameter | Style | Type | Description | - +=====================+=======+==========+=================================+ - | verification_status | query | xsd:int | Pre-defined constants. | - | (optional) | | | Not verified = 0 | - | | | | Verified = 1 | - +---------------------+-------+----------+---------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - Update to add verification_status to the response body. - - .. parsed-literal:: - { - pagination: { - current_page: 6, - total_pages: 37 - }, - results: [ - { - url: "https://refstack.openstack.org/#/results/7943e04a-2b95-453c-b627-8a24b2c6faa0", - created_at: "2016-07-25 02:24:34", - meta: { }, - id: "7943e04a-2b95-453c-b627-8a24b2c6faa0", - duration_seconds: 0, - verification_status : 0 - }, - { - url: "https://refstack.openstack.org/#/results/91ae10c5-ecf5-4823-81d4-09836dc212cf", - created_at: "2016-07-13 18:37:53", - meta: { - shared: ""true"", - target: "compute", - guideline: "2016.01.json" - }, - id: "91ae10c5-ecf5-4823-81d4-09836dc212cf", - duration_seconds: 6037, - verification_status : 1 - }, - ........ - ] - } - -**Show result details** - -* Description: (No update) - -* Method type: GET (No update) - -* URI: v1/results/{result_id} (No update) - -* Normal Response Codes: (No update) - -* Error Response Codes: (No update) - -* Request parameters: (No update) - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - Update to add verification_status to the response body. - - .. parsed-literal:: - { - user_role: "user", - created_at: "2016-07-13 18:37:53", - meta: { - shared: ""true"", - target: "compute", - guideline: "2016.01.json" - }, - id: "91ae10c5-ecf5-4823-81d4-09836dc212cf", - duration_seconds: 6037, - verification_status : 1, - results: [ - "tempest.api.compute.certificates.test_certificates.CertificatesV2TestJSON.test_create_root_certificate", - "tempest.api.compute.certificates.test_certificates.CertificatesV2TestJSON.test_get_root_certificate", - ...... - ] - } - -Security impact ---------------- - -None. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Paul Van Eck - Andrey Pavlov - -Other contributors: - - -Work Items ----------- - -* Add the defined additional field to the "test" table. -* Develop business and UI code to enable marking a test result. - - -Dependencies -============ - -None - - -Testing -======= - -* Add unit tests to test the newly added code. - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/pike/approved/add-refstack-docs.rst b/specs/pike/approved/add-refstack-docs.rst deleted file mode 100644 index f92482c2..00000000 --- a/specs/pike/approved/add-refstack-docs.rst +++ /dev/null @@ -1,129 +0,0 @@ -============================================= -Displaying RefStack Documentation Directly on Website -============================================= - -Launchpad blueprint: - -* https://blueprints.launchpad.net/refstack/+spec/user-documentation - -This specification defines the changes to the "About" page of the RefStack -website in that are necessary in order to allow RefStack documentation to be -displayed natively on the RefStack site. - -Problem description -=================== - -To make RefStack information more accessible to users, RefStack documentation -should be displayed in a format more closely matching that of the rest of -the RefStack site. Currently, documentation is maintained as RST files in the -‘doc’ folder of the RefStack repository, but with this change, users will also -be able to view them as HTML files via the RefStack site. - - -Proposed change -=============== - -As mentioned above, it would be ideal to be able to access RefStack -documentation in HTML format. The current plan is to use docutills in -combination with sphinx in order to create HTML templates which will then -be able to be integrated into the existing RefStack website. - -Another goal of this documentation update will be to a duplicate set of docs -intended for users from the rest of the docs, in order to ensure that they -will be more easily accessed by end users. These docs will be displayed on the -the RefStack website. A second set of docs, the RefStack Project docs, -will be hosted at the OpenStack docs website. These will be the same docs -that are published in the RefStack repo in RST format. - - -Possible libraries to use: - -sphinx - -docutils - -Alternatives ------------- - -Data model impact ------------------ - -None - -REST API impact ---------------- - -None - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Paul Van Eck - -Other contributors: - Luz Cazares - -Work Items ----------- - -None - -Dependencies -============ - -None - -Testing -======= - -None - -Documentation Impact -==================== - -User specific documents will now be available on the RefStack website in -simple HTML format. It will be listed under the "About" section on the main -menu bar. This will be a change from the current state in that users will now -be able to view documentation concerning running tests and uploading results -in a format which is similar to the rest of the RefStack website. - -RefStack documentation will now also be available on the main OpenStack docs -site. These docs will use the same source as those hosted on the RefStack site. - -References -========== - diff --git a/specs/pike/approved/upload-subunit-tests.rst b/specs/pike/approved/upload-subunit-tests.rst deleted file mode 100644 index 84b71175..00000000 --- a/specs/pike/approved/upload-subunit-tests.rst +++ /dev/null @@ -1,315 +0,0 @@ -============================================= -Upload Subunit Data From Test Results -============================================= - -Launchpad blueprint: - -* https://blueprints.launchpad.net/refstack/+spec/subunit-data-upload - -This specification describes an expansion of the RefStack API's -functionality to allow for upload of the subunit data which corresponds -to a given set of test results. - -Problem description -=================== - -Currently, all test results uploaded to the RefStack website consists -of a json file containing only the portion of the RefStack run pertaining -to the passed tests. This limitation dates back to the the start of the -RefStack project. At that time, Defcore (which is now known as interop-WG) -was very concerned about the possibility that private data may be included -in the subunit upload file. Defcore was concerned that vendors might, for -that reason, be hesitant to upload data into RefStack for fear of -unintentionally revealing vendor-specific data such as reasons for test -failures. For this reason, Defcore agreed unanimously that RefStack should -care only about passing tests, and not failed or skipped ones. - -The risk, with this resolution, however, is that not including a full set of -results means that it would be fairly simple to falsify those results in -order to make an OpenStack instance appear to be more interoperable than -it actually is. This too, was discussed at the time, and Defcore eventually -arrived at the conclusion that, in the end, it would be better to accept -vendor results in good faith, but to always leave the door open for users -and Foundation staff to verify those results independently. This decision -did not, however, account for the possibility that vendors seeking support -during the process of verifying the interoperability of their product may -need a way to securely share subunit data for review by Foundation staff. - -Proposed change -=============== - -In order to move towards having a more reliable and verifiable collection -of RefStack results, we intend to add functionality to the RefStack -toolkit that will allow for the upload of the subunit data related to a given -set of test results. This data would be private, only accessible to the party -uploading it, and to foundation staff, to be used for result integrity -verification as well as debugging assistance. Upload of subunit data will not, -for the time being, be required. - -After discussing a number of data storage methods at the 7/18/17 RefStack -meeting[3], we settled upon saving the newly usable subunit data in the -current database. With a small adjustment to our alembic settings (which -will keep the version tables from colliding), this could be done using the -existing subunit2sql toolkit[4]. In order to apply the table name change, -we will build in a series of functions that check refstack.conf and rename -the existing alembic version table if needed. This added functionality, -when merged and functional, will make RefStack one of only two OpenStack -projects (according to oslo.conf docs[7])that is currently capable of -modifying configuration at runtime without a service restart. The usage of -subunit2sql will do a lot of the heavy lifting for us, as far as data import -goes, as well as keeping the storage method of test data consistent across -the board. - -For the time being, we plan to link the subunit data will be linked to the -corresponding test results via a key value pair in the metadata table that -is an existing part of the RefStack database. - -Toolset to use: - -subunit2sql - -Alternatives ------------- - -Though we did eventually decide upon storing the new data in a new, separate -database, a few alternate options were discussed during the 7/18/17 RefStack -meeting[3]. The alternate options discussed were: -* Save subunit files as-is in a file system. This has the benefit of being the - least processing-intensive option for saving the data, as it would literally - just save the output into a file. It may, however, make subunit data upload - a bit less elegant, as well as being a deviation from the way test run data - is managed throughout the rest of RefStack. -* Save subunit data in the RefStack database and tables by building in the - functionality required to save and manage it. Like the option listed above, - this option keeps test run data stored consistently across refstack, which - would make the changes to the API more consistent as well. It would also - avoid the overhead that would result from using a separate database, as well - as any redundancies that resulted from using a second, separate database. - However, any redundancy would be fairly minor due to the extremely limited - scope of the data we are currently storing from each test run, and this would - leave more of the implementation up to us, which, because of how well - subunit2sql's schema fulfills the needs of this change, may be wholly - unnecessary. -* Save subunit data in a separate database created by subunit2sql. This has the - benefit of having all of the functionality we need without forcing us to - reinvent the wheel, but it also carries with it the overhead of having to use - a second database. This option doesn't make much sense, however, given that - we can actually use subunit2sql's toolkit in the current refstack database, - as long as we can configure the database to use an extra (differently named) - alembic version table for refstack's core db. - -Data model impact ------------------ - -We may be able to use the tables created by subunit2sql within the RefStack -database. These tables (for reference) are mapped out below::: - - -------------------------------------- - | tests | - -------------------------------------- - | id | String(256) | - | test_id | String(256) | - | run_count | Integer | - | failure | Integer | - | run_time | Float | - -------------------------------------- - - ---------------------------------------- - | runs | - ---------------------------------------- - | id | BigInteger | - | skips | Integer | - | fails | Integer | - | passes | Integer | - | run_time | Float | - | artifacts | Text | - | run_at | DateTime | - ---------------------------------------- - - --------------------------------------------------- - | test_runs | - --------------------------------------------------- - | id | BigInteger | - | test_id | BigInteger | - | run_id | BigInteger | - | status | String(256) | - | start_time | DateTime | - | start_time_microseconds | Integer | - | stop_time | DateTime | - | stop_time_microseconds | Integer | - | test | Test | - | run | Run | - --------------------------------------------------- - - ------------------------------------------- - | run_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | run_id | BigInteger | - | run | Run | - ------------------------------------------- - - ------------------------------------------- - | test_run_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | test_run_id | BigInteger | - | test_run | TestRun | - ------------------------------------------- - - ------------------------------------------- - | test_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | test_id | BigInteger | - | test | Test | - ------------------------------------------- - - ------------------------------------------- - | attachments | - ------------------------------------------- - | id | BigInteger | - | test_run_id | BigInteger | - | label | String(255) | - | attachment | LargeBinary | - | test_run | TestRun | - ------------------------------------------- - -more details about this data model can be found in the source docs for -subunit2sql[5] - -If we end up being unable to integrate the two databases into one at this time, -we plan to use the metadata table which already exists in the RefStack internal -db to store a key pair that links the existing test data to the newly added -subunit data. - -REST API impact ---------------- - -We will need to implement a new REST API for the upload of subunit data -from the client, and then use subunit2sql to process and save the data -into the database. - - -Security impact ---------------- -It has been suggested that uploading the subunit data for tests may expose -private data. However, it was determined in the 6/27/2017 RefStack meeting[1] -that if any such data is revealed through this upload, it would be due to a -leak in tempest's logging procedures, not the upload of this new type of data. - -This was also discussed at the 6/28/17 Interop-wg meeting[2]. It was at this -meeting that was confirmed that we would implement this change using an -opt-in flag, so that those who are still concerned about the security of -uploading the results do not, by default, have to upload their data. It was -also determined that, due to the fact that this design reflects a fairly -significant reversal in a past decision, that the community should be -properly notified. This decision also resulted in the following action plan: -1. write an email to distribute to the mailing list -2. send out the official decision after the email is distributed -3. change the official interop docs to reflect this change - -Another concern was that a database injection attack may be possible, if an -attacker were to use maliciously crafted subunit data. This threat, also, -does not appear to be much of a danger, as the mass majority of the data -written to the database is done after the subunit data is processed, meaning -that there are very few places in which raw strings are written into the db. -We need to look a little more into whether sql does enough input sanitization -for our needs. - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance impact -------------------- - -None - -Other deployer impact ---------------------- - -We will also need to adjust refstack-client to be able to consume the new API -feature while uploading subunit data. - -One of the most user-visible part of this change would be the creation of a -flag option which enables the upload of the subunit data to the refstack site, -which would modify the existing procedure in that we would need to build in -functionality that would allow for the additional data upload. - -We would also need to add a second flag to the database sync functionality in -order to allow for the alternate naming of the alembic version table, which -enables us to use both subunit2sql and refstack tables and functionality -within the same database. - -Developer impact ----------------- - -None - -Implementation -============== - -Assignees(s) ------------- - -Primary assignee: - Megan Guiney - -Other contributors: - Paul Van Eck (subunit data upload ui in refstack-client) - -Work Items ----------- -* Add a CONF option to allow for the usage of nonstandard alembic - version table names. -* Add a utility that allows for the runtime checking and alteration - of alembic version table names. -* Create an API at the server side to accept the subunit data -* At the server side, use subunit2sql to process the subunit data -* Link subunit data to existing set of refstack results. -* Create UI to upload subunit data (completed, as of 1/20/2016[6], - though may require update) -* Create a UI to display subunit data. There may already be one, but - we need to make sure such a utility exists. We also need to decide - whether the results should be viewable via the refstack website. - - - -Dependencies -============ - -Testing -======= - -Documentation Impact -==================== - -We will need to update the docs to reflect the additions to the API, the -database, and to refstack-client as well. - -References -========== -[1] http://eavesdrop.openstack.org/meetings/refstack/2017/refstack. - 2017-06-27-19.00.log.html -[2] http://eavesdrop.openstack.org/meetings/interopwg/2017/interopwg. - 2017-06-28-16.00.log.html -[3] http://eavesdrop.openstack.org/meetings/refstack/2017/refstack. - 2017-07-18-19.00.log.html -[4] https://opendev.org/opendev/subunit2sql -[5] https://docs.openstack.org/subunit2sql/latest/data_model.html -[6] https://review.opendev.org/265394/ -[7] https://docs.openstack.org/oslo.config/latest/configuration/ - mutable.html diff --git a/specs/prior/approved/ability_to_upload_a_complete_tempest_config.rst b/specs/prior/approved/ability_to_upload_a_complete_tempest_config.rst deleted file mode 100755 index e7fc26d7..00000000 --- a/specs/prior/approved/ability_to_upload_a_complete_tempest_config.rst +++ /dev/null @@ -1,68 +0,0 @@ -=========================================== -Ability to upload a complete tempest config -=========================================== - -storyboard: https://storyboard.openstack.org/#!/story/105 - -**Problem description** - -It makes sense that if an admin already has a working and tested tempest config, -they should be able to use it with the refstack tester. - -**Proposed change** - -Allowing the user to use a custom tempest config would require changes to the -tester cli as well as the web interface. We can safely break the code commits -for this into two tasks. - -* The CLI would require an extra argument for a path to a config file. As well -as some logic that bypassed the internal config generation. - -**Alternatives** - -None off hand. - -**Data model impact** - -None. - -**REST API impact** - -None - -**Performance Impact** - -This should speed up the tester because now it will not have to -generate/discover config or prepare the cloud to match config options. - -**Developer impact** - -n/a - -**Implementation:** - -**Assignee(s)** - -Primary assignee: - dlenwell - -**Work Items** - -* Implement CLI code - -**Dependencies** - -N/A - -**Testing** - -N/A - -**Documentation Impact** - -Cli changes should be noted in the --help output as well as written into any documentation for the tester. - -**References** - -N/A - diff --git a/specs/prior/approved/refstack-org-defcore-reports.rst b/specs/prior/approved/refstack-org-defcore-reports.rst deleted file mode 100644 index 05975278..00000000 --- a/specs/prior/approved/refstack-org-defcore-reports.rst +++ /dev/null @@ -1,116 +0,0 @@ -Required reports and outputs for defcore -========================================== -Blueprint: https://blueprints.launchpad.net/refstack/+spec/define-tools-for-defcore -Storyboard: https://storyboard.openstack.org/#!/story/109 - -Refstack data is used by the DefCore committee to identify capabilities and -tests to include in the OpenStack core. To facilitate this process, refstack -should produce and present / display this information to the committee -in a meaningful way. - -We anticipate that these reports will also be interesting to the -broader OpenStack community to select popular capabilities; consequently, -the results should not be focused strictly on DefCore as the consumer. - -A detailed description of the problem: - -The DefCore report needs to show both core and non-core tests. While the underlying data -is the same (% of population reporting pass), we must clearly show a difference between -core and non-core tests. We must _not_ make non-core and non-pass results appear negative. -It is very important that data be presented without adding negative bias. - -Since the target is interoperability, the community will expect the reports to highlight -core tests that are below a specific threshold (assumed 50% for now) as at risk and -non-core tests that are above a specific threshold (assumed 80% for now) as likely candidates -for core tests. The report should highlight both types of tests clearly - -Note: The reports must work well for the color blind reader. - -Design Approach: - -The report will be grouped into capabilities with the % pass for each test within the -capability shown as a pie chart. By stacking the pies close together for each capability, -the report becomes an effective "stacked bar" without losing the per test detail. - -Core tests should have their passing rate shown in green. If the rate is <50% then a thicker -red border should be added. - -Non-Core tests should have their passing rate shown in black. If the rate is >80% then a thicker -green border should be added. - -.. image:: https://wiki.openstack.org/w/images/f/f2/Refstack_mock_defcore.png - :width: 700px - :alt: DefCore Report Sample - -The intention of this design is to make it very easy to find outliers in the overall result. -A quick scan of the results should easily find tests or groups of tests that have high or low -compliance in the community. The report is designed to focus on pass rates rather than -on fail/skip rates. - -The data driving the UI should also be available as JSON data using an API on the same URL path. -It may be possible to implement the UI client side using the API without server rendering. -This implementation choice is left to the developer. - -Alternatives - -It would be possible to supply only JSON data via the API; however, this will not -help the community evaluate interoperability and is less desirable. - -Data model impact - -Likely none; however, depending on the complexity of the queries, -it may be necessary to create intermediate tables to collect the results. - -If new models are needed, this spec should be updated with the design. -At this time, we assume that the collection does not require an -intermediate model. - -Specification for the method - -These are read-only reports and should use GETs. - -The URL path should match the other UI paths with then following pattern: - -HTML response: GET /[refstack base]/defcore/[release] -JSON response: GET /[refstack base]/defcore/[release].json - -Security impact - -Since these these reports are based on public data and interoperability is a community -need, the reports should be non-restricted in the UI. They should not require any login -to view. - -Notifications impact: - -None. - -Other end user impact: - -Added navigation on UI. - -Developer impact - -None. - -Assignee(s) - -TBD - -Work Items - -* Spec * Mock -* CSS & HTML Frame -* Data Collection -* Connect Data into UI Page - -Dependencies - -Sparklines JS libraries: http://omnipotent.net/jquery.sparkline/#s-about - -Documentation Impact - -Need to document screen and meaning of fields. - -References - -http://wiki.openstack.org/wiki/Governance/DefCoreCommittee \ No newline at end of file diff --git a/specs/prior/approved/refstack-org-gearman-tester.rst b/specs/prior/approved/refstack-org-gearman-tester.rst deleted file mode 100644 index 122eadfc..00000000 --- a/specs/prior/approved/refstack-org-gearman-tester.rst +++ /dev/null @@ -1,185 +0,0 @@ -Gearman based test queue for refstack.org -========================================== - -launchpad blueprint: -https://blueprints.launchpad.net/refstack/+spec/refstack-org-gearman-tester - -Set up gearman worker/client for triggering official test runs from refstack.org - -* build gearman client / job monitor - -* stand alone worker script that does not require that refstack is installed. - -* Test status reporting API call - -* package installer for test runner with dependency and version coverage. - - -Problem description -=================== - -In an effort to make this hostable long term and scalable, we need a way to manage a queue of tests that run on a distributed infrastructure. For that I like gearman. - -This covers the Public cloud vendor official testing use case. - -.. image:: https://wiki.openstack.org/w/images/1/16/Refstack-publiccloud-usecase.png - :width: 700px - :alt: Public Cloud official test channel use case - - -Proposed change -=============== - -Generalized gearman flow. - -(#) execute_test.py is already installed on gearman worker node. -(#) The run_gearman method in tempest_tester.py will use gearman client to send over a payload. -(#) The payload will have the necessary information to construct the arguments to execute_test.py -(#) Gearman worker receives the payload. -(#) Validates the payload Sets up local virtual env and installs the correct version of tempest within it. -(#) The worker then kicks off execute_test with -callback ``refstack server`` ``test_id`` --conf_json ``from payload`` --tempest_home ``tempest install dir`` -(#) With the current execute_test.py code, it will interact with the refstack server to get more information about the cloud being tested to construct the tempest.config, and get the testcases to be run (optional), then execute the tempest test from the tempest_home. At the end, it will automatically send back the results to Refstack server. - -Note: with this design, gearman worker will have network access to the Refstack server and will need access to the cloud being tested. - -This spec covers the following deliverables; - - * gearman client side code. (https://review.opendev.org/84270/) - * gearman worker code (wip) - * Parts of this are already stubbed out in the code. specifically the "run_gearman" method. - * Test status reporting API call - * This feature will overlap with the following blue print: https://blueprints.launchpad.net/refstack/+spec/update-test-status - * Installer with dependency coverage for the worker to improve speed of deployment of new workers. - * In this instance tempest would be installed in a virtual env before every test. So that the exact version of tempest that is needed for this specific test is installed in a way that is easy to clean up afterwards for the next test that will run on that worker node. - - -Alternatives ------------- - -There are a lot of other job queue type things .. I happen to love gearman and the infra team has a gearman based system in place already.. they know how to troubleshoot it and tweak it for performance. - -Data model impact ------------------ - -This uses the current models without any changes. - -REST API impact ---------------- - -**update-test-status** - This is a basic method for remote testers to report status to the gui/api - - * Method type: POST - - * if result is accepted responds with 202 - - * Expected error http response code(s) - - * 400 bad request.. parameter was missing? - - * 405 not authorized, this method should only allow failure reports from known testing hosts - - * URL: /update-test-status/ - - * Parameters - - * payload - the payload object that was passed into the worker to begin with - - * on docker tests I think we should still post back from data if we can.. - - * test_id - the test id - -Security impact ---------------- - -* Does this change touch sensitive data such as tokens, keys, or user data? **NO** - -* Does this change alter the API in a way that may impact security, such as - a new way to access sensitive information or a new way to login? **NO** - -* Does this change involve cryptography or hashing? **NO** - -* Does this change require the use of sudo or any elevated privileges? **NO** - -* Does this change involve using or parsing user-provided data? This could - be directly at the API level or indirectly such as changes to a cache layer. **YES** - -* Can this change enable a resource exhaustion attack, such as allowing a - single API interaction to consume significant server resources? Some examples - of this include launching subprocesses for each connection, or entity - expansion attacks in XML. **NO** (thats why we use gearman) - -Notifications impact --------------------- - -The gearman client should be able to feed back its status updates to the 'TestStatus' model through the update-test-status method. - -Other end user impact ---------------------- - -Aside from the API, are there other ways a user will interact with this feature? - -Users will be able to trigger, cancel, and, receive status updates. - -Performance Impact ------------------- - -The idea behind using gearman for this is that we can scale the worker pool in and out -depending on demand. So there is no real need to worry about performance impacts. - - -Other deployer impact ---------------------- - -* using the gearman testing option will require two settings in `refstack.cfg` GEARMAN_SERVER and GEARMAN_PORT will need to be set with the location and port of the gearmand server. - -* This change will require being enabled in the same file with the TEST_METHOD value set to "gearman". - -Developer impact ----------------- - -TDB - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - dlenwell - -Other contributors: - rockyg (documentation) * these documents are ripe with raw material for docs :) - -Work Items ----------- - -* gearman client side code. (https://review.opendev.org/84270/) - * starts/stops/handle the gearman job queue -* gearman worker code (wip) -* report failure api call -* package installer for test runner with dependency coverage. - -Dependencies -============ - -extends openstack-infra/gear - https://opendev.org/opendev/gear - -will also require a running gearmand service someplace accessible to both worker and client. - -Testing -======= - -TBD - -Documentation Impact -==================== - -This should already be included in the high level architecture documentation for refstack. - -References -========== - -* http://gearman.org diff --git a/specs/prior/approved/refstack-org-result-visualization.rst b/specs/prior/approved/refstack-org-result-visualization.rst deleted file mode 100644 index 85987b36..00000000 --- a/specs/prior/approved/refstack-org-result-visualization.rst +++ /dev/null @@ -1,167 +0,0 @@ -Intuitive visualization for test comparison -========================================== - -Blueprint: https://blueprints.launchpad.net/refstack/+spec/results-visualization -Storyboard: https://storyboard.openstack.org/#!/story/111 - -Result comparison is the very essence of refstack. This spec lays out the -basic design objectives, comparison matrix and wire-frames for the initial -visualization between cloud test runs. The display of this information -must make it simple for users to map their cloud interoperability to other -clouds. - - Note: Positive results only for refstack public site. - We will still handle negative/skip use cases. - -Problem description -------------------- - -Refstack collects substantial amounts of detailed raw data in the form -of passed test results. Individually, these tests provide little insight -about cloud interoperability; consequently, restack must provide a way to -group results (likely capabilities) and contract with other posted test -results. - -Specific Items that Need Visualization - -Comparison of results against: - * Core Test List ("am I core?") - * Universe of Tests ("close to 100%?") - * Other's runs ("do I interoperate?") - * Previous runs ("did I improve?") - -To make it easier to determine, results should follow the capabilities -groups rather than individual tests. Users should be able to drill down -into a capability to look at the test detail. - -Note about Capabilities versus Tests: In DefCore, capabilities are -tracked as the definition of "core." Each capability has a defined -set of tests. For a core capability to be considered as "passed," -all of the tests in that capability must pass. Since we do not -track "failed" as a state, a non-passing test simply makes the whole -capability not passing. - -General Visualization: Tristate ----------------------------- - -The general approach is to focus on _deltas_ from other sets rather -than showing the actual results. This means that visualizations -will be more about what's different or changed. The preferred tool -will be the "tristate" graph: http://omnipotent.net/jquery.sparkline/#s-about. - -For consistency, users should expect that: - * +1 = good, match - * 0 = ok, no match (run is advantaged over reference/core) - * -1 = bad, missing functionality - -.. image:: https://wiki.openstack.org/w/images/1/19/Refstack_mock_tristate.png - :width: 700px - :alt: Tristate Graph Sample - -There are two consistent but slightly different ways that we will use tri-state: - -1) comparing to core tests with a goal of showing compliance - * +1 = passed a core test or capability - * 0 = passed a non-core test or capability - * -1 = did not pass a core test or capability (this is the same as "not-reported") -2) compare to other tests with a goal of showing interoperability - * +1 = passed in both samples - * 0 = passed in subject but not in reference (subject is advantaged) - * -1 = not passed in subject but did in reference (subject is disadvantaged) - -An example rendering would lock like this: - -.. image:: https://wiki.openstack.org/w/images/5/5e/Refstack_mock_comparison.png - :width: 700px - :alt: Comparison Mock Up - -Important Design Note: All tristate graphs must use the same ordered capability/test list -to ensure that results are easily to compare visually. The purpose of the tristate is -to help quickly find outliers not perform detailed comparison. Drill downs will be used -to resolve specific differences. - -Detailed Visualization: Drill Down ----------------------------- - -We will expand the capabilities level tristate in the detailed visualization but -still retain the tristate meanings with specific tests. In the drill down, the -user will see the original tristate graph above a table with the capabilities -list (order preserved) by rows. In each row, the following columns: -* the name of the capability -* a tristate will visualize the individual test results using the same +1/0/-1 semantics -* a simple list of the -1 tests - -Usability Note: The name of the test/capability should be included as a hover. - -Alternatives ----------------------------- - -There are several other approaches to visualize this information including shaded table -cells and spider charts. This would be acceptable alternatives; however, the tristate -chart is compact, very simple to use and highly intuitive for comparing result sets. - -It would be possible to use tristate shapes (circle, open circle, square) to reflect the same -data on tables. - -Data model impact - -Likely none; however, depending on the complexity of the queries, -it may be necessary to create intermediate tables to to summarize -capabilities from test results per run to improve performance. - -If new models are needed, this spec should be updated with the design. -At this time, we assume that the collection does not require an -intermediate model. - -Specification for the method - -These are read-only reports and should use GETs. - -The URL path should match the other UI paths with then following pattern: - -Compare against previous results: -HTML response: GET /[refstack base]/compare/[release]/[cloud id] - -Compare against other clouds: -HTML response: GET /[refstack base]/compare/[release]/[cloud id]?to=[other 1]|[other 2] - -JSON response same as HTML but with .json - -Security impact - -None. These are open reports. - -Notifications impact - -None. - -Other end user impact - -None. - -Developer impact - -None. - -Assignee(s) - -TBD - -Work Items - -* Spec & Mock -* CSS & HTML Frame -* Data Collection -* Connect Data into UI Page - -Dependencies - -Sparklines JS libraries: http://omnipotent.net/jquery.sparkline/#s-about - -Documentation Impact - -Need to document screen and drill down expectation. - -References - -http://wiki.openstack.org/wiki/Governance/DefCoreCommittee \ No newline at end of file diff --git a/specs/prior/approved/refstack-org-tcup-base.rst b/specs/prior/approved/refstack-org-tcup-base.rst deleted file mode 100644 index 13c40c7e..00000000 --- a/specs/prior/approved/refstack-org-tcup-base.rst +++ /dev/null @@ -1,163 +0,0 @@ -Base TCUP environment for refstack.org -========================================== -https://blueprints.launchpad.net/refstack/+spec/standalone-tcup-driver - -TCUP (Tempest in a Container, Upload from Probe) is a self-contained, universal Tempest environment that can be widely used by the community with minimal effort AND minimal support effort by the Refstack team. - -Problem description -=================== - -For DefCore and the core definition, we need to collect lots and lots of test runs against deployed OpenStack clouds. Many of these clouds are behind firewalls and not accessible by 3rd parties. So we need to make it super easy to make running Tempest and result uploads as accessible as possible. - -Community access is the goal of TCUP. While the original and primary intent of Tempest was to test OpenStack code, having a large body of tests creates unique opportunities for us. DefCore uses the tests as a way to define core capabilities. - -Installing and configuring Tempest presents a challenge for many in the community. TCUP's job is to reduce that complexity to the smallest possible set. - -Who are "Users" below? The user in this context is the TCUP user, not user inferred from the OpenStack API credentials. - -Requirements: - -* It should not matter which Linux distro they are using -* Users should not have to figure out which Refstack and Tempest code to check out (beyond the single tcup.py file) -* Users should not have to deal with packages or pips (beyond Docker and the minimal tcup requirements) -* Users should not have to determine where to upload their results (but could override) -* Users identities must be hidden unless they agree/ask to have them published. There is a risk that their OpenStack credentials may be revealed in log messages - this should be addressed. -* When the test is complete, the test system dissolves - -Anti-Requirements: - -* Users should not need to checkout or clone any code -* Users should not have to edit configuration files - -.. image:: https://wiki.openstack.org/w/images/f/f4/Tcup_flow.png - :width: 700px - :alt: TCUP workflow - -Proposed change -=============== - -TCUP should be designed in as simple a way as possible. - -Running TCUP should only require Docker (.9+), a single tcup.py file with minimal dependencies, working OpenStack cloud credentials and an Internet connection for install and results upload. The cloud being tested does _not_ have to be public. TCUP will work as long as the user and the TCUP install has network access to the cloud being tested. - -Environment variables from the host (OS_*) will be passed into the container. The container should not start unless critical OS_ variables are in place. The specific OS_ items are limited: - - * OS_AUTH_URL - * OS_TENANT_NAME - * OS_USERNAME - * OS_PASSWORD - -There should be a `--debug` mode to allow for user testing and debug. The debug flag should NOT start tests automatically and should map the user pwd into /dev in the TCUP container. - -There must be both a way to use local code (refstack) to run TCUP and also a simple file download approach. These methods should be functionally identical. - -While there may be broad uses of TCUP for test automation, it is not desirable to overload them at the expense of manual usability. TCUP should be kept very simple for users in this pass. - -By default, TCUP will upload results to the Refstack site (this is a requirement above); however, we anticipate other use cases. For users who do not want to upload their results, they can change their API target parameters. This will allow users to instead upload their results to an internal Refstack site or simply save the results to their local drive. - -Alternatives ------------- - -* THESE ARE INCLUDED FOR COMPLETENESS, NOT IMPLEMENTATION * - -It would be possible to create a single-use VM for this testing. That would require much more download time, build effort and maintenance. - -An additional method is to package execute_test on its own allowing it to install on fedora and ubuntu. It already has tempest havana stable as a dependency. It can be installed and the rc file can be sourced and it can be kicked off. No container would be needed and you can log into any cloud instance on any cloud provider that has network reach to the cloud you want to test. Start an ephemeral vm and log into it and run two commands. - -Yet another approach is to assume tempest havana is already installed. Users can invokes execute_test directly without using docker or any container. This omits the "minimal setup" TCUP approach. - -It would be possible to setup a cloud-based process to run Tempest (this is a Refstack use case); however, this would not reach private clouds. It also does not give the user control of the data. - -Data model impact ------------------ - -None. - -REST API impact ---------------- - -None; however, TCUP will rely on a stable upload REST API. - - -Security impact ---------------- - -User passwords are passed into the container and should be redacted from log entries or error messages. - -We should prompt the user (from the tcup.py) code to enter a password if none is provided in the environment. - -Passwords must not be stored by TCUP! - - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -TCUP is designed as a stand-alone capability. It should not have interactions with other parts of the system except via the API as noted above. - -Performance Impact ------------------- - -None. - -Other deployer impact ---------------------- - -The community version of TCUP does NOT have to be coupled to other test running models. - -It is _not_ desirable to complicate TCUP to serve other uses. - -Developer impact ----------------- - -None. TCUP should use the standard API. - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - robhirschfeld - -Other contributors: - praveen (test) - alexhirschfeld (dev & test) - dlenwell (review) - rockyg (documentation) * these documents are ripe with raw material for docs :) - -Work Items ----------- - -* build TCUP docker container (via Dockerfile) -* build tcup.py to build and launch docker -* document run process -* update configuration generator to use environment variables -* integrate execute_test scripts into TCUP -* integrate default upload target into TCUP - -Dependencies -============ - -* execute_test scripts must support environment variables -* upload API must function correctly - -Testing -======= - -Manual environment testing by Refstack and community. - -Documentation Impact -==================== - -TCUP needs detailed community facing documentation and video tours. - -References -========== - -* http://docker.io diff --git a/specs/prior/implemented/api-v1.md b/specs/prior/implemented/api-v1.md deleted file mode 100755 index 86be7b90..00000000 --- a/specs/prior/implemented/api-v1.md +++ /dev/null @@ -1,159 +0,0 @@ -Refstack API v1 -=============== -This document is to serve as the complete spec for refstack's v1 api. - -###Problem description# - -As our requirements grow, so must the api. To maintain backwards compatibility on a live running copy of the api we have to version it to insure that older software can still have basic api functionality. - -This api will be implemented at api.refstack.org. Current api functions that exist in web.py will be deprecated as software that relies on them are updated to use the new api. - -###Proposed change# - -This isn't so much a change as a definition of the api that has evolved so far. This way we have a v1 spec and can use it to plan for v2 features and changes. - -###Alternatives# - -N/A - -###REST API impact# - -All urls will be prefaced by */v1/* indicating version one of the api is in use. - - -####results# - -**description:** Receive tempest test result from a remote test runner. this function expects json formated pass results. - -**url:** post /v1/results - -**parameters:** - -str:data - a string input containing json as shown in lower example. - -**post example:** - - { - 'cpid': '2fd4e1c67a2d28fced849ee1bb76e7391b93eb12', - 'duration_seconds': 23445234, - 'results': [ - {'name':'fully.qualified.test.path', - 'uid':'test uuid'}, /* if test has uid. uid has a priority */ - {'name: 'another.test.path.without.uid'}] /* only passed tests */ - } - -**normal response:** http:201 - the status has been saved - - { - 'test_run_id': '7fd4e1c67a2d28fced849ee1bb76e7391b93eb12', - } - -**failed response:** http:404 - the job_id does not exist - - { - 'message': 'the job_id does not exist.' - } - -**failed response:** http:400 - the job_id already has results - - { - 'message': 'the job_id already has results' - } - ----- -**description:** Get the results of a test run in JSON format. - -**url:** get /v1/results/{test_run_id} - -**normal response:** http:200 - OK - - { - 'created_at': '2015-01-16 10:10:10', - 'duration_seconds': 25, - 'cpid': '6b678f2c7fa94c7e942728b300451b56', - 'results': [ - 'tempest.api.test.id', - 'tempest.api.another.test.id' - ] - } - ----- - -**Data model impact** - -* add int field called duration_seconds to test model -* add varchar field cpid to test model -* change name of subunit field in test model to results and possibly increase length -* change name of TestStatus to Events -* add int field event_type to Events model - -**Security impact** - -* Does this change touch sensitive data such as tokens, keys, or user data? **no** - -* Does this change alter the API in a way that may impact security, such as a new way to access sensitive information or a new way to login? **yes** - - _we have not implemented a security model around reporting events. some discussion will need to take place to decide if this is acceptable._ - -* Does this change involve cryptography or hashing? **no** - -* Does this change require the use of sudo or any elevated privileges? **no** - -* Does this change involve using or parsing user-provided data? This could - be directly at the API level or indirectly such as changes to a cache layer. **yes** - - _we will be parsing results and json data being posted to perform different actions and will therefore be vulnerable. in order to protect ourselves we will have to take care with the parsing code to ensure things are valid and not open to injection attacks_ - -* Can this change enable a resource exhaustion attack, such as allowing a single API interaction to consume significant server resources? Some examples of this include launching subprocesses for each connection, or entity expansion attacks in XML. **no** - - -**Notifications impact** - -N/A - -**Other end user impact** - -Moving forward any changes to the testing client will need to be in sync with the api version. - -**Performance Impact** - -N/A - -**Developer impact** - -This will effect a lot of areas of existing code.. this is why the second work item below is "update exising api calls and web views so that they are compatible with the new scema" and "modify testing client to use the new api calls". - -**Implementation:** - -The api will be implemented as a flask application in the api.py file. - -**Assignee(s)** - -Primary assignee: - dlenwell - -**Work Items** - -* update models to reflect above mentioned schema changes. -* update exising api calls and web views so that they are compatible with the new scema. -* replace current api.py with a new api that meets this spec. -* create validation decorator -* stand up api.refstack.org with new api. -* write api unit tests. -* modify testing client to use the new api calls. - -**Dependencies** - -N/A - -**Testing** - -* we will require api unit tests for each call and expected response. - -**Documentation Impact** - -* all api functions will have sphinx compatible doc tags reflecting actual usage. - -**References** - -N/A diff --git a/specs/prior/implemented/coretest-testid.rst b/specs/prior/implemented/coretest-testid.rst deleted file mode 100755 index 0d452a38..00000000 --- a/specs/prior/implemented/coretest-testid.rst +++ /dev/null @@ -1,142 +0,0 @@ - -================================================= -Use fully qualified test id in the coretests file -================================================= - -Launchpad blueprint: - - -This document describes the format for the test ids that will be used in the -DefCore coretests.json file. - - -Problem description -=================== - -The coretests.json file includes a list of tests that are defined as "core" by -the DefCore committee. Currently, the coretests.json file (in the -defcore/havana directory) uses the method names defined in the Tempest Python -test classes as the test names. While these method names are unique in Havana, -it is not the case in Icehouse where some of the method names are being used by -multiple test classes of different OpenStack components. - - -Proposed change -=============== - -The proposal is to adopt the test id as used by the subunit package to identify -each individual test. The test id is a fully qualified name which includes the -fully qualified class name of the Python test class and the method name. Using -this test id format will also help the performance of processing subunit test -results against the core tests list for compliance checking. - -The following is an example which shows how the test_get_default_quotas test is -currently defined in the coretests.json file versus the proposed test id format. - -* Current definition - - .. parsed-literal:: - "test_access_public_container_object_without_using_creds":\ - { "file": "test_object_services.py" } - -* Proposed test id format - - .. parsed-literal:: - "tempest.api.object_storage.test_object_services.PublicObjectTest.\ - test_access_public_container_object_without_using_creds" - - -Alternatives ------------- - -Open to suggestions on better ways to uniquely identify each test case with run -time processing performance in mind. - - -Data model impact ------------------ - -None - - -REST API impact ---------------- - -None - - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -Using the test id will help the performance of run time result processing. - - -Other deployer impact ---------------------- - -None - - -Developer impact ----------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Catherine Diep - -Other contributors: - Rob Hirschfeld - -Work Items ----------- - -* Catherine to create the corresponding test id from the tests listed in the - coretests.json file. -* Rob Hirschfeld to review and validate the result test id list - - -Dependencies -============ - -None - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -None diff --git a/specs/prior/implemented/identify-code-to-deprecate.rst b/specs/prior/implemented/identify-code-to-deprecate.rst deleted file mode 100755 index 89cf23a0..00000000 --- a/specs/prior/implemented/identify-code-to-deprecate.rst +++ /dev/null @@ -1,180 +0,0 @@ - -========================== -Identify code to deprecate -========================== - -Storyboard: https://storyboard.openstack.org/#!/story/110 - -This spec identifies code that should be deprecated. - -Problem description -=================== - -At the Juno summit, we have learned of many other groups working on -things that overlap with refstack. In addition, there are new -requirements and changes in refstack that may also result in the -refactoring of refstack code. This spec identifies code that should -either be deprecated or moved into other projects. - -In addition to that. We've decided to simplify the design of refstack by -removing both the "official" and local testing tracks from refstack. - -Instead we will allow vendors/operators to run tests on their own and -simply submit the results using the refstack-client. - - -Proposed change -=============== - -* Remove templated config config creation: - - Until auto configuration exists in tempest. refstack has decided to - remove support for in app generated config. Instead we will allow - operators to use their own known config file with the refstack-client. - - This change would require: - - * Removing the tempest.conf file generation code in refstack-client. - Add option in refstack-client to take a tempest.conf file as input - parameter. It is the caller's responsibility to construct the - tempest.conf file and pass it to refstack-client. - - * Several functions in web.py will be removed. - - * get_testcases - * get_miniconf - * get_script - - When/if a common tool to generate tempest.conf becomes available we - may revisit reintroducing this feature. - - There is currently a blueprint under review - (https://review.opendev.org/#/c/94473/) to build a tool that will - generate proper tempest.conf using as few input arguments as possible. - -* Remove code to trigger test from web ui functionality. - - Because we've removed the use cases for both local and official web - driven test runs. We can remove all the excess web ui for creating new - clouds and starting and monitoring test runs. - -* tools/tempest_tester.py - - This code being replaced by the new refstack-client. - -* tools/tempest_subunit_test_result.py - - refstack is no longer parsing any raw subunit. The refstack-client - only uploads an array of passing tests. - - We originally planned to contribute this code to the tempest project. - However the tempest ptl is not interested in it because they'd prefer - folks just use the built in subunit parsers. - - We have additionally thought of moving this code into refstack-client. - I am not sure that is the correct path. However I am open to - discussion. - -* tools/execute_test - - Being replaced by the new refstack-client. - - Also need to remove the following files: - - 1: refstack/tools/: - docker_buildfile.py, - docker_buildfile.template - 2: refstack/templates/ - show_report.html, - show_status.html, - tempest.conf, - test_cloud.html - -Data model impact ------------------ - -None, The database will remain as it is. - - -REST API impact ---------------- - -Many functions which are currently included in the web.py file will be -removed once the new v1 api lands. - - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - - -Implementation -============== - -Details in the Work Items section. - -Assignee(s) ------------ - -Primary assignee: - Catherine Diep - -Other contributors: - IBM team member - -Work Items ----------- - -Use the "Proposed Change" section to populate work items. - - -Dependencies -============ - -refstack-client and v1 api landing. - - -Testing -======= - -None - - -Documentation Impact -==================== - -None - - -References -========== - -* Tempest blueprint for tempest.conf creation - - https://review.opendev.org/#/c/94473/ diff --git a/specs/prior/implemented/refstack-org-api-cloud-uuid.rst b/specs/prior/implemented/refstack-org-api-cloud-uuid.rst deleted file mode 100644 index 8d52903e..00000000 --- a/specs/prior/implemented/refstack-org-api-cloud-uuid.rst +++ /dev/null @@ -1,145 +0,0 @@ -========================================== -Test Submission API should use Target Identity Endpoint UUID -========================================== - -story: "Use keystone uuid with cloud id" -ref: https://storyboard.openstack.org/#!/story/135 - - -In order to ensure that multiple test runs are attributed to the same cloud, -test runner needs to use a consistent, discoverable and unique identifier -for each cloud test. This allows multiple users to correlate results from -the same cloud. - -Problem description -=================== - -Refstack is designed to have minimal user security and configuration overhead; -consequently, there are no mechanisms in the short term to ensure that a user's -test results are authorized (see note). To create valid results, refstack needs a way to -know when multiple runs are against the same targets so that comparisons are valid. - - > Note: In the future, Refstack will include user authentication. At that point - it will be possible to associate uploaded data to users and vendors in an - authoritative way. - -To solve this problem, refstack needs a unique handle for each cloud tested -that is unique and also discoverable to the test runner. - -Some requirements: - -* No round trips to refstack before a test is submitted (do not pre-create cloud) -* Minimal trust of users (do not require user credentials for uploads) -* Users should not be expected to remember cloud IDs -* Multiple users of same cloud should be tracked together - -Proposed change -=============== - -When test runner submits results, it should submit with the Identity Endpoint -UUID (aka Keystone end point under serviceCatalog/service["identity"]/endpoint[?id]). - -The refstack API should accept EITHER the user's created refstack cloud ID or the -discovered Endpoint UUID. If the refstack cloud ID is passed and no cloud -exists then refstack should create a new refstack cloud. - -Alternatives ------------- - -Refstack could use a different endpoint for the ID - -Refstack could stop using its own cloud ID and only use endpoint IDs - -Possible addition: we may want to also track the cloud endpoint URL. This -could be a possible added field for the JSON upload. While this will -help identify clouds, it could also reveal more information than the -user wants disclosed. We should only implement this with user permission. - -Data model impact ------------------ - -We have to add the endpoint ID as a field into the Cloud model. - -REST API impact ---------------- - -The Test Upload API needs to be modified to accept either the Test ID or the -endpoint UUID. If the endpoint UUID is not in the URL then it should be included -in the JSON payload. - -Security impact ---------------- - -Improvement: this helps reduce the need of passing refstack authentication to ensure -that cloud results are linked to individual clouds or users. - - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -Users will not have to pre-create clouds before using -refstack. - -Users will have to be able to assign clouds to endpoint UUIDs. - -Performance Impact ------------------- - -Should improve performance because round trips on result -uploads are avoided. - -Other deployer impact ---------------------- - -None. - -Developer impact ----------------- - -Should simplify developer work. - -Implementation -============== - -Assignee(s) ------------ - -TBD - -Work Items ----------- - -* determine forumula for endpoint UUID (if any needed) -* get and add cloud epid to results upload -* add cloud epid to model -* update api to response correctly for epid lookup - -Dependencies -============ - -API v1 spec. - -Testing -======= - -We need to validate the endpoint IDs correctly resolve to clouds. - -Documentation Impact -==================== - -We should explain how clouds are identified in the documentation so that -users will understand the impact of re-installing and how to keep results -together even if the cloud changes. - -We will also have to explain how to associate results to a user managed -cloud. - -References -========== - -https://storyboard.openstack.org/#!/story/135 diff --git a/specs/prior/implemented/refstack-org-test-result-json-schema.rst b/specs/prior/implemented/refstack-org-test-result-json-schema.rst deleted file mode 100644 index 48743f41..00000000 --- a/specs/prior/implemented/refstack-org-test-result-json-schema.rst +++ /dev/null @@ -1,176 +0,0 @@ -========================================== -Example Spec - The title of your blueprint -========================================== - -https://blueprints.launchpad.net/refstack/+spec/test-result-json-schema - -Refstack accepts connections from the refstac-client to post test results -for storage and scoring. Test results are in json and follow a simple -format. However there is no defined schema for the test results, allowing -for a greater possibility of invalid or malicious data to be injected -into the refstack database. This spec proposes the creation of a json -schema that can be used by client developers to generate their own valid -test results. The spec will also be used by the server to validate -incoming test results. - -Problem description -=================== - -* Refstack accepts test results in json format. However, there is no - explicit definition of the structure of this json data. - -* Data sent to the Refstack server is not validated against any schema, - increasing the possibility of invalid or malicious data. - -* Clients developers do not have a specific schema to develop against. - - -Proposed change -=============== - -This change proposes the following additions: - -* A json schema file to capture the format of the test results. - -* Validation of incoming results against the json schema in Refstack. - -Alternatives ------------- - -Another alternative is manually writing code to validate test results. -This schema does not preclue that alternative, but by creating and -validating against a schema we take advantage of existing libraries -and reduce the possibility of introducing unintended parsing errors. - -Data model impact ------------------ - -This proposed change has two impacts on the data model: - -* It introduces a schema that must be maintained and versioned - against the current data model. - -* It adds an additional level of validation to the 'create' operation - of the CRUD model. Since running a test is a long operation this - addition level of complexity will not have an impact on application - performance. - -* No database migrations will be necessary. - -* A static page to serve the schema will be necessary. - -REST API impact ---------------- - -The v1 api must be modified to validate test results. The modification -will result in an addition to behavior on the API with no changes -to the user-facing endpoint. - -**description:** Receive tempest test result from a remote test runner. -This function expects json formated pass results. -Update to the api described at: -https://opendev.org/openinfra/refstack/src/branch/master/specs/prior/implemented/api-v1.md - -**url:** post /v1/results - -**failed response:** http:400 - Malformed data. - - { - 'message': 'Malformed json data, see /v1/results/schema' - } - -**url:** get /v1/results/schema - -**valid response:** http:200 schema.json file - -No invalid responses. No accepted parameters. - -Security impact ---------------- - -This change is intended to improve the security of the application -by introducing data validation. No data-changing apis are -introduced. - -Notifications impact --------------------- - -No known notification impacts. - -Other end user impact ---------------------- - -End users may experience client failures if their client does not produce -valid json. - -Performance Impact ------------------- - -This change introduces a validation step to the POST process. The additional -time used to validate the data is very small compared to the time taken -to generate data through cloud testing. The timing impact is negligible -in the context of the upload use case. - -Other deployer impact ---------------------- - -This change will take immediate effect after being merged. - -Developer impact ----------------- - -No additional developer impact. - -Implementation -============== - -This change will be implemented as a validation function in the API POST -pipeline. It will essentially be middleware that takes the input data, -validates the results, and sends back a positive or a negative result. -If the result is negative, the 400 response will be returned. -If the result is positive, the data processing will continue as normal. - -Assignee(s) ------------ - -Primary assignee: - hogepodge - -Work Items ----------- - -* Write json schema -* Write json schema GET endpoint -* Implement validation on results POST endpoint. -* Implement unit tests for validation and endpoint. - -Dependencies -============ - -No additional dependencies will be added. - -Testing -======= - -To the TestRefStackApi class the following tests will be added: -* test_results_valid_data -* test_results_invalid_data - -These results will confirm both positive (200) and negative (400) results. - -To the unit tests the validator function will be tested: -* test_valid_data -* test_invalid_data -* test_empty_data - -These results will provide three modes of schema validation. - -Documentation Impact -==================== - -Documentation will be updated to link to current schema. - -References -========== - -No additional references. diff --git a/specs/prior/implemented/seperate_refstack_tester_from_refstack.rst b/specs/prior/implemented/seperate_refstack_tester_from_refstack.rst deleted file mode 100755 index 3712f765..00000000 --- a/specs/prior/implemented/seperate_refstack_tester_from_refstack.rst +++ /dev/null @@ -1,94 +0,0 @@ -====================================== -separate refstack tester from refstack -====================================== - -**Problem description** - -The refstack tester needs to be easily installable on its own without checking -out the refstack code. - -**Proposed change** - -This would require taking the code that lives in refstack/tools/tester and moving -it into its own repository. probably in openstack-infra. - -**Alternatives** - -we could leave the code were it is and force anything that wants to install it to -checkout the whole of refstack to do so. - -**Data model impact** - -none. - -**REST API impact** - -none. - -**Security impact** - -* Does this change touch sensitive data such as tokens, keys, or user data? **no** - -* Does this change alter the API in a way that may impact security, such as - a new way to access sensitive information or a new way to login? **no** - -* Does this change involve cryptography or hashing? **no** - -* Does this change require the use of sudo or any elevated privileges? **no** - -* Does this change involve using or parsing user-provided data? This could - be directly at the API level or indirectly such as changes to a cache layer. **no** - -* Can this change enable a resource exhaustion attack, such as allowing a - single API interaction to consume significant server resources? Some examples - of this include launching subprocesses for each connection, or entity - expansion attacks in XML. **no** - -**Notifications impact** - -none. - -**Other end user impact** - -The tester would need to remain compatible with the v1 api spec. - -**Performance Impact** - -none/ - -**Developer impact** - -When finished tcup would need to have the tester as a dependency. - -**Implementation:** - -**Assignee(s)** - -Primary assignee: - dlenwell - -**Work Items** - -* put code from refstack/tools/tester in external github repo (i.e. github.com/dlenwell/refstack-tester) -* add basic unit and node test to new project to insure it works in ci -* create project in openstack-infra/config for project with the above repo set as its upstream. -* insure that project has the enable storyboard flag. -* add refstack-tester to requirements.txt in refstack (will still be needed by tcup) -* deprecate code in refstack/tools/tester - -**Dependencies** - -none. - -**Testing** - -The new project will require a base set of tests so that ci works properly. - -**Documentation Impact** - -Since we have not written docs for the api.. let this document serve as the -starting place for that. - -**References** - -N/A diff --git a/specs/prior/implemented/simplify-uploads-by-only-sending-pass-results.rst b/specs/prior/implemented/simplify-uploads-by-only-sending-pass-results.rst deleted file mode 100755 index 2e1f64d1..00000000 --- a/specs/prior/implemented/simplify-uploads-by-only-sending-pass-results.rst +++ /dev/null @@ -1,129 +0,0 @@ -============================================= -Simplify Uploads by only sending Pass results -============================================= - -Blueprint: https://blueprints.launchpad.net/refstack/+spec/pass-only-uploads -Storyboard: https://storyboard.openstack.org/#!/story/108 - -As part of helping the community accept the publication of results, refstack -test uploads should default to only PASS results. We are NOT uploading skips -or errors. This aligns with the interop objective because we want have a -positive assertion. - -Problem description -=================== - -Because fail results often include sensitive information it has been a pain -point for some of our early adopters to upload results to a foundation -controlled database. Since refstack really only cares about the things that -pass, we'll just parse the results and leave the fails out. - -Proposed change -=============== - -This would involve using the subunit parsing code inside the tester. We would -run the parser on the subunit before uploading the results to the api. - -We'll want to have a non-default option to send all data. Because I am sure -that some folks will want to use refstack internally for real debugging and -test regression. - -If a user tries to submit results to the public api server (Regardless of what -the non-defaulting option is set to.). It will either produce an error (which I -don't think is as useful) or it will just scrub the results anyways. - -Alternatives ------------- - -We could take the full upload and do the processing server side to accomplish -the DefCore objective; however, that does not avoid the data leak issue. - -Data model impact ------------------ - -None - -REST API impact ---------------- - -None - -Security impact ---------------- - -This change addresses security concerns. as far as I can tell it will not -create any new ones. - -Notifications impact --------------------- - -It should mention that results are being scrubbed in the on screen log messages. -so that it's clear on every action. - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -This will slow down the tester but not by a lot. However it will also speed up -the upload of the results since they will be trimmed. - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -This could potentially be a maintenance problem moving forward as we move the -subunit parsing utils into tempest then move the tester into its own repo. - -It will require that someone stays on top of things during those changes to avoid -duplication of code. - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - dlenwell - -Other contributors: - catherine wrote the parsing utils. So some support might be needed. - -Work Items ----------- - -The tester already has a stubbed out function for this code. Just needs -to be filled in. - -Dependencies -============ - -None - -Testing -======= - -None - -Documentation Impact -==================== - -It should be written down that we aren't uploading fails. So its known to -operators and they don't have to be concerned about security leaks. - -But outside of that I don't see the need for documentation. - - -References -========== - -N/A diff --git a/specs/prior/implemented/test-record-api.rst b/specs/prior/implemented/test-record-api.rst deleted file mode 100755 index f3508970..00000000 --- a/specs/prior/implemented/test-record-api.rst +++ /dev/null @@ -1,198 +0,0 @@ -.. - This work is licensed under a Creative Commons Attribution 3.0 Unported - License. - - http://creativecommons.org/licenses/by/3.0/legalcode - -================================================= -Add Test Records Retrieval API to Refstack v1 API -================================================= - -StoryBoard https://storyboard.openstack.org/#!/story/2000212 - -This story proposes to add a new REST API that returns the test records -of the test results that have been uploaded to the Refstack server. - - -Problem description -=================== - -Currently, there is no API provided to retrieve the test records that the -users have uploaded to the Refstack server. The test records do not -contain the actual results of the tests. A test record only includes -test_id (result_id), date on which test was uploaded to refstack and -"cloud provider ID" (cpid) where the test results were collected. - -The use cases for adding this API are derived from the mockup phase for -Refstack user interface experience. - -Use Case 1: A UI dashboard that shows all test records that have been -uploaded to the Refstack server in the last number of days. - -Use Case 2: A UI dashboard that shows the latest test records that have been -uploaded to the Refstack server by all cloud providers identified by -"cloud provider ID" (cpid). - -Use Case 3: A UI that shows all test records uploaded by a cloud provider. - -Proposed change -=============== - -Add an API to Refstack v1 API to retrieve test records stored in the -"tests" table of the Refstack database. This API will need to support the -following optional filtering parameters: - -* Ability to specify the number of returned records. -* Ability to only retrieve test records in a specified date range. -* Ability to retrieve test records for a specific cloud provider identified - by its "Cloud Provider ID" (cpid). - -Note: Currently, there is no user or vendor identity associated to the test -results. The only association is with cpid. As the user and vendor -information become available, this API needs to be updated to provide -filtering based on user and vendor identity. - -Alternatives ------------- - -Direct access to the database to retrieve test records. Open to suggestions. - - -Data model impact ------------------ - -None - -REST API impact ---------------- - -Add a new API with the following specification. - -* Description: This API will be used to retrieve the test run records that - were uploaded to the Refstack database. -* Method type: GET -* URI: v1/results/ - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * BadRequest (400) - * Unauthorized (401) - * Not found (404) - -* Parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | start_date | query | xsd:date | Only retrieve data uploaded from | - | (optional) | | | this date. ISO 8601 date format | - | | | | YYYY-MM-DD | - +---------------+-------+--------------+-----------------------------------+ - | end_date | query | xsd:date | Only retrieve data uploaded up to | - | (optional) | | | this date. ISO 8601 date format | - | | | | YYYY-MM-DD | - +---------------+-------+--------------+-----------------------------------+ - | cpid | query | xsd:string | Only return the test records | - | (optional) | | | belonging to this cpid. | - +---------------+-------+--------------+-----------------------------------+ - | page | query | xsd:int | Page number to retrieve result | - | (optional) | | | records. Default is page=1 which | - | | | | contains the latest number of | - | | | | result records uploaded. The | - | | | | number of records per page is | - | | | | configurable via the refstack.conf| - | | | | file. | - +---------------+-------+--------------+-----------------------------------+ - - - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - {'test_id' : '95346866-307f-4052-ba31-ff6270635e14', - 'created_at': '2015-02-02 23:42:25', - 'cpid' : '043ef631f4204935a59c9ba573f0e111' - }, - {'test_id' : '95346866-307f-4052-ba31-ff6270635e15', - 'created_at': '2015-03-02 23:42:25', - 'cpid' : '043ef631f4204935a59c9ba573f0e122' - } - ...... - } - -Security impact ---------------- - -None - -Notifications impact --------------------- - -None - -Other end user impact ---------------------- - -None - -Performance Impact ------------------- - -None - -Other deployer impact ---------------------- - -None - -Developer impact ----------------- - -None - - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: Vladislav Kuzmin - -Other contributors: Catherine Diep - -Work Items ----------- - -* Create new API functions in the various layers. - - -Dependencies -============ - -None - - -Testing -======= - -Require API unit tests for each call and expected response. - - -Documentation Impact -==================== - -All API functions will have sphinx compatible doc tags reflecting actual usage. - - -References -========== - -None diff --git a/specs/queens/approved/subunit-data-api.rst b/specs/queens/approved/subunit-data-api.rst deleted file mode 100644 index f1dd509b..00000000 --- a/specs/queens/approved/subunit-data-api.rst +++ /dev/null @@ -1,497 +0,0 @@ -============================================= -Subunit Data Management API -============================================= - -Launchpad blueprint: - -* https://blueprints.launchpad.net/refstack/+spec/subunit-data-api - -This specification describes an expansion of the refstack api which, when -complete, will allow for the upload and management of subunit data files -to a RefStack server. - -Problem description -=================== - -The current RefStack API allows for the upload, management, and verification -of test results by server administrators. These capabilities, though -sufficient for the current scope of RefStack, will need an API expansion in -order to allow for similar data management of subunit data results. This -expansion will enable those organizations looking to achieve a greater degree -of interoperability to securely share the details of test runs with the -Foundation so as to get assistance with getting their OpenStack instance to -successfully meet interop standards. - - -Proposed change -=============== - -**Add new API functionality to the RefStack v1 API** - - * Upload new subunit data- nonadmin capability - - * Link new subunit data to a corresponding existing test result- - nonadmin capability - - * Delete subunit data- admin/owner capability - - * Show subunit data for a given test result- admin/owner capability - - - -Note that, amongst the additions to the table that stores test results, -there is no added field intended for the storage of a subunit result id. -This is because, as per the spec defining the changes needed to upload and -utilize subunit data, the current plan is to link the two entries via the -metadata table. - -Alternatives ------------- - -* If subunit2sql takes too long to perform the aforementioned operations, - using asynchronous processing and upload may prove to be a better option. - For now though, it appears as though synchronous operations will be possible -* Possibly require subunit data to be converted into json before being passed - in for upload - -Data model impact ------------------- - -This API will interface with subunit2sql, which will add several tables into -the RefStack database. Though these have been laid out already in the general -subunit data import spec, for the sake of thoroughness, here they -are again::: - - -------------------------------------- - | tests | - -------------------------------------- - | id | String(256) | - | test_id | String(256) | - | run_count | Integer | - | failure | Integer | - | run_time | Float | - -------------------------------------- - - ---------------------------------------- - | runs | - ---------------------------------------- - | id | BigInteger | - | skips | Integer | - | fails | Integer | - | passes | Integer | - | run_time | Float | - | artifacts | Text | - | run_at | DateTime | - ---------------------------------------- - - --------------------------------------------------- - | test_runs | - --------------------------------------------------- - | id | BigInteger | - | test_id | BigInteger | - | run_id | BigInteger | - | status | String(256) | - | start_time | DateTime | - | start_time_microseconds | Integer | - | stop_time | DateTime | - | stop_time_microseconds | Integer | - | test | Test | - | run | Run | - --------------------------------------------------- - - ------------------------------------------- - | run_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | run_id | BigInteger | - | run | Run | - ------------------------------------------- - - ------------------------------------------- - | test_run_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | test_run_id | BigInteger | - | test_run | TestRun | - ------------------------------------------- - - ------------------------------------------- - | test_metadata | - ------------------------------------------- - | id | BigInteger | - | key | String(255) | - | value | String(255) | - | test_id | BigInteger | - | test | Test | - ------------------------------------------- - - ------------------------------------------- - | attachments | - ------------------------------------------- - | id | BigInteger | - | test_run_id | BigInteger | - | label | String(255) | - | attachment | LargeBinary | - | test_run | TestRun | - ------------------------------------------- - - -REST API impact ---------------- - -The current plan, as briefly outlined above, is to make the following -additions to the current API: - -**Upload subunit data** - -* Description: - - This capability will be used to upload the subunit data of a test result - that is not already in the database. It will do so in a few steps. First, - it will take the subunit file open it, and convert it to v2 stream format - (refstack-client outputs a subunit v1 file). Then, it will check to make - sure the data is not already stored in the database, and if there is no - record matching the data stored in the passed-in file, the api should then - use subunit2sql to insert the subunit data into the appropriate fields, as - well as inserting using the parsed data to insert a new entry into the - refstack "runs" table using the existing refstack api utilities. This may - seem a bit complicated for an upload function, but the goal in doing this - all in one fell swoop is to ensure that no subunit data is ever uploaded - that is not connected to some test result. Uploading subunit data will not - require admin privileges. - -* Method Type: POST - -* URI: v1/subunit/ - -* Normal Response Codes: - - * Created (201) - -* Error Response Codes: - - * Bad Request (400) - * Not found (404) - -* Request parameters: N/A - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - { - 'subunit_data': - } - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - 'subunit-uuid': 'subunit2sql-defined run id', - 'result-id': 'result id' - } - - -**Link subunit data to a corresponding existing test result** - -* Description: - - This will allow for the linking of a new, unadded set of subunit data - to data a test result already existing in the database. It will do - so by converting the contents of the given file to a subunit v2 stream, - then using the stream to generate a corresponding test result, - and then comparing that to the passed in test result. If the - generated result and the stored result correspond to one another, - it should insert the subunit data into the database and link the two - entries via a key value pair in RefStack's meta table. The two keys I - plan to use are the subunit data's uuid and the test result's id. - Because the validity of the link is easily verifiable, this action will - not be one that requires admin privileges. - -* Method Type: PUT - -* URI: v1/subunit - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Not Found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | result_id | URI | csapi:UUID | test result ID to link to | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: - - .. parsed-literal:: - { - 'subunit data': - } - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - 'uuid': 'subunit2sql-defined run id', - 'id': 'refstack test result id' - } - -**Delete subunit data entry** - -* Description - - This utility will be used to delete subunit data from the RefStack - database. Foundation and vendor admins, along with entry owners will - be able to delete subunit data entry. - -* Method type: DELETE - -* URI: v1/subunit/{id} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Forbidden (403) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | ID to be removed. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - -**Show subunit data** - -* Description - - This utility will be used to list the subunit data that has been - uploaded into the RefStack database. This action will be available - to vendor and Foundation admins only. A specific subunit data entry - can be selected and viewed using the result_id parameter. It will do - so in two steps. First, it will take the given test result id, and - reference refstack's meta table to find the corresponding subunit - uuid. Then, it will use that uuid to GET the subunit data from the - v1/subunit/{uuid} endpoint. - -* Method type: GET - -* URI: v1/subunit/{uuid} - -* Normal Response Codes: - - * OK (200) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Forbidden (403) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | test result id to search for. | - +---------------+-------+--------------+-----------------------------------+ - - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: - - .. parsed-literal:: - { - 'subunit-data:': { - 'run_at': 2017-08-16 18:34:58.367221Z - 'uuid': '4d7950cb-586e-407e-9acf-5b169825af98', - 'skips': 0, - 'fails': 1, - 'passes': 1, - 'run_time': 2060.7 - 'artifacts': 'http://example-logs.log', - } - 'tests': [ - { - 'id': '1 - 'test_id': 'tempest.api.network.test_security_groups.SecGroupTest.test_create_security_group_rule_with_icmp_type_code' - 'run_count': 1 - 'success': 1 - 'failure': 1 - 'run_time': 5.60538 - }, - { - 'test_id': ' tempest.api.compute.keypairs.test_keypairs_negative.KeyPairsNegativeTestJSON.test_create_keypair_with_empty_public_key', - 'run_count': 1, - 'success': 0, - 'failure': 1, - 'run_time': 0.10919, - }, - ] - 'test_runs': [ - { - 'test_id': 1, - 'run_id': 1, - 'status': 'success', - 'start_time': 2017-08-16 07:21:56, - 'stop_time': 2017-08-16 07:22:02, - 'start_time_microsecond': 929341, - 'stop_time_microsecond': 534721, - }, - { - 'test_id': 2, - 'run_id': 2, - 'status': 'fail', - 'start_time': 2017-08-16 07:13:34, - 'stop_time': 2017-08-16 07:13:35, - 'start_time_microsecond': 693353, - 'stop_time_microsecond': 726471, - }, - ] - 'attachments': [ - { - 'test_run_id': 1, - 'label': '' - 'attachment': '' - } - ] - } - - -**Delete Test Result** - -* Description - - This modification to the v1/results/ endpoint's delete function will - ensure that, when a test result is deleted, the corresponding subunit - data is too. This is neccessary largely because, in our data model, - subunit data should always be linked to an associated test result. - -* Method type: DELETE - -* URI: v1/result/{id} - -* Normal Response Codes: - - * No content (204) - -* Error Response Codes: - - * Bad Request (400) - * Unauthorized (401) - * Forbidden (403) - * Not found (404) - -* Request parameters: - - +---------------+-------+--------------+-----------------------------------+ - | Parameter | Style | Type | Description | - +===============+=======+==============+===================================+ - | id | URI | csapi:UUID | ID to be removed. | - +---------------+-------+--------------+-----------------------------------+ - -* JSON schema definition for the body data: N/A - -* JSON schema definition for the response data: N/A - -Security impact ---------------- - -There has been some concern over the sharing of subunit data via the RefStack -API, and though they are largely based on a misinformation, this is part of -why so few of the API additions are nonadmin. For more details about this -discussion, please refer to the generalized spec for the upload and usage of -subunit tests. - -Notifications impact --------------------- - -None. - -Other end user impact ---------------------- - -None. - -Performance impact ------------------- - -None. - -Other deployer impact ---------------------- - -None. - -Implementation -============== - -Assignee(s) ------------ - -Primary assignee: - Megan Guiney - -Other contributors: - TBD - -Work Items ----------- - -* Discuss, amend, and merge this spec -* Run subunit2sql performance tests -* add field to "test" table -* add subunit api functionity -* add subunit-adjacent test result api functionality - - -Dependencies -============ - -* subunit2sql and its dependencies will need to be installed - during refstack server setup. As a result, puppet-refstack may - need some adjustments. - - -Testing -======= - -* Add unit tests to verify the proper functionality of the new API - additions. - - -Documentation Impact -==================== - -* Add documentation to detail the usage and functionality of the - new API additions. - - -References -========== -[1] https://opendev.org/openinfra/refstack/src/branch/master/specs/pike/approved/upload-subunit-tests.rst diff --git a/specs/template.rst b/specs/template.rst deleted file mode 100644 index 341de1db..00000000 --- a/specs/template.rst +++ /dev/null @@ -1,306 +0,0 @@ -========================================== -Example Spec - The title of your blueprint -========================================== - -Include the URL of your launchpad blueprint: - -https://blueprints.launchpad.net/nova/+spec/example - -Introduction paragraph -- why are we doing anything? A single paragraph of -prose that operators can understand. - -Some notes about using this template: - -* Your spec should be in ReSTructured text, like this template. - -* Please wrap text at 80 columns. - -* The filename in the git repository should match the launchpad URL, for - example a URL of: https://blueprints.launchpad.net/nova/+spec/awesome-thing - should be named awesome-thing.rst - -* Please do not delete any of the sections in this template. If you have - nothing to say for a whole section, just write: None - -* For help with syntax, see http://sphinx-doc.org/rest.html - -* To test out your formatting, build the docs using tox, or see: - http://rst.ninjs.org - - -Problem description -=================== - -A detailed description of the problem: - -* For a new feature this might be use cases. Ensure you are clear about the - actors in each use case: End User vs Deployer - -* For a major reworking of something existing it would describe the - problems in that feature that are being addressed. - - -Proposed change -=============== - -Here is where you cover the change you propose to make in detail. How do you -propose to solve this problem? - -If this is one part of a larger effort make it clear where this piece ends. In -other words, what's the scope of this effort? - -Alternatives ------------- - -What other ways could we do this thing? Why aren't we using those? This doesn't -have to be a full literature review, but it should demonstrate that thought has -been put into why the proposed solution is an appropriate one. - -Data model impact ------------------ - -Changes which require modifications to the data model often have a wider impact -on the system. The community often has strong opinions on how the data model -should be evolved, from both a functional and performance perspective. It is -therefore important to capture and gain agreement as early as possible on any -proposed changes to the data model. - -Questions which need to be addressed by this section include: - -* What new data objects and/or database schema changes is this going to require? - -* What database migrations will accompany this change. - -* How will the initial set of new data objects be generated, for example if you - need to take into account existing instances, or modify other existing data - describe how that will work. - -REST API impact ---------------- - -Each API method which is either added or changed should have the following - -* Specification for the method - - * A description of what the method does suitable for use in - user documentation - - * Method type (POST/PUT/GET/DELETE) - - * Normal http response code(s) - - * Expected error http response code(s) - - * A description for each possible error code should be included - describing semantic errors which can cause it such as - inconsistent parameters supplied to the method, or when an - instance is not in an appropriate state for the request to - succeed. Errors caused by syntactic problems covered by the JSON - schema definition do not need to be included. - - * URL for the resource - - * Parameters which can be passed via the url - - * JSON schema definition for the body data if allowed - - * JSON schema definition for the response data if any - -* Example use case including typical API samples for both data supplied - by the caller and the response - -* Discuss any policy changes, and discuss what things a deployer needs to - think about when defining their policy. - -Example JSON schema definitions can be found in the Nova tree -https://opendev.org/openstack/nova/src/branch/master/nova/api/openstack/compute/schemas/ - -Note that the schema should be defined as restrictively as -possible. Parameters which are required should be marked as such and -only under exceptional circumstances should additional parameters -which are not defined in the schema be permitted (eg -additionaProperties should be False). - -Reuse of existing predefined parameter types such as regexps for -passwords and user defined names is highly encouraged. - -Security impact ---------------- - -Describe any potential security impact on the system. Some of the items to -consider include: - -* Does this change touch sensitive data such as tokens, keys, or user data? - -* Does this change alter the API in a way that may impact security, such as - a new way to access sensitive information or a new way to login? - -* Does this change involve cryptography or hashing? - -* Does this change require the use of sudo or any elevated privileges? - -* Does this change involve using or parsing user-provided data? This could - be directly at the API level or indirectly such as changes to a cache layer. - -* Can this change enable a resource exhaustion attack, such as allowing a - single API interaction to consume significant server resources? Some examples - of this include launching subprocesses for each connection, or entity - expansion attacks in XML. - -For more detailed guidance, please see the OpenStack Security Guidelines as -a reference (https://wiki.openstack.org/wiki/Security/Guidelines). These -guidelines are a work in progress and are designed to help you identify -security best practices. For further information, feel free to reach out -to the OpenStack Security Group at openstack-security@lists.openstack.org. - -Notifications impact --------------------- - -Please specify any changes to notifications. Be that an extra notification, -changes to an existing notification, or removing a notification. - -Other end user impact ---------------------- - -Aside from the API, are there other ways a user will interact with this feature? - -* Does this change have an impact on python-novaclient? What does the user - interface there look like? - -Performance Impact ------------------- - -Describe any potential performance impact on the system, for example -how often will new code be called, and is there a major change to the calling -pattern of existing code. - -Examples of things to consider here include: - -* A periodic task might look like a small addition but if it calls conductor or - another service the load is multiplied by the number of nodes in the system. - -* Scheduler filters get called once per host for every instance being created, so - any latency they introduce is linear with the size of the system. - -* A small change in a utility function or a commonly used decorator can have a - large impacts on performance. - -* Calls which result in a database queries (whether direct or via conductor) can - have a profound impact on performance when called in critical sections of the - code. - -* Will the change include any locking, and if so what considerations are there on - holding the lock? - -Other deployer impact ---------------------- - -Discuss things that will affect how you deploy and configure OpenStack -that have not already been mentioned, such as: - -* What config options are being added? Should they be more generic than - proposed (for example a flag that other hypervisor drivers might want to - implement as well)? Are the default values ones which will work well in - real deployments? - -* Is this a change that takes immediate effect after its merged, or is it - something that has to be explicitly enabled? - -* If this change is a new binary, how would it be deployed? - -* Please state anything that those doing continuous deployment, or those - upgrading from the previous release, need to be aware of. Also describe - any plans to deprecate configuration values or features. For example, if we - change the directory name that instances are stored in, how do we handle - instance directories created before the change landed? Do we move them? Do - we have a special case in the code? Do we assume that the operator will - recreate all the instances in their cloud? - -Developer impact ----------------- - -Discuss things that will affect other developers working on OpenStack, -such as: - -* If the blueprint proposes a change to the driver API, discussion of how - other hypervisors would implement the feature is required. - - -Implementation -============== - -Assignee(s) ------------ - -Who is leading the writing of the code? Or is this a blueprint where you're -throwing it out there to see who picks it up? - -If more than one person is working on the implementation, please designate the -primary author and contact. - -Primary assignee: - - -Other contributors: - - -Work Items ----------- - -Work items or tasks -- break the feature up into the things that need to be -done to implement it. Those parts might end up being done by different people, -but we're mostly trying to understand the timeline for implementation. - - -Dependencies -============ - -* Include specific references to specs and/or blueprints in nova, or in other - projects, that this one either depends on or is related to. - -* If this requires functionality of another project that is not currently used - by Nova (such as the glance v2 API when we previously only required v1), - document that fact. - -* Does this feature require any new library dependencies or code otherwise not - included in OpenStack? Or does it depend on a specific version of library? - - -Testing -======= - -Please discuss how the change will be tested. We especially want to know what -tempest tests will be added. It is assumed that unit test coverage will be -added so that doesn't need to be mentioned explicitly, but discussion of why -you think unit tests are sufficient and we don't need to add more tempest -tests would need to be included. - -Is this untestable in gate given current limitations (specific hardware / -software configurations available)? If so, are there mitigation plans (3rd -party testing, gate enhancements, etc). - - -Documentation Impact -==================== - -What is the impact on the docs team of this change? Some changes might require -donating resources to the docs team to have the documentation updated. Don't -repeat details discussed above, but please reference them here. - - -References -========== - -Please add any useful references here. You are not required to have any -reference. Moreover, this specification should still make sense when your -references are unavailable. Examples of what you could include are: - -* Links to mailing list or IRC discussions - -* Links to notes from a summit session - -* Links to relevant research, if appropriate - -* Related specifications as appropriate (e.g. if it's an EC2 thing, link the EC2 docs) - -* Anything else you feel it is worthwhile to refer to diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 7195029a..00000000 --- a/test-requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -coverage>=3.6 -hacking>=3.0.1,<3.1.0;python_version>='3.5' # Apache-2.0 -pycodestyle>=2.0.0,<2.6.0 # MIT -flake8-import-order==0.11 # LGPLv3 - -docutils>=0.11 # OSI-Approved Open Source, Public Domain -httmock>=1.2.4 -oslotest>=1.2.0 # Apache-2.0 -python-subunit>=0.0.18 -stestr>=1.1.0 # Apache-2.0 -testtools>=0.9.34 -PyMySQL>=0.6.2,!=0.6.4 -WebTest>=3.0.0 diff --git a/tools/convert-docs.py b/tools/convert-docs.py deleted file mode 100755 index 3678be66..00000000 --- a/tools/convert-docs.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2017 IBM, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Convert RST files to basic HTML. The primary use case is to provide a way -to display RefStack documentation on the RefStack website. -""" - -import argparse -import glob -import os - -from bs4 import BeautifulSoup -from docutils.core import publish_file - - -def extract_body(html): - """Extract the content of the body tags of an HTML string.""" - soup = BeautifulSoup(html, "html.parser") - return ''.join(['%s' % str(a) for a in soup.body.contents]) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description='Convert RST files to basic HTML template files.' - ) - parser.add_argument('files', - metavar='file', - nargs='+', - help='RST file(s) to be converted to HTML templates.') - parser.add_argument('-o', '--output_dir', - required=False, - help='The directory where template files should be ' - 'output to. Defaults to the current directory.') - args = parser.parse_args() - - if args.output_dir: - output_dir = args.output_dir - # If the output directory doesn't exist, create it. - if not os.path.exists(output_dir): - try: - os.makedirs(output_dir) - except OSError: - if not os.path.isdir(output_dir): - raise - else: - output_dir = os.getcwd() - - for path in args.files: - for file in glob.glob(path): - base_file = os.path.splitext(os.path.basename(file))[0] - - # Calling publish_file will also print to stdout. Destination path - # is set to /dev/null to suppress this. - html = publish_file(source_path=file, - destination_path='/dev/null', - writer_name='html',) - body = extract_body(html) - - output_file = os.path.join(output_dir, base_file + ".html") - with open(output_file, "w") as template_file: - template_file.write(body) diff --git a/tools/cover.sh b/tools/cover.sh deleted file mode 100755 index 6ef501a1..00000000 --- a/tools/cover.sh +++ /dev/null @@ -1,87 +0,0 @@ -#!/bin/bash -# -# Copyright 2015: Mirantis Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -ALLOWED_EXTRA_MISSING=30 - -show_diff () { - head -1 $1 - diff -U 0 $1 $2 | sed 1,2d -} - -# Stash uncommited changes, checkout master and save coverage report -uncommited=$(git status --porcelain | grep -v "^??") -[[ -n $uncommited ]] && git stash > /dev/null -git checkout HEAD^ - -baseline_report=$(mktemp -t refstack_coverageXXXXXXX) -coverage erase -find . -type f -name "*.pyc" -delete -stestr --test-path ./refstack/tests/unit run -coverage combine -coverage html -d cover -coverage xml -o cover/coverage.xml -coverage report > $baseline_report -baseline_missing=$(awk 'END { print $3 }' $baseline_report) - -# Checkout back and unstash uncommited changes (if any) -git checkout - -[[ -n $uncommited ]] && git stash pop > /dev/null - -# Generate and save coverage report -current_report=$(mktemp -t refstack_coverageXXXXXXX) -coverage erase -find . -type f -name "*.pyc" -delete -stestr --test-path ./refstack/tests/unit run -coverage combine -coverage html -d cover -coverage xml -o cover/coverage.xml -coverage report > $current_report -current_missing=$(awk 'END { print $3 }' $current_report) - -baseline_percentage=$(awk 'END { print $4 }' $baseline_report) -current_percentage=$(awk 'END { print $4 }' $current_report) -# Show coverage details -allowed_missing=$((baseline_missing+ALLOWED_EXTRA_MISSING)) - -echo "Baseline report: $(cat ${baseline_report})" -echo "Proposed change report: $(cat ${current_report})" -echo "" -echo "" -echo "Allowed to introduce missing lines : ${ALLOWED_EXTRA_MISSING}" -echo "Missing lines in master : ${baseline_missing}" -echo "Missing lines in proposed change : ${current_missing}" -echo "Current percentage : ${baseline_percentage}" -echo "Proposed change percentage : ${current_percentage}" - -if [[ $allowed_missing -gt $current_missing ]]; -then - if [[ $baseline_missing -lt $current_missing ]]; - then - show_diff $baseline_report $current_report - echo "I believe you can cover all your code with 100% coverage!" - else - echo "Thank you! You are awesome! Keep writing unit tests! :)" - fi - exit_code=0 -else - show_diff $baseline_report $current_report - echo "Please write more unit tests, we should keep our test coverage :( " - exit_code=1 -fi - -rm $baseline_report $current_report -exit $exit_code diff --git a/tools/install-js-tools.sh b/tools/install-js-tools.sh deleted file mode 100755 index cdfa1967..00000000 --- a/tools/install-js-tools.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash -# Copyright 2017 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. -if [ $EUID -ne 0 ] ; then - SUDO='sudo -E' -fi - -if type apt-get; then - # Install https transport - otherwise apt-get HANGS on https urls - # Install curl so the curl commands work - # Install gnupg2 so that the apt-key add works - $SUDO apt-get update - $SUDO apt-get install -y apt-transport-https curl gnupg2 - # Install recent NodeJS repo - curl -sS https://deb.nodesource.com/gpgkey/nodesource.gpg.key | $SUDO apt-key add - - echo "deb https://deb.nodesource.com/node_10.x bionic main" | $SUDO tee /etc/apt/sources.list.d/nodesource.list - # Install yarn repo - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | $SUDO apt-key add - - echo "deb https://dl.yarnpkg.com/debian/ stable main" | $SUDO tee /etc/apt/sources.list.d/yarn.list - $SUDO apt-get update - DEBIAN_FRONTEND=noninteractive \ - $SUDO apt-get -q --option "Dpkg::Options::=--force-confold" --assume-yes \ - install nodejs yarn -elif type yum; then - $SUDO curl https://dl.yarnpkg.com/rpm/yarn.repo -o /etc/yum.repos.d/yarn.repo - $SUDO $(dirname $0)/install-js-repos-rpm.sh - $SUDO yum -y install nodejs yarn -elif type zypper; then - $SUDO zypper install -y nodejs10 npm10 - $SUDO npm install yarn -elif type brew; then - brew install nodejs yarn -else - echo "Unsupported platform" -fi diff --git a/tools/test-setup.sh b/tools/test-setup.sh deleted file mode 100755 index 6bc69d2e..00000000 --- a/tools/test-setup.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -xe - -# This script will be run by OpenStack CI before unit tests are run, -# it sets up the test system as needed. -# Developers should setup their test systems in a similar way. - -# This setup needs to be run as a user that can run sudo. -sudo apt-get install xdg-utils -sudo apt-get install dbus-user-session && systemctl --user start dbus.service -# The root password for the MySQL database; pass it in via -# MYSQL_ROOT_PW. -DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave} - -# This user and its password are used by the tests, if you change it, -# your tests might fail. -DB_USER=openstack_citest -DB_PW=openstack_citest - -sudo -H mysqladmin -u root password $DB_ROOT_PW - -# It's best practice to remove anonymous users from the database. If -# a anonymous user exists, then it matches first for connections and -# other connections from that host will not work. -sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e " - DELETE FROM mysql.user WHERE User=''; - FLUSH PRIVILEGES; - CREATE USER '$DB_USER'@'%' IDENTIFIED BY '$DB_PW'; - GRANT ALL PRIVILEGES ON *.* TO '$DB_USER'@'%' WITH GRANT OPTION;" - -# Now create our database. -mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e " - SET default_storage_engine=MYISAM; - DROP DATABASE IF EXISTS openstack_citest; - CREATE DATABASE openstack_citest CHARACTER SET utf8;" - -# Same for PostgreSQL -# The root password for the PostgreSQL database; pass it in via -# POSTGRES_ROOT_PW. -DB_ROOT_PW=${POSTGRES_ROOT_PW:-insecure_slave} - -# Setup user -root_roles=$(sudo -H -u postgres psql -t -c " - SELECT 'HERE' from pg_roles where rolname='$DB_USER'") -if [[ ${root_roles} == *HERE ]];then - sudo -H -u postgres psql -c "ALTER ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" -else - sudo -H -u postgres psql -c "CREATE ROLE $DB_USER WITH SUPERUSER LOGIN PASSWORD '$DB_PW'" -fi - -# Store password for tests -cat << EOF > $HOME/.pgpass -*:*:*:$DB_USER:$DB_PW -EOF -chmod 0600 $HOME/.pgpass - -# Now create our database -psql -h 127.0.0.1 -U $DB_USER -d template1 -c "DROP DATABASE IF EXISTS openstack_citest" -createdb -h 127.0.0.1 -U $DB_USER -l C -T template0 -E utf8 openstack_citest diff --git a/tools/update-rs-db.py b/tools/update-rs-db.py deleted file mode 100755 index b7a10625..00000000 --- a/tools/update-rs-db.py +++ /dev/null @@ -1,287 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2017 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Test result update & verify script for the local refstack database. -""" - -import argparse -import datetime -import json -import os -import sys - -from collections import namedtuple -import jwt -import requests - - -def generate_token(keyfile, _id): - # get private key in string format - with open(keyfile) as pemfile: - secret = pemfile.read().rstrip() - exp = datetime.timedelta(seconds=100500) - payload =\ - { - "user_openid": _id, - "exp": datetime.datetime.now() + exp - } - token = jwt.encode(payload, secret, algorithm="RS256") - with open("./token", "w+") as tokenfile: - tokenfile.write(str(token)) - print("token stored in the current working directory.") - - -def testAuth(link, tokenfile): - auth_test_url = link.split("/v1")[0] + "/#/profile" - with open(tokenfile) as tokenfile: - token = tokenfile.read().strip() - headers = {"Authorization": "Bearer " + token} - response = requests.get(auth_test_url, headers) - if response.status_code == 200: - return True, token - else: - print("token auth failed. status code = %d" % (response.status_code)) - print("auth failure detail: %s" % (response.text)) - return False, None - - -def getData(entry): - guidelines = ["2015.03", "2015.04", "2015.05", "2015.07", "2016.01", - "2016.08", "2017.01", "2017.09"] - # NOTE: Storage is an alias of Object - components = ["platform", "compute", "storage", "object"] - if len(entry) < 10: - return None, None, None - refstackLink = entry[9].strip() - guideline = entry[4].strip() - target = entry[5].lower().strip() - if refstackLink: - testId = refstackLink.split("/")[-1] - else: - refstackLink = None - testId = None - if guideline not in guidelines: - guideline = None - if target not in components: - target = None - elif target == "storage": - target = "object" - return testId, guideline, target - - -def linkChk(link, token): - """Check existence of and access to api result link""" - print("checking result with a test ID of: %s" % (link.split("/")[-1])) - if not link: - return False - try: - if " " in link: - return False - headers = {"Authorization": "Bearer " + token} - response = requests.get(link, headers) - if response.status_code == 200: - return json.loads(response.text) - elif response.status_code == 401 or response.status_code == 403: - print("Authentication Failed. link check response code: %d" % - (response.status_code)) - return False - elif response.status_code == 400: - print("Malformed Request. link response code: %d" % - (response.status_code)) - return False - else: - print("Link check response_status_code = %d" % - (response.status_code)) - print("Link check response detail: %s" % (response.text)) - return False - except requests.exceptions as err: - print(err) - return False - - -def updateField(header, apiLink, raw_data): - """Update a given metadata field""" - valid_keytype = ["shared", "guideline", "target"] - keytype = raw_data.type - keyval = raw_data.value - if keytype not in valid_keytype or not keyval: - updresult = "%s keypair does not exist" % (keytype) - return updresult, False - link = apiLink.strip() + "/meta/" + keytype - response = requests.post(link, data=keyval, headers=header) - if response.status_code != 201: - print("update response status code=%d" % - (response.status_code)) - print("update response text=%s" % (response.text)) - updresult = ("%s field update failed. reason: %s" % - (keytype, response.text.replace(",", " "))) - return updresult, False - else: - updresult = ("%s field update successful," % (keytype)) - return updresult, True - - -def updateResult(apiLink, target, guideline, token, record): - """Update metadata for result and verify if all updates are a success""" - MetadataField = namedtuple("MetadataField", ["type", "value"]) - success = [] - header = {"Authorization": "Bearer " + token} - with open(record, "a") as r: - r.write(str(datetime.datetime.now()) + "," + apiLink + ",") - # update the shared field - data = MetadataField("shared", "true") - shared_result, shared_status = updateField(header, apiLink, data) - r.write(shared_result) - success.append(shared_status) - # update the target field - data = MetadataField("target", target) - target_result, target_status = updateField(header, apiLink, data) - r.write(target_result) - success.append(target_status) - # update the guideline field - data = MetadataField("guideline", guideline + ".json") - gl_result, gl_status = updateField(header, apiLink, data) - r.write(gl_result) - success.append(gl_status) - if not all(success): - r.write("unable to verify.\n") - return False - # if there were no update failures, we can verify the result - # this is the operation most likely to fail, so extra checks are - # in order - print("Test Result updated successfully. Attempting verification.") - try: - response = requests.put(apiLink, - json={"verification_status": 1}, - headers=header) - except Exception as ex: - print("Exception raised while verifying test result: %s" % - (str(ex))) - r.write("verification failed: %s\n" % (str(ex))) - return False - updated = verification_chk(apiLink, header) - if response.status_code not in (200, 201): - print("verification failure status code=%d" % - (response.status_code)) - print("verification failure detail=%s" % - (response.text)) - r.write("verification unsuccessful: detail: %s\n" % - (response.text)) - return False - elif not updated: - print("verification_status field failed to update") - r.write("verification status update failed. detail: %s\n" % - (response.text)) - return False - else: - print("Test result verified!\n") - r.write("Test result successfully verified\n") - return True - - -def verification_chk(link, header): - try: - response = requests.get(link, header) - status = int(response.json()["verification_status"]) - if status == 1: - return True - else: - return False - except Exception as ex: - print( - "Exception raised while ensuring verification status update: %s" % - str(ex)) - return False - - -def main(): - linect = 0 - parser = argparse.ArgumentParser( - "Update the internal RefStack db using a csv file") - # token handling options- we either need the path of a working token, - # or the data to generate a new token - token_flags = parser.add_mutually_exclusive_group(required=True) - token_flags.add_argument("--tokenfile", type=str, action="store", - help=("Absolute path to a json web token to " - "use to auth to the RefStack API")) - token_flags.add_argument("--generate", nargs=2, - metavar=("ssh-key", "openstack-id"), - help=("data needed to create a new auth token " - "ssh - key should be an absolute path to " - "a rsa ssh key. openstack - id indicates " - "an openstackid url to use for auth. " - "example: " - "https://openstackid.org/")) - # non token-related flags - parser.add_argument("--file", "-f", metavar="f", type=str, action="store", - required=True, - help="csv source for the data to use in updates") - parser.add_argument( - "--endpoint", "-e", metavar="e", - type=str, action="store", required=True, - help="the base URL of the endpoint. ex: http://examplerefstack.com/v1") - parser.add_argument("--record", "-r", metavar="r", type=str, - action="store", default="verification_results.csv", - help=("name of file to output update & verification " - "run record data into")) - args = parser.parse_args() - infile = args.file - record = args.record - endpoint = args.endpoint - if args.generate: - keypath = args.generate[0] - _id = args.generate[1] - generate_token(keypath, _id) - tokenfile = "./token" - else: - tokenfile = args.tokenfile - auth_success, token = testAuth(endpoint, tokenfile) - if not auth_success: - print(("Please enter either a valid token or an openstackid and the " - "absolute path to an rsa ssh key.")) - sys.exit(1) - with open(infile) as f: - for line in f: - linect = linect + 1 - entry = line.split(",") - testId, guideline, target = getData(entry) - if None in (testId, guideline, target): - print(("entry found at line %d cannot be updated and " - "verified: entry incomplete.\n") % (linect)) - else: - apiLink = os.path.join(endpoint, "results", testId) - testResult = linkChk(apiLink, token) - if testResult: - if testResult.get("verification_status"): - print("Result has been verified.\n") - else: - print( - "Result link is valid. Updating result with ID %s" - % (testId)) - success = updateResult(apiLink, target, guideline, - token, record) - if not success: - print(("update of the results with the ID %s " - "failed. please recheck your spreadsheet " - "and try again" % (testId))) - else: - print(("the test result: % s cannot be updated or " - "verified due to a broken result link." % (testId))) - - -main() diff --git a/tools/update-rs-db.rst b/tools/update-rs-db.rst deleted file mode 100644 index 1eae3148..00000000 --- a/tools/update-rs-db.rst +++ /dev/null @@ -1,101 +0,0 @@ -####################################################################### -# update-rs-db.py # -####################################################################### - -This document contains some details that are necessary to know to be -successful in the usage of the script update-rs-db.py. - -The script can be run using the following formatting: -"./update-rs-db.py --file /tmp/datasource.csv --endpoint -http://example.com:8000/v1 --tokenfile ". In order to -successfully update and verify results, you will need admin rights -for the refstack server in question. Instructions on how to get -these for your local install can be found at -https://opendev.org/openinfra/refstack/src/master/doc/source/refstack.rst#optional-configure-foundation-organization-and-group - -This script updates RefStack tests as verified given a specific -spreadsheet. The columns in this spreadsheet are, in this order: - - * Company Name - - * Product Name - - * Type (Distribution, Public, or Private) - - * Region - - * Guideline - - * Component (Compute, Platform, or Object) - - * Reported Release - - * Passed Release - - * Federated identity (yes/no) - - * Refstack Link - - * Zendesk Link - - * Marketplace Link - - * License Date - - * Update Product (yes/no) - - * Contacts - - * Notes - - * License Link - - * Active (1 or 0) - - * Public (1 or 0) - -The data is pulled from a csv file. The default csv name is toadd.csv, -but using the -f flag, we can use csv of a different filename. - -The refstack database that we are pushing updates to is set via the "-e", -or "--endpoint flag. This flag specifies the refstack api endpoint to be -used to update the database. This is a required flag. - -Because editing arbitrary test results requires administrative privileges, -an auth token must be used with the RefStack API. This token can be -generated by entering the command "jwt --key="$( cat -)" --alg=RS256 user_openid= exp=+100500". This generates a -json web token, which we must link using the "--tokenfile" flag. Because -we cannot auth without this token, the token is a required flag. - -The script will go through each line of the CSV, grabbing the refstack link, -the guideline, and the component. It also uses the refstack result to get a -test result Id. - -It then uses that test ID to update the internal db using refstack's built -in RESTful api. - -Lastly, if at least one of the links has proven to be valid, we will -then use the same RESTful api and test ID to update the verification_status -field associated with that test result. - -The status of each of these steps will be output to "verification_status.csv" -by default. A '1' will denote that the resource was successfully updated while -a '0' will denote that the resource was not successfully updated. The order of -fields of this file are as follows: - - * Date modified - - * API link - - * Shared update status - - * Guideline - - * Guideline update success status - - * Target - - * Target update success status - - * Verification update success status diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 01070c7c..00000000 --- a/tox.ini +++ /dev/null @@ -1,96 +0,0 @@ -[tox] -envlist = py3,pep8,pip-check-reqs -minversion = 3.18 -skipsdist = True - -[testenv] -usedevelop = True -install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} -U {opts} {packages} -setenv = - VIRTUAL_ENV={envdir} - LANG=en_US.UTF-8 - LANGUAGE=en_US:en - LC_ALL=C -allowlist_externals = - find - {toxinidir}/setup-mysql-tests.sh - {toxinidir}/tools/cover.sh -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt -commands = - find . -type f -name "*.pyc" -delete - stestr run {posargs} -distribute = false - -[testenv:functional] -basepython = python3 -setenv = SUBUNIT_TEST_PATH=./refstack/tests/api -# Integration/functional tests -# must not be run in parallel (--concurrency=1), -# because each of these tests -# require cleanup of database -commands = {toxinidir}/setup-mysql-tests.sh stestr run --test-path ./refstack/tests/api {posargs:--concurrency=1} - -[testenv:pep8] -commands = - flake8 {posargs} - flake8 --filename=refstack* bin -distribute = false - -[testenv:genconfig] -commands = - oslo-config-generator --output-file etc/refstack.conf.sample \ - --namespace refstack \ - --namespace oslo.db \ - --namespace oslo.log - -[testenv:venv] -commands = {posargs} - -[testenv:cover] -commands = {toxinidir}/tools/cover.sh {posargs} - -[testenv:docs] -deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} - -r{toxinidir}/doc/requirements.txt -commands = sphinx-build -b html doc/source doc/build/html - -[flake8] -# E125 is a won't fix until https://github.com/jcrocholl/pep8/issues/126 is resolved. For further detail see https://review.opendev.org/#/c/36788/ -# E123 skipped because it is ignored by default in the default pep8 -# E129 skipped because it is too limiting when combined with other rules -# W504 skipped because it is overeager and unnecessary -# H404 multi line docstring should start with a summary -# H405 skipped because it arbitrarily forces doctring "title" lines -ignore = E125,E123,E129,W504,H404,H405 -show-source = True -exclude = .git,.venv,.tox,dist,doc,*egg,build,*lib/python*,*alembic/versions* -enable-extensions = H106,H203,H904 -import-order-style = pep8 -builtins = _ - -[testenv:pip-check-reqs] -# Do not install test-requirements as that will pollute the virtualenv for -# determining missing packages. -# This also means that pip-check-reqs must be installed separately, outside -# of the requirements.txt files -deps = pip_check_reqs - -r{toxinidir}/requirements.txt -commands= - # PyMySQL is a python only runtime dep if using MySQL - pip-extra-reqs -d --ignore-file=refstack/tests/* --ignore-requirement=PyMySQL refstack tools - pip-missing-reqs -d --ignore-file=refstack/tests/* refstack tools - -[testenv:debug] -commands = oslo_debug_helper -t refstack/tests/unit {posargs} - -[testenv:bindep] -# Do not install any requirements. We want this to be fast and work even if -# system dependencies are missing, since it's used to tell you what system -# dependencies are missing! This also means that bindep must be installed -# separately, outside of the requirements files, and develop mode disabled -# explicitly to avoid unnecessarily installing the checked-out repo too (this -# further relies on "tox.skipsdist = True" above). -deps = bindep -commands = bindep test -usedevelop = False diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index e583d5a0..00000000 --- a/yarn.lock +++ /dev/null @@ -1,2743 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@bower_components/angular-animate@angular/bower-angular-animate#~1.3": - version "1.3.20" - resolved "https://codeload.github.com/angular/bower-angular-animate/tar.gz/a0d4208f770315df80866fcb449eff913efbbbdc" - -"@bower_components/angular-bootstrap@angular-ui/bootstrap-bower#0.14.3": - version "0.14.3" - resolved "https://codeload.github.com/angular-ui/bootstrap-bower/tar.gz/b5e11f5861a1591a300e78a9902c1a7e7918d75b" - -"@bower_components/angular-busy@cgross/angular-busy#4.1.3": - version "4.1.3" - resolved "https://codeload.github.com/cgross/angular-busy/tar.gz/f2e3261a0982da1f023881a6e7e3239cb626c9a4" - -"@bower_components/angular-confirm-modal@Schlogen/angular-confirm#1.2.3": - version "1.2.3" - resolved "https://codeload.github.com/Schlogen/angular-confirm/tar.gz/d458be429fb7695059057f29745701cc86bc1cee" - -"@bower_components/angular-mocks@angular/bower-angular-mocks#1.3.15": - version "1.3.15" - resolved "https://codeload.github.com/angular/bower-angular-mocks/tar.gz/1ffdfef850b10d40f2838c1bba41a95755c2d8da" - -"@bower_components/angular-resource@angular/bower-angular-resource#1.3.15": - version "1.3.15" - resolved "https://codeload.github.com/angular/bower-angular-resource/tar.gz/4cc3a759e2077f27aafb273a4a4929241aae1fd6" - -"@bower_components/angular-ui-router@angular-ui/angular-ui-router-bower#0.2.13": - version "0.0.0" - resolved "https://codeload.github.com/angular-ui/angular-ui-router-bower/tar.gz/2e580f271defdec34f464aab0cca519e41d1ee33" - -"@bower_components/angular@angular/bower-angular#1.3.15": - version "1.3.15" - resolved "https://codeload.github.com/angular/bower-angular/tar.gz/ba7abcfa409ba852146e6ba206693cf7bac3e359" - -"@bower_components/bootstrap@twbs/bootstrap#3.3.2": - version "3.3.2" - resolved "https://codeload.github.com/twbs/bootstrap/tar.gz/bcf7dd38b5ab180256e2e4fb5da0369551b3f082" - -"@bower_components/jquery@jquery/jquery-dist#>= 1.9.1": - version "3.3.1" - resolved "https://codeload.github.com/jquery/jquery-dist/tar.gz/9e8ec3d10fad04748176144f108d7355662ae75e" - -abbrev@1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" - -accepts@1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca" - dependencies: - mime-types "~2.1.11" - negotiator "0.6.1" - -acorn-jsx@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-3.0.1.tgz#afdf9488fb1ecefc8348f6fb22f464e32a58b36b" - dependencies: - acorn "^3.0.4" - -acorn@^3.0.4: - version "3.3.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a" - -acorn@^5.5.0: - version "5.6.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.6.2.tgz#b1da1d7be2ac1b4a327fb9eab851702c5045b4e7" - -after@0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f" - -ajv-keywords@^1.0.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c" - -ajv@^4.7.0: - version "4.11.8" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.11.8.tgz#82ffb02b29e662ae53bdc20af15947706739c536" - dependencies: - co "^4.6.0" - json-stable-stringify "^1.0.1" - -angular-mocks@^1.3.15: - version "1.7.0" - resolved "https://registry.yarnpkg.com/angular-mocks/-/angular-mocks-1.7.0.tgz#4957aaf966b6fc36edd6ac381a60814f7c05de3f" - -ansi-align@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f" - dependencies: - string-width "^2.0.0" - -ansi-escapes@^1.1.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-1.4.0.tgz#d3a8a83b319aa67793662b13e761c7911422306e" - -ansi-regex@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" - -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - -ansi-styles@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - dependencies: - color-convert "^1.9.0" - -anymatch@^1.3.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-1.3.2.tgz#553dcb8f91e3c889845dfdba34c77721b90b9d7a" - dependencies: - micromatch "^2.1.5" - normalize-path "^2.0.0" - -aproba@^1.0.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - -are-we-there-yet@~1.1.2: - version "1.1.5" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21" - dependencies: - delegates "^1.0.0" - readable-stream "^2.0.6" - -argparse@^1.0.7, argparse@~1.0.2: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-2.0.0.tgz#8f3b827f955a8bd669697e4a4256ac3ceae356cf" - dependencies: - arr-flatten "^1.0.1" - -arr-flatten@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - -array-find-index@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1" - -array-slice@^0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5" - -array-union@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - dependencies: - array-uniq "^1.0.1" - -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - -array-unique@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" - -arraybuffer.slice@0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz#f33b2159f0532a3f3107a272c0ccfbd1ad2979ca" - -arrify@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" - -async-each@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d" - -babel-code-frame@^6.16.0: - version "6.26.0" - resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" - dependencies: - chalk "^1.1.3" - esutils "^2.0.2" - js-tokens "^3.0.2" - -babel-polyfill@^7.0.0-beta.2: - version "7.0.0-beta.3" - resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-7.0.0-beta.3.tgz#053f0943cdf498910cc037de2ffc7b1f2e36223a" - dependencies: - core-js "^2.4.0" - regenerator-runtime "^0.11.0" - -backo2@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/backo2/-/backo2-1.0.2.tgz#31ab1ac8b129363463e35b3ebb69f4dfcfba7947" - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - -base64-arraybuffer@0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8" - -base64id@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6" - -better-assert@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/better-assert/-/better-assert-1.0.2.tgz#40866b9e1b9e0b55b481894311e68faffaebc522" - dependencies: - callsite "1.0.0" - -binary-extensions@^1.0.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.11.0.tgz#46aa1751fb6a2f93ee5e689bb1087d4b14c6c205" - -blob@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.4.tgz#bcf13052ca54463f30f9fc7e95b9a47630a94921" - -bluebird@^3.3.0: - version "3.5.1" - resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9" - -body-parser@^1.16.1: - version "1.18.3" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.18.3.tgz#5b292198ffdd553b3a0f20ded0592b956955c8b4" - dependencies: - bytes "3.0.0" - content-type "~1.0.4" - debug "2.6.9" - depd "~1.1.2" - http-errors "~1.6.3" - iconv-lite "0.4.23" - on-finished "~2.3.0" - qs "6.5.2" - raw-body "2.3.3" - type-is "~1.6.16" - -bower-away@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/bower-away/-/bower-away-1.1.2.tgz#c055445aa85f5e830f6b2484d9fcb90d2888b301" - dependencies: - babel-polyfill "^7.0.0-beta.2" - bower-config "^1.4.0" - bower-json "^0.8.1" - chalk "^2.0.1" - clone-deep "^1.0.0" - deep-is "^0.1.3" - difflet "^1.0.1" - execa "^0.8.0" - lodash "^4.17.4" - meow "^3.7.0" - update-notifier "^2.2.0" - which "^1.2.14" - -bower-config@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/bower-config/-/bower-config-1.4.1.tgz#85fd9df367c2b8dbbd0caa4c5f2bad40cd84c2cc" - dependencies: - graceful-fs "^4.1.3" - mout "^1.0.0" - optimist "^0.6.1" - osenv "^0.1.3" - untildify "^2.1.0" - -bower-json@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/bower-json/-/bower-json-0.8.1.tgz#96c14723241ae6466a9c52e16caa32623a883843" - dependencies: - deep-extend "^0.4.0" - ext-name "^3.0.0" - graceful-fs "^4.1.3" - intersect "^1.0.1" - -boxen@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/boxen/-/boxen-1.3.0.tgz#55c6c39a8ba58d9c61ad22cd877532deb665a20b" - dependencies: - ansi-align "^2.0.0" - camelcase "^4.0.0" - chalk "^2.0.1" - cli-boxes "^1.0.0" - string-width "^2.0.0" - term-size "^1.2.0" - widest-line "^2.0.0" - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^0.1.2: - version "0.1.5" - resolved "https://registry.yarnpkg.com/braces/-/braces-0.1.5.tgz#c085711085291d8b75fdd74eab0f8597280711e6" - dependencies: - expand-range "^0.1.0" - -braces@^1.8.2: - version "1.8.5" - resolved "https://registry.yarnpkg.com/braces/-/braces-1.8.5.tgz#ba77962e12dff969d6b76711e914b737857bf6a7" - dependencies: - expand-range "^1.8.1" - preserve "^0.2.0" - repeat-element "^1.1.2" - -buffer-from@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.0.tgz#87fcaa3a298358e0ade6e442cfce840740d1ad04" - -builtin-modules@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" - -bytes@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" - -caller-path@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f" - dependencies: - callsites "^0.2.0" - -callsite@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/callsite/-/callsite-1.0.0.tgz#280398e5d664bd74038b6f0905153e6e8af1bc20" - -callsites@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca" - -camelcase-keys@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7" - dependencies: - camelcase "^2.0.0" - map-obj "^1.0.0" - -camelcase@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f" - -camelcase@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" - -capture-stack-trace@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/capture-stack-trace/-/capture-stack-trace-1.0.0.tgz#4a6fa07399c26bba47f0b2496b4d0fb408c5550d" - -chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" - dependencies: - ansi-styles "^2.2.1" - escape-string-regexp "^1.0.2" - has-ansi "^2.0.0" - strip-ansi "^3.0.0" - supports-color "^2.0.0" - -chalk@^2.0.1: - version "2.4.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -charm@0.1.x: - version "0.1.2" - resolved "https://registry.yarnpkg.com/charm/-/charm-0.1.2.tgz#06c21eed1a1b06aeb67553cdc53e23274bac2296" - -chokidar@^1.4.1: - version "1.7.0" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-1.7.0.tgz#798e689778151c8076b4b360e5edd28cda2bb468" - dependencies: - anymatch "^1.3.0" - async-each "^1.0.0" - glob-parent "^2.0.0" - inherits "^2.0.1" - is-binary-path "^1.0.0" - is-glob "^2.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.0.0" - optionalDependencies: - fsevents "^1.0.0" - -chownr@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.0.1.tgz#e2a75042a9551908bebd25b8523d5f9769d79181" - -ci-info@^1.0.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-1.1.3.tgz#710193264bb05c77b8c90d02f5aaf22216a667b2" - -circular-json@^0.3.1: - version "0.3.3" - resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.3.tgz#815c99ea84f6809529d2f45791bdf82711352d66" - -cli-boxes@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-1.0.0.tgz#4fa917c3e59c94a004cd61f8ee509da651687143" - -cli-cursor@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" - dependencies: - restore-cursor "^1.0.1" - -cli-width@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639" - -clone-deep@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-1.0.0.tgz#b2f354444b5d4a0ce58faca337ef34da2b14a6c7" - dependencies: - for-own "^1.0.0" - is-plain-object "^2.0.4" - kind-of "^5.0.0" - shallow-clone "^1.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - -color-convert@^1.9.0: - version "1.9.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed" - dependencies: - color-name "^1.1.1" - -color-name@^1.1.1: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - -colors@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.3.0.tgz#5f20c9fef6945cb1134260aab33bfbdc8295e04e" - -combine-lists@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/combine-lists/-/combine-lists-1.0.1.tgz#458c07e09e0d900fc28b70a3fec2dacd1d2cb7f6" - dependencies: - lodash "^4.5.0" - -component-bind@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1" - -component-emitter@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3" - -component-emitter@1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" - -component-inherit@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - -concat-stream@^1.5.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" - dependencies: - buffer-from "^1.0.0" - inherits "^2.0.3" - readable-stream "^2.2.2" - typedarray "^0.0.6" - -configstore@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/configstore/-/configstore-3.1.2.tgz#c6f25defaeef26df12dd33414b001fe81a543f8f" - dependencies: - dot-prop "^4.1.0" - graceful-fs "^4.1.2" - make-dir "^1.0.0" - unique-string "^1.0.0" - write-file-atomic "^2.0.0" - xdg-basedir "^3.0.0" - -connect@^3.6.0: - version "3.6.6" - resolved "https://registry.yarnpkg.com/connect/-/connect-3.6.6.tgz#09eff6c55af7236e137135a72574858b6786f524" - dependencies: - debug "2.6.9" - finalhandler "1.1.0" - parseurl "~1.3.2" - utils-merge "1.0.1" - -console-control-strings@^1.0.0, console-control-strings@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - -cookie@0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" - -core-js@^2.2.0, core-js@^2.4.0: - version "2.5.7" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.7.tgz#f972608ff0cead68b841a16a932d0b183791814e" - -core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - -create-error-class@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/create-error-class/-/create-error-class-3.0.2.tgz#06be7abef947a3f14a30fd610671d401bca8b7b6" - dependencies: - capture-stack-trace "^1.0.0" - -cross-spawn@^5.0.1: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - -crypto-random-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" - -currently-unhandled@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea" - dependencies: - array-find-index "^1.0.1" - -custom-event@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/custom-event/-/custom-event-1.0.1.tgz#5d02a46850adf1b4a317946a3928fccb5bfd0425" - -d@1: - version "1.0.0" - resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f" - dependencies: - es5-ext "^0.10.9" - -debug@2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da" - dependencies: - ms "0.7.1" - -debug@2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c" - dependencies: - ms "0.7.2" - -debug@2.6.9, debug@^2.1.1, debug@^2.1.2: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - dependencies: - ms "2.0.0" - -debug@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" - dependencies: - ms "2.0.0" - -decamelize@^1.1.2: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - -deep-extend@^0.4.0: - version "0.4.2" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" - -deep-extend@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" - -deep-is@0.1.x, deep-is@^0.1.3, deep-is@~0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - -del@^2.0.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/del/-/del-2.2.2.tgz#c12c981d067846c84bcaf862cff930d907ffd1a8" - dependencies: - globby "^5.0.0" - is-path-cwd "^1.0.0" - is-path-in-cwd "^1.0.0" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - rimraf "^2.2.8" - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - -detect-libc@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" - -di@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/di/-/di-0.0.1.tgz#806649326ceaa7caa3306d75d985ea2748ba913c" - -difflet@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/difflet/-/difflet-1.0.1.tgz#189f8f9039e4ee4ac3ea943d4de66d259965b13c" - dependencies: - charm "0.1.x" - deep-is "0.1.x" - traverse "0.6.x" - -doctrine@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - dependencies: - esutils "^2.0.2" - -dom-serialize@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/dom-serialize/-/dom-serialize-2.2.1.tgz#562ae8999f44be5ea3076f5419dcd59eb43ac95b" - dependencies: - custom-event "~1.0.0" - ent "~2.2.0" - extend "^3.0.0" - void-elements "^2.0.0" - -dot-prop@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.0.tgz#1f19e0c2e1aa0e32797c49799f2837ac6af69c57" - dependencies: - is-obj "^1.0.0" - -duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - -encodeurl@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - -ends-with@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/ends-with/-/ends-with-0.2.0.tgz#2f9da98d57a50cfda4571ce4339000500f4e6b8a" - -engine.io-client@1.8.3: - version "1.8.3" - resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-1.8.3.tgz#1798ed93451246453d4c6f635d7a201fe940d5ab" - dependencies: - component-emitter "1.2.1" - component-inherit "0.0.3" - debug "2.3.3" - engine.io-parser "1.3.2" - has-cors "1.1.0" - indexof "0.0.1" - parsejson "0.0.3" - parseqs "0.0.5" - parseuri "0.0.5" - ws "1.1.2" - xmlhttprequest-ssl "1.5.3" - yeast "0.1.2" - -engine.io-parser@1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-1.3.2.tgz#937b079f0007d0893ec56d46cb220b8cb435220a" - dependencies: - after "0.8.2" - arraybuffer.slice "0.0.6" - base64-arraybuffer "0.1.5" - blob "0.0.4" - has-binary "0.1.7" - wtf-8 "1.0.0" - -engine.io@1.8.3: - version "1.8.3" - resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-1.8.3.tgz#8de7f97895d20d39b85f88eeee777b2bd42b13d4" - dependencies: - accepts "1.3.3" - base64id "1.0.0" - cookie "0.3.1" - debug "2.3.3" - engine.io-parser "1.3.2" - ws "1.1.2" - -ent@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - -error-ex@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.1.tgz#f855a86ce61adc4e8621c3cda21e7a7612c3a8dc" - dependencies: - is-arrayish "^0.2.1" - -es5-ext@^0.10.14, es5-ext@^0.10.35, es5-ext@^0.10.9, es5-ext@~0.10.14: - version "0.10.45" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.45.tgz#0bfdf7b473da5919d5adf3bd25ceb754fccc3653" - dependencies: - es6-iterator "~2.0.3" - es6-symbol "~3.1.1" - next-tick "1" - -es6-iterator@^2.0.1, es6-iterator@~2.0.1, es6-iterator@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" - dependencies: - d "1" - es5-ext "^0.10.35" - es6-symbol "^3.1.1" - -es6-map@^0.1.3: - version "0.1.5" - resolved "https://registry.yarnpkg.com/es6-map/-/es6-map-0.1.5.tgz#9136e0503dcc06a301690f0bb14ff4e364e949f0" - dependencies: - d "1" - es5-ext "~0.10.14" - es6-iterator "~2.0.1" - es6-set "~0.1.5" - es6-symbol "~3.1.1" - event-emitter "~0.3.5" - -es6-set@~0.1.5: - version "0.1.5" - resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1" - dependencies: - d "1" - es5-ext "~0.10.14" - es6-iterator "~2.0.1" - es6-symbol "3.1.1" - event-emitter "~0.3.5" - -es6-symbol@3.1.1, es6-symbol@^3.1.1, es6-symbol@~3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77" - dependencies: - d "1" - es5-ext "~0.10.14" - -es6-weak-map@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.2.tgz#5e3ab32251ffd1538a1f8e5ffa1357772f92d96f" - dependencies: - d "1" - es5-ext "^0.10.14" - es6-iterator "^2.0.1" - es6-symbol "^3.1.1" - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - -escope@^3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/escope/-/escope-3.6.0.tgz#e01975e812781a163a6dadfdd80398dc64c889c3" - dependencies: - es6-map "^0.1.3" - es6-weak-map "^2.0.1" - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-config-openstack@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/eslint-config-openstack/-/eslint-config-openstack-4.0.1.tgz#0774075eccbd1689e8532f4bfc45015821f37b7e" - dependencies: - js-yaml "3.3.1" - -eslint-plugin-angular@1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-angular/-/eslint-plugin-angular-1.4.0.tgz#53917b7d979dbaa620140f9711e179d4916b5747" - -eslint@^3.0.0: - version "3.19.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-3.19.0.tgz#c8fc6201c7f40dd08941b87c085767386a679acc" - dependencies: - babel-code-frame "^6.16.0" - chalk "^1.1.3" - concat-stream "^1.5.2" - debug "^2.1.1" - doctrine "^2.0.0" - escope "^3.6.0" - espree "^3.4.0" - esquery "^1.0.0" - estraverse "^4.2.0" - esutils "^2.0.2" - file-entry-cache "^2.0.0" - glob "^7.0.3" - globals "^9.14.0" - ignore "^3.2.0" - imurmurhash "^0.1.4" - inquirer "^0.12.0" - is-my-json-valid "^2.10.0" - is-resolvable "^1.0.0" - js-yaml "^3.5.1" - json-stable-stringify "^1.0.0" - levn "^0.3.0" - lodash "^4.0.0" - mkdirp "^0.5.0" - natural-compare "^1.4.0" - optionator "^0.8.2" - path-is-inside "^1.0.1" - pluralize "^1.2.1" - progress "^1.1.8" - require-uncached "^1.0.2" - shelljs "^0.7.5" - strip-bom "^3.0.0" - strip-json-comments "~2.0.1" - table "^3.7.8" - text-table "~0.2.0" - user-home "^2.0.0" - -espree@^3.4.0: - version "3.5.4" - resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.4.tgz#b0f447187c8a8bed944b815a660bddf5deb5d1a7" - dependencies: - acorn "^5.5.0" - acorn-jsx "^3.0.0" - -esprima@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804" - -esprima@~2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-2.2.0.tgz#4292c1d68e4173d815fa2290dc7afc96d81fcd83" - -esquery@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.0.1.tgz#406c51658b1f5991a5f9b62b1dc25b00e3e5c708" - dependencies: - estraverse "^4.0.0" - -esrecurse@^4.1.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.2.1.tgz#007a3b9fdbc2b3bb87e4879ea19c92fdbd3942cf" - dependencies: - estraverse "^4.1.0" - -estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" - -esutils@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" - -event-emitter@~0.3.5: - version "0.3.5" - resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" - dependencies: - d "1" - es5-ext "~0.10.14" - -eventemitter3@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-3.1.0.tgz#090b4d6cdbd645ed10bf750d4b5407942d7ba163" - -execa@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -exit-hook@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8" - -expand-braces@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/expand-braces/-/expand-braces-0.1.2.tgz#488b1d1d2451cb3d3a6b192cfc030f44c5855fea" - dependencies: - array-slice "^0.2.3" - array-unique "^0.2.1" - braces "^0.1.2" - -expand-brackets@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-0.1.5.tgz#df07284e342a807cd733ac5af72411e581d1177b" - dependencies: - is-posix-bracket "^0.1.0" - -expand-range@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-0.1.1.tgz#4cb8eda0993ca56fa4f41fc42f3cbb4ccadff044" - dependencies: - is-number "^0.1.1" - repeat-string "^0.2.2" - -expand-range@^1.8.1: - version "1.8.2" - resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" - dependencies: - fill-range "^2.1.0" - -ext-list@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/ext-list/-/ext-list-2.2.2.tgz#0b98e64ed82f5acf0f2931babf69212ef52ddd37" - dependencies: - mime-db "^1.28.0" - -ext-name@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ext-name/-/ext-name-3.0.0.tgz#07e4418737cb1f513c32c6ea48d8b8c8e0471abb" - dependencies: - ends-with "^0.2.0" - ext-list "^2.0.0" - meow "^3.1.0" - sort-keys-length "^1.0.0" - -extend@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" - -extglob@^0.3.1: - version "0.3.2" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-0.3.2.tgz#2e18ff3d2f49ab2765cec9023f011daa8d8349a1" - dependencies: - is-extglob "^1.0.0" - -fast-levenshtein@~2.0.4: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - -figures@^1.3.5: - version "1.7.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-1.7.0.tgz#cbe1e3affcf1cd44b80cadfed28dc793a9701d2e" - dependencies: - escape-string-regexp "^1.0.5" - object-assign "^4.1.0" - -file-entry-cache@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-2.0.0.tgz#c392990c3e684783d838b8c84a45d8a048458361" - dependencies: - flat-cache "^1.2.1" - object-assign "^4.0.1" - -filename-regex@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/filename-regex/-/filename-regex-2.0.1.tgz#c1c4b9bee3e09725ddb106b75c1e301fe2f18b26" - -fill-range@^2.1.0: - version "2.2.4" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-2.2.4.tgz#eb1e773abb056dcd8df2bfdf6af59b8b3a936565" - dependencies: - is-number "^2.1.0" - isobject "^2.0.0" - randomatic "^3.0.0" - repeat-element "^1.1.2" - repeat-string "^1.5.2" - -finalhandler@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.0.tgz#ce0b6855b45853e791b2fcc680046d88253dd7f5" - dependencies: - debug "2.6.9" - encodeurl "~1.0.1" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.2" - statuses "~1.3.1" - unpipe "~1.0.0" - -find-up@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" - dependencies: - path-exists "^2.0.0" - pinkie-promise "^2.0.0" - -flat-cache@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481" - dependencies: - circular-json "^0.3.1" - del "^2.0.2" - graceful-fs "^4.1.2" - write "^0.2.1" - -follow-redirects@^1.0.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.0.tgz#234f49cf770b7f35b40e790f636ceba0c3a0ab77" - dependencies: - debug "^3.1.0" - -for-in@^0.1.3: - version "0.1.8" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-0.1.8.tgz#d8773908e31256109952b1fdb9b3fa867d2775e1" - -for-in@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - -for-own@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-0.1.5.tgz#5265c681a4f294dabbf17c9509b6763aa84510ce" - dependencies: - for-in "^1.0.1" - -for-own@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/for-own/-/for-own-1.0.0.tgz#c63332f415cedc4b04dbfe70cf836494c53cb44b" - dependencies: - for-in "^1.0.1" - -fs-access@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/fs-access/-/fs-access-1.0.1.tgz#d6a87f262271cefebec30c553407fb995da8777a" - dependencies: - null-check "^1.0.0" - -fs-minipass@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.5.tgz#06c277218454ec288df77ada54a03b8702aacb9d" - dependencies: - minipass "^2.2.1" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - -fsevents@^1.0.0: - version "1.2.4" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.4.tgz#f41dcb1af2582af3692da36fc55cbd8e1041c426" - dependencies: - nan "^2.9.2" - node-pre-gyp "^0.10.0" - -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - dependencies: - aproba "^1.0.3" - console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" - signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" - -generate-function@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.0.0.tgz#6858fe7c0969b7d4e9093337647ac79f60dfbe74" - -generate-object-property@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/generate-object-property/-/generate-object-property-1.2.0.tgz#9c0e1c40308ce804f4783618b937fa88f99d50d0" - dependencies: - is-property "^1.0.0" - -get-stdin@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe" - -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - -glob-base@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" - dependencies: - glob-parent "^2.0.0" - is-glob "^2.0.0" - -glob-parent@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" - dependencies: - is-glob "^2.0.0" - -glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1: - version "7.1.2" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15" - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -global-dirs@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445" - dependencies: - ini "^1.3.4" - -globals@^9.14.0: - version "9.18.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" - -globby@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-5.0.0.tgz#ebd84667ca0dbb330b99bcfc68eac2bc54370e0d" - dependencies: - array-union "^1.0.1" - arrify "^1.0.0" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -got@^6.7.1: - version "6.7.1" - resolved "https://registry.yarnpkg.com/got/-/got-6.7.1.tgz#240cd05785a9a18e561dc1b44b41c763ef1e8db0" - dependencies: - create-error-class "^3.0.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-redirect "^1.0.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - lowercase-keys "^1.0.0" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - unzip-response "^2.0.1" - url-parse-lax "^1.0.0" - -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.3: - version "4.1.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658" - -has-ansi@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" - dependencies: - ansi-regex "^2.0.0" - -has-binary@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/has-binary/-/has-binary-0.1.7.tgz#68e61eb16210c9545a0a5cce06a873912fe1e68c" - dependencies: - isarray "0.0.1" - -has-cors@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39" - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - -has-unicode@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - -hosted-git-info@^2.1.4: - version "2.6.0" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.6.0.tgz#23235b29ab230c576aab0d4f13fc046b0b038222" - -http-errors@1.6.3, http-errors@~1.6.3: - version "1.6.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.6.3.tgz#8b55680bb4be283a0b5bf4ea2e38580be1d9320d" - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - -http-proxy@^1.13.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.17.0.tgz#7ad38494658f84605e2f6db4436df410f4e5be9a" - dependencies: - eventemitter3 "^3.0.0" - follow-redirects "^1.0.0" - requires-port "^1.0.0" - -iconv-lite@0.4.23, iconv-lite@^0.4.4: - version "0.4.23" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.23.tgz#297871f63be507adcfbfca715d0cd0eed84e9a63" - dependencies: - safer-buffer ">= 2.1.2 < 3" - -ignore-walk@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.1.tgz#a83e62e7d272ac0e3b551aaa82831a19b69f82f8" - dependencies: - minimatch "^3.0.4" - -ignore@^3.2.0: - version "3.3.8" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.8.tgz#3f8e9c35d38708a3a7e0e9abb6c73e7ee7707b2b" - -import-lazy@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - -indent-string@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80" - dependencies: - repeating "^2.0.0" - -indexof@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/indexof/-/indexof-0.0.1.tgz#82dc336d232b9062179d05ab3293a66059fd435d" - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.3, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.1, inherits@~2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - -ini@^1.3.4, ini@~1.3.0: - version "1.3.5" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" - -inquirer@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-0.12.0.tgz#1ef2bfd63504df0bc75785fff8c2c41df12f077e" - dependencies: - ansi-escapes "^1.1.0" - ansi-regex "^2.0.0" - chalk "^1.0.0" - cli-cursor "^1.0.1" - cli-width "^2.0.0" - figures "^1.3.5" - lodash "^4.3.0" - readline2 "^1.0.1" - run-async "^0.1.0" - rx-lite "^3.1.2" - string-width "^1.0.1" - strip-ansi "^3.0.0" - through "^2.3.6" - -interpret@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614" - -intersect@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/intersect/-/intersect-1.0.1.tgz#332650e10854d8c0ac58c192bdc27a8bf7e7a30c" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - dependencies: - binary-extensions "^1.0.0" - -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - -is-builtin-module@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-1.0.0.tgz#540572d34f7ac3119f8f76c30cbc1b1e037affbe" - dependencies: - builtin-modules "^1.0.0" - -is-ci@^1.0.10: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-1.1.0.tgz#247e4162e7860cebbdaf30b774d6b0ac7dcfe7a5" - dependencies: - ci-info "^1.0.0" - -is-dotfile@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" - -is-equal-shallow@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz#2238098fc221de0bcfa5d9eac4c45d638aa1c534" - dependencies: - is-primitive "^2.0.0" - -is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - -is-extglob@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" - -is-finite@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.0.2.tgz#cc6677695602be550ef11e8b4aa6305342b6d0aa" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - dependencies: - number-is-nan "^1.0.0" - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - -is-glob@^2.0.0, is-glob@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" - dependencies: - is-extglob "^1.0.0" - -is-installed-globally@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.1.0.tgz#0dfd98f5a9111716dd535dda6492f67bf3d25a80" - dependencies: - global-dirs "^0.1.0" - is-path-inside "^1.0.0" - -is-my-ip-valid@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-my-ip-valid/-/is-my-ip-valid-1.0.0.tgz#7b351b8e8edd4d3995d4d066680e664d94696824" - -is-my-json-valid@^2.10.0: - version "2.17.2" - resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.17.2.tgz#6b2103a288e94ef3de5cf15d29dd85fc4b78d65c" - dependencies: - generate-function "^2.0.0" - generate-object-property "^1.1.0" - is-my-ip-valid "^1.0.0" - jsonpointer "^4.0.0" - xtend "^4.0.0" - -is-npm@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4" - -is-number@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-0.1.1.tgz#69a7af116963d47206ec9bd9b48a14216f1e3806" - -is-number@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-2.1.0.tgz#01fcbbb393463a548f2f466cce16dece49db908f" - dependencies: - kind-of "^3.0.2" - -is-number@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" - -is-obj@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" - -is-path-cwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" - -is-path-in-cwd@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz#5ac48b345ef675339bd6c7a48a912110b241cf52" - dependencies: - is-path-inside "^1.0.0" - -is-path-inside@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036" - dependencies: - path-is-inside "^1.0.1" - -is-plain-obj@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" - -is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - dependencies: - isobject "^3.0.1" - -is-posix-bracket@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" - -is-primitive@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-primitive/-/is-primitive-2.0.0.tgz#207bab91638499c07b2adf240a41a87210034575" - -is-property@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-property/-/is-property-1.0.2.tgz#57fe1c4e48474edd65b09911f26b1cd4095dda84" - -is-redirect@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24" - -is-resolvable@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" - -is-retry-allowed@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34" - -is-stream@^1.0.0, is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - -is-utf8@^0.2.0: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" - -isarray@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" - -isarray@1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - -isbinaryfile@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/isbinaryfile/-/isbinaryfile-3.0.2.tgz#4a3e974ec0cba9004d3fc6cde7209ea69368a621" - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - dependencies: - isarray "1.0.0" - -isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - -jasmine-core@2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.8.0.tgz#bcc979ae1f9fd05701e45e52e65d3a5d63f1a24e" - -js-tokens@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" - -js-yaml@3.3.1: - version "3.3.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.3.1.tgz#ca1acd3423ec275d12140a7bab51db015ba0b3c0" - dependencies: - argparse "~1.0.2" - esprima "~2.2.0" - -js-yaml@^3.5.1: - version "3.12.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.12.0.tgz#eaed656ec8344f10f527c6bfa1b6e2244de167d1" - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -json-stable-stringify@^1.0.0, json-stable-stringify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af" - dependencies: - jsonify "~0.0.0" - -json3@3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1" - -jsonify@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" - -jsonpointer@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9" - -karma-chrome-launcher@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/karma-chrome-launcher/-/karma-chrome-launcher-2.2.0.tgz#cf1b9d07136cc18fe239327d24654c3dbc368acf" - dependencies: - fs-access "^1.0.0" - which "^1.2.1" - -karma-cli@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/karma-cli/-/karma-cli-1.0.1.tgz#ae6c3c58a313a1d00b45164c455b9b86ce17f960" - dependencies: - resolve "^1.1.6" - -karma-jasmine@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/karma-jasmine/-/karma-jasmine-1.1.2.tgz#394f2b25ffb4a644b9ada6f22d443e2fd08886c3" - -karma@^1.7.1: - version "1.7.1" - resolved "https://registry.yarnpkg.com/karma/-/karma-1.7.1.tgz#85cc08e9e0a22d7ce9cca37c4a1be824f6a2b1ae" - dependencies: - bluebird "^3.3.0" - body-parser "^1.16.1" - chokidar "^1.4.1" - colors "^1.1.0" - combine-lists "^1.0.0" - connect "^3.6.0" - core-js "^2.2.0" - di "^0.0.1" - dom-serialize "^2.2.0" - expand-braces "^0.1.1" - glob "^7.1.1" - graceful-fs "^4.1.2" - http-proxy "^1.13.0" - isbinaryfile "^3.0.0" - lodash "^3.8.0" - log4js "^0.6.31" - mime "^1.3.4" - minimatch "^3.0.2" - optimist "^0.6.1" - qjobs "^1.1.4" - range-parser "^1.2.0" - rimraf "^2.6.0" - safe-buffer "^5.0.1" - socket.io "1.7.3" - source-map "^0.5.3" - tmp "0.0.31" - useragent "^2.1.12" - -kind-of@^3.0.2: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - -kind-of@^6.0.0: - version "6.0.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" - -latest-version@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-3.1.0.tgz#a205383fea322b33b5ae3b18abee0dc2f356ee15" - dependencies: - package-json "^4.0.0" - -levn@^0.3.0, levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -load-json-file@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" - dependencies: - graceful-fs "^4.1.2" - parse-json "^2.2.0" - pify "^2.0.0" - pinkie-promise "^2.0.0" - strip-bom "^2.0.0" - -lodash@^3.8.0: - version "3.10.1" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6" - -lodash@^4.0.0, lodash@^4.17.4, lodash@^4.3.0, lodash@^4.5.0: - version "4.17.10" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.10.tgz#1b7793cf7259ea38fb3661d4d38b3260af8ae4e7" - -log4js@^0.6.31: - version "0.6.38" - resolved "https://registry.yarnpkg.com/log4js/-/log4js-0.6.38.tgz#2c494116695d6fb25480943d3fc872e662a522fd" - dependencies: - readable-stream "~1.0.2" - semver "~4.3.3" - -loud-rejection@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f" - dependencies: - currently-unhandled "^0.4.1" - signal-exit "^3.0.0" - -lowercase-keys@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - -lru-cache@4.1.x, lru-cache@^4.0.1: - version "4.1.3" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.3.tgz#a1175cf3496dfc8436c156c334b4955992bce69c" - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - -make-dir@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - dependencies: - pify "^3.0.0" - -map-obj@^1.0.0, map-obj@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" - -math-random@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/math-random/-/math-random-1.0.1.tgz#8b3aac588b8a66e4975e3cdea67f7bb329601fac" - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - -meow@^3.1.0, meow@^3.7.0: - version "3.7.0" - resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" - dependencies: - camelcase-keys "^2.0.0" - decamelize "^1.1.2" - loud-rejection "^1.0.0" - map-obj "^1.0.1" - minimist "^1.1.3" - normalize-package-data "^2.3.4" - object-assign "^4.0.1" - read-pkg-up "^1.0.1" - redent "^1.0.0" - trim-newlines "^1.0.0" - -micromatch@^2.1.5: - version "2.3.11" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" - dependencies: - arr-diff "^2.0.0" - array-unique "^0.2.1" - braces "^1.8.2" - expand-brackets "^0.1.4" - extglob "^0.3.1" - filename-regex "^2.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.1" - kind-of "^3.0.2" - normalize-path "^2.0.1" - object.omit "^2.0.0" - parse-glob "^3.0.4" - regex-cache "^0.4.2" - -mime-db@^1.28.0: - version "1.34.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.34.0.tgz#452d0ecff5c30346a6dc1e64b1eaee0d3719ff9a" - -mime-db@~1.33.0: - version "1.33.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db" - -mime-types@~2.1.11, mime-types@~2.1.18: - version "2.1.18" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.18.tgz#6f323f60a83d11146f831ff11fd66e2fe5503bb8" - dependencies: - mime-db "~1.33.0" - -mime@^1.3.4: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - -minimatch@^3.0.2, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - dependencies: - brace-expansion "^1.1.7" - -minimist@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d" - -minimist@^1.1.3, minimist@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" - -minimist@~0.0.1: - version "0.0.10" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.10.tgz#de3f98543dbf96082be48ad1a0c7cda836301dcf" - -minipass@^2.2.1, minipass@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.3.3.tgz#a7dcc8b7b833f5d368759cce544dccb55f50f233" - dependencies: - safe-buffer "^5.1.2" - yallist "^3.0.0" - -minizlib@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.1.0.tgz#11e13658ce46bc3a70a267aac58359d1e0c29ceb" - dependencies: - minipass "^2.2.1" - -mixin-object@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/mixin-object/-/mixin-object-2.0.1.tgz#4fb949441dab182540f1fe035ba60e1947a5e57e" - dependencies: - for-in "^0.1.3" - is-extendable "^0.1.1" - -mkdirp@^0.5.0, mkdirp@^0.5.1: - version "0.5.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" - dependencies: - minimist "0.0.8" - -mout@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/mout/-/mout-1.1.0.tgz#0b29d41e6a80fa9e2d4a5be9d602e1d9d02177f6" - -ms@0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098" - -ms@0.7.2: - version "0.7.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - -mute-stream@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.5.tgz#8fbfabb0a98a253d3184331f9e8deb7372fac6c0" - -nan@^2.9.2: - version "2.10.0" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.10.0.tgz#96d0cd610ebd58d4b4de9cc0c6828cda99c7548f" - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - -needle@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/needle/-/needle-2.2.1.tgz#b5e325bd3aae8c2678902fa296f729455d1d3a7d" - dependencies: - debug "^2.1.2" - iconv-lite "^0.4.4" - sax "^1.2.4" - -negotiator@0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9" - -next-tick@1: - version "1.0.0" - resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c" - -node-pre-gyp@^0.10.0: - version "0.10.0" - resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.10.0.tgz#6e4ef5bb5c5203c6552448828c852c40111aac46" - dependencies: - detect-libc "^1.0.2" - mkdirp "^0.5.1" - needle "^2.2.0" - nopt "^4.0.1" - npm-packlist "^1.1.6" - npmlog "^4.0.2" - rc "^1.1.7" - rimraf "^2.6.1" - semver "^5.3.0" - tar "^4" - -nopt@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d" - dependencies: - abbrev "1" - osenv "^0.1.4" - -normalize-package-data@^2.3.2, normalize-package-data@^2.3.4: - version "2.4.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.4.0.tgz#12f95a307d58352075a04907b84ac8be98ac012f" - dependencies: - hosted-git-info "^2.1.4" - is-builtin-module "^1.0.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^2.0.0, normalize-path@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - dependencies: - remove-trailing-separator "^1.0.1" - -npm-bundled@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.0.3.tgz#7e71703d973af3370a9591bafe3a63aca0be2308" - -npm-packlist@^1.1.6: - version "1.1.10" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.1.10.tgz#1039db9e985727e464df066f4cf0ab6ef85c398a" - dependencies: - ignore-walk "^3.0.1" - npm-bundled "^1.0.1" - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - dependencies: - path-key "^2.0.0" - -npmlog@^4.0.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -null-check@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/null-check/-/null-check-1.0.0.tgz#977dffd7176012b9ec30d2a39db5cf72a0439edd" - -number-is-nan@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" - -object-assign@4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.0.tgz#7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0" - -object-assign@^4.0.1, object-assign@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - -object-component@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" - -object.omit@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" - dependencies: - for-own "^0.1.4" - is-extendable "^0.1.1" - -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - dependencies: - ee-first "1.1.1" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - dependencies: - wrappy "1" - -onetime@^1.0.0: - version "1.1.0" - resolved "http://registry.npmjs.org/onetime/-/onetime-1.1.0.tgz#a1f7838f8314c516f05ecefcbc4ccfe04b4ed789" - -optimist@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.6.1.tgz#da3ea74686fa21a19a111c326e90eb15a0196686" - dependencies: - minimist "~0.0.1" - wordwrap "~0.0.2" - -optionator@^0.8.2: - version "0.8.2" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64" - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.4" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - wordwrap "~1.0.0" - -options@>=0.0.5: - version "0.0.6" - resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f" - -os-homedir@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" - -os-tmpdir@^1.0.0, os-tmpdir@~1.0.1, os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - -osenv@^0.1.3, osenv@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410" - dependencies: - os-homedir "^1.0.0" - os-tmpdir "^1.0.0" - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - -package-json@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/package-json/-/package-json-4.0.1.tgz#8869a0401253661c4c4ca3da6c2121ed555f5eed" - dependencies: - got "^6.7.1" - registry-auth-token "^3.0.1" - registry-url "^3.0.3" - semver "^5.1.0" - -parse-glob@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" - dependencies: - glob-base "^0.3.0" - is-dotfile "^1.0.0" - is-extglob "^1.0.0" - is-glob "^2.0.0" - -parse-json@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" - dependencies: - error-ex "^1.2.0" - -parsejson@0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/parsejson/-/parsejson-0.0.3.tgz#ab7e3759f209ece99437973f7d0f1f64ae0e64ab" - dependencies: - better-assert "~1.0.0" - -parseqs@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d" - dependencies: - better-assert "~1.0.0" - -parseuri@0.0.5: - version "0.0.5" - resolved "https://registry.yarnpkg.com/parseuri/-/parseuri-0.0.5.tgz#80204a50d4dbb779bfdc6ebe2778d90e4bce320a" - dependencies: - better-assert "~1.0.0" - -parseurl@~1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3" - -path-exists@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" - dependencies: - pinkie-promise "^2.0.0" - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - -path-is-inside@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" - -path-key@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - -path-parse@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.5.tgz#3c1adf871ea9cd6c9431b6ea2bd74a0ff055c4c1" - -path-type@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" - dependencies: - graceful-fs "^4.1.2" - pify "^2.0.0" - pinkie-promise "^2.0.0" - -pify@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - -pify@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - -pluralize@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - -preserve@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b" - -process-nextick-args@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.0.tgz#a37d732f4271b4ab1ad070d35508e8290788ffaa" - -progress@^1.1.8: - version "1.1.8" - resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be" - -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - -qjobs@^1.1.4: - version "1.2.0" - resolved "https://registry.yarnpkg.com/qjobs/-/qjobs-1.2.0.tgz#c45e9c61800bd087ef88d7e256423bdd49e5d071" - -qs@6.5.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" - -randomatic@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.0.0.tgz#d35490030eb4f7578de292ce6dfb04a91a128923" - dependencies: - is-number "^4.0.0" - kind-of "^6.0.0" - math-random "^1.0.1" - -range-parser@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.0.tgz#f49be6b487894ddc40dcc94a322f611092e00d5e" - -raw-body@2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.3.3.tgz#1b324ece6b5706e153855bc1148c65bb7f6ea0c3" - dependencies: - bytes "3.0.0" - http-errors "1.6.3" - iconv-lite "0.4.23" - unpipe "1.0.0" - -rc@^1.0.1, rc@^1.1.6, rc@^1.1.7: - version "1.2.8" - resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" - dependencies: - deep-extend "^0.6.0" - ini "~1.3.0" - minimist "^1.2.0" - strip-json-comments "~2.0.1" - -read-pkg-up@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" - dependencies: - find-up "^1.0.0" - read-pkg "^1.0.0" - -read-pkg@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" - dependencies: - load-json-file "^1.0.0" - normalize-package-data "^2.3.2" - path-type "^1.0.0" - -readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.2.2: - version "2.3.6" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readable-stream@~1.0.2: - version "1.0.34" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.1" - isarray "0.0.1" - string_decoder "~0.10.x" - -readdirp@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.1.0.tgz#4ed0ad060df3073300c48440373f72d1cc642d78" - dependencies: - graceful-fs "^4.1.2" - minimatch "^3.0.2" - readable-stream "^2.0.2" - set-immediate-shim "^1.0.1" - -readline2@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/readline2/-/readline2-1.0.1.tgz#41059608ffc154757b715d9989d199ffbf372e35" - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - mute-stream "0.0.5" - -rechoir@^0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" - dependencies: - resolve "^1.1.6" - -redent@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde" - dependencies: - indent-string "^2.1.0" - strip-indent "^1.0.1" - -regenerator-runtime@^0.11.0: - version "0.11.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" - -regex-cache@^0.4.2: - version "0.4.4" - resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd" - dependencies: - is-equal-shallow "^0.1.3" - -registry-auth-token@^3.0.1: - version "3.3.2" - resolved "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-3.3.2.tgz#851fd49038eecb586911115af845260eec983f20" - dependencies: - rc "^1.1.6" - safe-buffer "^5.0.1" - -registry-url@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-3.1.0.tgz#3d4ef870f73dde1d77f0cf9a381432444e174942" - dependencies: - rc "^1.0.1" - -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - -repeat-element@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" - -repeat-string@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-0.2.2.tgz#c7a8d3236068362059a7e4651fc6884e8b1fb4ae" - -repeat-string@^1.5.2: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - -repeating@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" - dependencies: - is-finite "^1.0.0" - -require-uncached@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/require-uncached/-/require-uncached-1.0.3.tgz#4e0d56d6c9662fd31e43011c4b95aa49955421d3" - dependencies: - caller-path "^0.1.0" - resolve-from "^1.0.0" - -requires-port@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" - -resolve-from@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" - -resolve@^1.1.6: - version "1.7.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3" - dependencies: - path-parse "^1.0.5" - -restore-cursor@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541" - dependencies: - exit-hook "^1.0.0" - onetime "^1.0.0" - -rimraf@^2.2.8, rimraf@^2.6.0, rimraf@^2.6.1: - version "2.6.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36" - dependencies: - glob "^7.0.5" - -run-async@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-0.1.0.tgz#c8ad4a5e110661e402a7d21b530e009f25f8e389" - dependencies: - once "^1.3.0" - -rx-lite@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-3.1.2.tgz#19ce502ca572665f3b647b10939f97fd1615f102" - -safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - -"safer-buffer@>= 2.1.2 < 3": - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - -sax@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" - -semver-diff@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36" - dependencies: - semver "^5.0.3" - -"semver@2 || 3 || 4 || 5", semver@^5.0.3, semver@^5.1.0, semver@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.5.0.tgz#dc4bbc7a6ca9d916dee5d43516f0092b58f7b8ab" - -semver@~4.3.3: - version "4.3.6" - resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da" - -set-blocking@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - -set-immediate-shim@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" - -setprototypeof@1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.0.tgz#d0bd85536887b6fe7c0d818cb962d9d91c54e656" - -shallow-clone@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-1.0.0.tgz#4480cd06e882ef68b2ad88a3ea54832e2c48b571" - dependencies: - is-extendable "^0.1.1" - kind-of "^5.0.0" - mixin-object "^2.0.1" - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - dependencies: - shebang-regex "^1.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - -shelljs@^0.7.5: - version "0.7.8" - resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.7.8.tgz#decbcf874b0d1e5fb72e14b164a9683048e9acb3" - dependencies: - glob "^7.0.0" - interpret "^1.0.0" - rechoir "^0.6.2" - -signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" - -slice-ansi@0.0.4: - version "0.0.4" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35" - -socket.io-adapter@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-0.5.0.tgz#cb6d4bb8bec81e1078b99677f9ced0046066bb8b" - dependencies: - debug "2.3.3" - socket.io-parser "2.3.1" - -socket.io-client@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-1.7.3.tgz#b30e86aa10d5ef3546601c09cde4765e381da377" - dependencies: - backo2 "1.0.2" - component-bind "1.0.0" - component-emitter "1.2.1" - debug "2.3.3" - engine.io-client "1.8.3" - has-binary "0.1.7" - indexof "0.0.1" - object-component "0.0.3" - parseuri "0.0.5" - socket.io-parser "2.3.1" - to-array "0.1.4" - -socket.io-parser@2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-2.3.1.tgz#dd532025103ce429697326befd64005fcfe5b4a0" - dependencies: - component-emitter "1.1.2" - debug "2.2.0" - isarray "0.0.1" - json3 "3.3.2" - -socket.io@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-1.7.3.tgz#b8af9caba00949e568e369f1327ea9be9ea2461b" - dependencies: - debug "2.3.3" - engine.io "1.8.3" - has-binary "0.1.7" - object-assign "4.1.0" - socket.io-adapter "0.5.0" - socket.io-client "1.7.3" - socket.io-parser "2.3.1" - -sort-keys-length@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/sort-keys-length/-/sort-keys-length-1.0.1.tgz#9cb6f4f4e9e48155a6aa0671edd336ff1479a188" - dependencies: - sort-keys "^1.0.0" - -sort-keys@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" - dependencies: - is-plain-obj "^1.0.0" - -source-map@^0.5.3: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - -spdx-correct@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.0.0.tgz#05a5b4d7153a195bc92c3c425b69f3b2a9524c82" - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.1.0.tgz#2c7ae61056c714a5b9b9b2b2af7d311ef5c78fe9" - -spdx-expression-parse@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz#99e119b7a5da00e05491c9fa338b7904823b41d0" - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.0.tgz#7a7cd28470cc6d3a1cfe6d66886f6bc430d3ac87" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - -"statuses@>= 1.4.0 < 2": - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - -statuses@~1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e" - -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - -string_decoder@~0.10.x: - version "0.10.31" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - dependencies: - ansi-regex "^2.0.0" - -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - dependencies: - ansi-regex "^3.0.0" - -strip-bom@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" - dependencies: - is-utf8 "^0.2.0" - -strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - -strip-indent@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2" - dependencies: - get-stdin "^4.0.1" - -strip-json-comments@~2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - -supports-color@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" - -supports-color@^5.3.0: - version "5.4.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54" - dependencies: - has-flag "^3.0.0" - -table@^3.7.8: - version "3.8.3" - resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f" - dependencies: - ajv "^4.7.0" - ajv-keywords "^1.0.0" - chalk "^1.1.1" - lodash "^4.0.0" - slice-ansi "0.0.4" - string-width "^2.0.0" - -tar@^4: - version "4.4.4" - resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.4.tgz#ec8409fae9f665a4355cc3b4087d0820232bb8cd" - dependencies: - chownr "^1.0.1" - fs-minipass "^1.2.5" - minipass "^2.3.3" - minizlib "^1.1.0" - mkdirp "^0.5.0" - safe-buffer "^5.1.2" - yallist "^3.0.2" - -term-size@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/term-size/-/term-size-1.2.0.tgz#458b83887f288fc56d6fffbfad262e26638efa69" - dependencies: - execa "^0.7.0" - -text-table@~0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - -through@^2.3.6: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - -timed-out@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" - -tmp@0.0.31: - version "0.0.31" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.31.tgz#8f38ab9438e17315e5dbd8b3657e8bfb277ae4a7" - dependencies: - os-tmpdir "~1.0.1" - -tmp@0.0.x: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - dependencies: - os-tmpdir "~1.0.2" - -to-array@0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/to-array/-/to-array-0.1.4.tgz#17e6c11f73dd4f3d74cda7a4ff3238e9ad9bf890" - -traverse@0.6.x: - version "0.6.6" - resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.6.6.tgz#cbdf560fd7b9af632502fed40f918c157ea97137" - -trim-newlines@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613" - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - dependencies: - prelude-ls "~1.1.2" - -type-is@~1.6.16: - version "1.6.16" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.16.tgz#f89ce341541c672b25ee7ae3c73dee3b2be50194" - dependencies: - media-typer "0.3.0" - mime-types "~2.1.18" - -typedarray@^0.0.6: - version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - -ultron@1.0.x: - version "1.0.2" - resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa" - -unique-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" - dependencies: - crypto-random-string "^1.0.0" - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - -untildify@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/untildify/-/untildify-2.1.0.tgz#17eb2807987f76952e9c0485fc311d06a826a2e0" - dependencies: - os-homedir "^1.0.0" - -unzip-response@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-2.0.1.tgz#d2f0f737d16b0615e72a6935ed04214572d56f97" - -update-notifier@^2.2.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-2.5.0.tgz#d0744593e13f161e406acb1d9408b72cad08aff6" - dependencies: - boxen "^1.2.1" - chalk "^2.0.1" - configstore "^3.0.0" - import-lazy "^2.1.0" - is-ci "^1.0.10" - is-installed-globally "^0.1.0" - is-npm "^1.0.0" - latest-version "^3.0.0" - semver-diff "^2.0.0" - xdg-basedir "^3.0.0" - -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - dependencies: - prepend-http "^1.0.1" - -user-home@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/user-home/-/user-home-2.0.0.tgz#9c70bfd8169bc1dcbf48604e0f04b8b49cde9e9f" - dependencies: - os-homedir "^1.0.0" - -useragent@^2.1.12: - version "2.3.0" - resolved "https://registry.yarnpkg.com/useragent/-/useragent-2.3.0.tgz#217f943ad540cb2128658ab23fc960f6a88c9972" - dependencies: - lru-cache "4.1.x" - tmp "0.0.x" - -util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - -validate-npm-package-license@^3.0.1: - version "3.0.3" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.3.tgz#81643bcbef1bdfecd4623793dc4648948ba98338" - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - -void-elements@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/void-elements/-/void-elements-2.0.1.tgz#c066afb582bb1cb4128d60ea92392e94d5e9dbec" - -which@^1.2.1, which@^1.2.14, which@^1.2.9: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - dependencies: - isexe "^2.0.0" - -wide-align@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" - dependencies: - string-width "^1.0.2 || 2" - -widest-line@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.0.tgz#0142a4e8a243f8882c0233aa0e0281aa76152273" - dependencies: - string-width "^2.1.1" - -wordwrap@~0.0.2: - version "0.0.3" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" - -wordwrap@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - -write-file-atomic@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-2.3.0.tgz#1ff61575c2e2a4e8e510d6fa4e243cce183999ab" - dependencies: - graceful-fs "^4.1.11" - imurmurhash "^0.1.4" - signal-exit "^3.0.2" - -write@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/write/-/write-0.2.1.tgz#5fc03828e264cea3fe91455476f7a3c566cb0757" - dependencies: - mkdirp "^0.5.1" - -ws@1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.2.tgz#8a244fa052401e08c9886cf44a85189e1fd4067f" - dependencies: - options ">=0.0.5" - ultron "1.0.x" - -wtf-8@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/wtf-8/-/wtf-8-1.0.0.tgz#392d8ba2d0f1c34d1ee2d630f15d0efb68e1048a" - -xdg-basedir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" - -xmlhttprequest-ssl@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.3.tgz#185a888c04eca46c3e4070d99f7b49de3528992d" - -xtend@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af" - -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - -yallist@^3.0.0, yallist@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.0.2.tgz#8452b4bb7e83c7c188d8041c1a837c773d6d8bb9" - -yeast@0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419"