Retire Sahara: remove repo content

Sahara project is retiring
- https://review.opendev.org/c/openstack/governance/+/919374

this commit remove the content of this project repo

Depends-On: https://review.opendev.org/c/openstack/project-config/+/919376
Change-Id: I0995b857b4277b95946c5457180cdcfa9c3c7ec2
This commit is contained in:
Ghanshyam Mann 2024-05-10 17:28:05 -07:00
parent 3ae7a19664
commit 3815c917a3
314 changed files with 8 additions and 19286 deletions

View File

@ -1,13 +0,0 @@
[run]
branch = True
source = sahara_tests
omit =
.tox/*
sahara_tests/unit/*
[paths]
source = sahara_tests
[report]
ignore_errors = True
precision = 3

27
.gitignore vendored
View File

@ -1,27 +0,0 @@
*.egg-info
*.egg[s]
*.log
*.py[co]
.coverage
.testrepository
.tox
.stestr
.venv
AUTHORS
ChangeLog
build
cover
develop-eggs
dist
doc/build
doc/html
eggs
scenario_key-*
sdist
target
tools/lintstack.head.py
tools/pylint_exceptions
doc/source/sample.config
# Files created by releasenotes build
releasenotes/build

View File

@ -1,3 +0,0 @@
[DEFAULT]
test_path=sahara_tests/unit
group_regex=([^\.]+\.)+

View File

@ -1,325 +0,0 @@
- project:
queue: sahara
templates:
- publish-openstack-docs-pti
- openstack-python3-ussuri-jobs
- release-notes-jobs-python3
check:
jobs:
- openstack-tox-cover:
voting: false
- openstack-tox-pylint:
voting: false
- sahara-tests-scenario:
voting: false
- sahara-tests-scenario-v2:
voting: false
- sahara-tests-tempest:
voting: false
- sahara-tests-tempest-v2:
voting: false
- sahara-tests-scenario-wallaby:
voting: false
- sahara-tests-scenario-victoria:
voting: false
- sahara-tests-scenario-ussuri:
voting: false
- sahara-tests-scenario-train:
voting: false
- sahara-tests-scenario-stein:
voting: false
- openstack-tox-py36:
voting: false
- openstack-tox-py37:
voting: false
gate:
jobs:
- sahara-tests-scenario:
voting: false
- sahara-tests-scenario-v2:
voting: false
- sahara-tests-tempest:
voting: false
- sahara-tests-tempest-v2:
voting: false
- openstack-tox-py36:
voting: false
- openstack-tox-py37:
voting: false
experimental:
jobs:
- sahara-tests-scenario-multinode-spark
- job:
name: sahara-tests-tempest
description: |
Run Tempest tests from the Sahara plugin.
parent: devstack-tempest
required-projects:
- openstack/sahara-tests
- openstack/sahara
- openstack/sahara-plugin-ambari
- openstack/sahara-plugin-cdh
- openstack/sahara-plugin-mapr
- openstack/sahara-plugin-spark
- openstack/sahara-plugin-storm
- openstack/sahara-plugin-vanilla
- openstack/heat
# - openstack/ceilometer
vars:
tempest_test_regex: ^(sahara_tempest_plugin.tests.)
tox_envlist: all
devstack_localrc:
IMAGE_URLS: https://cloud-images.ubuntu.com/xenial/current/xenial-server-cloudimg-amd64-disk1.img
TEMPEST_PLUGINS: /opt/stack/sahara-tests
USE_PYTHON3: True
devstack_local_conf:
test-config:
$TEMPEST_CONFIG:
data_processing:
test_image_name: xenial-server-cloudimg-amd64-disk1
test_ssh_user: ubuntu
data-processing-feature-enabled:
s3: 'True'
devstack_plugins:
sahara: https://opendev.org/openstack/sahara
heat: https://opendev.org/openstack/heat
# ceilometer: https://opendev.org/openstack/ceilometer
devstack_services:
tls-proxy: false
irrelevant-files:
- ^.*\.rst$
- ^api-ref/.*$
- ^doc/.*$
- ^etc/.*$
- ^releasenotes/.*$
- ^sahara_tests/.*$
- job:
name: sahara-tests-tempest-v2
description: |
Run Tempest tests from the Sahara plugin against Sahara APIv2
and Python 3.
parent: sahara-tests-tempest
required-projects:
- openstack/python-saharaclient
branches: master
vars:
devstack_localrc:
USE_PYTHON3: 'True'
devstack_local_conf:
test-config:
$TEMPEST_CONFIG:
data-processing:
api_version_saharaclient: '2'
use_api_v2: 'True'
devstack_services:
s-account: false
s-container: false
s-object: false
s-proxy: false
# variant for pre-Rocky branches (no S3)
- job:
name: sahara-tests-tempest
branches:
- stable/ocata
- stable/pike
- stable/queens
vars:
devstack_localrc:
USE_PYTHON3: 'False'
devstack_local_conf:
test-config:
$TEMPEST_CONFIG:
data-processing-feature-enabled:
s3: 'False'
# variant for pre-Ussuri branches (Python 2 by default)
- job:
name: sahara-tests-tempest
branches:
- stable/rocky
- stable/stein
- stable/train
vars:
devstack_localrc:
USE_PYTHON3: 'False'
- job:
name: sahara-tests-scenario
description: |
Run scenario tests for Sahara.
parent: devstack
roles:
- zuul: openstack/devstack
- zuul: openstack/sahara-image-elements
required-projects:
- openstack/sahara-tests
- openstack/sahara
- openstack/sahara-plugin-ambari
- openstack/sahara-plugin-cdh
- openstack/sahara-plugin-mapr
- openstack/sahara-plugin-spark
- openstack/sahara-plugin-storm
- openstack/sahara-plugin-vanilla
- openstack/heat
# - openstack/ceilometer
- openstack/sahara-image-elements
- openstack/shade
run: playbooks/sahara-tests-scenario.yaml
host-vars:
controller:
devstack_plugins:
sahara: https://opendev.org/openstack/sahara
heat: https://opendev.org/openstack/heat
# ceilometer: https://opendev.org/openstack/ceilometer
shade: https://opendev.org/openstack/shade
group-vars:
subnode:
devstack_services:
tls-proxy: false
vars:
devstack_services:
tls-proxy: false
devstack_localrc:
# required to contain (almost any) custom-built image
SWIFT_LOOPBACK_DISK_SIZE: 8G
SWIFT_MAX_FILE_SIZE: 8589934592
USE_PYTHON3: True
devstack_local_conf:
post-config:
$SAHARA_CONF_FILE:
DEFAULT:
min_transient_cluster_active_time: 90
sahara_image_name: xenial-server
sahara_image_url: https://cloud-images.ubuntu.com/xenial/current/xenial-server-cloudimg-amd64-disk1.img
sahara_plugin: fake
sahara_plugin_version: '0.1'
sahara_scenario_test_template: fake.yaml.mako
sahara_scenario_tox_env: venv
irrelevant-files:
- ^.*\.rst$
- ^api-ref/.*$
- ^doc/.*$
- ^releasenotes/.*$
- ^sahara_tempest_plugin/.*$
- job:
name: sahara-tests-scenario-v2
parent: sahara-tests-scenario
vars:
sahara_scenario_use_api_v2: True
- job:
name: sahara-tests-scenario-wallaby
parent: sahara-tests-scenario-py3
override-checkout: stable/wallaby
- job:
name: sahara-tests-scenario-victoria
parent: sahara-tests-scenario-py3
override-checkout: stable/victoria
- job:
name: sahara-tests-scenario-ussuri
parent: sahara-tests-scenario-py3
nodeset: openstack-single-node-bionic
override-checkout: stable/ussuri
# pre-Ussuri scenario tests: fully-py3 based according the rules
# (jobs running on master must use Python 3), but use RGW
# on pre-Train branches as Swift/py3 does not work there.
- job:
name: sahara-tests-scenario-train
parent: sahara-tests-scenario
nodeset: openstack-single-node-bionic
override-checkout: stable/train
- job:
name: sahara-tests-scenario-stein
parent: sahara-tests-scenario-py3
nodeset: openstack-single-node-bionic
override-checkout: stable/stein
- job:
name: sahara-tests-scenario-modwsgi
description: |
Run scenario tests on a Sahara deployment based on mod_wsgi.
parent: sahara-tests-scenario
vars:
devstack_localrc:
SAHARA_USE_MOD_WSGI: 'True'
# variant to be used on pre-Ussuri branches (Python 2 only)
- job:
name: sahara-tests-scenario
branches:
- stable/rocky
- stable/stein
- stable/train
vars:
devstack_localrc:
USE_PYTHON3: 'False'
- job:
name: sahara-tests-scenario-radosgw
description: |
Run scenario tests for Sahara, using RadosGW instead of Swift.
parent: sahara-tests-scenario
required-projects:
- openstack/devstack-plugin-ceph
host-vars:
controller:
devstack_plugins:
devstack-plugin-ceph: 'https://opendev.org/openstack/devstack-plugin-ceph'
vars:
devstack_localrc:
ENABLE_CEPH_CINDER: 'False'
ENABLE_CEPH_C_BAK: 'False'
ENABLE_CEPH_GLANCE: 'False'
ENABLE_CEPH_MANILA: 'False'
ENABLE_CEPH_NOVA: 'False'
ENABLE_CEPH_RGW: 'True'
devstack_local_conf:
test-config:
"$TEMPEST_CONFIG":
service_available:
swift: 'True'
devstack_services:
s-account: false
s-container: false
s-object: false
s-proxy: false
sahara_enable_s3: True
- job:
name: sahara-tests-scenario-py3
description: |
Run scenario tests on a Sahara deployment based on Python 3.
Required by some pre-Ussuri branches of sahara, which also
needs swift (not fully ported to Python 3 at the time).
parent: sahara-tests-scenario-radosgw
vars:
devstack_localrc:
USE_PYTHON3: 'True'
- job:
name: sahara-tests-scenario-multinode-spark
description: |
Run scenario tests based on Spark on a multinode Sahara deployment.
parent: sahara-tests-scenario-radosgw
nodeset: openstack-two-node
vars:
sahara_image_name: xenial-spark
sahara_image_url: '{{ ansible_user_dir }}/{{ zuul.projects["opendev.org/openstack/sahara-image-elements"].src_dir }}/ubuntu_sahara_spark_latest.qcow2'
sahara_plugin: spark
sahara_plugin_version: 1.6.0
sahara_scenario_test_template: spark-1.6.0.yaml.mako
sahara_flavors:
sah1.small:
id: 20
ram: 1536
disk: 20
vcpus: 1
ephemeral: 0

View File

@ -1,19 +0,0 @@
The source repository for this project can be found at:
https://opendev.org/openstack/sahara-tests
Pull requests submitted through GitHub are not monitored.
To start contributing to OpenStack, follow the steps in the contribution guide
to set up and use Gerrit:
https://docs.openstack.org/contributors/code-and-documentation/quick-start.html
Bugs should be filed on Storyboard:
https://storyboard.openstack.org/#!/project/openstack/sahara-tests
For more specific information about contributing to this repository, see the
sahara-tests contributor guide:
https://docs.openstack.org/sahara-tests/latest/contributor/contributing.html

View File

@ -1,53 +0,0 @@
Sahara Style Commandments
==========================
- Step 1: Read the OpenStack Style Commandments
https://docs.openstack.org/hacking/latest/
- Step 2: Read on
Sahara Specific Commandments
-----------------------------
Commit Messages
---------------
Using a common format for commit messages will help keep our git history
readable. Follow these guidelines:
- [S365] First, provide a brief summary of 50 characters or less. Summaries
of greater then 72 characters will be rejected by the gate.
- [S364] The first line of the commit message should provide an accurate
description of the change, not just a reference to a bug or blueprint.
Imports
-------
- [S366, S367] Organize your imports according to the ``Import order``
Dictionaries/Lists
------------------
- [S360] Ensure default arguments are not mutable.
- [S368] Must use a dict comprehension instead of a dict constructor with a
sequence of key-value pairs. For more information, please refer to
http://legacy.python.org/dev/peps/pep-0274/
=======
Logs
----
- [S369] Check LOG.info translations
- [S370] Check LOG.error translations
- [S371] Check LOG.warning translations
- [S372] Check LOG.critical translation
- [S373] LOG.debug never used for translations
- [S374] You used a deprecated log level
Importing json
--------------
- [S375] It's more preferable to use ``jsonutils`` from ``oslo_serialization``
instead of ``json`` for operating with ``json`` objects.

175
LICENSE
View File

@ -1,175 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.

View File

@ -1,38 +1,10 @@
========================
Team and repository tags
========================
This project is no longer maintained.
.. image:: https://governance.openstack.org/tc/badges/sahara-tests.svg
:target: https://governance.openstack.org/tc/reference/tags/index.html
The contents of this repository are still available in the Git
source code management system. To see the contents of this
repository before it reached its end of life, please check out the
previous commit with "git checkout HEAD^1".
.. Change things from this point on
Tests for Sahara project
========================
.. image:: https://img.shields.io/pypi/v/sahara-tests.svg
:target: https://pypi.org/project/sahara-tests/
:alt: Latest Version
.. image:: https://img.shields.io/pypi/dm/sahara-tests.svg
:target: https://pypi.org/project/sahara-tests/
:alt: Downloads
In sahara_tests folder we have next tools:
- Sahara-scenario framework
- Sahara tempest tests
* License: Apache License, Version 2.0
* `Sahara-scenario documentation`_
* `PyPi`_ - package installation
* `Storyboard project`_ - release management
* `Source`_
* `Specs`_
* `Release notes`_
.. _Sahara-scenario documentation: https://docs.openstack.org/sahara-tests/latest/scenario.html
.. _PyPi: https://pypi.org/project/sahara-tests
.. _Storyboard project: https://storyboard.openstack.org/#!/project/940
.. _Source: https://opendev.org/openstack/sahara-tests
.. _Specs: https://specs.openstack.org/openstack/sahara-specs/
.. _Release notes: https://docs.openstack.org/releasenotes/sahara-tests/unreleased.html
For any further questions, please email
openstack-discuss@lists.openstack.org or join #openstack-dev on
OFTC.

View File

@ -1,7 +0,0 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
openstackdocstheme>=2.2.1 # Apache-2.0
reno>=3.1.0 # Apache-2.0
sphinx>=2.0.0,!=2.1.0 # BSD

View File

@ -1,239 +0,0 @@
# -*- coding: utf-8 -*-
#
# sahara-tests documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 21 21:22:00 2015.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'reno.sphinxext',
'openstackdocstheme',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/sahara-tests'
openstackdocs_auto_name = False
openstackdocs_use_storyboard = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sahara-tests'
copyright = u'2015, Sahara team'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = False
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sahara-testsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sahara-tests.tex', u'sahara-tests Documentation',
u'Sahara team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sahara-tests', u'sahara-tests Documentation',
[u'Sahara team'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sahara-tests', u'sahara-tests Documentation',
u'Sahara team', 'sahara-tests', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'

View File

@ -1,14 +0,0 @@
============================
So You Want to Contribute...
============================
For general information on contributing to OpenStack, please check out the
`contributor guide <https://docs.openstack.org/contributors/>`_ to get started.
It covers all the basics that are common to all OpenStack projects: the
accounts you need, the basics of interacting with our Gerrit review system, how
we communicate as a community, etc.
sahara-tests is maintained by the OpenStack Sahara project.
To understand our development process and how you can contribute to it, please
look at the Sahara project's general contributor's page:
http://docs.openstack.org/sahara/latest/contributor/contributing.html

View File

@ -1,52 +0,0 @@
Welcome to sahara-tests's documentation!
========================================
Tests for the
`Sahara project <https://docs.openstack.org/sahara/latest/>`_.
It provides Sahara scenario tests framework and tempest tests.
User guide
----------
**Scenario Tests**
.. toctree::
:maxdepth: 1
scenario
**Tempest Plugin**
.. toctree::
:maxdepth: 1
tempest-plugin
**Contributor Guide**
.. toctree::
:maxdepth: 1
contributor/contributing
Source
------
* License: Apache License, Version 2.0
* `PyPi`_ - package installation
* `Storyboard project`_ - release management
* `Source`_
* `Specs`_
* :doc:`releasenotes`
.. _PyPi: https://pypi.org/project/sahara-tests
.. _Storyboard project: https://storyboard.openstack.org/#!/project/940
.. _Source: https://opendev.org/openstack/sahara-tests
.. _Specs: https://specs.openstack.org/openstack/sahara-specs/
Indices and tables
------------------
* :ref:`genindex`
* :ref:`search`

View File

@ -1,11 +0,0 @@
:orphan:
.. The :orphan: directive is required because
this file is not in the toctree
even if it is included by a :doc: directive.
=============
Release Notes
=============
.. release-notes::

View File

@ -1,792 +0,0 @@
System(scenario) tests for Sahara project
=========================================
_`Authentication`
-----------------
You need to be authenticated to run these tests. To authenticate you should
create openrc file (like in devstack) and source it.
.. sourcecode:: bash
#!/bin/sh
export OS_TENANT_NAME='admin'
export OS_PROJECT_NAME='admin'
export OS_USERNAME='admin'
export OS_PASSWORD='admin'
export OS_AUTH_URL='http://localhost:5000/v2.0'
..
Also you can specify the authentication details for Sahara tests using flags
in run-command:
.. sourcecode:: console
List of flags:
--os-username
--os-password
--os-project-name
--os-auth-url
..
Last way to set the authentication details for these tests is using a
``clouds.yaml`` file.
After creating the file, you can set ``OS_CLOUD`` variable or ``--os-cloud``
flag to the name of the cloud you have created and those values will be used.
We have an example of a ``clouds.yaml`` file, and you can find it in
``sahara_tests/unit/scenario/clouds.yaml``.
Using this example, you can create your own file with clouds instead of
setting the ``OS_CLOUD`` variable or the ``--os-cloud`` flag. Note that more
than one cloud can be defined in the same file.
Here you can find more information about
`clouds
<https://docs.openstack.org/os-client-config/latest/user/configuration.html#config-files>`_
Template variables
------------------
You need to define these variables because they are used in mako template
files and replace the values from scenario files. These names pass to the test
runner through the ``-V`` parameter and a special config file.
The format of the config file is an INI-style file, as accepted by the Python
ConfigParser module. The key/values must be specified in the DEFAULT section.
Variables and defaults templates
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following variables are currently used by defaults templates:
+-----------------------------+--------+-------------------------+
| Variable | Type | Value |
+=============================+========+=========================+
| network_private_name | string | private network name |
| | | for OS_PROJECT_NAME |
+-----------------------------+--------+-------------------------+
| network_public_name | string | public network name |
+-----------------------------+--------+-------------------------+
| <plugin_name_version>_image | string | name of the image to be |
| | | used for the specific |
| | | plugin/version |
+-----------------------------+--------+-------------------------+
| {ci,medium,large}_flavor_id | string | IDs of flavor with |
| | | different size |
+-----------------------------+--------+-------------------------+
After finishing with authentication and configuration of file with template
variables, you can run Sahara tests using Sahara Scenario Framework.
How to run
----------
Scenario framework has default templates for testing Sahara. To
use them, specify plugin and version (for transient check and fake plugin,
version is not necessary):
.. sourcecode:: console
$ tox -e venv -- sahara-scenario -p vanilla -v 2.7.1
..
Different OpenStack releases may require different configuration for the
same set of plugin and versions. If you use the plugin and version flag,
if you want to use the configuration file for a specific OpenStack release
supported by sahara-scenario, you can specify also the ``-r RELEASE``
argument, where ``RELEASE`` is the official name of the OpenStack release.
By default only default configuration files for the specified plugin and
version (and release, if any) are included. Also, if any job configuration
is included, only jobs not tagged with any features will be executed.
In order to enable feature-specific configuration settings, pass
the list of requested features through the ``--feature`` (``-f``) parameter.
The parameter makes sure that:
* additional base configuration file which are feature-specific are included;
* in addition to non-tagged jobs, jobs which are tagged with the specified
features are included too.
Example:
.. sourcecode:: console
$ tox -e venv -- sahara-scenario -p vanilla -v 2.7.1 -f s3 -f myfeature -r rocky
..
Create the YAML and/or the YAML mako template files for scenario tests
``etc/scenario/simple-testcase.yaml``.
You can take a look at sample YAML files `How to write scenario files`_.
The file ``templatevars.ini`` contains the values of the variables referenced
by any testcase you are going to run.
If you want to run tests for the Vanilla plugin with the Hadoop version 2.7.1,
you should create ``templatevars.ini`` with the appropriate values (see the
section `Variables and defaults templates`_) and use the following tox env:
.. sourcecode:: console
$ tox -e venv -- sahara-scenario -V templatevars.ini sahara_tests/scenario/defaults/vanilla-2.7.1.yaml.mako
..
Credentials locate in ``sahara_tests/scenario/defaults/credentials.yaml.mako``.
This file replace the variables included into testcase YAML or YAML Mako files
with the values defined into ``templatevars.ini``.
.. sourcecode:: console
$ tox -e venv -- sahara-scenario -V templatevars.ini sahara_tests/scenario/defaults/credentials.yaml.mako sahara_tests/scenario/defaults/vanilla-2.7.1.yaml.mako
..
The most useful and comfortable way to run sahara-scenario tests for Vanilla
Plugin:
.. sourcecode:: console
$ tox -e venv -- sahara-scenario -V templatevars.ini sahara_tests/scenario/defaults/credantials.yaml.mako -p vanilla -v 2.7.1
..
For more information about writing scenario YAML files, see the section
section `How to write scenario files`_.
Virtual environment flags
-------------------------
You can use the following flags to the Sahara scenario tests.
Optional arguments
~~~~~~~~~~~~~~~~~~
+-------------------+----------------------------+
| Arguments | Description |
+===================+============================+
| --help, -h | show help message and exit |
+-------------------+----------------------------+
| --variable_file, | path to the file with |
| -V | template variables |
+-------------------+----------------------------+
| --verbose | increase output verbosity |
+-------------------+----------------------------+
| --validate | validate yaml-files, |
| | tests will not be run |
+-------------------+----------------------------+
| --args ARGS | pairs of argument |
| | key:value |
+-------------------+----------------------------+
| --plugin, | specify plugin name |
| -p PLUGIN | |
+-------------------+----------------------------+
| --plugin_version, | specify plugin version |
| -v PLUGIN_VERSION | |
+-------------------+----------------------------+
| --release, | specify Sahara release |
| -r RELEASE | |
+-------------------+----------------------------+
| --report | write results to file |
+-------------------+----------------------------+
| --feature, | list of features |
| -f FEAT1 | that should be enabled |
| [-f FEAT2 ...] | |
+-------------------+----------------------------+
| --count COUNT | specify count of runs |
+-------------------+----------------------------+
| --os-cloud name | name of cloud to connect |
+-------------------+----------------------------+
| --os-auth-type, | |
| --os-auth-plugin | authentication type to use |
| name | |
+-------------------+----------------------------+
Authentication options
~~~~~~~~~~~~~~~~~~~~~~
Options specific to the password plugin.
+--------------------------+--------------------------------+
| Arguments | Description |
+==========================+================================+
| --os-auth-url | authentication URL |
| OS_AUTH_URL | |
+--------------------------+--------------------------------+
| --os-domain-id | domain ID to scope to |
| OS_DOMAIN_ID | |
+--------------------------+--------------------------------+
| --os-domain-name | domain name to scope to |
| OS_DOMAIN_NAME | |
+--------------------------+--------------------------------+
| --os-project-id | |
| --os-tenant-id | project ID to scope to |
| OS_PROJECT_ID | |
+--------------------------+--------------------------------+
| --os-project-name | |
| --os-tenant-name | project name to scope to |
| OS_PROJECT_NAME | |
+--------------------------+--------------------------------+
| --os-project-domain-id | domain ID containing project |
| OS_PROJECT_DOMAIN_ID | |
+--------------------------+--------------------------------+
| --os-project-domain-name | domain name containing project |
| OS_PROJECT_DOMAIN_NAME | |
+--------------------------+--------------------------------+
| --os-trust-id | trust ID |
| OS_TRUST_ID | |
+--------------------------+--------------------------------+
| | optional domain ID to use with |
| | v3 and v2 parameters. It will |
| --os-default-domain-id | be used for both the user and |
| OS_DEFAULT_DOMAIN_ID | project domain in v3 and |
| | ignored in v2 authentication. |
+--------------------------+--------------------------------+
| | optional domain name to use |
| | with v3 API and v2parameters. |
| --os-default-domain-name | It will be used for both |
| OS_DEFAULT_DOMAIN_NAME | the user and project domain |
| | in v3 and ignored in v2 |
| | authentication. |
+--------------------------+--------------------------------+
| --os-user-id | user ID |
| OS_USER_ID | |
+--------------------------+--------------------------------+
| --os-username, | |
| --os-user-name | username |
| OS_USERNAME | |
+--------------------------+--------------------------------+
| --os-user-domain-id | user's domain id |
| OS_USER_DOMAIN_ID | |
+--------------------------+--------------------------------+
| --os-user-domain-name | user's domain name |
| OS_USER_DOMAIN_NAME | |
+--------------------------+--------------------------------+
| --os-password | user's password |
| OS_PASSWORD | |
+--------------------------+--------------------------------+
API Connection Options
~~~~~~~~~~~~~~~~~~~~~~
Options controlling the HTTP API connections.
+--------------------------+--------------------------------------+
| Arguments | Description |
+==========================+======================================+
| | explicitly allow client to |
| | perform "insecure" TLS (https) |
| --insecure | requests. The server's |
| | certificate will not be verified |
| | against any certificate authorities. |
| | This option should be used with |
| | caution. |
+--------------------------+--------------------------------------+
| | specify a CA bundle file to use in |
| --os-cacert | verifying a TLS(https) server |
| <ca-certificate> | certificate. Defaults to env |
| | [OS_CACERT]. |
+--------------------------+--------------------------------------+
| --os-cert <certificate> | defaults to env[OS_CERT] |
+--------------------------+--------------------------------------+
| --os-key <key> | defaults to env[OS_KEY] |
+--------------------------+--------------------------------------+
| --timeout <seconds> | set request timeout (in seconds) |
+--------------------------+--------------------------------------+
Service Options
~~~~~~~~~~~~~~~
Options control the specialization of the API connection from information
found in the catalog.
+------------------------+----------------------------+
| Arguments | Description |
+========================+============================+
| --os-service-type | service type to request |
| <name> | from the catalog |
+------------------------+----------------------------+
| --os-service-name | service name to request |
| <name> | from the catalog |
+------------------------+----------------------------+
| --os-interface <name> | API Interface to use: |
| | [public, internal, admin] |
+------------------------+----------------------------+
| --os-region-name | region of the cloud to use |
| <name> | |
+------------------------+----------------------------+
| | endpoint to use instead |
| --os-endpoint-override | of the endpoint in the |
| <name> | catalog |
+------------------------+----------------------------+
| --os-api-version | which version of the |
| <name> | service API to use |
+------------------------+----------------------------+
_`How to write scenario files`
------------------------------
The example of full scenario file with all these parameters you can find in
``etc/scenario/simple-testcase.yaml``.
You can write all sections in one or several files, which can be simple YAML
files or YAML-based Mako templates (.yaml.mako or yml.mako). Fox example,
the most common sections you can keep in ``templatevars.ini`` and
``sahara_tests/scenario/defaults/credentials.yaml.mako``.
Field "concurrency"
-------------------
This field has integer value, and set concurrency for run tests
For example:
``concurrency: 2``
For parallel testing use flag ``--count`` in run command and
setup ``cuncurrency`` value
Section "credentials"
---------------------
This section is dictionary-type.
+---------------------+--------+----------+----------------+----------------+
| Fields | Type | Required | Default | Value |
+=====================+========+==========+================+================+
| sahara_service_type | string | | data-processing| service type |
| | | | | for sahara |
+---------------------+--------+----------+----------------+----------------+
| sahara_url | string | | None | url of sahara |
+---------------------+--------+----------+----------------+----------------+
| ssl_cert | string | | None | ssl certificate|
| | | | | for all clients|
+---------------------+--------+----------+----------------+----------------+
| ssl_verify | boolean| | False | enable verify |
| | | | | ssl for sahara |
+---------------------+--------+----------+----------------+----------------+
Section "network"
-----------------
This section is dictionary-type.
+-----------------------------+---------+----------+---------+----------------+
| Fields | Type | Required | Default | Value |
+=============================+=========+==========+=========+================+
| private_network | string | True | private | name or id of |
| | | | | private network|
+-----------------------------+---------+----------+---------+----------------+
| public_network | string | | public | name or id of |
| | | | | private network|
+-----------------------------+---------+----------+---------+----------------+
| auto_assignment_floating_ip | boolean | | False | |
+-----------------------------+---------+----------+---------+----------------+
Section "clusters"
------------------
This sections is an array-type.
.. list-table::
:header-rows: 1
* - Fields
- Type
- Required
- Default
- Value
* - plugin_name
- string
- True
-
- name of plugin
* - plugin_version
- string
- True
-
- version of plugin
* - image
- string
- True
-
- name or id of image
* - image_username
- string
-
-
- username for registering image
* - existing_cluster
- string
-
-
- cluster name or id for testing
* - key_name
- string
-
-
- name of registered ssh key for testing cluster
* - node_group_templates
- object
-
-
- see `section "node_group_templates"`_
* - cluster_template
- object
-
-
- see `section "cluster_template"`_
* - cluster
- object
-
-
- see `section "cluster"`_
* - scaling
- object
-
-
- see `section "scaling"`_
* - timeout_check_transient
- integer
-
- 300
- timeout for checking transient
* - timeout_poll_jobs_status
- integer
-
- 1800
- timeout for polling jobs state
* - timeout_delete_resource
- integer
-
- 300
- timeout for delete resource
* - timeout_poll_cluster_status
- integer
-
- 3600
- timeout for polling cluster state
* - scenario
- array
-
- ['run_jobs', 'scale', 'run_jobs']
- array of checks
* - edp_jobs_flow
- string, list
-
-
- name of jobs defined under edp_jobs_flow be executed on the cluster;
if list, each item may be a dict with fields
``name`` (string) and ``features`` (list), or a string
* - hdfs_username
- string
-
- hadoop
- username for hdfs
* - retain_resources
- boolean
-
- False
-
Section "node_group_templates"
------------------------------
This section is an array-type.
.. list-table::
:header-rows: 1
* - Fields
- Type
- Required
- Default
- Value
* - name
- string
- True
-
- name for node group template
* - flavor
- string or object
- True
-
- name or id of flavor, or see `section "flavor"`_
* - node_processes
- string
- True
-
- name of process
* - description
- string
-
- Empty
- description for node group
* - volumes_per_node
- integer
-
- 0
- minimum 0
* - volumes_size
- integer
-
- 0
- minimum 0
* - auto_security_group
- boolean
-
- True
-
* - security_group
- array
-
-
- security group
* - node_configs
- object
-
-
- name_of_config_section: config: value
* - availability_zone
- string
-
-
-
* - volumes_availability_zone
- string
-
-
-
* - volume_type
- string
-
-
-
* - is_proxy_gateway
- boolean
-
- False
- use this node as proxy gateway
* - edp_batching
- integer
-
- count jobs
- use for batching jobs
Section "flavor"
----------------
This section is an dictionary-type.
+----------------+---------+----------+---------------+-----------------+
| Fields | Type | Required | Default | Value |
+================+=========+==========+===============+=================+
| name | string | | auto-generate | name for flavor |
+----------------+---------+----------+---------------+-----------------+
| id | string | | auto-generate | id for flavor |
+----------------+---------+----------+---------------+-----------------+
| vcpus | integer | | 1 | number of VCPUs |
| | | | | for the flavor |
+----------------+---------+----------+---------------+-----------------+
| ram | integer | | 1 | memory in MB for|
| | | | | the flavor |
+----------------+---------+----------+---------------+-----------------+
| root_disk | integer | | 0 | size of local |
| | | | | disk in GB |
+----------------+---------+----------+---------------+-----------------+
| ephemeral_disk | integer | | 0 | ephemeral space |
| | | | | in MB |
+----------------+---------+----------+---------------+-----------------+
| swap_disk | integer | | 0 | swap space in MB|
+----------------+---------+----------+---------------+-----------------+
Section "cluster_template"
--------------------------
This section is dictionary-type.
.. list-table::
:header-rows: 1
* - Fields
- Type
- Required
- Default
- Value
* - name
- string
-
-
- name for cluster template
* - description
- string
-
- Empty
- description
* - cluster_configs
- object
-
-
- name_of_config_section: config: value
* - node_group_templates
- object
- True
-
- name_of_node_group: count
* - anti_affinity
- array
-
- Empty
- array of roles
Section "cluster"
-----------------
This section is dictionary-type.
+--------------+---------+----------+---------+------------------+
| Fields | Type | Required | Default | Value |
+==============+=========+==========+=========+==================+
| name | string | | Empty | name for cluster |
+--------------+---------+----------+---------+------------------+
| description | string | | Empty | description |
+--------------+---------+----------+---------+------------------+
| is_transient | boolean | | False | value |
+--------------+---------+----------+---------+------------------+
Section "scaling"
-----------------
This section is an array-type.
+------------+---------+----------+-----------+--------------------+
| Fields | Type | Required | Default | Value |
+============+=========+==========+===========+====================+
| operation | string | True | | "add" or "resize" |
+------------+---------+----------+-----------+--------------------+
| node_group | string | True | Empty | name of node group |
+------------+---------+----------+-----------+--------------------+
| size | integer | True | Empty | count node group |
+------------+---------+----------+-----------+--------------------+
Section "edp_jobs_flow"
-----------------------
This section has an object with a name from the `section "clusters"`_
field "edp_jobs_flows"
Object has sections of array-type.
Required: type
.. list-table::
:header-rows: 1
* - Fields
- Type
- Required
- Default
- Value
* - type
- string
- True
-
- "Pig", "Java", "MapReduce", "MapReduce.Streaming", "Hive", "Spark", "Shell"
* - input_datasource
- object
-
-
- see `section "input_datasource"`_
* - output_datasource
- object
-
-
- see `section "output_datasource"`_
* - main_lib
- object
-
-
- see `section "main_lib"`_
* - additional_libs
- object
-
-
- see `section "additional_libs"`_
* - configs
- dict
-
- Empty
- config: value
* - args
- array
-
- Empty
- array of args
Section "input_datasource"
--------------------------
Required: type, source
This section is dictionary-type.
+---------------+--------+----------+-----------+---------------------------+
| Fields | Type | Required | Default | Value |
+===============+========+==========+===========+===========================+
| type | string | True | | "swift", "hdfs", "maprfs" |
+---------------+--------+----------+-----------+---------------------------+
| hdfs_username | string | | | username for hdfs |
+---------------+--------+----------+-----------+---------------------------+
| source | string | True | | uri of source |
+---------------+--------+----------+-----------+---------------------------+
Section "output_datasource"
---------------------------
Required: type, destination
This section is dictionary-type.
+-------------+--------+----------+-----------+---------------------------+
| Fields | Type | Required | Default | Value |
+=============+========+==========+===========+===========================+
| type | string | True | | "swift", "hdfs", "maprfs" |
+-------------+--------+----------+-----------+---------------------------+
| destination | string | True | | uri of source |
+-------------+--------+----------+-----------+---------------------------+
Section "main_lib"
------------------
Required: type, source
This section is dictionary-type.
+--------+--------+----------+-----------+----------------------+
| Fields | Type | Required | Default | Value |
+========+========+==========+===========+======================+
| type | string | True | | "swift or "database" |
+--------+--------+----------+-----------+----------------------+
| source | string | True | | uri of source |
+--------+--------+----------+-----------+----------------------+
Section "additional_libs"
-------------------------
Required: type, source
This section is an array-type.
+--------+--------+----------+-----------+----------------------+
| Fields | Type | Required | Default | Value |
+========+========+==========+===========+======================+
| type | string | True | | "swift or "database" |
+--------+--------+----------+-----------+----------------------+
| source | string | True | | uri of source |
+--------+--------+----------+-----------+----------------------+

View File

@ -1,76 +0,0 @@
Tempest Integration of Sahara
=============================
Sahara Tempest plugin contains api, cli and clients tests.
There are several ways to run Tempest tests: it is possible to run them using
your Devstack or using Rally.
Run Tempest tests on Devstack
-----------------------------
See how to configure Tempest at
`Tempest Configuration Guide <https://docs.openstack.org/tempest/latest/configuration.html>`_.
Tempest automatically discovers installed plugins. That's why you just need to
install the Python packages that contains the Sahara Tempest plugin in the
same environment where Tempest is installed.
.. sourcecode:: console
$ git clone https://opendev.org/openstack/sahara-tests
$ cd sahara-tests/
$ pip install sahara-tests/
..
After that you can run Tempest tests. You can specify the name of
test or a more complex regular expression. While any ``testr``-based
test runner can be used, the official command for executing Tempest
tests is ``tempest run``.
For example, the following command will run a specific subset of tests:
.. sourcecode:: console
$ tempest run --regex '^sahara_tempest_plugin.tests.cli.test_scenario.Scenario.'
..
The full syntax of ``tempest run`` is described on `the relavant section of
the Tempest documentation <https://docs.openstack.org/tempest/latest/run.html>`_.
Other useful links:
* `Using Tempest plugins <https://docs.openstack.org/tempest/latest/plugin.html#using-plugins>`_.
* `Tempest Quickstart <https://docs.openstack.org/tempest/latest/overview.html#quickstart>`_.
Run Tempest tests using Rally
-----------------------------
First of all, be sure that Rally is installed and working. There should be
a Rally deployment with correct working Sahara service in it.
Full information can be found on the
`rally quick start guide <https://docs.openstack.org/rally/latest/quick_start/tutorial/step_9_verifying_cloud_via_tempest_verifier.html>`_.
Using this information, you can install ``rally verify`` tool and plugin for
testing Sahara. After this you are free to run Sahara Tempest tests. Here are
some examples of how to run all the tests:
.. sourcecode:: console
$ rally verify start --pattern sahara_tempest_plugin.tests
..
If you want to run client or cli tests, you need to add the following line to
generated config in ``[data-processing]`` field:
.. sourcecode:: bash
test_image_name = IMAGE_NAME
..
where ``IMAGE_NAME`` is the name of image on which you would like to run tests.

View File

@ -1,3 +0,0 @@
network:
private_network: ${network_private_name}
public_network: ${network_public_name}

View File

@ -1,6 +0,0 @@
credentials:
s3_accesskey: ${s3_accesskey}
s3_secretkey: ${s3_secretkey}
s3_endpoint: ${s3_endpoint}
s3_endpoint_ssl: ${s3_endpoint_ssl}
s3_bucket_path: ${s3_bucket_path}

View File

@ -1,43 +0,0 @@
edp_jobs_flow:
fake:
- type: Pig
input_datasource:
type: swift
source: edp-examples/edp-pig/cleanup-string/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: edp-examples/edp-pig/cleanup-string/example.pig
additional_libs:
- type: swift
source: edp-examples/edp-pig/cleanup-string/edp-pig-udf-stringcleaner.jar
spark_pi:
- type: Spark
main_lib:
type: database
source: edp-examples/edp-spark/spark-pi.py
configs:
edp.java.main_class: main
args:
- 2
spark_wordcount:
- type: Spark
input_datasource:
type: swift
source: edp-examples/edp-spark/sample_input.txt
output_datasource:
type: swift
destination: edp-output
main_lib:
type: database
source: edp-examples/edp-spark/spark-wordcount.jar
configs:
edp.java.main_class: sahara.edp.spark.SparkWordCount
edp.spark.adapt_for_swift: true
fs.swift.service.sahara.username: ${os_username}
fs.swift.service.sahara.password: ${os_password}
args:
- '{input_datasource}'
- '{output_datasource}'

View File

@ -1,25 +0,0 @@
edp_jobs_flow:
spark_wordcount_s3:
- type: Spark
input_datasource:
type: s3
source: edp-examples/edp-spark/sample_input.txt
output_datasource:
type: swift
destination: edp-output
main_lib:
type: swift
source: edp-examples/edp-spark/spark-wordcount.jar
configs:
edp.java.main_class: sahara.edp.spark.SparkWordCount
edp.spark.adapt_for_swift: true
fs.swift.service.sahara.username: ${os_username}
fs.swift.service.sahara.password: ${os_password}
fs.s3a.access.key: ${s3_accesskey}
fs.s3a.secret.key: ${s3_secretkey}
fs.s3a.endpoint: ${s3_endpoint}
fs.s3a.connection.ssl.enabled: ${s3_endpoint_ssl}
fs.s3a.path.style.access: ${s3_bucket_path}
args:
- '{input_datasource}'
- '{output_datasource}'

View File

@ -1,35 +0,0 @@
<%page args="is_transient='false', use_auto_security_group='true', ci_flavor_id='m1.small'"/>
clusters:
- plugin_name: fake
plugin_version: "0.1"
image: ${plugin_image}
node_group_templates:
- name: aio
flavor: ${ci_flavor_id}
node_processes:
- namenode
- jobtracker
- datanode
- tasktracker
volumes_per_node: 2
volumes_size: 1
auto_security_group: ${use_auto_security_group}
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- datanode
- jobtracker
auto_security_group: ${use_auto_security_group}
cluster_template:
name: fake01
node_group_templates:
aio: 1
cluster:
name: ${cluster_name}
is_transient: ${is_transient}
scaling:
- operation: add
node_group: worker
size: 1
edp_jobs_flow: fake

View File

@ -1,41 +0,0 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', cluster_name='ct', ci_flavor_id='m1.small'"/>
clusters:
- plugin_name: spark
plugin_version: 1.6.0
image: ${plugin_image}
node_group_templates:
- name: master
flavor: ${ci_flavor_id}
node_processes:
- master
- namenode
- datanode
- slave
auto_security_group: ${use_auto_security_group}
is_proxy_gateway: ${is_proxy_gateway}
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- datanode
- slave
auto_security_group: ${use_auto_security_group}
cluster_template:
name: spark160
node_group_templates:
master: 1
cluster_configs:
HDFS:
dfs.replication: 1
scaling:
- operation: add
node_group: worker
size: 1
scenario:
- run_jobs
- scale
edp_jobs_flow:
- spark_pi
- spark_wordcount
cluster:
name: ${cluster_name}

View File

@ -1,89 +0,0 @@
concurrency: 1
network:
private_network: private
public_network: public
clusters:
- plugin_name: vanilla
plugin_version: 2.7.1
image: sahara-liberty-vanilla-2.7.1-ubuntu-14.04
edp_jobs_flow: test_flow
- plugin_name: hdp
plugin_version: 2.0.6
image: f3c4a228-9ba4-41f1-b100-a0587689d4dd
scaling:
- operation: resize
node_group: hdp-worker
size: 5
- plugin_name: cdh
plugin_version: 5.3.0
image: ubuntu-cdh-5.3.0
scaling:
- operation: add
node_group: cdh-worker
size: 1
edp_jobs_flow:
- test_flow
- name: test_manila
features: manila
edp_jobs_flow:
test_flow:
- type: Pig
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
main_lib:
type: swift
source: etc/edp-examples/edp-pig/top-todoers/example.pig
configs:
dfs.replication: 1
- type: Java
additional_libs:
- type: database
source: etc/edp-examples/hadoop2/edp-java/hadoop-mapreduce-examples-2.7.1.jar
configs:
edp.java.main_class: org.apache.hadoop.examples.QuasiMonteCarlo
args:
- 10
- 10
- type: MapReduce
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
additional_libs:
- type: database
source: etc/edp-examples/edp-mapreduce/edp-mapreduce.jar
configs:
mapred.mapper.class: org.apache.oozie.example.SampleMapper
mapred.reducer.class: org.apache.oozie.example.SampleReducer
- type: MapReduce.Streaming
input_datasource:
type: swift
source: etc/edp-examples/edp-pig/trim-spaces/data/input
output_datasource:
type: hdfs
destination: /user/hadoop/edp-output
configs:
edp.streaming.mapper: /bin/cat
edp.streaming.reducer: /usr/bin/wc
test_manila:
- type: Pig
input_datasource:
type: manila
source: etc/edp-examples/edp-pig/top-todoers/data/input
output_datasource:
type: manila
destination: /user/hadoop/edp-output
main_lib:
type: manila
source: etc/edp-examples/edp-pig/top-todoers/example.pig
configs:
dfs.replication: 1

View File

@ -1,19 +0,0 @@
- hosts: all
strategy: linear
roles:
- orchestrate-devstack
- hosts: controller
tasks:
- name: build the required image
include_role:
name: build-sahara-images-dib
when: sahara_plugin is defined and sahara_plugin != 'fake'
- name: setup the sahara Scenario test environment
include_role:
name: setup-sahara-scenario-env
- include_role:
name: ensure-tox
- name: run the Sahara scenario tests
include_role:
name: run-sahara-scenario

View File

@ -1,7 +0,0 @@
---
prelude: >
Added ability to use clouds.yaml with scenario tests
features:
- User can now use clouds.yaml in the format defined
by os-client-config to specify the auth values
wanted on a scenario test.

View File

@ -1,3 +0,0 @@
---
features:
- New basic scenario test template for CDH 5.13.0.

View File

@ -1,6 +0,0 @@
---
prelude: >
Create new cli test in Sahara Tempest plugin for node group templates
features:
- Now we can filter node group template with column flag by plugin
and check if it was filtered successfully

View File

@ -1,7 +0,0 @@
---
upgrade:
- |
Python 2.7 support has been dropped. Last release of sahara-tests
to support python 2.7 is 0.9.1, the first version that can be used
with OpenStack Train.
The minimum version of Python now supported by sahara-tests is Python 3.5.

View File

@ -1,7 +0,0 @@
---
prelude: >
Clean resources created in CLI tests
fixes:
- Fix bug in which after running CLI tests,
projects and networks created for these tests
were not deleted at the end of the run.

View File

@ -1,6 +0,0 @@
---
prelude: >
Discovery of data sources with relative paths is now fixed.
fixes:
- Datasources with relative paths are now properly found
from the default resources.

View File

@ -1,6 +0,0 @@
---
prelude: >
Fix default resource discovery from the installed package.
fixes:
- The default set of resources (test templates for each plugin, etc)
can now be properly discovered when the package is installed.

View File

@ -1,6 +0,0 @@
---
fixes:
- |
The node group template creation test for the CLI client does not fail
anymore when the Sahara plugin selected for testing provides more than
one version.

View File

@ -1,4 +0,0 @@
---
fixes:
- |
Fix the test template for storm 1.2.

View File

@ -1,6 +0,0 @@
---
fixes:
- |
Force the PEM format for the generated ssh keys,
because paramiko does not yet support the new one
(https://github.com/paramiko/paramiko/issues/602).

View File

@ -1,9 +0,0 @@
---
prelude: >
Client tests have been imported from the Sahara repository.
features:
- The tests for the official Python clients have been moved here
from the Sahara repository. They are based on the Tempest
libraries even they do not follow the Tempest guidelines
(as they need to test the Python clients, they do not use
the native Tempest clients).

View File

@ -1,4 +0,0 @@
---
other:
- The default timeout for cluster polling was raised from 1800
to 3600 seconds.

View File

@ -1,3 +0,0 @@
---
prelude: >
Migrate auth system from keystoneclient to keystoneauth

View File

@ -1,3 +0,0 @@
---
prelude: >
Migrate sahara cli tests from saharaclient to sahara-tests

View File

@ -1,5 +0,0 @@
---
fixes:
- |
Properly handle more use cases when only Keystone v3 is enabled and/or
its service URI is missing the /v3 suffix.

View File

@ -1,6 +0,0 @@
---
prelude: >
Create new cli tests in Sahara Tempest plugin for job types
features:
- Now we can filter job types with column flag and also save config file of
any job type to the specified file.

View File

@ -1,3 +0,0 @@
---
upgrade:
- Migration from novaclient.v2.images to glanceclient

View File

@ -1,35 +0,0 @@
---
prelude: |
Fixes and addition for the API and CLI tests.
Support for CDH 5.9 in scenario tests.
Less parameters required for scenario tests.
features:
- The Tempest-based tests have received an increase of the coverage
for both API and CLI tests (job, plugins, templates; negative
testing).
- CDH 5.9 can be tested thanks to the addition of the specific
templates.
- Few parameters are not required anymore by the templates in scenario
tests; a default value is provided (name of templates, etc).
- The flavors used in templates are now parameters too.
- If a flavor name is provided in addition to its specification and
a flavor with that name exists, it is used and not created again.
- The dependencies on non-public Tempest interfaces have been
removed.
upgrade:
- the name of the variables/parameters used for the name of
the images in the scenario tests have been changes to follow
a more consistent pattern.
critical:
- The Tempest plugin was fixed after the removal of
the data_processing plugin from the tempest repository.
Most of the work was in place, only a small change was
missing.
fixes:
- The artifacts created during the execution of CLI tests
are properly cleaned at the end of the tests.
other:
- The documentation was improved (scenario tests) and
extended (Tempest plugin).

View File

@ -1,6 +0,0 @@
---
upgrade:
- |
Removed nova-network configuration. Nova network has been fully
removed from the OpenStack codebase, in all releases supported
by sahara-tests.

View File

@ -1,6 +0,0 @@
---
prelude: >
Yaml-files for deprecated plugins was removed
fixes:
- Removed yaml-files for Kilo release
- Removed unused yaml-files for master branch

View File

@ -1,4 +0,0 @@
---
features:
- Sahara API tests have been imported from Tempest and
made available using the Tempest Plugin Interface.

View File

@ -1,5 +0,0 @@
---
fixes:
- |
Fix a strange error where the internally generated test does not start
because sahara_tests.scenario is not found when running inside a virtualenv.

View File

@ -1,10 +0,0 @@
---
prelude: >
Tempest tests now support APIv2.
features:
- |
The Tempest plugin provides an APIv2 DataProcessing client and
tempest tests can be executed against APIv2 too.
The type of API used is driven by a tempest.conf configuration key
(data_processing.use_api_v2 for API tests,
data_processing.api_version_saharaclient for client and CLI tests)

View File

@ -1,10 +0,0 @@
---
features:
- |
The ``api_version_saharaclient`` variable now controls the Sahara API
version used not only by the Tempest.lib-based clients tests,
but also by the Tempest CLI tests.
deprecations:
- |
The ``saharaclient_version`` option in the ``data-processing`` group
has been renamed to ``api_version_saharaclient``.

View File

@ -1,5 +0,0 @@
---
features:
- |
Allow to enable boot_from_volume on node group templates
when running scenario tests with APIv2.

View File

@ -1,11 +0,0 @@
---
features:
- |
sahara-scenario supports feature sets. When passing specific
feature tags to sahara-scenario, additional job templates and
EDP jobs marked with those tags will be loaded.
fixes:
- |
When passing the plugin/version parameters to sahara-scenario,
users can now specify additional YAML templates which will be merged
to the default YAMLs, instead of being ignored.

View File

@ -1,4 +0,0 @@
---
features:
- |
The fully generated YAML file is printed when the verbose mode is enabled.

View File

@ -1,8 +0,0 @@
---
features:
- |
sahara-scenario now supports testing the S3 API for job binaries
and data sources, a feature introduced in Rocky.
The code can be enabled using the "s3" feature and
various templates now runs an S3-based job too when
the feature is enabled from the command line.

View File

@ -1,8 +0,0 @@
---
prelude: >
Scenario tests now support APIv2.
features:
- |
Scenario tests can be executed against APIv2.
The usage of APIv2 is enabled through a new command line argument
for sahara-scenario (--v2, -2).

View File

@ -1,7 +0,0 @@
---
prelude: >
Removed the need of a .testr.conf file when calling the
test runner.
fixes:
- A .testr.conf file was previously required in the runner
execution directory, now this is handled internally.

View File

@ -1,3 +0,0 @@
---
other:
- OpenStack reno integration was added for managing release notes

View File

@ -1,8 +0,0 @@
---
prelude: >
Tests no longer depend on fake plugin to run
other:
- Adapt Sahara Tests code to stop relying only on the fake
plugin and use the default plugin available. However, it's
worth noting that - if available - the fake plugin will
be used.

View File

@ -1,4 +0,0 @@
---
upgrade:
- |
sahara-scenario now requires stestr.

View File

@ -1,6 +0,0 @@
---
features:
- |
Added basic S3 API tests (job binaries and data sources) to the Tempest
plugin. The tests are disabled by default and can be enabled using
a new tempest.conf key (data-processing-feature-enabled.s3).

View File

@ -1,7 +0,0 @@
---
other:
- The dependency on tempest.scenario.manager.py has been
removed. There are still dependencies on internal
Tempest interfaces but they are more difficult to replace
due to lack of tempest.lib alternatives, and
scenario.manager.py is undergoing an heavy refactoring.

View File

@ -1,15 +0,0 @@
---
prelude: >
Ocata test templates are now available, while Liberty
test templates have been removed.
features:
- A folder with test templates with Ocata has been created
and initialized starting from the templates in the main
directory, following the status of the jobs tested on
the Sahara CI.
deprecations:
- The Liberty-specific job templates have been removed.
This means that starting from this release Liberty is
not supported (it has been EOL for a while).
- The MapR 5.0.0 test template have been removed from
the master branch as well.

View File

@ -1,12 +0,0 @@
---
prelude: >
Pike test templates are now available, while Mitaka
test templates have been removed.
features:
- A folder with scenario templates for Pike was added.
It is a subset of the templates in the main directory.
- Some requirements have been raised (especially Tempest).
deprecations:
- The Mitaka-specific job templates have been removed.
This means that starting from this release Mitaka is
not supported (it has been EOL for a while).

View File

@ -1,13 +0,0 @@
---
prelude: >
Queens test templates are now available, while Newton
test templates have been removed.
features:
- A folder with scenario templates for Queens was added.
It is a subset of the templates in the main directory.
- The available templates now supports Spark 2.2
and Vanilla 2.8.2.
deprecations:
- The Newton-specific job templates have been removed.
This means that starting from this release Newton is
not supported (it has been EOL for a while).

View File

@ -1,16 +0,0 @@
---
prelude: >
Rocky test templates are now available.
features:
- A folder with scenario templates for Rocky was added.
It is a subset of the templates in the main directory,
and includes all non-deprecated plugin/versions.
- The available default test templates now supports also
Spark 2.3, Storm 1.2, Vanilla 2.7.5, and the Ambari-based
HDP 2.6 and 2.5.
fixes:
- The CDH 5.11 test template, previously only available for queens,
was added to main (unversioned) templates directory.
upgrades:
- All the templates for deprecated version/plugins were removed
from the main (unversioned) templates directory.

View File

@ -1,11 +0,0 @@
---
prelude: >
Stein test templates are now available, while Ocata
test templates have been removed.
features:
- A folder with scenario templates for Stein has been added.
It is a subset of the templates in the main directory.
deprecations:
- The Ocata-specific job templates have been removed.
This means that starting from this release Ocata is
not supported (it is under Extended Maintenance now).

View File

@ -1,5 +0,0 @@
---
features:
- |
The public network field can be omitted from the configuration file of
the scenario, enabling the testing when only the project network is used.

View File

@ -1,4 +0,0 @@
---
features:
- Capture and report the timestamp in scenario tests when
an event starts and when an exception is triggered.

View File

@ -1,6 +0,0 @@
---
other:
- |
Updated the list of supported plugin/versions used by some Tempest
and Tempest-based tests to cover all the combinations available in the
Sahara releases supported by sahara-tests.

View File

@ -1,7 +0,0 @@
---
prelude: >
Long overdue general updates of the test templates
fixes:
- The default templates used by tests have been updated;
added (MapR, Ambari and Storm, and some versions of CDH),
or removed (obsolete versions of Vanilla and CDH).

View File

@ -1,8 +0,0 @@
---
prelude: >
Sahara Tests plugin now uses tempest stable interface
other:
- Sahara Tests plugin is adapted to use in-tree client,
which was migrated from Tempest code. Also, there's a
new stable interface for Service Clients in Tempest, so
this change adapts the code to use it.

View File

@ -1,253 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Sahara-tests Release Notes documentation build configuration file
extensions = [
'reno.sphinxext',
'openstackdocstheme'
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/sahara-tests'
openstackdocs_auto_name = False
openstackdocs_use_storyboard = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Sahara-tests'
copyright = u'2016, Sahara Developers'
# Release do not need a version number in the title, they
# cover multiple versions.
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Sahara-testsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Sahara-tests.tex', u'Sahara-tests Documentation',
u'Sahara Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sahara-tests', u'Sahara-tests Documentation',
[u'Sahara Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Sahara-tests', u'Sahara-tests Documentation',
u'Sahara Developers', 'Sahara-tests', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']

View File

@ -1,9 +0,0 @@
Welcome to Sahara-tests's documentation!
========================================
Contents:
.. toctree::
:maxdepth: 2
unreleased

View File

@ -1,5 +0,0 @@
==============================
Current Series Release Notes
==============================
.. release-notes::

View File

@ -1,27 +0,0 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=1.6 # Apache-2.0
Mako>=0.4.0 # MIT
botocore>=1.5.1 # Apache-2.0
fixtures>=3.0.0 # Apache-2.0/BSD
jsonschema>=3.2.0 # MIT
keystoneauth1>=2.1.0 # Apache-2.0
oslo.concurrency>=3.5.0 # Apache-2.0
oslo.serialization>=1.10.0 # Apache-2.0
oslo.utils>=3.5.0 # Apache-2.0
oslotest>=1.10.0 # Apache-2.0
os-client-config>=1.13.1 # Apache-2.0
paramiko>=1.16.0 # LGPL
python-glanceclient>=2.0.0 # Apache-2.0
python-novaclient!=2.33.0,>=2.29.0 # Apache-2.0
python-saharaclient>=0.13.0 # Apache-2.0
python-swiftclient>=2.2.0 # Apache-2.0
python-neutronclient>=4.2.0 # Apache-2.0
rfc3986>=0.2.0 # Apache-2.0
six>=1.9.0 # MIT
stestr>=1.0.0 # Apache-2.0
tempest>=16.0.0 # Apache-2.0
testtools>=1.4.0 # MIT

View File

@ -1,8 +0,0 @@
---
sahara_tests_src_dir: "{{ zuul.projects['opendev.org/openstack/sahara-tests'].src_dir }}"
sahara_cloud_demo: 'devstack-admin'
sahara_scenario_conf: '{{ ansible_user_dir }}/template_vars.ini'
sahara_scenario_test_template: 'fake.yaml.mako'
sahara_scenario_tox_env: 'venv'
sahara_enable_s3: False
tox_executable: 'tox'

View File

@ -1,24 +0,0 @@
---
- name: run sahara-scenario
shell: |
{{ tox_executable }} -e {{ sahara_scenario_tox_env }} --sitepackages -- sahara-scenario --verbose -V {{ sahara_scenario_conf }} \
etc/scenario/gate/credentials.yaml.mako \
etc/scenario/gate/edp.yaml.mako \
{% if sahara_enable_s3 -%}
etc/scenario/gate/credentials_s3.yaml.mako \
etc/scenario/gate/edp_s3.yaml.mako \
{% endif -%}
etc/scenario/gate/{{ sahara_scenario_test_template }} \
--os-cloud {{ sahara_cloud_demo }} \
{% if sahara_scenario_use_api_v2|default(False) -%}
--v2 \
{% endif -%}
{% if sahara_enable_s3 -%}
--feature s3 \
{% endif -%}
| tee scenario.log
if grep -qE '(FAILED|ERROR:)' scenario.log; then
exit 1
fi
args:
chdir: "{{ sahara_tests_src_dir }}"

View File

@ -1,23 +0,0 @@
---
sahara_cloud_admin: 'devstack-admin'
sahara_cloud_demo: 'devstack-admin'
sahara_plugin: 'fake'
sahara_plugin_version: '0.1'
sahara_image_name: 'xenial-server'
sahara_image_user: 'ubuntu'
sahara_image_format: 'qcow2'
sahara_scenario_conf: '{{ ansible_user_dir }}/template_vars.ini'
sahara_enable_s3: False
sahara_network_type: 'neutron'
private_network_name: 'private'
public_network_name: 'public'
sahara_flavor_small: 'sah1.small'
sahara_cluster_transient: 'False'
sahara_auto_security_group: 'True'
sahara_flavors:
'sah1.small':
id: 20
ram: 512
disk: 10
vcpus: 1
ephemeral: 0

View File

@ -1,52 +0,0 @@
- block:
- name: set sahara_image_path based on the remote file
set_fact:
sahara_image_path: "{{ ansible_user_dir }}/{{ sahara_image_name }}.{{ sahara_image_format }}"
- name: download the remote image
get_url:
url: "{{ sahara_image_url }}"
dest: "{{ sahara_image_path }}"
when: sahara_image_url is defined and sahara_image_url!='' and sahara_image_url is search('^http')
- name: set sahara_image_path from the local file
set_fact:
sahara_image_path: "{{ sahara_image_url }}"
when: sahara_image_url is defined and sahara_image_url!='' and not sahara_image_url is search('^http')
# we cannot use os_image because Ansible 2.7 requires a newer version of
# openstacksdk than the one available in queens and pike.
- name: register the required image in Glance
command: |
openstack --os-cloud {{ sahara_cloud_demo }} image create \
--disk-format {{ sahara_image_format }} --file {{ sahara_image_path }} \
{{ sahara_image_name }}
- name: register the required image in Sahara
shell: |
openstack --os-cloud {{ sahara_cloud_demo }} dataprocessing image register \
--username {{ sahara_image_user }} {{ sahara_image_name }};
openstack --os-cloud {{ sahara_cloud_demo }} dataprocessing image tags add {{ sahara_image_name }} --tags \
{{ sahara_plugin_version }} {{ sahara_plugin }}
- name: S3 configuration
import_tasks: setup_s3.yaml
when: sahara_enable_s3
# we cannot use os_nova_flavor as well (see above)
- name: create the required flavor(s)
command: |
openstack --os-cloud {{ sahara_cloud_demo }} flavor create \
--ram {{ item.value.ram }} \
--vcpus {{ item.value.vcpus|default('1') }} \
--disk {{ item.value.disk|default('10') }} \
{% if item.value.ephemeral|default(0) -%}
--ephemeral {{ item.value.ephemeral }} \
{% endif -%} \
{{ item.key }}
with_dict: "{{ sahara_flavors }}"
ignore_errors: true
- name: generate the configuration file for the scenario test
template:
src: sahara_scenario_conf.ini.j2
dest: "{{ sahara_scenario_conf }}"

View File

@ -1,29 +0,0 @@
- name: create the S3 credentials
shell: |
ACCESS_KEY=$(openstack --os-cloud {{ sahara_cloud_demo }} ec2 credentials list -f value -c Access | head -n1)
if [ -z "${ACCESS_KEY}" ]; then
ACCESS_KEY=$(openstack --os-cloud {{ sahara_cloud_demo }} ec2 credentials create -f value -c access)
fi
SECRET_KEY=$(openstack --os-cloud {{ sahara_cloud_demo }} ec2 credentials list -f value -c Secret | head -n1)
printf "${ACCESS_KEY}\n${SECRET_KEY}"
register: sahara_s3_credentials_out
# This task should not be needed normally and the endpoint should be discovered by default
- name: find the swift endpoint for S3
shell: |
ENDPOINT=$(openstack --os-cloud {{ sahara_cloud_admin }} endpoint list --service swift --interface public -c URL -f value)
ENDPOINT_PREFIX=$(awk -F'//' '{print $1}')
ENDPOINT_SSL="False"
if [ "${ENDPOINT_PREFIX}" = "https" ]; then
ENDPOINT_SSL="True"
fi
printf "${ENDPOINT}\n${ENDPOINT_SSL}"
register: sahara_s3_endpoint_out
- name: save the S3 access data
set_fact:
sahara_s3_accesskey: "{{ sahara_s3_credentials_out.stdout_lines[0] }}"
sahara_s3_secretkey: "{{ sahara_s3_credentials_out.stdout_lines[1] }}"
sahara_s3_endpoint: "{{ sahara_s3_endpoint_out.stdout_lines[0] }}"
sahara_s3_endpoint_ssl: "{{ sahara_s3_endpoint_out.stdout_lines[1] }}"
sahara_s3_bucket_path: True

View File

@ -1,16 +0,0 @@
[DEFAULT]
network_type: {{ sahara_network_type }}
network_private_name: {{ private_network_name }}
network_public_name: {{ public_network_name }}
plugin_image: {{ sahara_image_name }}
ci_flavor_id: {{ sahara_flavor_small }}
cluster_name: testc
is_transient: {{ sahara_cluster_transient }}
auto_security_group: {{ sahara_auto_security_group }}
{% if sahara_enable_s3 -%}
s3_accesskey: {{ sahara_s3_accesskey }}
s3_secretkey: {{ sahara_s3_secretkey }}
s3_endpoint: {{ sahara_s3_endpoint }}
s3_endpoint_ssl: {{ sahara_s3_endpoint_ssl }}
s3_bucket_path: {{ sahara_s3_bucket_path }}
{% endif -%}

View File

@ -1,5 +0,0 @@
===============================================
Tempest Integration of Sahara
===============================================
This directory contains Tempest tests to cover the Sahara project.

View File

@ -1,30 +0,0 @@
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.lib.services import clients
CONF = config.CONF
class Manager(clients.ServiceClients):
"""Tempest stable service clients and loaded plugins service clients"""
def __init__(self, credentials, service=None):
if CONF.identity.auth_version == 'v2':
identity_uri = CONF.identity.uri
else:
identity_uri = CONF.identity.uri_v3
super(Manager, self).__init__(credentials, identity_uri)

View File

@ -1,351 +0,0 @@
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import OrderedDict
import copy
from tempest import config
CONF = config.CONF
"""Default templates.
There should always be at least a master1 and a worker1 node
group template."""
BASE_VANILLA_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['namenode', 'resourcemanager',
'hiveserver']
},
'master2': {
'count': 1,
'node_processes': ['oozie', 'historyserver',
'secondarynamenode']
},
'worker1': {
'count': 1,
'node_processes': ['datanode', 'nodemanager'],
'node_configs': {
'MapReduce': {
'yarn.app.mapreduce.am.resource.mb': 256,
'yarn.app.mapreduce.am.command-opts': '-Xmx256m'
},
'YARN': {
'yarn.scheduler.minimum-allocation-mb': 256,
'yarn.scheduler.maximum-allocation-mb': 1024,
'yarn.nodemanager.vmem-check-enabled': False
}
}
}
},
'cluster_configs': {
'HDFS': {
'dfs.replication': 1
}
}
}
BASE_SPARK_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['namenode', 'master']
},
'worker1': {
'count': 1,
'node_processes': ['datanode', 'slave']
}
},
'cluster_configs': {
'HDFS': {
'dfs.replication': 1
}
}
}
BASE_CDH_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['CLOUDERA_MANAGER']
},
'master2': {
'count': 1,
'node_processes': ['HDFS_NAMENODE',
'YARN_RESOURCEMANAGER']
},
'master3': {
'count': 1,
'node_processes': ['OOZIE_SERVER', 'YARN_JOBHISTORY',
'HDFS_SECONDARYNAMENODE',
'HIVE_METASTORE', 'HIVE_SERVER2']
},
'worker1': {
'count': 1,
'node_processes': ['YARN_NODEMANAGER', 'HDFS_DATANODE']
}
},
'cluster_configs': {
'HDFS': {
'dfs_replication': 1
}
}
}
BASE_AMBARI_HDP_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['Ambari', 'MapReduce History Server',
'Spark History Server', 'NameNode',
'ResourceManager', 'SecondaryNameNode',
'YARN Timeline Server', 'ZooKeeper',
'Kafka Broker']
},
'master2': {
'count': 1,
'node_processes': ['Hive Metastore', 'HiveServer', 'Oozie']
},
'worker1': {
'count': 3,
'node_processes': ['DataNode', 'NodeManager']
}
},
'cluster_configs': {
'HDFS': {
'dfs.datanode.du.reserved': 0
}
}
}
BASE_MAPR_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['Metrics', 'Webserver', 'ZooKeeper',
'HTTPFS', 'Oozie', 'FileServer', 'CLDB',
'Flume', 'Hue', 'NodeManager', 'HistoryServer',
'ResourceManager', 'HiveServer2',
'HiveMetastore',
'Sqoop2-Client', 'Sqoop2-Server']
},
'worker1': {
'count': 1,
'node_processes': ['NodeManager', 'FileServer']
}
}
}
BASE_STORM_DESC = {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['nimbus']
},
'master2': {
'count': 1,
'node_processes': ['zookeeper']
},
'worker1': {
'count': 1,
'node_processes': ['supervisor']
}
}
}
DEFAULT_TEMPLATES = {
'fake': OrderedDict([
('0.1', {
'NODES': {
'master1': {
'count': 1,
'node_processes': ['namenode', 'jobtracker']
},
'worker1': {
'count': 1,
'node_processes': ['datanode', 'tasktracker'],
}
}
})
]),
'vanilla': OrderedDict([
('2.7.1', copy.deepcopy(BASE_VANILLA_DESC)),
('2.7.5', copy.deepcopy(BASE_VANILLA_DESC)),
('2.8.2', copy.deepcopy(BASE_VANILLA_DESC))
]),
'ambari': OrderedDict([
('2.3', copy.deepcopy(BASE_AMBARI_HDP_DESC)),
('2.4', copy.deepcopy(BASE_AMBARI_HDP_DESC)),
('2.5', copy.deepcopy(BASE_AMBARI_HDP_DESC)),
('2.6', copy.deepcopy(BASE_AMBARI_HDP_DESC))
]),
'spark': OrderedDict([
('1.3.1', copy.deepcopy(BASE_SPARK_DESC)),
('1.6.0', copy.deepcopy(BASE_SPARK_DESC)),
('2.1.0', copy.deepcopy(BASE_SPARK_DESC)),
('2.2', copy.deepcopy(BASE_SPARK_DESC)),
('2.3', copy.deepcopy(BASE_SPARK_DESC))
]),
'cdh': OrderedDict([
('5.4.0', copy.deepcopy(BASE_CDH_DESC)),
('5.5.0', copy.deepcopy(BASE_CDH_DESC)),
('5.7.0', copy.deepcopy(BASE_CDH_DESC)),
('5.9.0', copy.deepcopy(BASE_CDH_DESC)),
('5.11.0', copy.deepcopy(BASE_CDH_DESC)),
('5.13.0', copy.deepcopy(BASE_CDH_DESC))
]),
'mapr': OrderedDict([
('5.1.0.mrv2', copy.deepcopy(BASE_MAPR_DESC)),
('5.2.0.mrv2', copy.deepcopy(BASE_MAPR_DESC))
]),
'storm': OrderedDict([
('1.0.1', copy.deepcopy(BASE_STORM_DESC)),
('1.1.0', copy.deepcopy(BASE_STORM_DESC)),
('1.2', copy.deepcopy(BASE_STORM_DESC))
])
}
def get_plugin_data(plugin_name, plugin_version):
return DEFAULT_TEMPLATES[plugin_name][plugin_version]
def get_default_plugin():
"""Returns the default plugin used for testing."""
enabled_plugins = CONF.data_processing_feature_enabled.plugins
if len(enabled_plugins) == 0:
return None
# NOTE(raissa) if fake is available, use it first.
# this is to reduce load and should be removed
# once the fake plugin is no longer needed
if 'fake' in enabled_plugins:
return 'fake'
for plugin in enabled_plugins:
if plugin in DEFAULT_TEMPLATES.keys():
break
else:
plugin = ''
return plugin
def get_default_version(plugin):
"""Returns the default plugin version used for testing.
This is gathered separately from the plugin to allow
the usage of plugin name in skip_checks. This method is
rather invoked into resource_setup, which allows API calls
and exceptions.
"""
default_plugin_name = get_default_plugin()
if not (plugin and default_plugin_name):
return None
for version in DEFAULT_TEMPLATES[default_plugin_name].keys():
if version in plugin['versions']:
break
else:
version = None
return version
def get_node_group_template(nodegroup='worker1',
default_version=None,
floating_ip_pool=None,
api_version='1.1'):
"""Returns a node group template for the default plugin."""
try:
flavor = CONF.compute.flavor_ref
default_plugin_name = get_default_plugin()
plugin_data = (
get_plugin_data(default_plugin_name, default_version)
)
nodegroup_data = plugin_data['NODES'][nodegroup]
node_group_template = {
'description': 'Test node group template',
'plugin_name': default_plugin_name,
'node_processes': nodegroup_data['node_processes'],
'flavor_id': flavor,
'floating_ip_pool': floating_ip_pool,
'node_configs': nodegroup_data.get('node_configs', {})
}
if api_version == '1.1':
node_group_template['hadoop_version'] = default_version
else:
node_group_template['plugin_version'] = default_version
return node_group_template
except (IndexError, KeyError):
return None
def get_cluster_template(node_group_template_ids=None,
default_version=None,
api_version='1.1'):
"""Returns a cluster template for the default plugin.
node_group_template_ids contains the type and ID of pre-defined
node group templates that have to be used in the cluster template
(instead of dynamically defining them with 'node_processes').
"""
flavor = CONF.compute.flavor_ref
default_plugin_name = get_default_plugin()
if node_group_template_ids is None:
node_group_template_ids = {}
try:
plugin_data = (
get_plugin_data(default_plugin_name, default_version)
)
all_node_groups = []
for ng_name, ng_data in plugin_data['NODES'].items():
node_group = {
'name': '%s-node' % (ng_name),
'flavor_id': flavor,
'count': ng_data['count']
}
if ng_name in node_group_template_ids.keys():
# node group already defined, use it
node_group['node_group_template_id'] = (
node_group_template_ids[ng_name]
)
else:
# node_processes list defined on-the-fly
node_group['node_processes'] = ng_data['node_processes']
if 'node_configs' in ng_data:
node_group['node_configs'] = ng_data['node_configs']
all_node_groups.append(node_group)
cluster_template = {
'description': 'Test cluster template',
'plugin_name': default_plugin_name,
'cluster_configs': plugin_data.get('cluster_configs', {}),
'node_groups': all_node_groups,
}
if api_version == '1.1':
cluster_template['hadoop_version'] = default_version
else:
cluster_template['plugin_version'] = default_version
return cluster_template
except (IndexError, KeyError):
return None

View File

@ -1,84 +0,0 @@
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
service_option = cfg.BoolOpt("sahara",
default=False,
help="Whether or not sahara is expected to be "
"available")
data_processing_group = cfg.OptGroup(name="data-processing",
title="Data Processing options")
DataProcessingGroup = [
cfg.StrOpt('catalog_type',
default='data-processing',
help="Catalog type of the data processing service."),
cfg.StrOpt('endpoint_type',
default='publicURL',
choices=['public', 'admin', 'internal',
'publicURL', 'adminURL', 'internalURL'],
help="The endpoint type to use for the data processing "
"service."),
]
DataProcessingAdditionalGroup = [
cfg.IntOpt('cluster_timeout',
default=3600,
help='Timeout (in seconds) to wait for cluster deployment.'),
cfg.IntOpt('request_timeout',
default=10,
help='Timeout (in seconds) between status checks.'),
# FIXME: the default values here are an hack needed until it is possible
# to pass values from the job to tempest.conf (or a devstack plugin is
# written).
cfg.StrOpt('test_image_name',
default='xenial-server-cloudimg-amd64-disk1',
help='name of an image which is used for cluster creation.'),
cfg.StrOpt('test_ssh_user',
default='ubuntu',
help='username used to access the test image.'),
cfg.BoolOpt('use_api_v2',
default=False,
help='Run API tests against APIv2 instead of 1.1'),
cfg.StrOpt('api_version_saharaclient',
default='1.1',
help='Version of Sahara API used by saharaclient',
deprecated_name='saharaclient_version'),
cfg.StrOpt('sahara_url',
help='Sahara url as http://ip:port/api_version/tenant_id'),
# TODO(shuyingya): Delete this option once the Mitaka release is EOL.
cfg.BoolOpt('plugin_update_support',
default=True,
help='Does sahara support plugin update?'),
]
data_processing_feature_group = cfg.OptGroup(
name="data-processing-feature-enabled",
title="Enabled Data Processing features")
DataProcessingFeaturesGroup = [
cfg.ListOpt('plugins',
default=["vanilla", "cdh"],
help="List of enabled data processing plugins"),
# delete this and always execute the tests when Tempest and
# this Tempest plugin stop supporting Queens, the last version
# without or incomplete S3 support.
cfg.BoolOpt('s3',
default=False,
help='Does Sahara support S3?'),
]

View File

@ -1,73 +0,0 @@
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from tempest import config
from tempest.test_discover import plugins
from sahara_tempest_plugin import config as sahara_config
class SaharaTempestPlugin(plugins.TempestPlugin):
def load_tests(self):
base_path = os.path.split(os.path.dirname(
os.path.abspath(__file__)))[0]
test_dir = "sahara_tempest_plugin/tests"
full_test_dir = os.path.join(base_path, test_dir)
return full_test_dir, base_path
def register_opts(self, conf):
conf.register_opt(sahara_config.service_option,
group='service_available')
conf.register_group(sahara_config.data_processing_group)
conf.register_opts(sahara_config.DataProcessingGroup +
sahara_config.DataProcessingAdditionalGroup,
sahara_config.data_processing_group)
conf.register_group(sahara_config.data_processing_feature_group)
conf.register_opts(sahara_config.DataProcessingFeaturesGroup,
sahara_config.data_processing_feature_group)
def get_opt_lists(self):
return [
(sahara_config.data_processing_group.name,
sahara_config.DataProcessingGroup),
(sahara_config.data_processing_feature_group.name,
sahara_config.DataProcessingFeaturesGroup),
]
def get_service_clients(self):
data_processing_config = (
config.service_client_config('data-processing'))
params = {
'name': 'data_processing',
'service_version': 'data_processing.v1_1',
'module_path':
'sahara_tempest_plugin.services.data_processing.v1_1',
'client_names': ['DataProcessingClient']
}
params.update(data_processing_config)
params_v2 = {
'name': 'data_processing_v2',
'service_version': 'data_processing.v2',
'module_path':
'sahara_tempest_plugin.services.data_processing.v2',
'client_names': ['DataProcessingClient']
}
params_v2.update(data_processing_config)
return [params, params_v2]

View File

@ -1,50 +0,0 @@
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class BaseDataProcessingClient(rest_client.RestClient):
def _request_and_check_resp(self, request_func, uri, resp_status):
"""Make a request and check response status code.
It returns a ResponseBody.
"""
resp, body = request_func(uri)
self.expected_success(resp_status, resp.status)
return rest_client.ResponseBody(resp, body)
def _request_and_check_resp_data(self, request_func, uri, resp_status):
"""Make a request and check response status code.
It returns pair: resp and response data.
"""
resp, body = request_func(uri)
self.expected_success(resp_status, resp.status)
return resp, body
def _request_check_and_parse_resp(self, request_func, uri,
resp_status, *args, **kwargs):
"""Make a request, check response status code and parse response body.
It returns a ResponseBody.
"""
headers = {'Content-Type': 'application/json'}
resp, body = request_func(uri, headers=headers, *args, **kwargs)
self.expected_success(resp_status, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)

View File

@ -1,20 +0,0 @@
# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# flake8: noqa: E501
from sahara_tempest_plugin.services.data_processing.v1_1.data_processing_client import \
DataProcessingClient
__all__ = ['DataProcessingClient']

View File

@ -1,270 +0,0 @@
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from sahara_tempest_plugin.services.data_processing import base_client
class DataProcessingClient(base_client.BaseDataProcessingClient):
api_version = "v1.1"
def list_node_group_templates(self):
"""List all node group templates for a user."""
uri = 'node-group-templates'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_node_group_template(self, tmpl_id):
"""Returns the details of a single node group template."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_node_group_template(self, name, plugin_name, hadoop_version,
node_processes, flavor_id,
node_configs=None, **kwargs):
"""Creates node group template with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'node-group-templates'
body = kwargs.copy()
body.update({
'name': name,
'plugin_name': plugin_name,
'hadoop_version': hadoop_version,
'node_processes': node_processes,
'flavor_id': flavor_id,
'node_configs': node_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
body=json.dumps(body))
def delete_node_group_template(self, tmpl_id):
"""Deletes the specified node group template by id."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_and_check_resp(self.delete, uri, 204)
def list_plugins(self):
"""List all enabled plugins."""
uri = 'plugins'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_plugin(self, plugin_name, plugin_version=None):
"""Returns the details of a single plugin."""
uri = 'plugins/%s' % plugin_name
if plugin_version:
uri += '/%s' % plugin_version
return self._request_check_and_parse_resp(self.get, uri, 200)
def list_cluster_templates(self):
"""List all cluster templates for a user."""
uri = 'cluster-templates'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_cluster_template(self, tmpl_id):
"""Returns the details of a single cluster template."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_cluster_template(self, name, plugin_name, hadoop_version,
node_groups, cluster_configs=None,
**kwargs):
"""Creates cluster template with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'cluster-templates'
body = kwargs.copy()
body.update({
'name': name,
'plugin_name': plugin_name,
'hadoop_version': hadoop_version,
'node_groups': node_groups,
'cluster_configs': cluster_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
body=json.dumps(body))
def delete_cluster_template(self, tmpl_id):
"""Deletes the specified cluster template by id."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_and_check_resp(self.delete, uri, 204)
def update_cluster_template(self, tmpl_id, **kwargs):
"""Updates the specificed cluster template."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.put, uri, 202,
body=json.dumps(kwargs))
def list_data_sources(self):
"""List all data sources for a user."""
uri = 'data-sources'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_data_source(self, source_id):
"""Returns the details of a single data source."""
uri = 'data-sources/%s' % source_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_data_source(self, name, data_source_type, url, **kwargs):
"""Creates data source with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'data-sources'
body = kwargs.copy()
body.update({
'name': name,
'type': data_source_type,
'url': url
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def update_node_group_template(self, tmpl_id, **kwargs):
"""Updates the details of a single node group template."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.put, uri, 202,
body=json.dumps(kwargs))
def delete_data_source(self, source_id):
"""Deletes the specified data source by id."""
uri = 'data-sources/%s' % source_id
return self._request_and_check_resp(self.delete, uri, 204)
def update_data_source(self, source_id, **kwargs):
"""Updates a data source"""
uri = 'data-sources/%s' % source_id
return self._request_check_and_parse_resp(self.put, uri, 202,
body=json.dumps(kwargs))
def list_job_binary_internals(self):
"""List all job binary internals for a user."""
uri = 'job-binary-internals'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_binary_internal(self, job_binary_id):
"""Returns the details of a single job binary internal."""
uri = 'job-binary-internals/%s' % job_binary_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_binary_internal(self, name, data):
"""Creates job binary internal with specified params."""
uri = 'job-binary-internals/%s' % name
return self._request_check_and_parse_resp(self.put, uri, 202, data)
def delete_job_binary_internal(self, job_binary_id):
"""Deletes the specified job binary internal by id."""
uri = 'job-binary-internals/%s' % job_binary_id
return self._request_and_check_resp(self.delete, uri, 204)
def get_job_binary_internal_data(self, job_binary_id):
"""Returns data of a single job binary internal."""
uri = 'job-binary-internals/%s/data' % job_binary_id
return self._request_and_check_resp_data(self.get, uri, 200)
def list_job_binaries(self):
"""List all job binaries for a user."""
uri = 'job-binaries'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_binary(self, job_binary_id):
"""Returns the details of a single job binary."""
uri = 'job-binaries/%s' % job_binary_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_binary(self, name, url, extra=None, **kwargs):
"""Creates job binary with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'job-binaries'
body = kwargs.copy()
body.update({
'name': name,
'url': url,
'extra': extra or dict(),
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def delete_job_binary(self, job_binary_id):
"""Deletes the specified job binary by id."""
uri = 'job-binaries/%s' % job_binary_id
return self._request_and_check_resp(self.delete, uri, 204)
def get_job_binary_data(self, job_binary_id):
"""Returns data of a single job binary."""
uri = 'job-binaries/%s/data' % job_binary_id
return self._request_and_check_resp_data(self.get, uri, 200)
def list_jobs(self):
"""List all jobs for a user."""
uri = 'jobs'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job(self, job_id):
"""Returns the details of a single job."""
uri = 'jobs/%s' % job_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job(self, name, job_type, mains, libs=None, **kwargs):
"""Creates job with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'jobs'
body = kwargs.copy()
body.update({
'name': name,
'type': job_type,
'mains': mains,
'libs': libs or list(),
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def delete_job(self, job_id):
"""Deletes the specified job by id."""
uri = 'jobs/%s' % job_id
return self._request_and_check_resp(self.delete, uri, 204)

View File

@ -1,20 +0,0 @@
# Copyright (c) 2016 Hewlett-Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# flake8: noqa: E501
from sahara_tempest_plugin.services.data_processing.v2.data_processing_client import \
DataProcessingClient
__all__ = ['DataProcessingClient']

View File

@ -1,241 +0,0 @@
# Copyright (c) 2013 Mirantis Inc.
# Copyright (c) 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from sahara_tempest_plugin.services.data_processing import base_client
class DataProcessingClient(base_client.BaseDataProcessingClient):
api_version = "v2"
def list_node_group_templates(self):
"""List all node group templates for a user."""
uri = 'node-group-templates'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_node_group_template(self, tmpl_id):
"""Returns the details of a single node group template."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_node_group_template(self, name, plugin_name, plugin_version,
node_processes, flavor_id,
node_configs=None, **kwargs):
"""Creates node group template with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'node-group-templates'
body = kwargs.copy()
body.update({
'name': name,
'plugin_name': plugin_name,
'plugin_version': plugin_version,
'node_processes': node_processes,
'flavor_id': flavor_id,
'node_configs': node_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
body=json.dumps(body))
def delete_node_group_template(self, tmpl_id):
"""Deletes the specified node group template by id."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_and_check_resp(self.delete, uri, 204)
def update_node_group_template(self, tmpl_id, **kwargs):
"""Updates the details of a single node group template."""
uri = 'node-group-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.patch, uri, 202,
body=json.dumps(kwargs))
def list_plugins(self):
"""List all enabled plugins."""
uri = 'plugins'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_plugin(self, plugin_name, plugin_version=None):
"""Returns the details of a single plugin."""
uri = 'plugins/%s' % plugin_name
if plugin_version:
uri += '/%s' % plugin_version
return self._request_check_and_parse_resp(self.get, uri, 200)
def list_cluster_templates(self):
"""List all cluster templates for a user."""
uri = 'cluster-templates'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_cluster_template(self, tmpl_id):
"""Returns the details of a single cluster template."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_cluster_template(self, name, plugin_name, plugin_version,
node_groups, cluster_configs=None,
**kwargs):
"""Creates cluster template with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'cluster-templates'
body = kwargs.copy()
body.update({
'name': name,
'plugin_name': plugin_name,
'plugin_version': plugin_version,
'node_groups': node_groups,
'cluster_configs': cluster_configs or dict(),
})
return self._request_check_and_parse_resp(self.post, uri, 202,
body=json.dumps(body))
def delete_cluster_template(self, tmpl_id):
"""Deletes the specified cluster template by id."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_and_check_resp(self.delete, uri, 204)
def update_cluster_template(self, tmpl_id, **kwargs):
"""Updates the specificed cluster template."""
uri = 'cluster-templates/%s' % tmpl_id
return self._request_check_and_parse_resp(self.patch, uri, 202,
body=json.dumps(kwargs))
def list_data_sources(self):
"""List all data sources for a user."""
uri = 'data-sources'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_data_source(self, source_id):
"""Returns the details of a single data source."""
uri = 'data-sources/%s' % source_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_data_source(self, name, data_source_type, url, **kwargs):
"""Creates data source with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'data-sources'
body = kwargs.copy()
body.update({
'name': name,
'type': data_source_type,
'url': url
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def delete_data_source(self, source_id):
"""Deletes the specified data source by id."""
uri = 'data-sources/%s' % source_id
return self._request_and_check_resp(self.delete, uri, 204)
def update_data_source(self, source_id, **kwargs):
"""Updates a data source"""
uri = 'data-sources/%s' % source_id
return self._request_check_and_parse_resp(self.patch, uri, 202,
body=json.dumps(kwargs))
def list_job_binaries(self):
"""List all job binaries for a user."""
uri = 'job-binaries'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_binary(self, job_binary_id):
"""Returns the details of a single job binary."""
uri = 'job-binaries/%s' % job_binary_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_binary(self, name, url, extra=None, **kwargs):
"""Creates job binary with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'job-binaries'
body = kwargs.copy()
body.update({
'name': name,
'url': url,
'extra': extra or dict(),
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def delete_job_binary(self, job_binary_id):
"""Deletes the specified job binary by id."""
uri = 'job-binaries/%s' % job_binary_id
return self._request_and_check_resp(self.delete, uri, 204)
def get_job_binary_data(self, job_binary_id):
"""Returns data of a single job binary."""
uri = 'job-binaries/%s/data' % job_binary_id
return self._request_and_check_resp_data(self.get, uri, 200)
def list_job_templates(self):
"""List all jobs templates for a user."""
uri = 'job-templates'
return self._request_check_and_parse_resp(self.get, uri, 200)
def get_job_template(self, job_id):
"""Returns the details of a single job template."""
uri = 'job-templates/%s' % job_id
return self._request_check_and_parse_resp(self.get, uri, 200)
def create_job_template(self, name, job_type, mains, libs=None, **kwargs):
"""Creates job with specified params.
It supports passing additional params using kwargs and returns created
object.
"""
uri = 'job-templates'
body = kwargs.copy()
body.update({
'name': name,
'type': job_type,
'mains': mains,
'libs': libs or list(),
})
return self._request_check_and_parse_resp(self.post, uri,
202, body=json.dumps(body))
def delete_job_template(self, job_id):
"""Deletes the specified job by id."""
uri = 'job-templates/%s' % job_id
return self._request_and_check_resp(self.delete, uri, 204)

View File

@ -1,260 +0,0 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.lib import exceptions
import tempest.test
from sahara_tempest_plugin import clients
from sahara_tempest_plugin.common import plugin_utils
CONF = config.CONF
class InvalidSaharaTestConfiguration(exceptions.TempestException):
message = "Invalid configuration for Sahara tests"
class BaseDataProcessingTest(tempest.test.BaseTestCase):
credentials = ['primary']
client_manager = clients.Manager
@classmethod
def skip_checks(cls):
super(BaseDataProcessingTest, cls).skip_checks()
if not CONF.service_available.sahara:
raise cls.skipException('Sahara support is required')
cls.default_plugin = plugin_utils.get_default_plugin()
@classmethod
def setup_clients(cls):
super(BaseDataProcessingTest, cls).setup_clients()
if not CONF.data_processing.use_api_v2:
cls.api_version = '1.1'
cls.client = cls.os_primary.data_processing.DataProcessingClient()
else:
cls.api_version = '2.0'
cls.client = \
cls.os_primary.data_processing_v2.DataProcessingClient()
@classmethod
def resource_setup(cls):
super(BaseDataProcessingTest, cls).resource_setup()
plugin = None
if cls.default_plugin:
plugin = cls.client.get_plugin(cls.default_plugin)['plugin']
cls.default_version = plugin_utils.get_default_version(plugin)
if cls.default_plugin is not None and cls.default_version is None:
raise InvalidSaharaTestConfiguration(
message="No known Sahara plugin version was found")
# add lists for watched resources
cls._node_group_templates = []
cls._cluster_templates = []
cls._data_sources = []
cls._job_binary_internals = []
cls._job_binaries = []
cls._jobs = []
@classmethod
def resource_cleanup(cls):
cls.cleanup_resources(getattr(cls, '_cluster_templates', []),
cls.client.delete_cluster_template)
cls.cleanup_resources(getattr(cls, '_node_group_templates', []),
cls.client.delete_node_group_template)
if cls.api_version == '1.1':
cls.cleanup_resources(getattr(cls, '_jobs', []),
cls.client.delete_job)
else:
cls.cleanup_resources(getattr(cls, '_jobs', []),
cls.client.delete_job_template)
cls.cleanup_resources(getattr(cls, '_job_binaries', []),
cls.client.delete_job_binary)
if cls.api_version == '1.1':
cls.cleanup_resources(getattr(cls, '_job_binary_internals', []),
cls.client.delete_job_binary_internal)
cls.cleanup_resources(getattr(cls, '_data_sources', []),
cls.client.delete_data_source)
super(BaseDataProcessingTest, cls).resource_cleanup()
@staticmethod
def cleanup_resources(resource_id_list, method):
for resource_id in resource_id_list:
try:
method(resource_id)
except exceptions.NotFound:
# ignore errors while auto removing created resource
pass
@classmethod
def create_node_group_template(cls, name, plugin_name, hadoop_version,
node_processes, flavor_id,
node_configs=None, **kwargs):
"""Creates watched node group template with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_node_group_template(name, plugin_name,
hadoop_version,
node_processes,
flavor_id,
node_configs,
**kwargs)
resp_body = resp_body['node_group_template']
# store id of created node group template
cls._node_group_templates.append(resp_body['id'])
return resp_body
@classmethod
def create_cluster_template(cls, name, plugin_name, hadoop_version,
node_groups, cluster_configs=None, **kwargs):
"""Creates watched cluster template with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_cluster_template(name, plugin_name,
hadoop_version,
node_groups,
cluster_configs,
**kwargs)
resp_body = resp_body['cluster_template']
# store id of created cluster template
cls._cluster_templates.append(resp_body['id'])
return resp_body
@classmethod
def create_data_source(cls, name, type, url, **kwargs):
"""Creates watched data source with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_data_source(name, type, url, **kwargs)
resp_body = resp_body['data_source']
# store id of created data source
cls._data_sources.append(resp_body['id'])
return resp_body
@classmethod
def create_job_binary_internal(cls, name, data):
"""Creates watched job binary internal with specified params.
It returns created object. All resources created in this method will
be automatically removed in tearDownClass method.
"""
resp_body = cls.client.create_job_binary_internal(name, data)
resp_body = resp_body['job_binary_internal']
# store id of created job binary internal
cls._job_binary_internals.append(resp_body['id'])
return resp_body
@classmethod
def create_job_binary(cls, name, url, extra=None, **kwargs):
"""Creates watched job binary with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_job_binary(name, url, extra, **kwargs)
resp_body = resp_body['job_binary']
# store id of created job binary
cls._job_binaries.append(resp_body['id'])
return resp_body
@classmethod
def create_job(cls, name, job_type, mains, libs=None, **kwargs):
"""Creates watched job (v1) with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_job(name,
job_type, mains, libs, **kwargs)
resp_body = resp_body['job']
# store id of created job
cls._jobs.append(resp_body['id'])
return resp_body
@classmethod
def create_job_template(cls, name, job_type, mains, libs=None, **kwargs):
"""Creates watched job template (v2) with specified params.
It supports passing additional params using kwargs and returns created
object. All resources created in this method will be automatically
removed in tearDownClass method.
"""
resp_body = cls.client.create_job_template(name, job_type, mains,
libs, **kwargs)
resp_body = resp_body['job_template']
# store id of created job
cls._jobs.append(resp_body['id'])
return resp_body
@classmethod
def get_node_group_template(cls, nodegroup='worker1'):
"""Returns a node group template for the default plugin."""
return plugin_utils.get_node_group_template(nodegroup,
cls.default_version,
None,
cls.api_version)
@classmethod
def get_cluster_template(cls, node_group_template_ids=None):
"""Returns a cluster template for the default plugin.
node_group_template_defined contains the type and ID of pre-defined
node group templates that have to be used in the cluster template
(instead of dynamically defining them with 'node_processes').
"""
return plugin_utils.get_cluster_template(node_group_template_ids,
cls.default_version,
cls.api_version)
@classmethod
def wait_for_resource_deletion(cls, resource_id, get_resource):
"""Waits for a resource to be deleted.
The deletion of a resource depends on the client is_resource_deleted
implementation. This implementation will vary slightly from resource
to resource. get_resource param should be the function used to
retrieve that type of resource.
"""
def is_resource_deleted(resource_id):
try:
get_resource(resource_id)
except exceptions.NotFound:
return True
return False
cls.client.is_resource_deleted = is_resource_deleted
cls.client.wait_for_resource_deletion(resource_id)

View File

@ -1,162 +0,0 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testtools import testcase as tc
from tempest.lib import decorators
from tempest.lib.common.utils import data_utils
from sahara_tempest_plugin.tests.api import base as dp_base
class ClusterTemplateTest(dp_base.BaseDataProcessingTest):
# NOTE: Links to the API documentation: https://docs.openstack.org/
# api-ref/data-processing/v1.1/#cluster-templates
# https://docs.openstack.org/api-ref/data-processing/v2/#cluster-templates
@classmethod
def skip_checks(cls):
super(ClusterTemplateTest, cls).skip_checks()
if cls.default_plugin is None:
raise cls.skipException("No Sahara plugins configured")
@classmethod
def resource_setup(cls):
super(ClusterTemplateTest, cls).resource_setup()
# pre-define a node group templates
node_group_template_w = cls.get_node_group_template('worker1')
if node_group_template_w is None:
raise dp_base.InvalidSaharaTestConfiguration(
message="No known Sahara plugin was found")
node_group_template_w['name'] = data_utils.rand_name(
'sahara-ng-template')
# hack the arguments: keep the compatibility with the signature
# of self.create_node_group_template
if 'plugin_version' in node_group_template_w:
plugin_version_value = node_group_template_w['plugin_version']
del node_group_template_w['plugin_version']
node_group_template_w['hadoop_version'] = plugin_version_value
resp_body = cls.create_node_group_template(**node_group_template_w)
node_group_template_id = resp_body['id']
configured_node_group_templates = {'worker1': node_group_template_id}
cls.full_cluster_template = cls.get_cluster_template(
configured_node_group_templates)
# create cls.cluster_template variable to use for comparison to cluster
# template response body. The 'node_groups' field in the response body
# has some extra info that post body does not have. The 'node_groups'
# field in the response body is something like this
#
# 'node_groups': [
# {
# 'count': 3,
# 'name': 'worker-node',
# 'volume_mount_prefix': '/volumes/disk',
# 'created_at': '2014-05-21 14:31:37',
# 'updated_at': None,
# 'floating_ip_pool': None,
# ...
# },
# ...
# ]
cls.cluster_template = cls.full_cluster_template.copy()
del cls.cluster_template['node_groups']
def _create_cluster_template(self, template_name=None):
"""Creates Cluster Template with optional name specified.
It creates template, ensures template name and response body.
Returns id and name of created template.
"""
if not template_name:
# generate random name if it's not specified
template_name = data_utils.rand_name('sahara-cluster-template')
# hack the arguments: keep the compatibility with the signature
# of self.create_cluster_template
full_cluster_template_w = self.full_cluster_template.copy()
if 'plugin_version' in full_cluster_template_w:
plugin_version_value = full_cluster_template_w['plugin_version']
del full_cluster_template_w['plugin_version']
full_cluster_template_w['hadoop_version'] = plugin_version_value
# create cluster template
resp_body = self.create_cluster_template(template_name,
**full_cluster_template_w)
# ensure that template created successfully
self.assertEqual(template_name, resp_body['name'])
self.assertDictContainsSubset(self.cluster_template, resp_body)
return resp_body['id'], template_name
@tc.attr('smoke')
@decorators.idempotent_id('3525f1f1-3f9c-407d-891a-a996237e728b')
def test_cluster_template_create(self):
self._create_cluster_template()
@tc.attr('smoke')
@decorators.idempotent_id('7a161882-e430-4840-a1c6-1d928201fab2')
def test_cluster_template_list(self):
template_info = self._create_cluster_template()
# check for cluster template in list
templates = self.client.list_cluster_templates()['cluster_templates']
templates_info = [(template['id'], template['name'])
for template in templates]
self.assertIn(template_info, templates_info)
@tc.attr('smoke')
@decorators.idempotent_id('2b75fe22-f731-4b0f-84f1-89ab25f86637')
def test_cluster_template_get(self):
template_id, template_name = self._create_cluster_template()
# check cluster template fetch by id
template = self.client.get_cluster_template(template_id)
template = template['cluster_template']
self.assertEqual(template_name, template['name'])
self.assertDictContainsSubset(self.cluster_template, template)
@tc.attr('smoke')
@decorators.idempotent_id('ff1fd989-171c-4dd7-91fd-9fbc71b09675')
def test_cluster_template_delete(self):
template_id, _ = self._create_cluster_template()
# delete the cluster template by id
self.client.delete_cluster_template(template_id)
get_resource = self.client.get_cluster_template
self.wait_for_resource_deletion(template_id, get_resource)
templates = self.client.list_cluster_templates()['cluster_templates']
templates_info = [template['id']
for template in templates]
self.assertNotIn(template_id, templates_info)
@tc.attr('smoke')
@decorators.idempotent_id('40235aa0-cd4b-494a-9c12-2d0e8a92157a')
def test_cluster_template_update(self):
template_id, _ = self._create_cluster_template()
new_template_name = data_utils.rand_name('sahara-cluster-template')
body = {'name': new_template_name}
updated_template = self.client.update_cluster_template(template_id,
**body)
updated_template = updated_template['cluster_template']
self.assertEqual(new_template_name, updated_template['name'])

View File

@ -1,254 +0,0 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from testtools import testcase as tc
from tempest import config
from tempest.lib import decorators
from tempest.lib.common.utils import data_utils
from sahara_tempest_plugin.tests.api import base as dp_base
CONF = config.CONF
class DataSourceTest(dp_base.BaseDataProcessingTest):
@classmethod
def resource_setup(cls):
super(DataSourceTest, cls).resource_setup()
cls.swift_data_source_with_creds = {
'url': 'swift://sahara-container.sahara/input-source',
'description': 'Test data source',
'credentials': {
'user': cls.os_primary.credentials.username,
'password': cls.os_primary.credentials.password
},
'type': 'swift'
}
cls.swift_data_source = cls.swift_data_source_with_creds.copy()
del cls.swift_data_source['credentials']
cls.s3_data_source_with_creds = {
'url': 's3://sahara-bucket/input-source',
'description': 'Test data source',
'credentials': {
'accesskey': 'username',
'secretkey': 'key',
'endpoint': 'localhost',
'bucket_in_path': False,
'ssl': False
},
'type': 's3'
}
cls.s3_data_source = cls.s3_data_source_with_creds.copy()
del cls.s3_data_source['credentials']
cls.local_hdfs_data_source = {
'url': 'input-source',
'description': 'Test data source',
'type': 'hdfs'
}
cls.external_hdfs_data_source = {
'url': 'hdfs://172.18.168.2:8020/usr/hadoop/input-source',
'description': 'Test data source',
'type': 'hdfs'
}
def _create_data_source(self, source_body, source_name=None):
"""Creates Data Source with optional name specified.
It creates a link to input-source file (it may not exist), ensures
source name and response body. Returns id and name of created source.
"""
if not source_name:
# generate random name if it's not specified
source_name = data_utils.rand_name('sahara-data-source')
# create data source
resp_body = self.create_data_source(source_name, **source_body)
# ensure that source created successfully
self.assertEqual(source_name, resp_body['name'])
if source_body['type'] == 'swift':
source_body = self.swift_data_source
elif source_body['type'] == 's3':
source_body = self.s3_data_source
self.assertDictContainsSubset(source_body, resp_body)
return resp_body['id'], source_name
def _list_data_sources(self, source_info):
# check for data source in list
sources = self.client.list_data_sources()['data_sources']
sources_info = [(source['id'], source['name']) for source in sources]
self.assertIn(source_info, sources_info)
def _get_data_source(self, source_id, source_name, source_body):
# check data source fetch by id
source = self.client.get_data_source(source_id)['data_source']
self.assertEqual(source_name, source['name'])
self.assertDictContainsSubset(source_body, source)
def _delete_data_source(self, source_id):
# delete the data source by id
self.client.delete_data_source(source_id)
self.wait_for_resource_deletion(source_id, self.client.get_data_source)
# assert data source does not exist anymore
sources = self.client.list_data_sources()['data_sources']
sources_ids = [source['id'] for source in sources]
self.assertNotIn(source_id, sources_ids)
def _update_data_source(self, source_id):
new_source_name = data_utils.rand_name('sahara-data-source')
body = {'name': new_source_name}
updated_source = self.client.update_data_source(source_id, **body)
source = updated_source['data_source']
self.assertEqual(new_source_name, source['name'])
@tc.attr('smoke')
@decorators.idempotent_id('9e0e836d-c372-4fca-91b7-b66c3e9646c8')
def test_swift_data_source_create(self):
self._create_data_source(self.swift_data_source_with_creds)
@tc.attr('smoke')
@decorators.idempotent_id('3cb87a4a-0534-4b97-9edc-8bbc822b68a0')
def test_swift_data_source_list(self):
source_info = (
self._create_data_source(self.swift_data_source_with_creds))
self._list_data_sources(source_info)
@tc.attr('smoke')
@decorators.idempotent_id('fc07409b-6477-4cb3-9168-e633c46b227f')
def test_swift_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.swift_data_source_with_creds))
self._get_data_source(source_id, source_name, self.swift_data_source)
@tc.attr('smoke')
@decorators.idempotent_id('df53669c-0cd1-4cf7-b408-4cf215d8beb8')
def test_swift_data_source_delete(self):
source_id, _ = (
self._create_data_source(self.swift_data_source_with_creds))
self._delete_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('44398efb-c2a8-4a20-97cd-509c49b5d25a')
def test_swift_data_source_update(self):
source_id, _ = (
self._create_data_source(self.swift_data_source_with_creds))
self._update_data_source(source_id)
@decorators.idempotent_id('54b68270-74d2-4c93-a324-09c2dccb1208')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_create(self):
self._create_data_source(self.s3_data_source_with_creds)
@decorators.idempotent_id('5f67a8d1-e362-4204-88ec-674630a71019')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_list(self):
source_info = (
self._create_data_source(self.s3_data_source_with_creds))
self._list_data_sources(source_info)
@decorators.idempotent_id('84017749-b9d6-4542-9d12-1c73239e03b2')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.s3_data_source_with_creds))
self._get_data_source(source_id, source_name, self.s3_data_source)
@decorators.idempotent_id('fb8f9f44-17ea-4be9-8cec-e02f31a49bae')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_delete(self):
source_id, _ = (
self._create_data_source(self.s3_data_source_with_creds))
self._delete_data_source(source_id)
@decorators.idempotent_id('d069714a-86fb-45ce-8498-43901b065243')
@testtools.skipUnless(CONF.data_processing_feature_enabled.s3,
'S3 not available')
def test_s3_data_source_update(self):
source_id, _ = (
self._create_data_source(self.s3_data_source_with_creds))
self._update_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('88505d52-db01-4229-8f1d-a1137da5fe2d')
def test_local_hdfs_data_source_create(self):
self._create_data_source(self.local_hdfs_data_source)
@tc.attr('smoke')
@decorators.idempotent_id('81d7d42a-d7f6-4d9b-b38c-0801a4dfe3c2')
def test_local_hdfs_data_source_list(self):
source_info = self._create_data_source(self.local_hdfs_data_source)
self._list_data_sources(source_info)
@tc.attr('smoke')
@decorators.idempotent_id('ec0144c6-db1e-4169-bb06-7abae14a8443')
def test_local_hdfs_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.local_hdfs_data_source))
self._get_data_source(
source_id, source_name, self.local_hdfs_data_source)
@tc.attr('smoke')
@decorators.idempotent_id('e398308b-4230-4f86-ba10-9b0b60a59c8d')
def test_local_hdfs_data_source_delete(self):
source_id, _ = self._create_data_source(self.local_hdfs_data_source)
self._delete_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('16a71f3b-0095-431c-b542-c871e1f95e1f')
def test_local_hdfs_data_source_update(self):
source_id, _ = self._create_data_source(self.local_hdfs_data_source)
self._update_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('bfd91128-e642-4d95-a973-3e536962180c')
def test_external_hdfs_data_source_create(self):
self._create_data_source(self.external_hdfs_data_source)
@tc.attr('smoke')
@decorators.idempotent_id('92e2be72-f7ab-499d-ae01-fb9943c90d8e')
def test_external_hdfs_data_source_list(self):
source_info = self._create_data_source(self.external_hdfs_data_source)
self._list_data_sources(source_info)
@tc.attr('smoke')
@decorators.idempotent_id('a31edb1b-6bc6-4f42-871f-70cd243184ac')
def test_external_hdfs_data_source_get(self):
source_id, source_name = (
self._create_data_source(self.external_hdfs_data_source))
self._get_data_source(
source_id, source_name, self.external_hdfs_data_source)
@tc.attr('smoke')
@decorators.idempotent_id('295924cd-a085-4b45-aea8-0707cdb2da7e')
def test_external_hdfs_data_source_delete(self):
source_id, _ = self._create_data_source(self.external_hdfs_data_source)
self._delete_data_source(source_id)
@tc.attr('smoke')
@decorators.idempotent_id('9b317861-95db-44bc-9b4b-80d23feade3f')
def test_external_hdfs_data_source_update(self):
source_id, _ = self._create_data_source(self.external_hdfs_data_source)
self._update_data_source(source_id)

Some files were not shown because too many files have changed in this diff Show More