Add murano dashboard selenium functional test
Depends-On: I12cf2dbc62a25c1a56f7f0f7d54b7df412c2b397 Change-Id: I4b8f7e30c3d78c662bfcd44a5a46cfffc4dd2227 Implements: blueprint dashboard-selenium-tests-migrate-to-infra-ci
This commit is contained in:
parent
873fd7ff85
commit
3ab9a10055
22
.zuul.yaml
Normal file
22
.zuul.yaml
Normal file
@ -0,0 +1,22 @@
|
||||
- project:
|
||||
name: openstack/murano-dashboard
|
||||
check:
|
||||
jobs:
|
||||
- murano-dashboard-sanity-check
|
||||
|
||||
- job:
|
||||
name: murano-dashboard-sanity-check
|
||||
parent: legacy-dsvm-base
|
||||
run: playbooks/legacy/murano-dashboard-sanity-check/run.yaml
|
||||
post-run: playbooks/legacy/murano-dashboard-sanity-check/post.yaml
|
||||
voting: false
|
||||
timeout: 10800
|
||||
required-projects:
|
||||
- openstack-infra/devstack-gate
|
||||
- openstack/heat
|
||||
- openstack/murano
|
||||
- openstack/murano-dashboard
|
||||
- openstack/python-heatclient
|
||||
- openstack/python-muranoclient
|
||||
- openstack/horizon
|
||||
- openstack/heat-dashboard
|
31
functional_tests/collect_results.sh
Executable file
31
functional_tests/collect_results.sh
Executable file
@ -0,0 +1,31 @@
|
||||
DEST=${DEST:-/opt/stack/new}
|
||||
DASHBOARD_DIR=$DEST/murano-dashboard
|
||||
|
||||
function create_artifacts_dir() {
|
||||
dst="${WORKSPACE}/logs/artifacts"
|
||||
mkdir -p "${dst}"
|
||||
}
|
||||
|
||||
function collect_screenshots() {
|
||||
# Copy screenshots for failed tests
|
||||
if [[ -d "$DASHBOARD_DIR/muranodashboard/tests/functional/screenshots/" ]]; then
|
||||
mkdir -p "${WORKSPACE}/logs/artifacts/screenshots"
|
||||
cp -Rv $DASHBOARD_DIR/muranodashboard/tests/functional/screenshots/* "${WORKSPACE}/logs/artifacts/screenshots/"
|
||||
fi
|
||||
}
|
||||
|
||||
function generate_html_report() {
|
||||
local xml_report="${WORKSPACE}/logs/test_report.xml"
|
||||
local html_report="${WORKSPACE}/logs/test_report.html"
|
||||
|
||||
if [[ -f "${WORKSPACE}/logs/test_report.xml" ]]; then
|
||||
$(which python) "$DASHBOARD_DIR/functional_tests/generate_html_report.py" "${xml_report}" "${html_report}"
|
||||
cp "${html_report}" "${WORKSPACE}/index.html"
|
||||
fi
|
||||
}
|
||||
|
||||
function do_collect_results() {
|
||||
create_artifacts_dir
|
||||
collect_screenshots
|
||||
generate_html_report
|
||||
}
|
24
functional_tests/env_pkg_prepare.sh
Executable file
24
functional_tests/env_pkg_prepare.sh
Executable file
@ -0,0 +1,24 @@
|
||||
function prepare_packages() {
|
||||
|
||||
sudo wget https://sourceforge.net/projects/ubuntuzilla/files/mozilla/apt/pool/main/f/firefox-mozilla-build/firefox-mozilla-build_46.0.1-0ubuntu1_amd64.deb/download -O firefox46.deb
|
||||
sudo dpkg -i firefox46.deb
|
||||
sudo rm -f firefox46.deb
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
libpq-dev \
|
||||
python-dev \
|
||||
libxml2-dev \
|
||||
libxslt1-dev \
|
||||
libffi-dev \
|
||||
make \
|
||||
gcc \
|
||||
ntpdate \
|
||||
xvfb \
|
||||
zip \
|
||||
python-openssl \
|
||||
python-crypto \
|
||||
libgtk-3-0 \
|
||||
libasound2 \
|
||||
libdbus-glib-1-2
|
||||
}
|
164
functional_tests/generate_html_report.py
Normal file
164
functional_tests/generate_html_report.py
Normal file
@ -0,0 +1,164 @@
|
||||
#!/usr/bin/python
|
||||
# Copyright (c) 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
from __future__ import with_statement
|
||||
import jinja2
|
||||
import lxml.etree as et
|
||||
import uuid
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
|
||||
if not __name__ == "__main__":
|
||||
sys.exit(1)
|
||||
if not len(sys.argv) >= 3:
|
||||
sys.exit(1)
|
||||
if not os.path.exists(sys.argv[1]):
|
||||
sys.exit(1)
|
||||
|
||||
LOG_LINE_PATTERN = "^(?P<date>20[0-9]{2}\-[0-9]{2}\-[0-9]{2}) (?P<time>[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+) (?P<pid>[0-9]+) (?P<level>[A-Z]+) (?P<package>.*?) \[\-\](?P<log>.*?)$"
|
||||
|
||||
|
||||
def get_attr(element, attr):
|
||||
return element.attrib[attr] if attr in element.attrib.keys() else None
|
||||
|
||||
|
||||
def parse_log_file(log_file_path):
|
||||
LOG_RECORDS = []
|
||||
LAST_LOG_ENTRY = None
|
||||
|
||||
with open(log_file_path, 'r') as log_file:
|
||||
for line in log_file.readlines():
|
||||
match = re.match(LOG_LINE_PATTERN, line, re.S)
|
||||
if match:
|
||||
LAST_LOG_ENTRY = {
|
||||
'date': match.group('date'),
|
||||
'time': match.group('time'),
|
||||
'pid': match.group('pid'),
|
||||
'level': match.group('level'),
|
||||
'package': match.group('package'),
|
||||
'log': match.group('log')
|
||||
}
|
||||
|
||||
LOG_RECORDS.append(LAST_LOG_ENTRY)
|
||||
elif LAST_LOG_ENTRY is not None:
|
||||
LAST_LOG_ENTRY['log'] += line
|
||||
|
||||
return [log_record for log_record in LOG_RECORDS if log_record['level'] == "ERROR"]
|
||||
|
||||
|
||||
STATS = {
|
||||
'total': 0,
|
||||
'success': 0,
|
||||
'skip': 0,
|
||||
'error': 0,
|
||||
'failure': 0,
|
||||
}
|
||||
|
||||
REPORT = {}
|
||||
|
||||
et.set_default_parser(et.XMLParser(huge_tree=True))
|
||||
|
||||
tree = et.parse(sys.argv[1])
|
||||
root = tree.getroot()
|
||||
|
||||
STATS['total'] = int(root.attrib['tests'])
|
||||
STATS['failure'] = int(root.attrib['failures'])
|
||||
STATS['error'] = int(root.attrib['errors'])
|
||||
STATS['skip'] = int(root.attrib['skip'])
|
||||
STATS['unsuccess'] = STATS['failure'] + STATS['error'] + STATS['skip']
|
||||
STATS['success'] = STATS['total'] - STATS['unsuccess']
|
||||
|
||||
for case in root:
|
||||
class_name = case.attrib['classname']
|
||||
|
||||
screenshot_file = 'logs/artifacts/screenshots/%s.png' % case.attrib['name']
|
||||
screenshot_path = os.path.join(
|
||||
os.environ.get('WORKSPACE'),
|
||||
screenshot_file
|
||||
)
|
||||
|
||||
test = {
|
||||
'name': case.attrib['name'],
|
||||
'time': case.attrib['time'],
|
||||
'result': 'success',
|
||||
'exc_type': None,
|
||||
'exc_message': None,
|
||||
'traceback': None,
|
||||
'output': case.text,
|
||||
'uuid': str(uuid.uuid1()),
|
||||
'screenshot': None
|
||||
}
|
||||
|
||||
for child in case:
|
||||
test['exc_type'] = get_attr(child, 'type')
|
||||
test['exc_message'] = get_attr(child, 'message')
|
||||
test['traceback'] = child.text
|
||||
if child.tag == 'error':
|
||||
test['result'] = 'error'
|
||||
if os.path.exists(screenshot_path):
|
||||
test['screenshot'] = screenshot_file
|
||||
elif child.tag == 'failure':
|
||||
test['result'] = 'failure'
|
||||
if os.path.exists(screenshot_path):
|
||||
test['screenshot'] = screenshot_file
|
||||
elif child.tag == 'skipped':
|
||||
test['result'] = 'skip'
|
||||
|
||||
if class_name not in REPORT.keys():
|
||||
REPORT[class_name] = {
|
||||
'tests': [],
|
||||
'stats': {
|
||||
'total': 0,
|
||||
'failure': 0,
|
||||
'error': 0,
|
||||
'skip': 0,
|
||||
'success': 0,
|
||||
},
|
||||
'result': 'success',
|
||||
'uuid': str(uuid.uuid1()),
|
||||
}
|
||||
|
||||
REPORT[class_name]['tests'].append(test)
|
||||
REPORT[class_name]['stats']['total'] += 1
|
||||
REPORT[class_name]['stats'][test['result']] += 1
|
||||
|
||||
TOTAL = REPORT[class_name]['stats']['total']
|
||||
|
||||
for class_name in REPORT.keys():
|
||||
if REPORT[class_name]['stats']['failure'] > 0:
|
||||
REPORT[class_name]['result'] = 'failure'
|
||||
elif REPORT[class_name]['stats']['error'] > 0:
|
||||
REPORT[class_name]['result'] = 'failure'
|
||||
elif REPORT[class_name]['stats']['skip'] == TOTAL:
|
||||
REPORT[class_name]['result'] = 'skip'
|
||||
else:
|
||||
REPORT[class_name]['result'] = 'success'
|
||||
|
||||
jinja = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(os.path.join(
|
||||
os.path.dirname(__file__), 'templates')
|
||||
)
|
||||
)
|
||||
|
||||
with open(sys.argv[2], 'w') as report_file:
|
||||
report_file.write(jinja.get_template(
|
||||
os.path.basename('report.template')
|
||||
).render(
|
||||
report=REPORT,
|
||||
stats=STATS,
|
||||
coverage=False,
|
||||
))
|
27
functional_tests/post_test_hook.sh
Executable file
27
functional_tests/post_test_hook.sh
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
|
||||
XTRACE=$(set +o | grep xtrace)
|
||||
set -o xtrace
|
||||
|
||||
DEST=${DEST:-/opt/stack/new}
|
||||
DASHBOARD_DIR=$DEST/murano-dashboard
|
||||
|
||||
source $DASHBOARD_DIR/functional_tests/collect_results.sh
|
||||
source $DASHBOARD_DIR/functional_tests/run_test.sh
|
||||
|
||||
echo "#Run murano-dashboard functional test"
|
||||
set +e
|
||||
start_xvfb_session
|
||||
run_tests
|
||||
EXIT_CODE=$?
|
||||
set -e
|
||||
|
||||
echo "Collect the test results"
|
||||
do_collect_results
|
||||
|
||||
echo "Kill Xvfb"
|
||||
sudo pkill Xvfb
|
||||
|
||||
exit $EXIT_CODE
|
||||
|
||||
$XTRACE
|
13
functional_tests/pre_test_hook.sh
Executable file
13
functional_tests/pre_test_hook.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
DEST=${DEST:-/opt/stack/new}
|
||||
DASHBOARD_DIR=$DEST/murano-dashboard
|
||||
|
||||
source $DASHBOARD_DIR/functional_tests/env_pkg_prepare.sh
|
||||
|
||||
XTRACE=$(set +o | grep xtrace)
|
||||
set -o xtrace
|
||||
|
||||
prepare_packages
|
||||
sync
|
||||
|
||||
$XTRACE
|
38
functional_tests/run_test.sh
Executable file
38
functional_tests/run_test.sh
Executable file
@ -0,0 +1,38 @@
|
||||
DEST=${DEST:-/opt/stack/new}
|
||||
DASHBOARD_DIR=$DEST/murano-dashboard
|
||||
|
||||
function start_xvfb_session() {
|
||||
|
||||
export VFB_DISPLAY_SIZE='1280x1024'
|
||||
export VFB_COLOR_DEPTH=16
|
||||
export VFB_DISPLAY_NUM=22
|
||||
|
||||
export DISPLAY=:${VFB_DISPLAY_NUM}
|
||||
|
||||
fonts_path="/usr/share/fonts/X11/misc/"
|
||||
|
||||
# Start XVFB session
|
||||
sudo Xvfb -fp "${fonts_path}" "${DISPLAY}" -screen 0 "${VFB_DISPLAY_SIZE}x${VFB_COLOR_DEPTH}" &
|
||||
}
|
||||
|
||||
function run_nosetests() {
|
||||
local tests=$*
|
||||
|
||||
export NOSETESTS_CMD="$(which nosetests)"
|
||||
|
||||
$NOSETESTS_CMD -s -v \
|
||||
--with-xunit \
|
||||
--xunit-file="$WORKSPACE/logs/test_report.xml" \
|
||||
$tests
|
||||
|
||||
}
|
||||
|
||||
function run_tests() {
|
||||
sudo rm -f /tmp/parser_table.py
|
||||
sudo pip install "selenium<3.0.0,>=2.50.1"
|
||||
|
||||
cd $DASHBOARD_DIR/muranodashboard/tests/functional
|
||||
|
||||
run_nosetests sanity_check
|
||||
|
||||
}
|
72
functional_tests/templates/empty.template
Normal file
72
functional_tests/templates/empty.template
Normal file
@ -0,0 +1,72 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="en-US">
|
||||
<head>
|
||||
<title>Test Report</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
|
||||
|
||||
<style type="text/css" media="screen">
|
||||
body { font-family: sans-serif; font-size: 10pt; }
|
||||
a { text-decoration: none; }
|
||||
tfoot, thead { background-color: #ccc; font-weight: bold; }
|
||||
tbody td { padding-left: 5px !important; }
|
||||
.number { text-align: center; }
|
||||
.info_header { font-weight: bold; margin-left: 2em; margin-top: 1em;
|
||||
color: black; }
|
||||
.info_content { white-space: pre-wrap; color: black;
|
||||
margin-left: 2em; margin-top: 0.5em; font-family: monospace; }
|
||||
.test_class_status_success { background-color: #6c6 !important; }
|
||||
.test_class_status_failure { background-color: #c60 !important; }
|
||||
.test_class_status_error { background-color: #c00 !important; }
|
||||
.test_class_status_skip { background-color: #59f !important; }
|
||||
.test_status_success { color: #6c6 !important; }
|
||||
.test_status_failure { color: #c60 !important; }
|
||||
.test_status_error { color: #c00 !important; }
|
||||
.test_status_skip { color: #006 !important; }
|
||||
.parent_success { display: none; }
|
||||
.parent_skip { display: none; }
|
||||
.parent_error { display: inline; }
|
||||
.parent_failure { display: inline; }
|
||||
.test_class_row { cursor: pointer; }
|
||||
.test_name { margin-left: 2em; }
|
||||
</style>
|
||||
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
|
||||
<script type="text/javascript" src="https://cdn.datatables.net/1.10.2/js/jquery.dataTables.min.js"></script>
|
||||
<link rel=stylesheet type=text/css href="https://cdn.datatables.net/1.10.2/css/jquery.dataTables.min.css">
|
||||
|
||||
<script type="text/javascript">
|
||||
$(document).ready(function () {
|
||||
$(".expand_button").click(function(event) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
$("#info_" + this.id).toggle();
|
||||
});
|
||||
$(".test_class_row").click(function(event) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
$("." + this.id + "_child").toggle();
|
||||
});
|
||||
$("#report").dataTable({
|
||||
"autoWidth": false,
|
||||
"paging": false,
|
||||
"ordering": false,
|
||||
"info": false,
|
||||
"columnDefs": [
|
||||
{ className: "number", "targets": [2, 3, 4, 5, 6] }
|
||||
]
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Test Report</h1>
|
||||
|
||||
<div>
|
||||
Summary: unable to run tests
|
||||
</div>
|
||||
|
||||
<a href="artifacts/">View Artifacts</a> | <a href="console.html">View Full Log</a>
|
||||
|
||||
</body>
|
||||
</html>
|
155
functional_tests/templates/report.template
Normal file
155
functional_tests/templates/report.template
Normal file
@ -0,0 +1,155 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" dir="ltr" lang="en-US">
|
||||
<head>
|
||||
<title>Test Report</title>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
|
||||
|
||||
<style type="text/css" media="screen">
|
||||
body { font-family: sans-serif; font-size: 10pt; }
|
||||
a { text-decoration: none; }
|
||||
tfoot, thead { background-color: #ccc; font-weight: bold; }
|
||||
tbody td { padding-left: 5px !important; }
|
||||
.number { text-align: center; }
|
||||
.info_header { font-weight: bold; margin-left: 2em; margin-top: 1em;
|
||||
color: black; }
|
||||
.info_content { white-space: pre-wrap; color: black;
|
||||
margin-left: 2em; margin-top: 0.5em; font-family: monospace; }
|
||||
.test_class_status_success { background-color: #6c6 !important; }
|
||||
.test_class_status_failure { background-color: #c60 !important; }
|
||||
.test_class_status_error { background-color: #c00 !important; }
|
||||
.test_class_status_skip { background-color: #59f !important; }
|
||||
.test_status_success { color: #6c6 !important; }
|
||||
.test_status_failure { color: #c60 !important; }
|
||||
.test_status_error { color: #c00 !important; }
|
||||
.test_status_skip { color: #006 !important; }
|
||||
.parent_success { display: none; }
|
||||
.parent_skip { display: none; }
|
||||
.parent_error { display: inline; }
|
||||
.parent_failure { display: inline; }
|
||||
.test_class_row { cursor: pointer; }
|
||||
.test_name { margin-left: 2em; }
|
||||
</style>
|
||||
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.1.min.js"></script>
|
||||
<script type="text/javascript" src="https://cdn.datatables.net/1.10.2/js/jquery.dataTables.min.js"></script>
|
||||
<link rel=stylesheet type=text/css href="https://cdn.datatables.net/1.10.2/css/jquery.dataTables.min.css">
|
||||
|
||||
<script type="text/javascript">
|
||||
$(document).ready(function () {
|
||||
$(".expand_button").click(function(event) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
$("#info_" + this.id).toggle();
|
||||
});
|
||||
$(".test_class_row").click(function(event) {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
$("." + this.id + "_child").toggle();
|
||||
});
|
||||
$("#report").dataTable({
|
||||
"autoWidth": false,
|
||||
"paging": false,
|
||||
"ordering": false,
|
||||
"info": false,
|
||||
"columnDefs": [
|
||||
{ className: "number", "targets": [2, 3, 4, 5, 6] }
|
||||
]
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Test Report</h1>
|
||||
|
||||
<div>
|
||||
Summary:
|
||||
success — {{ stats.success }},
|
||||
skip — {{ stats.skip }},
|
||||
error — {{ stats.error }},
|
||||
failure — {{ stats.failure }}
|
||||
</div>
|
||||
|
||||
<a href="logs/">View Artifacts</a> | <a href="job-output.txt.gz">View Full Log</a>
|
||||
|
||||
<table id="report" class="display compact">
|
||||
<thead>
|
||||
<tr id='header_row'>
|
||||
<td>Test Group/Test case</td>
|
||||
<td>Status</td>
|
||||
<td>Count</td>
|
||||
<td>Success</td>
|
||||
<td>Failure</td>
|
||||
<td>Error</td>
|
||||
<td>Skip</td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for class, group in report.items() %}
|
||||
<tr class="test_class_status_{{ group.result }} test_class_row" id="{{ group.uuid }}">
|
||||
<td class="test_class">{{ class }}</td>
|
||||
<td>{{ group.result }}</td>
|
||||
<td>{{ group.stats.total }}</td>
|
||||
<td>{{ group.stats.success }}</td>
|
||||
<td>{{ group.stats.failure }}</td>
|
||||
<td>{{ group.stats.error }}</td>
|
||||
<td>{{ group.stats.skip }}</td>
|
||||
</tr>
|
||||
{% for test in group.tests %}
|
||||
<tr class="test_status_{{ test.result }} {{ group.uuid }}_child parent_{{ group_result }}">
|
||||
<td>
|
||||
<div class="test_name">
|
||||
<a href="#" class="expand_button" id="{{ test.uuid }}">{{ test.name }}</a>
|
||||
{% if test.screenshot %}
|
||||
(<a href="{{ test.screenshot }}">screenshot</a>)
|
||||
{% endif %}
|
||||
</div>
|
||||
<div id="info_{{ test.uuid }}" style="display: none;">
|
||||
{% if test.output %}
|
||||
<div class="info_header">
|
||||
Output:
|
||||
</div>
|
||||
<div class="info_content">
|
||||
{{ test.output }}
|
||||
</div>
|
||||
{% endif %}{% if test.exc_type %}
|
||||
<div class="info_header">
|
||||
{{ test.exc_type }}:
|
||||
</div>
|
||||
<div class="info_content">
|
||||
{{ test.exc_message }}
|
||||
</div>
|
||||
{% endif %}{% if test.traceback %}
|
||||
<div class="info_header">
|
||||
Traceback:
|
||||
</div>
|
||||
<div class="info_content">
|
||||
{{ test.tb }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</td>
|
||||
<td>{{ test.result }}</td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
<td></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
<tfoot>
|
||||
<tr id='total_row'>
|
||||
<td>Total</td>
|
||||
<td>{% if stats.failure + stats.error > 0 %}failure{% else %}success{% endif %}</td>
|
||||
<td>{{ stats.total }}</td>
|
||||
<td>{{ stats.success }}</td>
|
||||
<td>{{ stats.failure }}</td>
|
||||
<td>{{ stats.error }}</td>
|
||||
<td>{{ stats.skip }}</td>
|
||||
</tr>
|
||||
</tfoot>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
67
playbooks/legacy/murano-dashboard-sanity-check/post.yaml
Normal file
67
playbooks/legacy/murano-dashboard-sanity-check/post.yaml
Normal file
@ -0,0 +1,67 @@
|
||||
- hosts: primary
|
||||
tasks:
|
||||
|
||||
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||
synchronize:
|
||||
src: '{{ ansible_user_dir }}/workspace/'
|
||||
dest: '{{ zuul.executor.log_root }}'
|
||||
mode: pull
|
||||
copy_links: true
|
||||
verify_host: true
|
||||
rsync_opts:
|
||||
- --include=/logs/**
|
||||
- --include=*/
|
||||
- --exclude=*
|
||||
- --prune-empty-dirs
|
||||
|
||||
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||
synchronize:
|
||||
src: '{{ ansible_user_dir }}/workspace/'
|
||||
dest: '{{ zuul.executor.log_root }}'
|
||||
mode: pull
|
||||
copy_links: true
|
||||
verify_host: true
|
||||
rsync_opts:
|
||||
- --include=**/*index.html
|
||||
- --include=**/*index.html.gz
|
||||
- --include=index.html
|
||||
- --include=index.html.gz
|
||||
|
||||
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||
synchronize:
|
||||
src: '{{ ansible_user_dir }}/workspace/'
|
||||
dest: '{{ zuul.executor.log_root }}'
|
||||
mode: pull
|
||||
copy_links: true
|
||||
verify_host: true
|
||||
rsync_opts:
|
||||
- --include=**/*testr_results.html.gz
|
||||
- --include=*/
|
||||
- --exclude=*
|
||||
- --prune-empty-dirs
|
||||
|
||||
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||
synchronize:
|
||||
src: '{{ ansible_user_dir }}/workspace/'
|
||||
dest: '{{ zuul.executor.log_root }}'
|
||||
mode: pull
|
||||
copy_links: true
|
||||
verify_host: true
|
||||
rsync_opts:
|
||||
- --include=**/*nose_results.html
|
||||
- --include=*/
|
||||
- --exclude=*
|
||||
- --prune-empty-dirs
|
||||
|
||||
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||
synchronize:
|
||||
src: '{{ ansible_user_dir }}/workspace/'
|
||||
dest: '{{ zuul.executor.log_root }}'
|
||||
mode: pull
|
||||
copy_links: true
|
||||
verify_host: true
|
||||
rsync_opts:
|
||||
- --include=/.testrepository/tmp*
|
||||
- --include=*/
|
||||
- --exclude=*
|
||||
- --prune-empty-dirs
|
66
playbooks/legacy/murano-dashboard-sanity-check/run.yaml
Normal file
66
playbooks/legacy/murano-dashboard-sanity-check/run.yaml
Normal file
@ -0,0 +1,66 @@
|
||||
- hosts: all
|
||||
name: Autoconverted job legacy-tempest-dsvm-murano-api from old job gate-tempest-dsvm-murano-api-ubuntu-xenial
|
||||
tasks:
|
||||
|
||||
- name: Ensure legacy workspace directory
|
||||
file:
|
||||
path: '{{ ansible_user_dir }}/workspace'
|
||||
state: directory
|
||||
|
||||
- shell:
|
||||
cmd: |
|
||||
set -e
|
||||
set -x
|
||||
cat > clonemap.yaml << EOF
|
||||
clonemap:
|
||||
- name: openstack-infra/devstack-gate
|
||||
dest: devstack-gate
|
||||
EOF
|
||||
/usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \
|
||||
git://git.openstack.org \
|
||||
openstack-infra/devstack-gate
|
||||
executable: /bin/bash
|
||||
chdir: '{{ ansible_user_dir }}/workspace'
|
||||
environment: '{{ zuul | zuul_legacy_vars }}'
|
||||
|
||||
- shell:
|
||||
cmd: |
|
||||
set -e
|
||||
set -x
|
||||
export PYTHONUNBUFFERED=true
|
||||
export DEVSTACK_GATE_TEMPEST=0
|
||||
export DEVSTACK_GATE_NEUTRON=1
|
||||
export PROJECTS="openstack/heat $PROJECTS"
|
||||
export PROJECTS="openstack/python-heatclient $PROJECTS"
|
||||
export PROJECTS="openstack/murano $PROJECTS"
|
||||
export PROJECTS="openstack/murano-dashboard $PROJECTS"
|
||||
export PROJECTS="openstack/python-muranoclient $PROJECTS"
|
||||
export PROJECTS="openstack/horizon $PROJECTS"
|
||||
export ENABLED_SERVICES=horizon
|
||||
export PROJECTS="openstack/heat-dashboard $PROJECTS"
|
||||
export DEVSTACK_LOCAL_CONFIG="enable_plugin heat git://git.openstack.org/openstack/heat"
|
||||
export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin murano git://git.openstack.org/openstack/murano"
|
||||
export DEVSTACK_LOCAL_CONFIG+=$'\n'"enable_plugin heat-dashboard git://git.openstack.org/openstack/heat-dashboard"
|
||||
export BRANCH_OVERRIDE=default
|
||||
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
|
||||
export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE
|
||||
fi
|
||||
|
||||
function pre_test_hook {
|
||||
cd /opt/stack/new/murano-dashboard/functional_tests
|
||||
./pre_test_hook.sh
|
||||
}
|
||||
export -f pre_test_hook
|
||||
|
||||
function post_test_hook {
|
||||
cd /opt/stack/new/murano-dashboard/functional_tests
|
||||
./post_test_hook.sh
|
||||
}
|
||||
|
||||
export -f post_test_hook
|
||||
|
||||
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
|
||||
./safe-devstack-vm-gate-wrap.sh
|
||||
executable: /bin/bash
|
||||
chdir: '{{ ansible_user_dir }}/workspace'
|
||||
environment: '{{ zuul | zuul_legacy_vars }}'
|
Loading…
Reference in New Issue
Block a user