Merge "Add elastic-recheck data querying"

This commit is contained in:
Jenkins 2016-05-20 21:35:11 +00:00 committed by Gerrit Code Review
commit f6f12919af
8 changed files with 257 additions and 41 deletions

View File

@ -86,6 +86,55 @@ something like::
That will startup a uwsgi server running the rest api on port 5000.
Elastic Recheck Configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are certain API operations which will use the `elastic-recheck`_ project
to pull in additional information about failures that occur during a run.
However, since elastic-recheck is not widely deployed this is an optional
feature and is only enabled if elastic-recheck is installed. (and importable
by the API server) Also note that elastic-recheck is not published on pypi and
must be manually installed via git. Additionally, after you install
elastic-recheck you also need to configure the location of the queries by
using the `query_dir` configuration option. If this is not set than the
elastic-recheck support will be disabled. Optionally, if you need to set
the url of you elasticsearch API endpoint you can set this with the `es_url`
configuration option. By default it is configured to talk to openstack-infra's
elasticsearch server at http://logstash.openstack.org/elasticsearch
.. _elastic-recheck: http://git.openstack.org/cgit/openstack-infra/elastic-recheck/
Caching Configuration
^^^^^^^^^^^^^^^^^^^^^
Since the introduction of elastic recheck querying dogpile.cache has been
used to cache any request that hits elasticsearch. This is because the
query times for using elastic-recheck are quite variable and often very slow.
(at least for talking to openstack-infra's elasticsearch) To enable reasonable
interactive response times we cache the api response from requests using
elasticsearch data. Note, that this caching is enabled regardless of whether
elastic-recheck is enabled or not.
There are three configuration options available around configuring caching.
While the defaults were picked to work in most situations depending on your
specific deployment specifics there are other choices that might make more
sense.
The first is `cache_backend` which is used to set the python class for the
`dogpile.cache.api.CacheBackend`_ to use. By default this is set to
`dogpile.cache.dbm` which uses a DBM file on disk. You can effectively disable
all caching by setting this value to `dogpile.cache.null`.
.. __dogpile.cache.api.CacheBackend: http://dogpilecache.readthedocs.io/en/latest/api.html#dogpile.cache.api.CacheBackend
The second option is `cache_expiration` which is used to set the timeout value
to use for any cached responses. This is an integer for the number of seconds
to keep a response cached. By default this is set to 30mins.
The third option is `cache_file` which is used to set the file path when using
the DBM backend is used. By default this is configured to use
TEMPDIR/openstack-health.dbm
Frontend
--------
The production application can be build using::

View File

@ -4,6 +4,17 @@ var controllersModule = require('./_index');
var d3 = require('d3');
function unique(arr) {
var u = {}, a = [];
for (var i = 0, l = arr.length; i < l; ++i) {
if (!u.hasOwnProperty(arr[i])) {
a.push(arr[i]);
u[arr[i]] = 1;
}
}
return a;
}
/**
* @ngInject
*/
@ -122,12 +133,26 @@ function HomeController(
healthService.getRecentFailedTests().then(function(response) {
vm.recentTests = response.data;
vm.recentRuns = {};
angular.forEach(vm.recentTests, function(test) {
if (!vm.recentRuns[test.link]) {
vm.recentRuns[test.link] = [];
angular.forEach(vm.recentTests.test_runs, function(test) {
if (typeof vm.recentRuns[test.link] === 'undefined') {
vm.recentRuns[test.link] = {};
vm.recentRuns[test.link].bugs = [];
vm.recentRuns[test.link].fails = [];
}
vm.recentRuns[test.link].push(test);
if (vm.recentTests.bugs[test.uuid] && vm.recentTests.bugs[test.uuid].length > 0) {
vm.recentRuns[test.link].bugs.push(vm.recentTests.bugs[test.uuid]);
vm.recentRuns[test.link].bugs = unique(vm.recentRuns[test.link].bugs);
}
vm.recentRuns[test.link].fails.push(test);
});
for (var link in vm.recentRuns) {
if (vm.recentRuns[link].bugs.length === 0) {
vm.recentRuns[link].bugs = '';
} else {
vm.recentRuns[link].bugs = 'Likely bugs: ' + vm.recentRuns[link].bugs.join();
}
}
});
config.get().then(function(config) {
vm.apiRoot = config.apiRoot;

View File

@ -70,9 +70,9 @@
<div ng-repeat="(key, value) in home.recentRuns">
<uib-accordion-group
template-url="templates/accordion-group-run.html"
heading="{{ key }}"
heading="{{ key }};{{ value.bugs }}"
is-open="false">
<table table-sort data="value"
<table table-sort data="value.fails"
class="table table-hover default-cols">
<thead>
<tr>
@ -82,7 +82,7 @@
</tr>
</thead>
<tbody>
<tr ng-repeat="(k, v) in value">
<tr ng-repeat="(k, v) in value.fails">
<td><a ui-sref="test({ testId: v.test_id })"}>
{{ v.test_id }}</a>
</td>

View File

@ -4,10 +4,13 @@
<a role="button" data-toggle="collapse" href aria-expanded="{{isOpen}}" aria-controls="{{::panelId}}" tabindex="0" class="accordion-toggle" ng-click="toggleOpen()" uib-accordion-transclude="heading">
<span uib-accordion-header ng-class="{'text-muted': isDisabled}">
<i ng-class="{'fa fa-minus-square-o': isOpen, 'fa fa-plus-square-o': !isOpen}"></i>
{{heading | split:'/' | last:2 | join:'/'}}
{{heading | split:';' | first | join:'' | split:'/' | last:2 | join:'/'}}
</span>
</a>
<span class="text-info"><a target="_blank" href="{{heading}}"><fa name="external-link"></fa></a></span>
<span class="text-info"><a target="_blank" href="{{heading | split:';' | first | join:''}}"><fa name="external-link"></fa></a></span>
<span class="pull-right">
{{heading | split:';' | last | join:''}}
</span>
</h4>
</div>
<div id="{{::panelId}}" aria-labelledby="{{::headingId}}" aria-hidden="{{!isOpen}}" role="tabpanel" class="panel-collapse collapse" uib-collapse="!isOpen">

View File

@ -15,12 +15,17 @@
import argparse
from contextlib import contextmanager
import datetime
from dateutil import parser as date_parser
import itertools
import os
import six
from six.moves import configparser as ConfigParser
from six.moves.urllib import parse
import tempfile
import threading
import dogpile.cache
from feedgen import feed
import flask
from flask import abort
@ -36,13 +41,21 @@ from subunit2sql.db import api
from openstack_health.run_aggregator import RunAggregator
from openstack_health import test_run_aggregator
try:
from elastic_recheck import elasticRecheck as er
except ImportError:
er = None
app = flask.Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
config = None
engine = None
Session = None
query_dir = None
classifier = None
rss_opts = {}
feeds = {'last runs': {}}
region = None
def get_app():
@ -62,6 +75,10 @@ def _config_get(config_func, section, option, default_val=None):
@app.before_first_request
def _setup():
setup()
def setup():
global config
if not config:
@ -81,6 +98,41 @@ def setup():
except ConfigParser.Error:
rss_opts['frontend_url'] = ('http://status.openstack.org/'
'openstack-health')
try:
query_dir = config.get('default', 'query_dir')
except ConfigParser.Error:
pass
try:
es_url = config.get('default', 'es_url')
except ConfigParser.Error:
es_url = None
if query_dir and er:
global classifier
classifier = er.Classifier(query_dir, es_url=es_url)
try:
backend = config.get('default', 'cache_backend')
except ConfigParser.Error:
backend = 'dogpile.cache.dbm'
try:
expire = config.get('default', 'cache_expiration')
except ConfigParser.Error:
expire = datetime.timedelta(minutes=30)
try:
cache_file = config.get('default', 'cache_file')
except ConfigParser.Error:
cache_file = os.path.join(tempfile.gettempdir(),
'openstack-health.dbm')
global region
if backend == 'dogpile.cache.dbm':
args = {'filename': cache_file}
else:
args = {}
region = dogpile.cache.make_region().configure(backend,
expiration_time=expire,
arguments=args)
def get_session():
@ -408,19 +460,49 @@ def get_recent_failed_runs_rss(run_metadata_key, value):
@app.route('/tests/recent/<string:status>', methods=['GET'])
def get_recent_test_status(status):
global region
if not region:
setup()
status = parse.unquote(status)
num_runs = flask.request.args.get('num_runs', 10)
with session_scope() as session:
failed_runs = api.get_recent_failed_runs(num_runs, session)
test_runs = api.get_test_runs_by_status_for_run_ids(status,
failed_runs,
session=session)
output = []
for run in test_runs:
run['start_time'] = run['start_time'].isoformat()
run['stop_time'] = run['stop_time'].isoformat()
output.append(run)
return jsonify(output)
bug_dict = {}
query_threads = []
def _populate_bug_dict(change_num, patch_num, short_uuid, run):
bug_dict[run] = classifier.classify(change_num, patch_num,
short_uuid, recent=True)
@region.cache_on_arguments()
def _get_recent(status):
with session_scope() as session:
failed_runs = api.get_recent_failed_runs(num_runs, session)
global classifier
if classifier:
for run in failed_runs:
metadata = api.get_run_metadata(run, session=session)
for meta in metadata:
if meta.key == 'build_short_uuid':
short_uuid = meta.value
elif meta.key == 'build_change':
change_num = meta.value
elif meta.key == 'build_patchset':
patch_num = meta.value
query_thread = threading.Thread(
target=_populate_bug_dict, args=(change_num, patch_num,
short_uuid, run))
query_threads.append(query_thread)
query_thread.start()
test_runs = api.get_test_runs_by_status_for_run_ids(
status, failed_runs, session=session, include_run_id=True)
output = []
for run in test_runs:
run['start_time'] = run['start_time'].isoformat()
run['stop_time'] = run['stop_time'].isoformat()
output.append(run)
for thread in query_threads:
thread.join()
return {'test_runs': output, 'bugs': bug_dict}
return jsonify(_get_recent(status))
@app.route('/run/<string:run_id>/tests', methods=['GET'])

View File

@ -813,19 +813,72 @@ class TestRestAPI(base.TestCase):
'stop_time': timestamp_b,
}
])
def test_get_recent_test_failures(self, db_mock, recent_mock):
def test_get_recent_test_failures_no_es(self, db_mock, recent_mock):
setup_mock = mock.patch('openstack_health.api.setup')
setup_mock.start()
self.addCleanup(setup_mock.stop)
api.classifier = None
api.region = mock.MagicMock()
api.region.cache_on_arguments = mock.MagicMock()
api.region.cache_on_arguments.return_value = lambda x: x
res = self.app.get('/tests/recent/fail')
self.assertEqual(200, res.status_code)
db_mock.assert_called_once_with('fail', ['a_convincing_id'],
session=api.Session())
session=api.Session(),
include_run_id=True)
response_data = json.loads(res.data.decode('utf-8'))
expected_resp = [
{
expected_resp = {
'bugs': {},
'test_runs': [{
'test_id': u'fake_test',
'link': u'fake_url',
'start_time': timestamp_a.isoformat(),
'stop_time': timestamp_b.isoformat(),
}]
}]}
self.assertEqual(expected_resp, response_data)
@mock.patch('subunit2sql.db.api.get_recent_failed_runs',
return_value=['a_convincing_id'])
@mock.patch('subunit2sql.db.api.get_test_runs_by_status_for_run_ids',
return_value=[
{
'test_id': u'fake_test',
'link': u'fake_url',
'start_time': timestamp_a,
'stop_time': timestamp_b,
}
])
@mock.patch('subunit2sql.db.api.get_run_metadata',
return_value=[
models.RunMetadata(key='build_short_uuid', value='abcd'),
models.RunMetadata(key='build_change', value='1234'),
models.RunMetadata(key='build_patchset', value='3'),
])
def test_get_recent_test_failures_with_es(self, meta_mock, db_mock,
recent_mock):
setup_mock = mock.patch('openstack_health.api.setup')
setup_mock.start()
self.addCleanup(setup_mock.stop)
api.region = mock.MagicMock()
api.region.cache_on_arguments = mock.MagicMock()
api.region.cache_on_arguments.return_value = lambda x: x
api.classifier = mock.MagicMock()
api.classifier.classify = mock.MagicMock()
api.classifier.classify.return_value = ['12345']
res = self.app.get('/tests/recent/fail')
self.assertEqual(200, res.status_code)
db_mock.assert_called_once_with('fail', ['a_convincing_id'],
session=api.Session(),
include_run_id=True)
response_data = json.loads(res.data.decode('utf-8'))
expected_resp = {
'bugs': {'a_convincing_id': ['12345']},
'test_runs': [{
'test_id': u'fake_test',
'link': u'fake_url',
'start_time': timestamp_a.isoformat(),
'stop_time': timestamp_b.isoformat(),
}]}
self.assertEqual(expected_resp, response_data)
def test__gen_feed(self):

View File

@ -3,7 +3,8 @@
# process, which may cause wedges in the gate later.
pbr>=1.6 # Apache-2.0
Flask<1.0,>=0.10 # BSD
subunit2sql>=1.5.0 # Apache-2.0
dogpile.cache>=0.5.7 # BSD
subunit2sql>=1.6.0 # Apache-2.0
SQLAlchemy<1.1.0,>=1.0.10 # MIT
flask-jsonpify # MIT
PyMySQL>=0.6.2 # MIT License

View File

@ -14,20 +14,23 @@ describe('HomeController', function() {
}
}
};
var mockRecentFailed = [
{
'link': 'http://logs.openstack.org/97/280597/1/gate/gate-tempest-dsvm-ironic-pxe_ipa/61f4153',
'start_time': '2016-02-17T11:38:43.185384',
'stop_time': '2016-02-17T11:50:04.465870',
'test_id': 'ironic.test_baremetal_basic_ops.BaremetalBasicOps.test_baremetal_server_ops'
},
{
'link': 'http://logs.openstack.org/49/277949/2/gate/gate-tempest-dsvm-ironic-pxe_ipa/8ac452c',
'start_time': '2016-02-17T10:29:32.448360',
'stop_time': '2016-02-17T10:44:33.880733',
'test_id': 'ironic.test_baremetal_basic_ops.BaremetalBasicOps.test_baremetal_server_ops'
}
];
var mockRecentFailed = {
'bugs': {},
'test_runs': [
{
'link': 'http://logs.openstack.org/97/280597/1/gate/gate-tempest-ironic-pxe_ipa/61f4153',
'start_time': '2016-02-17T11:38:43.185384',
'stop_time': '2016-02-17T11:50:04.465870',
'test_id': 'ironic.test_baremetal_basic_ops.BaremetalBasicOps.test_baremetal_server_ops'
},
{
'link': 'http://logs.openstack.org/49/277949/2/gate/gate-tempest-ironic-pxe_ipa/8ac452c',
'start_time': '2016-02-17T10:29:32.448360',
'stop_time': '2016-02-17T10:44:33.880733',
'test_id': 'ironic.test_baremetal_basic_ops.BaremetalBasicOps.test_baremetal_server_ops'
}
]
};
beforeEach(inject(function($rootScope, _$controller_) {
$scope = $rootScope.$new();
@ -44,7 +47,7 @@ describe('HomeController', function() {
},
getRecentFailedTests: function() {
return {
then: function(callback) { callback(mockRecentFailed); }
then: function(callback) { callback({'data': mockRecentFailed}); }
};
}
};