Add support to filter results by failure test_ids
This commit adds a new field to the query yaml test_ids which is a list of test_ids that will be query the subunit2sql db to verify that at least one of them failed on the failed uuid. Change-Id: If3668709e3294b5d6bf9e1f082396fbc39c08512
This commit is contained in:
parent
108cbdb31e
commit
d83ef2e5ea
23
README.rst
23
README.rst
@ -65,6 +65,29 @@ and notifications by adding ``suppress-graph: true`` or
|
|||||||
``suppress-notifcation: true`` to the yaml file. These can be used to make
|
``suppress-notifcation: true`` to the yaml file. These can be used to make
|
||||||
sure expected failures don't show up on the unclassified page.
|
sure expected failures don't show up on the unclassified page.
|
||||||
|
|
||||||
|
If the only signature available is overly broad and adding additional logging
|
||||||
|
can't reasonably make a good signature, you can also filter the results of a
|
||||||
|
query based on the test_ids that failed for the run being checked.
|
||||||
|
This can be done by adding a ``test_ids`` keyword to the query file and then a
|
||||||
|
list of the test_ids to verify failed. The test_id also should exclude any
|
||||||
|
attrs, this is the list of attrs appended to the test_id between '[]'. For
|
||||||
|
example, 'smoke', 'slow', any service tags, etc. This is how subunit-trace
|
||||||
|
prints the test ids by default if you're using it. If any of the listed
|
||||||
|
test_ids match as failing for the run being checked with the query it will
|
||||||
|
return a match. Since filtering leverages subunit2sql which only receives
|
||||||
|
results from the gate pipeline, this technique will only work on the gate
|
||||||
|
queue. For example, if your query yaml file looked like::
|
||||||
|
|
||||||
|
query: >-
|
||||||
|
message:"ExceptionA"
|
||||||
|
test_ids:
|
||||||
|
- tempest.api.compute.servers.test_servers.test_update_server_name
|
||||||
|
- tempest.api.compute.servers.test_servers_negative.test_server_set_empty_name
|
||||||
|
|
||||||
|
this will only match the bug if the logstash query had a hit for the run and
|
||||||
|
either test_update_server_name or test_server_set_empty name failed during the
|
||||||
|
run.
|
||||||
|
|
||||||
In order to support rapidly added queries, it's considered socially acceptable
|
In order to support rapidly added queries, it's considered socially acceptable
|
||||||
to approve changes that only add 1 new bug query, and to even self approve
|
to approve changes that only add 1 new bug query, and to even self approve
|
||||||
those changes by core reviewers.
|
those changes by core reviewers.
|
||||||
|
@ -15,6 +15,9 @@
|
|||||||
import dateutil.parser as dp
|
import dateutil.parser as dp
|
||||||
import gerritlib.gerrit
|
import gerritlib.gerrit
|
||||||
import pyelasticsearch
|
import pyelasticsearch
|
||||||
|
import sqlalchemy
|
||||||
|
from sqlalchemy import orm
|
||||||
|
from subunit2sql.db import api as db_api
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
@ -26,6 +29,7 @@ import elastic_recheck.query_builder as qb
|
|||||||
from elastic_recheck import results
|
from elastic_recheck import results
|
||||||
|
|
||||||
ES_URL = "http://logstash.openstack.org/elasticsearch"
|
ES_URL = "http://logstash.openstack.org/elasticsearch"
|
||||||
|
DB_URI = 'mysql+pymysql://query:query@logstash.openstack.org/subunit2sql'
|
||||||
|
|
||||||
|
|
||||||
def required_files(job):
|
def required_files(job):
|
||||||
@ -346,6 +350,16 @@ class Stream(object):
|
|||||||
self.gerrit.review(event.project, event.name(), msg)
|
self.gerrit.review(event.project, event.name(), msg)
|
||||||
|
|
||||||
|
|
||||||
|
def check_failed_test_ids_for_job(build_uuid, test_ids, session):
|
||||||
|
failing_test_ids = db_api.get_failing_test_ids_from_runs_by_key_value(
|
||||||
|
'build_short_uuid', build_uuid, session)
|
||||||
|
for test_id in test_ids:
|
||||||
|
if test_id in failing_test_ids:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class Classifier(object):
|
class Classifier(object):
|
||||||
"""Classify failed tempest-devstack jobs based.
|
"""Classify failed tempest-devstack jobs based.
|
||||||
|
|
||||||
@ -384,6 +398,9 @@ class Classifier(object):
|
|||||||
# Reload each time
|
# Reload each time
|
||||||
self.queries = loader.load(self.queries_dir)
|
self.queries = loader.load(self.queries_dir)
|
||||||
bug_matches = []
|
bug_matches = []
|
||||||
|
engine = sqlalchemy.create_engine(DB_URI)
|
||||||
|
Session = orm.sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
for x in self.queries:
|
for x in self.queries:
|
||||||
if x.get('suppress-notification'):
|
if x.get('suppress-notification'):
|
||||||
continue
|
continue
|
||||||
@ -394,5 +411,15 @@ class Classifier(object):
|
|||||||
build_short_uuid)
|
build_short_uuid)
|
||||||
results = self.es.search(query, size='10', recent=recent)
|
results = self.es.search(query, size='10', recent=recent)
|
||||||
if len(results) > 0:
|
if len(results) > 0:
|
||||||
bug_matches.append(x['bug'])
|
if x.get('test_ids', None):
|
||||||
|
test_ids = x['test_ids']
|
||||||
|
self.log.debug(
|
||||||
|
"For bug %s checking subunit2sql for failures on "
|
||||||
|
"test_ids: %s" % (x['bug'], test_ids))
|
||||||
|
if check_failed_test_ids_for_job(build_short_uuid,
|
||||||
|
test_ids, session):
|
||||||
|
bug_matches.append(x['bug'])
|
||||||
|
else:
|
||||||
|
bug_matches.append(x['bug'])
|
||||||
|
|
||||||
return bug_matches
|
return bug_matches
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
query: >
|
||||||
|
message:" 503"
|
||||||
|
AND filename:"logs/syslog.txt"
|
||||||
|
AND syslog_program:"proxy-server"
|
||||||
|
test_ids:
|
||||||
|
- tempest.api.object_storage.test_account_services.test_list_containers
|
@ -12,6 +12,8 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import mock
|
||||||
|
|
||||||
from elastic_recheck import elasticRecheck as er
|
from elastic_recheck import elasticRecheck as er
|
||||||
from elastic_recheck.tests import unit
|
from elastic_recheck.tests import unit
|
||||||
|
|
||||||
@ -32,3 +34,46 @@ class TestElasticRecheck(unit.UnitTestCase):
|
|||||||
self.assertEqual(len(results), 20)
|
self.assertEqual(len(results), 20)
|
||||||
self.assertEqual(results.took, 46)
|
self.assertEqual(results.took, 46)
|
||||||
self.assertEqual(results.timed_out, False)
|
self.assertEqual(results.timed_out, False)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSubunit2sqlCrossover(unit.UnitTestCase):
|
||||||
|
|
||||||
|
@mock.patch(
|
||||||
|
'subunit2sql.db.api.get_failing_test_ids_from_runs_by_key_value',
|
||||||
|
return_value=['test1', 'test2', 'test3'])
|
||||||
|
def test_check_failed_test_ids_for_job_matches(self, mock_db_api):
|
||||||
|
res = er.check_failed_test_ids_for_job('fake_uuid',
|
||||||
|
['test1', 'test4'],
|
||||||
|
mock.sentinel.session)
|
||||||
|
self.assertTrue(res)
|
||||||
|
|
||||||
|
@mock.patch(
|
||||||
|
'subunit2sql.db.api.get_failing_test_ids_from_runs_by_key_value',
|
||||||
|
return_value=['test23', 'test12', 'test300'])
|
||||||
|
def test_check_failed_test_ids_for_job_no_matches(self, mock_db_api):
|
||||||
|
res = er.check_failed_test_ids_for_job('fake_uuid',
|
||||||
|
['test1', 'test4'],
|
||||||
|
mock.sentinel.session)
|
||||||
|
self.assertFalse(res)
|
||||||
|
|
||||||
|
@mock.patch.object(er, 'check_failed_test_ids_for_job', return_value=True)
|
||||||
|
def test_classify_with_test_id_filter_match(self, mock_id_check):
|
||||||
|
c = er.Classifier('./elastic_recheck/tests/unit/queries_with_filters')
|
||||||
|
es_mock = mock.patch.object(c.es, 'search', return_value=[1, 2, 3])
|
||||||
|
es_mock.start()
|
||||||
|
self.addCleanup(es_mock.stop)
|
||||||
|
res = c.classify(1234, 1, 'fake')
|
||||||
|
self.assertEqual(res, ['1234567'],
|
||||||
|
"classify() returned %s when it should have returned "
|
||||||
|
"a list with one bug id: '1234567'" % res)
|
||||||
|
|
||||||
|
@mock.patch.object(er, 'check_failed_test_ids_for_job', return_value=False)
|
||||||
|
def test_classify_with_test_id_filter_no_match(self, mock_id_check):
|
||||||
|
c = er.Classifier('./elastic_recheck/tests/unit/queries_with_filters')
|
||||||
|
es_mock = mock.patch.object(c.es, 'search', return_value=[1, 2, 3])
|
||||||
|
es_mock.start()
|
||||||
|
self.addCleanup(es_mock.stop)
|
||||||
|
res = c.classify(1234, 1, 'fake')
|
||||||
|
self.assertEqual(res, [],
|
||||||
|
"classify() returned bug matches %s when none should "
|
||||||
|
"have been found" % res)
|
||||||
|
@ -13,3 +13,6 @@ launchpadlib
|
|||||||
Jinja2
|
Jinja2
|
||||||
argparse
|
argparse
|
||||||
requests
|
requests
|
||||||
|
subunit2sql>=0.9.0
|
||||||
|
SQLAlchemy>=0.9.7,<1.1.0
|
||||||
|
PyMySQL>=0.6.2 # MIT License
|
||||||
|
Loading…
Reference in New Issue
Block a user