Merge "Keep going on network timeouts or service errors"

This commit is contained in:
Jenkins 2016-01-23 03:17:19 +00:00 committed by Gerrit Code Review
commit 0ded0c866c
2 changed files with 37 additions and 16 deletions

View File

@ -22,6 +22,7 @@ import os
import sys import sys
from launchpadlib import launchpad from launchpadlib import launchpad
import pyelasticsearch
import pytz import pytz
import requests import requests
@ -50,10 +51,10 @@ LOG = logging.getLogger('ergraph')
def get_launchpad_bug(bug): def get_launchpad_bug(bug):
lp = launchpad.Launchpad.login_anonymously('grabbing bugs',
'production',
LPCACHEDIR)
try: try:
lp = launchpad.Launchpad.login_anonymously('grabbing bugs',
'production',
LPCACHEDIR)
lp_bug = lp.bugs[bug] lp_bug = lp.bugs[bug]
bugdata = {'name': lp_bug.title} bugdata = {'name': lp_bug.title}
projects = ", ".join(map(lambda x: "(%s - %s)" % projects = ", ".join(map(lambda x: "(%s - %s)" %
@ -65,6 +66,10 @@ def get_launchpad_bug(bug):
# if someone makes a bug private, we lose access to it. # if someone makes a bug private, we lose access to it.
bugdata = dict(name='Unknown (Private Bug)', bugdata = dict(name='Unknown (Private Bug)',
affects='Unknown (Private Bug)', reviews=[]) affects='Unknown (Private Bug)', reviews=[])
except requests.exceptions.RequestException:
LOG.exception("Failed to get Launchpad data for bug %s" % bug)
bugdata = dict(name='Unable to get launchpad data',
affects='Unknown', reviews=[])
return bugdata return bugdata
@ -175,9 +180,18 @@ def main():
fails24=0, fails24=0,
data=[]) data=[])
buglist.append(bug) buglist.append(bug)
results = classifier.hits_by_query(query['query'], try:
args.queue, results = classifier.hits_by_query(query['query'],
size=3000) args.queue,
size=3000)
except pyelasticsearch.exceptions.InvalidJsonResponseError:
LOG.exception("Invalid Json while collecting metrics for query %s"
% query['query'])
continue
except requests.exceptions.ReadTimeout:
LOG.exception("Timeout while collecting metrics for query %s" %
query['query'])
continue
facets_for_fail = er_results.FacetSet() facets_for_fail = er_results.FacetSet()
facets_for_fail.detect_facets(results, facets_for_fail.detect_facets(results,

View File

@ -18,8 +18,10 @@ import argparse
import collections import collections
import ConfigParser import ConfigParser
import datetime import datetime
import logging
import operator import operator
import re import re
import requests
import dateutil.parser as dp import dateutil.parser as dp
import jinja2 import jinja2
@ -42,6 +44,8 @@ EXCLUDED_JOBS = (
EXCLUDED_JOBS_REGEX = re.compile('(' + '|'.join(EXCLUDED_JOBS) + ')') EXCLUDED_JOBS_REGEX = re.compile('(' + '|'.join(EXCLUDED_JOBS) + ')')
LOG = logging.getLogger('eruncategorized')
def get_options(): def get_options():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
@ -253,16 +257,19 @@ def _failure_percentage(hits, fails):
def collect_metrics(classifier, fails): def collect_metrics(classifier, fails):
data = {} data = {}
for q in classifier.queries: for q in classifier.queries:
results = classifier.hits_by_query(q['query'], size=30000) try:
hits = _status_count(results) results = classifier.hits_by_query(q['query'], size=30000)
data[q['bug']] = { hits = _status_count(results)
'fails': _failure_count(hits), data[q['bug']] = {
'hits': hits, 'fails': _failure_count(hits),
'percentages': _failure_percentage(results, fails), 'hits': hits,
'query': q['query'], 'percentages': _failure_percentage(results, fails),
'failed_jobs': _failed_jobs(results) 'query': q['query'],
} 'failed_jobs': _failed_jobs(results)
}
except requests.exceptions.ReadTimeout:
LOG.exception("Failed to collection metrics for query %s" %
q['query'])
return data return data