Optimize algorithm that retrieves CI results from Gerrit

New algorithm iterates over drivers and branches and polls corresponding
reviews from Gerrit. Reviews are iterated until vote or comment matching
pattern is found.

Also:
 * CI results are merged into drivers during retrieval. Drivers are stored
   in memcached as part of default-data
 * Enforced capitalized case for release ids
 * Removed unused code from dashboard

Closes bug 1319293
Closes bug 1318051

Change-Id: Id8893deb1fcb7d206830678c2aefe6f5e5751c71
This commit is contained in:
Ilya Shakhat 2014-05-14 18:45:29 +04:00
parent 7fe83f00a5
commit d61ed36d50
12 changed files with 292 additions and 602 deletions

View File

@ -1,154 +0,0 @@
# Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
MEMORY_STORAGE_CACHED = 0
class MemoryStorage(object):
def __init__(self):
pass
class CachedMemoryStorage(MemoryStorage):
def __init__(self):
super(CachedMemoryStorage, self).__init__()
# common indexes
self.records = {}
self.primary_key_index = {}
self.record_types_index = {}
self.module_index = {}
self.user_id_index = {}
self.company_index = {}
self.release_index = {}
self.blueprint_id_index = {}
self.company_name_mapping = {}
self.indexes = {
'primary_key': self.primary_key_index,
'record_type': self.record_types_index,
'company_name': self.company_index,
'module': self.module_index,
'user_id': self.user_id_index,
'release': self.release_index,
}
def _save_record(self, record):
if record.get('company_name') == '*robots':
return
self.records[record['record_id']] = record
for key, index in six.iteritems(self.indexes):
self._add_to_index(index, record, key)
for bp_id in (record.get('blueprint_id') or []):
if bp_id in self.blueprint_id_index:
self.blueprint_id_index[bp_id].add(record['record_id'])
else:
self.blueprint_id_index[bp_id] = set([record['record_id']])
def update(self, records):
have_updates = False
for record in records:
have_updates = True
record_id = record['record_id']
if record_id in self.records:
# remove existing record from indexes
self._remove_record_from_index(self.records[record_id])
self._save_record(record)
if have_updates:
self.company_name_mapping = dict(
(c.lower(), c) for c in self.company_index.keys())
return have_updates
def _remove_record_from_index(self, record):
for key, index in six.iteritems(self.indexes):
index[record[key]].remove(record['record_id'])
def _add_to_index(self, record_index, record, key):
record_key = record[key]
if record_key in record_index:
record_index[record_key].add(record['record_id'])
else:
record_index[record_key] = set([record['record_id']])
def _get_record_ids_from_index(self, items, index):
record_ids = set()
for item in items:
if item in index:
record_ids |= index[item]
return record_ids
def get_record_ids_by_modules(self, modules):
return self._get_record_ids_from_index(modules, self.module_index)
def get_record_ids_by_companies(self, companies):
return self._get_record_ids_from_index(
map(self.get_original_company_name, companies),
self.company_index)
def get_record_ids_by_user_ids(self, launchpad_ids):
return self._get_record_ids_from_index(launchpad_ids,
self.user_id_index)
def get_record_ids_by_releases(self, releases):
return self._get_record_ids_from_index(releases, self.release_index)
def get_record_ids_by_blueprint_ids(self, blueprint_ids):
return self._get_record_ids_from_index(blueprint_ids,
self.blueprint_id_index)
def get_record_ids(self):
return self.records.keys()
def get_record_ids_by_type(self, record_type):
return self.record_types_index.get(record_type, set())
def get_records(self, record_ids):
for i in record_ids:
yield self.records[i]
def get_record_by_primary_key(self, primary_key):
if primary_key in self.primary_key_index:
record_id = list(self.primary_key_index[primary_key])
if record_id:
return self.records[record_id[0]]
return None
def get_original_company_name(self, company_name):
normalized = company_name.lower()
if normalized not in self.company_name_mapping:
return normalized
return self.company_name_mapping[normalized]
def get_companies(self):
return self.company_index.keys()
def get_modules(self):
return self.module_index.keys()
def get_user_ids(self):
return self.user_id_index.keys()
def get_memory_storage(memory_storage_type):
if memory_storage_type == MEMORY_STORAGE_CACHED:
return CachedMemoryStorage()
else:
raise Exception('Unknown memory storage type %s' % memory_storage_type)

View File

@ -197,18 +197,24 @@ function show_summary(base_url) {
tableData[i].driver_info += "<div>" + tableData[i].description + "</div>"; tableData[i].driver_info += "<div>" + tableData[i].description + "</div>";
} }
tableData[i].in_trunk = ""; var releases_list = [];
for (var j = 0; j < tableData[i].releases_info.length; j++) { for (var j = 0; j < tableData[i].releases_info.length; j++) {
tableData[i].in_trunk += "<a href=\"" + tableData[i].releases_info[j].wiki + "\" target=\"_blank\">" + releases_list.push("<a href=\"" + tableData[i].releases_info[j].wiki + "\" target=\"_blank\">" +
tableData[i].releases_info[j].name + "</a> "; tableData[i].releases_info[j].name + "</a>");
} }
tableData[i].in_trunk = releases_list.join(" ");
tableData[i].ci_tested = ""; tableData[i].ci_tested = "";
if (tableData[i].os_versions_map["master"]) { if (tableData[i].ci) {
var master = tableData[i].os_versions_map["master"]; if (tableData[i].releases_info.length > 0) {
if (master.review_url) { var last_release = tableData[i].releases_info[tableData[i].releases_info.length - 1].release_id;
tableData[i].ci_tested = "<a href=\"" + master.review_url + var master = tableData[i].releases[last_release];
"\" target=\"_blank\" title=\"Click for details\"><span style=\"color: #008000\">&#x2714;</span></a>"; if (master.review_url) {
tableData[i].ci_tested = "<a href=\"" + master.review_url +
"\" target=\"_blank\" title=\"Click for details\"><span style=\"color: #008000\">&#x2714;</span></a>";
} else {
tableData[i].ci_tested = "<span style=\"color: #808080\">&#x2714;</span>";
}
} else { } else {
tableData[i].ci_tested = "<span style=\"color: #808080\">&#x2714;</span>"; tableData[i].ci_tested = "<span style=\"color: #808080\">&#x2714;</span>";
} }
@ -216,21 +222,22 @@ function show_summary(base_url) {
tableData[i].ci_tested = "<span style=\"color: darkred\">&#x2716;</span>"; tableData[i].ci_tested = "<span style=\"color: darkred\">&#x2716;</span>";
} }
tableData[i].maintainers_info = ""; var maintainers_list = [];
if (tableData[i].maintainers) { if (tableData[i].maintainers) {
for (j = 0; j < tableData[i].maintainers.length; j++) { for (j = 0; j < tableData[i].maintainers.length; j++) {
var maintainer = tableData[i].maintainers[j]; var maintainer = tableData[i].maintainers[j];
var mn = maintainer.name; var mn = maintainer.name;
if (maintainer.launchpad_id) { if (maintainer.launchpad_id) {
tableData[i].maintainers_info = "<a href=\"http://stackalytics.com/?user_id=" + maintainers_list.push("<a href=\"http://stackalytics.com/?user_id=" +
maintainer.launchpad_id + "\" target=\"_blank\">" + mn + "</a>"; maintainer.launchpad_id + "\" target=\"_blank\">" + mn + "</a>");
} }
else if (maintainer.irc) { else if (maintainer.irc) {
tableData[i].maintainers_info = "<a href=\"irc:" + maintainer.irc + "\">" + mn + "</a>"; maintainers_list.push("<a href=\"irc:" + maintainer.irc + "\">" + mn + "</a>");
} else { } else {
tableData[i].maintainers_info = mn; maintainers_list.push(mn);
} }
} }
tableData[i].maintainers_info = maintainers_list.join(", ");
} else { } else {
tableData[i].maintainers_info = ""; tableData[i].maintainers_info = "";
} }

View File

@ -18,7 +18,6 @@ import re
import flask import flask
import memcache import memcache
from driverlog.dashboard import memory_storage
from driverlog.openstack.common import log as logging from driverlog.openstack.common import log as logging
@ -35,19 +34,14 @@ def _build_projects_map(default_data):
def _build_releases_map(default_data): def _build_releases_map(default_data):
releases_map = {} releases_map = {}
for release in default_data['releases']: for release in default_data['releases']:
releases_map[release['id']] = release releases_map[release['id'].lower()] = release
return releases_map return releases_map
def _extend_drivers_info(): def _extend_drivers_info():
for driver in get_vault()['drivers_map'].values(): for driver in get_vault()['drivers_map'].values():
releases_info = [] releases_info = []
for release in driver['os_versions_map'].keys(): for release in driver['releases'].keys():
release = release.lower()
if release.find('/') > 0:
release = release.split('/')[1]
if release == 'master':
release = get_vault()['default_data']['releases'][-1]['id']
if release in get_vault()['releases_map']: if release in get_vault()['releases_map']:
releases_info.append( releases_info.append(
{ {
@ -61,35 +55,20 @@ def _extend_drivers_info():
if 'email' in driver['maintainer']: if 'email' in driver['maintainer']:
del driver['maintainer']['email'] del driver['maintainer']['email']
driver['project_name'] = (get_vault()['projects_map']
def _build_drivers_map(default_data, projects_map): [driver['project_id']]['name'])
driver_map = {}
for driver in default_data['drivers']:
driver['project_name'] = projects_map[driver['project_id']]['name']
key = (driver['project_id'].lower(),
driver['vendor'].lower(),
driver['name'].lower())
driver_map[key] = driver
return driver_map
def get_vault(): def get_vault():
vault = getattr(flask.current_app, 'driverlog_vault', None) vault = getattr(flask.current_app, 'driverlog_vault', None)
if not vault: if not vault:
try: try:
vault = {}
vault['memory_storage'] = memory_storage.get_memory_storage(
memory_storage.MEMORY_STORAGE_CACHED)
if 'CONF' not in flask.current_app.config: if 'CONF' not in flask.current_app.config:
LOG.critical('Configure environment variable DRIVERLOG_CONF ' LOG.critical('Configure environment variable DRIVERLOG_CONF '
'with path to config file') 'with path to config file')
flask.abort(500) flask.abort(500)
vault = {}
conf = flask.current_app.config['CONF'] conf = flask.current_app.config['CONF']
MEMCACHED_URI_PREFIX = r'^memcached:\/\/' MEMCACHED_URI_PREFIX = r'^memcached:\/\/'
@ -110,37 +89,21 @@ def get_vault():
flask.request.driverlog_updated = True flask.request.driverlog_updated = True
memcached = vault['memcached'] memcached = vault['memcached']
hashes = memcached.get_multi(['default_data_hash', 'update_hash'], update_time = memcached.get('driverlog:update_time')
key_prefix='driverlog:')
if vault.get('default_data_hash') != hashes.get('default_data_hash'): if vault.get('update_time') != update_time:
vault['default_data_hash'] = hashes['default_data_hash'] vault['update_time'] = update_time
vault['default_data'] = memcached.get('driverlog:default_data')
projects_map = _build_projects_map(vault['default_data']) default_data = memcached.get('driverlog:default_data')
vault['default_data'] = default_data
projects_map = _build_projects_map(default_data)
vault['projects_map'] = projects_map vault['projects_map'] = projects_map
releases_map = _build_releases_map(vault['default_data']) releases_map = _build_releases_map(default_data)
vault['releases_map'] = releases_map vault['releases_map'] = releases_map
drivers_map = _build_drivers_map( vault['drivers_map'] = default_data['drivers']
vault['default_data'], projects_map)
vault['drivers_map'] = drivers_map
_extend_drivers_info()
if vault.get('update_hash') != hashes.get('update_hash'):
vault['update_hash'] = hashes['update_hash']
update = memcached.get('driverlog:update')
for proj_vendor_driver, os_versions_map in update.iteritems():
ovm = os_versions_map['os_versions_map']
if proj_vendor_driver not in vault['drivers_map']:
LOG.info('Unknown driver %s, ignoring', proj_vendor_driver)
else:
vault['drivers_map'][proj_vendor_driver][
'os_versions_map'].update(ovm)
_extend_drivers_info() _extend_drivers_info()
@ -149,7 +112,3 @@ def get_vault():
'Please run the processor') 'Please run the processor')
return vault return vault
def get_memory_storage():
return get_vault()['memory_storage']

View File

@ -14,7 +14,6 @@
# limitations under the License. # limitations under the License.
import os import os
import urllib
import flask import flask
from flask.ext import gravatar as gravatar_ext from flask.ext import gravatar as gravatar_ext
@ -23,7 +22,6 @@ import six
from driverlog.dashboard import api from driverlog.dashboard import api
from driverlog.dashboard import decorators from driverlog.dashboard import decorators
from driverlog.dashboard import vault
from driverlog.openstack.common import log as logging from driverlog.openstack.common import log as logging
from driverlog.processor import config from driverlog.processor import config
@ -61,35 +59,6 @@ def summary():
pass pass
@app.route('/details')
@decorators.templated()
def details():
project_id = flask.request.args.get('project_id') or ''
vendor = flask.request.args.get('vendor') or ''
driver_name = flask.request.args.get('driver_name') or ''
drivers_map = vault.get_vault()['drivers_map']
key = (urllib.unquote_plus(project_id).lower(),
urllib.unquote_plus(vendor).lower(),
urllib.unquote_plus(driver_name).lower())
if key not in drivers_map:
flask.abort(404)
driver = drivers_map[key]
os_versions_list = []
for os_version, os_version_info in driver['os_versions_map'].iteritems():
os_version_info['os_version'] = os_version
os_versions_list.append(os_version_info)
sorted(os_versions_list, key=lambda x: x['os_version'])
driver['os_versions'] = os_versions_list
return {
'driver': driver,
}
@app.errorhandler(404) @app.errorhandler(404)
@decorators.templated('404.html', 404) @decorators.templated('404.html', 404)
def page_not_found(e): def page_not_found(e):

View File

@ -13,16 +13,11 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import json
import collections
import re import re
import time
import memcache import memcache
from oslo.config import cfg from oslo.config import cfg
from six.moves.urllib import parse
import time
from driverlog.openstack.common import log as logging from driverlog.openstack.common import log as logging
from driverlog.processor import config from driverlog.processor import config
@ -33,19 +28,10 @@ from driverlog.processor import utils
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
def find_comment(review, ci): def _find_vote(review, ci_id):
patch_number = review['currentPatchSet']['number'] """
Finds vote corresponding to ci_id
for comment in reversed(review.get('comments') or []): """
prefix = 'Patch Set %s:' % patch_number
if ((comment['reviewer'].get('username') == ci) and
(comment['message'].find(prefix) == 0)):
return comment['message'][len(prefix):].strip()
return None
def find_vote(review, ci_id):
for approval in (review['currentPatchSet'].get('approvals') or []): for approval in (review['currentPatchSet'].get('approvals') or []):
if approval['type'] not in ['Verified', 'VRIF']: if approval['type'] not in ['Verified', 'VRIF']:
continue continue
@ -56,164 +42,139 @@ def find_vote(review, ci_id):
return None return None
def process_reviews(review_iterator, ci_ids_map, project_id): def find_ci_result(review_iterator, ci):
branch_ci_set = set() """
For a given stream of reviews finds result left by specified ci
"""
for review in review_iterator: for review in review_iterator:
review_url = review['url'] review_url = review['url']
branch = review['branch']
for comment in reversed(review.get('comments') or []): for comment in reversed(review.get('comments') or []):
ci_id = comment['reviewer'].get('username') if comment['reviewer'].get('username') != ci['id']:
if ci_id not in ci_ids_map:
continue continue
branch_ci = (branch, ci_id)
if branch_ci in branch_ci_set:
continue # already seen, ignore
branch_ci_set.add(branch_ci)
message = comment['message'] message = comment['message']
prefix = 'Patch Set %s:' % review['currentPatchSet']['number'] prefix = 'Patch Set %s:' % review['currentPatchSet']['number']
if comment['message'].find(prefix) != 0: if comment['message'].find(prefix) != 0:
break # all comments from the latest patch set passed break # all comments from the latest patch set passed
message = message[len(prefix):].strip() message = message[len(prefix):].strip()
for one_ci in ci_ids_map[ci_id]: result = None
result = None
# try to get result by parsing comment message # try to get result by parsing comment message
success_pattern = one_ci.get('success_pattern') success_pattern = ci.get('success_pattern')
failure_pattern = one_ci.get('failure_pattern') failure_pattern = ci.get('failure_pattern')
result = None
if success_pattern and re.search(success_pattern, message): if success_pattern and re.search(success_pattern, message):
result = True result = True
elif failure_pattern and re.search(failure_pattern, message): elif failure_pattern and re.search(failure_pattern, message):
result = False result = False
# try to get result from vote # try to get result from vote
if result is None: if result is None:
result = find_vote(review, ci_id) result = _find_vote(review, ci['id'])
if result is not None: if result is not None:
yield { return {
(project_id, 'ci_result': result,
one_ci['vendor'].lower(), 'comment': message,
one_ci['driver_name'].lower()): { 'timestamp': comment['timestamp'],
'os_versions_map': { 'review_url': review_url,
branch: {
'comment': message,
'timestamp': comment['timestamp'],
'review_url': review_url
}
}
}
}
def update_generator(memcached_inst, default_data, ci_ids_map,
force_update=False):
for project in default_data['projects']:
project_id = project['id'].lower()
rcs_inst = rcs.get_rcs(project_id, cfg.CONF.review_uri)
rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
username=cfg.CONF.ssh_username)
LOG.debug('Processing reviews for project: %s', project_id)
rcs_key = 'driverlog:rcs:' + parse.quote_plus(project_id)
last_id = None
if not force_update:
last_id = memcached_inst.get(rcs_key)
review_iterator = rcs_inst.log(last_id)
for item in process_reviews(review_iterator, ci_ids_map, project_id):
yield item
last_id = rcs_inst.get_last_id()
LOG.debug('RCS last id is: %s', last_id)
memcached_inst.set(rcs_key, last_id)
def _get_hash(data):
h = hashlib.new('sha1')
h.update(json.dumps(data))
return h.hexdigest()
def build_ci_map(drivers):
ci_map = collections.defaultdict(list)
for driver in drivers:
if 'ci' in driver:
value = {
'vendor': driver['vendor'],
'driver_name': driver['name'],
}
ci = driver['ci']
if 'success_pattern' in ci:
value['success_pattern'] = ci['success_pattern']
if 'failure_pattern' in ci:
value['failure_pattern'] = ci['failure_pattern']
ci_map[ci['id']].append(value)
return ci_map
def transform_default_data(default_data):
for driver in default_data['drivers']:
driver['os_versions_map'] = {}
if 'releases' in driver:
for release in driver['releases']:
driver['os_versions_map'][release] = {
'success': True,
'comment': 'self-tested verification'
} }
def store_default_data(default_data, memcached_inst): def _get_release_by_branch(releases, branch):
transform_default_data(default_data) """
memcached_inst.set('driverlog:default_data', default_data) Translates branch name into release_id
"""
release = branch.lower()
if release.find('/') > 0:
return release.split('/')[1]
elif release == 'master':
return releases[-1]['id'].lower()
def update_drivers(drivers, releases):
"""
Iterates all drivers and searches for results produced by their CIs
Returns True if info was updated
"""
branches = [('stable/' + r['id'].lower()) for r in releases] + ['master']
rcs_inst = rcs.get_rcs(cfg.CONF.review_uri)
rcs_inst.setup(key_filename=cfg.CONF.ssh_key_filename,
username=cfg.CONF.ssh_username)
has_updates = False
for driver in drivers.values():
if 'ci' not in driver:
continue
project_id = driver['project_id']
ci_id = driver['ci']['id']
for branch in branches:
LOG.debug('Searching reviews for project: %(project_id)s, branch: '
'%(branch)s, ci_id: %(ci_id)s',
{'project_id': project_id, 'branch': branch,
'ci_id': ci_id})
review_iterator = rcs_inst.log(project=project_id, branch=branch,
reviewer=ci_id)
ci_result = find_ci_result(review_iterator, driver['ci'])
if ci_result:
LOG.debug('Found CI result: %s', ci_result)
has_updates = True
key = (project_id, driver['vendor'], driver['name'])
os_version = _get_release_by_branch(releases, branch)
ci_result['ci_tested'] = True
drivers[key]['releases'][os_version] = ci_result
return has_updates
def transform_default_data(default_data):
transformed_drivers = {}
for driver in default_data['drivers']:
transformed_releases = {}
if 'releases' in driver:
for release in driver['releases']:
transformed_releases[release.lower()] = {
'ci_tested': False,
}
driver['releases'] = transformed_releases
key = (driver['project_id'], driver['vendor'], driver['name'])
transformed_drivers[key] = driver
default_data['drivers'] = transformed_drivers
def process(memcached_inst, default_data, force_update):
old_dd_hash = memcached_inst.get('driverlog:default_data_hash') old_dd_hash = memcached_inst.get('driverlog:default_data_hash')
new_dd_hash = _get_hash(default_data) new_dd_hash = utils.calc_hash(default_data)
memcached_inst.set('driverlog:default_data_hash', new_dd_hash)
return new_dd_hash != old_dd_hash need_update = False
if (new_dd_hash != old_dd_hash) or force_update:
def calculate_update(memcached_inst, default_data, force_update): transform_default_data(default_data)
update = {}
if not force_update:
update = memcached_inst.get('driverlog:update') or {}
ci_ids_map = build_ci_map(default_data['drivers'])
need_update = force_update
for record in update_generator(memcached_inst, default_data, ci_ids_map,
force_update=force_update):
LOG.info('Got new record from Gerrit: %s', record)
need_update = True need_update = True
else:
default_data = memcached_inst.get('driverlog:default_data')
key = record.keys()[0] need_update |= update_drivers(default_data['drivers'],
if key not in update: default_data['releases'])
update.update(record)
else:
os_version = record[key]['os_versions_map'].keys()[0]
info = record[key]['os_versions_map'].values()[0]
if os_version in update[key]['os_versions_map']:
update[key]['os_versions_map'][os_version].update(info)
else:
update[key]['os_versions_map'][os_version] = info
# write update into memcache
memcached_inst.set('driverlog:update', update)
if need_update: if need_update:
memcached_inst.set('driverlog:update_hash', time.time()) # write default data into memcache
memcached_inst.set('driverlog:default_data', default_data)
memcached_inst.set('driverlog:default_data_hash', new_dd_hash)
memcached_inst.set('driverlog:update_time', time.time())
def main(): def main():
@ -239,10 +200,7 @@ def main():
LOG.critical('Unable to load default data') LOG.critical('Unable to load default data')
return not 0 return not 0
dd_update = store_default_data(default_data, memcached_inst) process(memcached_inst, default_data, cfg.CONF.force_update)
calculate_update(memcached_inst, default_data,
cfg.CONF.force_update or dd_update)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -17,6 +17,7 @@ import json
import re import re
import paramiko import paramiko
import six
from driverlog.openstack.common import log as logging from driverlog.openstack.common import log as logging
@ -25,12 +26,12 @@ LOG = logging.getLogger(__name__)
DEFAULT_PORT = 29418 DEFAULT_PORT = 29418
GERRIT_URI_PREFIX = r'^gerrit:\/\/' GERRIT_URI_PREFIX = r'^gerrit:\/\/'
PAGE_LIMIT = 100 PAGE_LIMIT = 5
class Rcs(object): class Rcs(object):
def __init__(self, project_id, uri): def __init__(self, uri):
self.project_id = project_id pass
def setup(self, **kwargs): def setup(self, **kwargs):
pass pass
@ -43,8 +44,8 @@ class Rcs(object):
class Gerrit(Rcs): class Gerrit(Rcs):
def __init__(self, project_id, uri): def __init__(self, uri):
super(Gerrit, self).__init__(project_id, uri) super(Gerrit, self).__init__(uri)
stripped = re.sub(GERRIT_URI_PREFIX, '', uri) stripped = re.sub(GERRIT_URI_PREFIX, '', uri)
if stripped: if stripped:
@ -83,11 +84,14 @@ class Gerrit(Rcs):
LOG.exception(e) LOG.exception(e)
return False return False
def _get_cmd(self, project_id, sort_key=None, limit=PAGE_LIMIT): def _get_cmd(self, sort_key=None, limit=PAGE_LIMIT, **kwargs):
params = ' '.join([(k + ':\'' + v + '\'')
for k, v in six.iteritems(kwargs)])
cmd = ('gerrit query --format JSON ' cmd = ('gerrit query --format JSON '
'project:\'%(project_id)s\' limit:%(limit)s ' '%(params)s limit:%(limit)s '
'--current-patch-set --comments ' % '--current-patch-set --comments ' %
{'project_id': project_id, 'limit': limit}) {'params': params, 'limit': limit})
cmd += ' is:merged' cmd += ' is:merged'
if sort_key: if sort_key:
cmd += ' resume_sortkey:%016x' % sort_key cmd += ' resume_sortkey:%016x' % sort_key
@ -102,11 +106,11 @@ class Gerrit(Rcs):
LOG.exception(e) LOG.exception(e)
return False return False
def _poll_reviews(self, project_id, start_id=None, last_id=None): def _poll_reviews(self, start_id=None, last_id=None, **kwargs):
sort_key = start_id sort_key = start_id
while True: while True:
cmd = self._get_cmd(project_id, sort_key) cmd = self._get_cmd(sort_key, **kwargs)
LOG.debug('Executing command: %s', cmd) LOG.debug('Executing command: %s', cmd)
exec_result = self._exec_command(cmd) exec_result = self._exec_command(cmd)
if not exec_result: if not exec_result:
@ -124,55 +128,27 @@ class Gerrit(Rcs):
break break
proceed = True proceed = True
review['project_id'] = project_id
yield review yield review
if not proceed: if not proceed:
break break
def log(self, last_id): def log(self, **kwargs):
if not self._connect(): if not self._connect():
return return
# poll new merged reviews from the top down to last_id # poll new merged reviews from the top down to last_id
LOG.debug('Poll new reviews for project: %s', self.project_id) for review in self._poll_reviews(**kwargs):
for review in self._poll_reviews(self.project_id, last_id=last_id):
yield review yield review
self.client.close() self.client.close()
def get_last_id(self):
if not self._connect():
return None
LOG.debug('Get last id for project: %s', self.project_id) def get_rcs(uri):
cmd = self._get_cmd(self.project_id, limit=1)
LOG.debug('Executing command: %s', cmd)
exec_result = self._exec_command(cmd)
if not exec_result:
return None
stdin, stdout, stderr = exec_result
last_id = None
for line in stdout:
review = json.loads(line)
if 'sortKey' in review:
last_id = int(review['sortKey'], 16)
break
self.client.close()
LOG.debug('Project %(project_id)s last id is %(id)s',
{'project_id': self.project_id, 'id': last_id})
return last_id
def get_rcs(project_id, uri):
LOG.debug('Review control system is requested for uri %s' % uri) LOG.debug('Review control system is requested for uri %s' % uri)
match = re.search(GERRIT_URI_PREFIX, uri) match = re.search(GERRIT_URI_PREFIX, uri)
if match: if match:
return Gerrit(project_id, uri) return Gerrit(uri)
else: else:
LOG.warning('Unsupported review control system, fallback to dummy') LOG.warning('Unsupported review control system, fallback to dummy')
return Rcs(project_id, uri) return Rcs(uri)

View File

@ -15,6 +15,7 @@
import cgi import cgi
import datetime import datetime
import hashlib
import json import json
import re import re
import time import time
@ -181,3 +182,9 @@ def copy_dict(source, include=None, exclude=None):
exclude = exclude or [] exclude = exclude or []
return dict([(k, v) for k, v in six.iteritems(source) return dict([(k, v) for k, v in six.iteritems(source)
if k in include and k not in exclude]) if k in include and k not in exclude])
def calc_hash(data):
h = hashlib.new('sha1')
h.update(json.dumps(data))
return h.hexdigest()

View File

@ -23,43 +23,43 @@
], ],
"releases": [ "releases": [
{ {
"id": "austin", "id": "Austin",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Austin" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Austin"
}, },
{ {
"id": "bexar", "id": "Bexar",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Bexar" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Bexar"
}, },
{ {
"id": "cactus", "id": "Cactus",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Cactus" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Cactus"
}, },
{ {
"id": "diablo", "id": "Diablo",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Diablo" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Diablo"
}, },
{ {
"id": "essex", "id": "Essex",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Essex" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Essex"
}, },
{ {
"id": "folsom", "id": "Folsom",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Folsom" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Folsom"
}, },
{ {
"id": "grizzly", "id": "Grizzly",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Grizzly" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Grizzly"
}, },
{ {
"id": "havana", "id": "Havana",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana"
}, },
{ {
"id": "icehouse", "id": "Icehouse",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse"
}, },
{ {
"id": "juno", "id": "Juno",
"wiki": "https://wiki.openstack.org/wiki/Releases" "wiki": "https://wiki.openstack.org/wiki/Releases"
} }
], ],
@ -857,8 +857,8 @@
"wiki": "http://openstack.redhat.com/OpenDaylight_integration", "wiki": "http://openstack.redhat.com/OpenDaylight_integration",
"ci": { "ci": {
"id": "odl-jenkins", "id": "odl-jenkins",
"success_pattern": "success", "success_pattern": "SUCCESS",
"failure_pattern": "fail" "failure_pattern": "FAILURE"
} }
}, },
{ {
@ -1014,8 +1014,8 @@
"wiki": "http://wiki.cloudbase.it/hyperv-tempest-exclusions", "wiki": "http://wiki.cloudbase.it/hyperv-tempest-exclusions",
"ci": { "ci": {
"id": "hyper-v-ci", "id": "hyper-v-ci",
"success_pattern": "Successful", "success_pattern": "Build succeeded",
"failure_pattern": "Failed" "failure_pattern": "Build failed"
}, },
"releases": ["Folsom", "Grizzly", "Havana", "Icehouse"] "releases": ["Folsom", "Grizzly", "Havana", "Icehouse"]
}, },

View File

@ -26,8 +26,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"id": { "id": {
"type": "string", "$ref": "#/definitions/release_id"
"pattern": "^[a-z]+$"
}, },
"wiki": { "wiki": {
"type": "string" "type": "string"
@ -65,8 +64,7 @@
"releases": { "releases": {
"type": "array", "type": "array",
"items": { "items": {
"type": "string", "$ref": "#/definitions/release_id"
"pattern": "^[\\w]+$"
} }
}, },
"ci": { "ci": {
@ -118,6 +116,10 @@
}, },
"required": ["id"], "required": ["id"],
"additionalProperties": false "additionalProperties": false
},
"release_id": {
"type": "string",
"pattern": "^[A-Z][a-z]+$"
} }
} }
} }

View File

@ -37,13 +37,14 @@ class TestCIConfigValidity(testtools.TestCase):
def test_ci_config_matches_sample_review(self): def test_ci_config_matches_sample_review(self):
def verify_single_driver(driver_name): def verify_single_driver(driver_name):
ci_ids_map = main.build_ci_map(self.default_data['drivers']) for driver in self.default_data['drivers']:
records = list(main.process_reviews( if driver['name'] == driver_name:
[self.review], ci_ids_map, 'openstack/neutron')) result = main.find_ci_result([self.review], driver['ci'])
records = [r for r in records self.assertIsNotNone(result, 'CI result should be found '
if r.keys()[0][2] == driver_name.lower()] 'for driver %s' % driver_name)
self.assertEqual(1, len(records), '1 record is expected for ' return
'driver %s' % driver_name)
self.fail('No result parsed for driver %s' % driver_name)
verify_single_driver('Cisco Nexus Plugin') verify_single_driver('Cisco Nexus Plugin')
verify_single_driver('Neutron ML2 Driver For Cisco Nexus Devices') verify_single_driver('Neutron ML2 Driver For Cisco Nexus Devices')

View File

@ -15,15 +15,15 @@
], ],
"releases": [ "releases": [
{ {
"id": "havana", "id": "Havana",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Havana"
}, },
{ {
"id": "icehouse", "id": "Icehouse",
"wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse" "wiki": "https://wiki.openstack.org/wiki/ReleaseNotes/Icehouse"
}, },
{ {
"id": "juno", "id": "Juno",
"wiki": "https://wiki.openstack.org/wiki/Releases" "wiki": "https://wiki.openstack.org/wiki/Releases"
} }
], ],

View File

@ -18,89 +18,78 @@ import memcache
import mock import mock
from driverlog.processor import main from driverlog.processor import main
from driverlog.processor import utils
import testtools import testtools
def _read_sample_review():
with open('tests/unit/test_data/sample_review.json') as fd:
return json.load(fd)
def _read_sample_default_data():
with open('tests/unit/test_data/sample_default_data.json') as fd:
return json.load(fd)
class TestMain(testtools.TestCase): class TestMain(testtools.TestCase):
def setUp(self): def setUp(self):
super(TestMain, self).setUp() super(TestMain, self).setUp()
with open('tests/unit/test_data/sample_review.json') as fd:
self.review = json.load(fd)
with open('tests/unit/test_data/sample_default_data.json') as fd:
self.default_data = json.load(fd)
def test_build_ci_map(self):
ci_map = main.build_ci_map(self.default_data['drivers'])
self.assertTrue('arista-test' in ci_map)
self.assertEqual([{
'vendor': 'Arista',
'driver_name': 'Arista Neutron ML2 Driver'
}], ci_map['arista-test'])
def test_process_reviews_ci_vote_and_comment(self): def test_process_reviews_ci_vote_and_comment(self):
# check that vote and matching comment are found # check that vote and matching comment are found
ci_ids_map = main.build_ci_map(self.default_data['drivers']) result = main.find_ci_result([_read_sample_review()],
records = list(main.process_reviews( {'id': 'arista-test'})
[self.review], ci_ids_map, 'openstack/neutron'))
records = [r for r in records if r.keys()[0][1] == 'arista']
self.assertEqual(1, len(records), 'One record is expected') self.assertIsNotNone(result, 'CI result should be found')
expected_record = { expected_record = {
('openstack/neutron', 'arista', 'arista neutron ml2 driver'): { 'ci_result': True,
'os_versions_map': { 'comment': 'Verified+1\n\nArista third party testing PASSED '
'master': { '[ https://arista.box.com/s/x8z0 ]',
'comment': 'Verified+1\n\n' 'timestamp': 1399478047,
'Arista third party testing PASSED ' 'review_url': 'https://review.openstack.org/92468',
'[ https://arista.box.com/s/x8z0 ]',
'timestamp': 1399478047,
'review_url': 'https://review.openstack.org/92468',
}
}
}
} }
self.assertEqual(expected_record, records[0]) self.assertEqual(expected_record, result)
def test_process_reviews_ci_only_comments(self): def test_process_reviews_ci_only_comments(self):
# check that comment is found and parsed correctly # check that comment is found and parsed correctly
ci_ids_map = main.build_ci_map(self.default_data['drivers']) result = main.find_ci_result([_read_sample_review()], {
records = list(main.process_reviews( 'id': 'cisco_neutron_ci',
[self.review], ci_ids_map, 'openstack/neutron')) 'success_pattern': 'neutron_zuul \\S+ : SUCCESS',
records = [r for r in records if r.keys()[0][1] == 'cisco'] 'failure_pattern': 'neutron_zuul \\S+ : FAILURE',
})
self.assertEqual(2, len(records), '2 records are expected ' self.assertIsNotNone(result, 'CI result should be found')
'(since there are 2 cisco entries)')
expected_record = { expected_record = {
( 'ci_result': True,
'openstack/neutron', 'cisco', 'comment': 'Build succeeded.\n\n- neutron_zuul '
'neutron ml2 driver for cisco nexus devices' 'http://128.107.233.28:8080/job/neutron_zuul/263 : '
): { 'SUCCESS in 18m 52s',
'os_versions_map': { 'timestamp': 1399481091,
'master': { 'review_url': 'https://review.openstack.org/92468',
'comment': 'Build succeeded.\n\n'
'- neutron_zuul http://128.107.233.28:8080/'
'job/neutron_zuul/263 : SUCCESS in 18m 52s',
'timestamp': 1399481091,
'review_url': 'https://review.openstack.org/92468',
}
}
}
} }
self.assertEqual(expected_record, records[0]) self.assertEqual(expected_record, result)
def test_tranform_default_data(self): def test_transform_default_data(self):
driver = { driver = {
"project_id": "openstack/neutron", 'project_id': 'openstack/neutron',
"releases": ["Grizzly", "Havana", "Icehouse"], } 'vendor': 'Cisco',
'name': 'Cisco Nexus Plugin',
'releases': ['Grizzly', 'Havana', 'Icehouse'], }
dd = {'drivers': [driver]} dd = {'drivers': [driver]}
main.transform_default_data(dd) main.transform_default_data(dd)
self.assertTrue('Grizzly' in driver['os_versions_map'],
self.assertIn(('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin'),
dd['drivers'].keys())
driver = dd['drivers'][
('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')]
self.assertTrue('grizzly' in driver['releases'],
'Grizzly should be copied from releases into ' 'Grizzly should be copied from releases into '
'os_version_map') 'os_version_map')
@ -119,12 +108,15 @@ class TestMain(testtools.TestCase):
return memcached_inst return memcached_inst
def _patch_rcs(self, rcs_getter): def _patch_rcs(self, rcs_getter):
def _get_rcs(project_id, review_uri): def _patch_log(**kwargs):
if (kwargs['project'] == 'openstack/neutron' and
kwargs['branch'] == 'master'):
return [_read_sample_review()]
return []
def _get_rcs(review_uri):
rcs_inst = mock.Mock() rcs_inst = mock.Mock()
if project_id == 'openstack/neutron': rcs_inst.log.side_effect = _patch_log
rcs_inst.log.return_value = [self.review]
else:
rcs_inst.log.return_value = []
return rcs_inst return rcs_inst
rcs_getter.side_effect = _get_rcs rcs_getter.side_effect = _get_rcs
@ -136,16 +128,16 @@ class TestMain(testtools.TestCase):
self._patch_rcs(rcs_getter) self._patch_rcs(rcs_getter)
# run! # run!
main.calculate_update(memcached_inst, self.default_data, False) main.process(memcached_inst, _read_sample_default_data(), False)
# verify # verify
update = memcached_inst.get('driverlog:update') update = memcached_inst.get('driverlog:default_data')['drivers']
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin') driver_key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
self.assertIn(driver_key, update) self.assertIn(driver_key, update.keys())
self.assertIn('master', update[driver_key]['os_versions_map']) self.assertIn('havana', update[driver_key]['releases'].keys())
self.assertEqual('https://review.openstack.org/92468', self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master'] (update[driver_key]['releases']['juno']
['review_url'])) ['review_url']))
@mock.patch('oslo.config.cfg.CONF') @mock.patch('oslo.config.cfg.CONF')
@ -154,72 +146,45 @@ class TestMain(testtools.TestCase):
# checks that existing data will be overwritten with update # checks that existing data will be overwritten with update
# preserving data for other versions # preserving data for other versions
# put default data with some updates into memory storage
dd = _read_sample_default_data()
main.transform_default_data(dd)
key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
dd['drivers'][key]['releases'].update({
'juno': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/11111'
},
'havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}})
# put hash from default data to emulate that file is not changed
default_data_from_file = _read_sample_default_data()
memcached_inst = self._make_test_memcached({ memcached_inst = self._make_test_memcached({
'driverlog:update': { 'driverlog:default_data': dd,
('openstack/neutron', 'cisco', 'cisco nexus plugin'): { 'driverlog:default_data_hash': utils.calc_hash(
'os_versions_map': { default_data_from_file)})
'master': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/11111'
},
'stable/havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}
}}}})
self._patch_rcs(rcs_getter) self._patch_rcs(rcs_getter)
# run! # run!
main.calculate_update(memcached_inst, self.default_data, False) main.process(memcached_inst, default_data_from_file, False)
# verify # verify
update = memcached_inst.get('driverlog:update') update = memcached_inst.get('driverlog:default_data')['drivers']
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin') driver_key = ('openstack/neutron', 'Cisco', 'Cisco Nexus Plugin')
self.assertIn(driver_key, update) self.assertIn(driver_key, update.keys())
self.assertIn('master', update[driver_key]['os_versions_map']) self.assertIn('juno', update[driver_key]['releases'])
self.assertEqual('https://review.openstack.org/92468', self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master'] (update[driver_key]['releases']['juno']
['review_url'])) ['review_url']))
self.assertIn('stable/havana', update[driver_key]['os_versions_map']) self.assertIn('havana', update[driver_key]['releases'])
self.assertEqual('https://review.openstack.org/22222', self.assertEqual('https://review.openstack.org/22222',
(update[driver_key]['os_versions_map'] (update[driver_key]['releases']
['stable/havana']['review_url'])) ['havana']['review_url']))
@mock.patch('oslo.config.cfg.CONF')
@mock.patch('driverlog.processor.rcs.get_rcs')
def test_calculate_update_insert_version_data(self, rcs_getter, conf):
# checks that existing data will be overwritten with update
memcached_inst = self._make_test_memcached({
'driverlog:update': {
('openstack/neutron', 'cisco', 'cisco nexus plugin'): {
'os_versions_map': {
'stable/havana': {
'comment': 'Build succeeded.',
'timestamp': 1234567890,
'review_url': 'https://review.openstack.org/22222'
}
}}}})
self._patch_rcs(rcs_getter)
# run!
main.calculate_update(memcached_inst, self.default_data, False)
# verify
update = memcached_inst.get('driverlog:update')
driver_key = ('openstack/neutron', 'cisco', 'cisco nexus plugin')
self.assertIn(driver_key, update)
self.assertIn('master', update[driver_key]['os_versions_map'])
self.assertEqual('https://review.openstack.org/92468',
(update[driver_key]['os_versions_map']['master']
['review_url']))
self.assertIn('stable/havana', update[driver_key]['os_versions_map'])
self.assertEqual('https://review.openstack.org/22222',
(update[driver_key]['os_versions_map']
['stable/havana']['review_url']))