Optimize performance of compact records
* Make compact records pure namedtuples (remove all dict behaviour) * Add profiler decorator for dashboard requests performance analysis * Introduced new parameter 'collect_profiler_stats' for file name where profile stats could be stored * Fix py33 test failures Change-Id: Ic5c900047a493541510974e9bc9c161f5606739e
This commit is contained in:

committed by
Ilya Shakhat

parent
8ab2a069b4
commit
a7eb7d024b
@@ -54,4 +54,7 @@
|
|||||||
# default_project_type = openstack
|
# default_project_type = openstack
|
||||||
|
|
||||||
# The interval specifies how frequently dashboard should check for updates in seconds
|
# The interval specifies how frequently dashboard should check for updates in seconds
|
||||||
# dashboard-update-interval = 3600
|
# dashboard_update_interval = 3600
|
||||||
|
|
||||||
|
# Name of file to store python profiler data. This option works for dashboard only
|
||||||
|
# collect_profiler_stats =
|
||||||
|
@@ -13,10 +13,13 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import cProfile
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
|
import operator
|
||||||
|
|
||||||
import flask
|
import flask
|
||||||
|
from oslo.config import cfg
|
||||||
import six
|
import six
|
||||||
from werkzeug import exceptions
|
from werkzeug import exceptions
|
||||||
|
|
||||||
@@ -210,23 +213,23 @@ def record_filter(ignore=None):
|
|||||||
|
|
||||||
|
|
||||||
def incremental_filter(result, record, param_id, context):
|
def incremental_filter(result, record, param_id, context):
|
||||||
result[record[param_id]]['metric'] += 1
|
result[getattr(record, param_id)]['metric'] += 1
|
||||||
|
|
||||||
|
|
||||||
def loc_filter(result, record, param_id, context):
|
def loc_filter(result, record, param_id, context):
|
||||||
result[record[param_id]]['metric'] += record['loc']
|
result[getattr(record, param_id)]['metric'] += record.loc
|
||||||
|
|
||||||
|
|
||||||
def mark_filter(result, record, param_id, context):
|
def mark_filter(result, record, param_id, context):
|
||||||
result_by_param = result[record[param_id]]
|
result_by_param = result[getattr(record, param_id)]
|
||||||
if record['type'] == 'Workflow' and record['value'] == 1:
|
if record.type == 'Workflow' and record.value == 1:
|
||||||
value = 'A'
|
value = 'A'
|
||||||
else:
|
else:
|
||||||
value = record['value']
|
value = record.value
|
||||||
result_by_param['metric'] += 1
|
result_by_param['metric'] += 1
|
||||||
result_by_param[value] = result_by_param.get(value, 0) + 1
|
result_by_param[value] = result_by_param.get(value, 0) + 1
|
||||||
|
|
||||||
if record.get('disagreement'):
|
if record.disagreement:
|
||||||
result_by_param['disagreements'] = (
|
result_by_param['disagreements'] = (
|
||||||
result_by_param.get('disagreements', 0) + 1)
|
result_by_param.get('disagreements', 0) + 1)
|
||||||
|
|
||||||
@@ -265,22 +268,21 @@ def mark_finalize(record):
|
|||||||
|
|
||||||
|
|
||||||
def person_day_filter(result, record, param_id, context):
|
def person_day_filter(result, record, param_id, context):
|
||||||
if record['record_type'] == 'commit' or record['record_type'] == 'member':
|
record_type = record.record_type
|
||||||
|
if record_type == 'commit' or record_type == 'member':
|
||||||
# 1. commit is attributed with the date of the merge which is not an
|
# 1. commit is attributed with the date of the merge which is not an
|
||||||
# effort of the author (author's effort is represented in patches)
|
# effort of the author (author's effort is represented in patches)
|
||||||
# 2. registration on openstack.org is not an effort
|
# 2. registration on openstack.org is not an effort
|
||||||
return
|
return
|
||||||
|
|
||||||
day = utils.timestamp_to_day(record['date'])
|
day = utils.timestamp_to_day(record.date)
|
||||||
# fact that record-days are grouped by days in some order is used
|
# fact that record-days are grouped by days in some order is used
|
||||||
if context.get('last_processed_day') != day:
|
if context.get('last_processed_day') != day:
|
||||||
context['last_processed_day'] = day
|
context['last_processed_day'] = day
|
||||||
context['counted_user_ids'] = set()
|
context['counted_user_ids'] = set()
|
||||||
|
|
||||||
user = vault.get_user_from_runtime_storage(record['user_id'])
|
user_id = record.user_id
|
||||||
user_id = user['seq']
|
value = getattr(record, param_id)
|
||||||
|
|
||||||
value = record[param_id]
|
|
||||||
if user_id not in context['counted_user_ids']:
|
if user_id not in context['counted_user_ids']:
|
||||||
context['counted_user_ids'].add(user_id)
|
context['counted_user_ids'].add(user_id)
|
||||||
result[value]['metric'] += 1
|
result[value]['metric'] += 1
|
||||||
@@ -288,9 +290,14 @@ def person_day_filter(result, record, param_id, context):
|
|||||||
|
|
||||||
def generate_records_for_person_day(record_ids):
|
def generate_records_for_person_day(record_ids):
|
||||||
memory_storage_inst = vault.get_memory_storage()
|
memory_storage_inst = vault.get_memory_storage()
|
||||||
for values in memory_storage_inst.day_index.values():
|
id_dates = []
|
||||||
for record in memory_storage_inst.get_records(record_ids & values):
|
for record in memory_storage_inst.get_records(record_ids):
|
||||||
yield record
|
id_dates.append((record.date, record.record_id))
|
||||||
|
|
||||||
|
id_dates.sort(key=operator.itemgetter(0))
|
||||||
|
for record in memory_storage_inst.get_records(
|
||||||
|
record_id for date, record_id in id_dates):
|
||||||
|
yield record
|
||||||
|
|
||||||
|
|
||||||
def aggregate_filter():
|
def aggregate_filter():
|
||||||
@@ -438,9 +445,33 @@ def jsonify(root='data'):
|
|||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def profiler_decorator(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def profiler_decorated_function(*args, **kwargs):
|
||||||
|
profiler = None
|
||||||
|
profile_filename = cfg.CONF.collect_profiler_stats
|
||||||
|
|
||||||
|
if profile_filename:
|
||||||
|
LOG.debug('Profiler is enabled')
|
||||||
|
profiler = cProfile.Profile()
|
||||||
|
profiler.enable()
|
||||||
|
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
|
||||||
|
if profile_filename:
|
||||||
|
profiler.disable()
|
||||||
|
profiler.dump_stats(profile_filename)
|
||||||
|
LOG.debug('Profiler stats is written to file %s', profile_filename)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return profiler_decorated_function
|
||||||
|
|
||||||
|
|
||||||
def response():
|
def response():
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
|
@profiler_decorator
|
||||||
def response_decorated_function(*args, **kwargs):
|
def response_decorated_function(*args, **kwargs):
|
||||||
callback = flask.app.request.args.get('callback', False)
|
callback = flask.app.request.args.get('callback', False)
|
||||||
data = func(*args, **kwargs)
|
data = func(*args, **kwargs)
|
||||||
|
@@ -113,12 +113,16 @@ def extend_user(user):
|
|||||||
|
|
||||||
def get_activity(records, start_record, page_size, query_message=None):
|
def get_activity(records, start_record, page_size, query_message=None):
|
||||||
if query_message:
|
if query_message:
|
||||||
|
# note that all records are now dicts!
|
||||||
|
key_func = operator.itemgetter('date')
|
||||||
records = [vault.extend_record(r) for r in records]
|
records = [vault.extend_record(r) for r in records]
|
||||||
records = [r for r in records
|
records = [r for r in records
|
||||||
if (r.get('message') and
|
if (r.get('message') and
|
||||||
r.get('message').find(query_message) > 0)]
|
r.get('message').find(query_message) > 0)]
|
||||||
records_sorted = sorted(records, key=operator.itemgetter('date'),
|
else:
|
||||||
reverse=True)
|
key_func = operator.attrgetter('date')
|
||||||
|
|
||||||
|
records_sorted = sorted(records, key=key_func, reverse=True)
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
for record in records_sorted[start_record:]:
|
for record in records_sorted[start_record:]:
|
||||||
|
@@ -53,35 +53,35 @@ class CachedMemoryStorage(MemoryStorage):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _save_record(self, record):
|
def _save_record(self, record):
|
||||||
if record.get('company_name') == '*robots':
|
if record.company_name == '*robots':
|
||||||
return
|
return
|
||||||
self.records[record['record_id']] = record
|
self.records[record.record_id] = record
|
||||||
for key, index in six.iteritems(self.indexes):
|
for key, index in six.iteritems(self.indexes):
|
||||||
self._add_to_index(index, record, key)
|
self._add_to_index(index, record, key)
|
||||||
for bp_id in (record.get('blueprint_id') or []):
|
for bp_id in (record.blueprint_id or []):
|
||||||
if bp_id in self.blueprint_id_index:
|
if bp_id in self.blueprint_id_index:
|
||||||
self.blueprint_id_index[bp_id].add(record['record_id'])
|
self.blueprint_id_index[bp_id].add(record.record_id)
|
||||||
else:
|
else:
|
||||||
self.blueprint_id_index[bp_id] = set([record['record_id']])
|
self.blueprint_id_index[bp_id] = set([record.record_id])
|
||||||
|
|
||||||
record_day = utils.timestamp_to_day(record['date'])
|
record_day = utils.timestamp_to_day(record.date)
|
||||||
if record_day in self.day_index:
|
if record_day in self.day_index:
|
||||||
self.day_index[record_day].add(record['record_id'])
|
self.day_index[record_day].add(record.record_id)
|
||||||
else:
|
else:
|
||||||
self.day_index[record_day] = set([record['record_id']])
|
self.day_index[record_day] = set([record.record_id])
|
||||||
|
|
||||||
mr = (record['module'], record['release'])
|
mr = (record.module, record.release)
|
||||||
if mr in self.module_release_index:
|
if mr in self.module_release_index:
|
||||||
self.module_release_index[mr].add(record['record_id'])
|
self.module_release_index[mr].add(record.record_id)
|
||||||
else:
|
else:
|
||||||
self.module_release_index[mr] = set([record['record_id']])
|
self.module_release_index[mr] = set([record.record_id])
|
||||||
|
|
||||||
def update(self, records):
|
def update(self, records):
|
||||||
have_updates = False
|
have_updates = False
|
||||||
|
|
||||||
for record in records:
|
for record in records:
|
||||||
have_updates = True
|
have_updates = True
|
||||||
record_id = record['record_id']
|
record_id = record.record_id
|
||||||
if record_id in self.records:
|
if record_id in self.records:
|
||||||
# remove existing record from indexes
|
# remove existing record from indexes
|
||||||
self._remove_record_from_index(self.records[record_id])
|
self._remove_record_from_index(self.records[record_id])
|
||||||
@@ -95,19 +95,19 @@ class CachedMemoryStorage(MemoryStorage):
|
|||||||
|
|
||||||
def _remove_record_from_index(self, record):
|
def _remove_record_from_index(self, record):
|
||||||
for key, index in six.iteritems(self.indexes):
|
for key, index in six.iteritems(self.indexes):
|
||||||
index[record[key]].remove(record['record_id'])
|
index[getattr(record, key)].remove(record.record_id)
|
||||||
|
|
||||||
record_day = utils.timestamp_to_day(record['date'])
|
record_day = utils.timestamp_to_day(record.date)
|
||||||
self.day_index[record_day].remove(record['record_id'])
|
self.day_index[record_day].remove(record.record_id)
|
||||||
self.module_release_index[
|
self.module_release_index[
|
||||||
(record['module'], record['release'])].remove(record['record_id'])
|
(record.module, record.release)].remove(record.record_id)
|
||||||
|
|
||||||
def _add_to_index(self, record_index, record, key):
|
def _add_to_index(self, record_index, record, key):
|
||||||
record_key = record[key]
|
record_key = getattr(record, key)
|
||||||
if record_key in record_index:
|
if record_key in record_index:
|
||||||
record_index[record_key].add(record['record_id'])
|
record_index[record_key].add(record.record_id)
|
||||||
else:
|
else:
|
||||||
record_index[record_key] = set([record['record_id']])
|
record_index[record_key] = set([record.record_id])
|
||||||
|
|
||||||
def _get_record_ids_from_index(self, items, index):
|
def _get_record_ids_from_index(self, items, index):
|
||||||
record_ids = set()
|
record_ids = set()
|
||||||
|
@@ -37,7 +37,7 @@ METRIC_LABELS = {
|
|||||||
'bpc': 'Completed Blueprints',
|
'bpc': 'Completed Blueprints',
|
||||||
'filed-bugs': 'Filed Bugs',
|
'filed-bugs': 'Filed Bugs',
|
||||||
'resolved-bugs': 'Resolved Bugs',
|
'resolved-bugs': 'Resolved Bugs',
|
||||||
# 'person-day': "Person-day effort"
|
'person-day': "Person-day effort",
|
||||||
'ci': 'CI votes',
|
'ci': 'CI votes',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -103,9 +103,9 @@ def open_reviews(module):
|
|||||||
total_open = 0
|
total_open = 0
|
||||||
|
|
||||||
for review in memory_storage_inst.get_records(review_ids):
|
for review in memory_storage_inst.get_records(review_ids):
|
||||||
if review['status'] == 'NEW':
|
if review.status == 'NEW':
|
||||||
total_open += 1
|
total_open += 1
|
||||||
if review['value'] in [1, 2]:
|
if review.value in [1, 2]:
|
||||||
waiting_on_reviewer.append(vault.extend_record(review))
|
waiting_on_reviewer.append(vault.extend_record(review))
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -163,7 +163,7 @@ def _get_punch_card_data(records):
|
|||||||
for wday in six.moves.range(0, 7):
|
for wday in six.moves.range(0, 7):
|
||||||
punch_card_raw.append([0] * 24)
|
punch_card_raw.append([0] * 24)
|
||||||
for record in records:
|
for record in records:
|
||||||
tt = datetime.datetime.fromtimestamp(record['date']).timetuple()
|
tt = datetime.datetime.fromtimestamp(record.date).timetuple()
|
||||||
punch_card_raw[tt.tm_wday][tt.tm_hour] += 1
|
punch_card_raw[tt.tm_wday][tt.tm_hour] += 1
|
||||||
|
|
||||||
punch_card_data = [] # format for jqplot bubble renderer
|
punch_card_data = [] # format for jqplot bubble renderer
|
||||||
|
@@ -15,10 +15,8 @@
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os
|
import os
|
||||||
import UserDict
|
|
||||||
|
|
||||||
import flask
|
import flask
|
||||||
import itertools
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
import six
|
import six
|
||||||
|
|
||||||
@@ -37,27 +35,8 @@ RECORD_FIELDS_FOR_AGGREGATE = ['record_id', 'primary_key', 'record_type',
|
|||||||
'disagreement', 'value', 'status',
|
'disagreement', 'value', 'status',
|
||||||
'blueprint_id']
|
'blueprint_id']
|
||||||
|
|
||||||
_CompactRecordTuple = collections.namedtuple('CompactRecord',
|
CompactRecord = collections.namedtuple('CompactRecord',
|
||||||
RECORD_FIELDS_FOR_AGGREGATE)
|
RECORD_FIELDS_FOR_AGGREGATE)
|
||||||
|
|
||||||
|
|
||||||
class CompactRecord(_CompactRecordTuple, UserDict.DictMixin):
|
|
||||||
__slots__ = ()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
if isinstance(key, str):
|
|
||||||
return getattr(self, key)
|
|
||||||
else:
|
|
||||||
return super(CompactRecord, self).__getitem__(key)
|
|
||||||
|
|
||||||
def keys(self):
|
|
||||||
return RECORD_FIELDS_FOR_AGGREGATE
|
|
||||||
|
|
||||||
def has_key(self, key):
|
|
||||||
return key in RECORD_FIELDS_FOR_AGGREGATE
|
|
||||||
|
|
||||||
def iteritems(self):
|
|
||||||
return itertools.izip(RECORD_FIELDS_FOR_AGGREGATE, self)
|
|
||||||
|
|
||||||
|
|
||||||
def compact_records(records):
|
def compact_records(records):
|
||||||
@@ -70,7 +49,7 @@ def compact_records(records):
|
|||||||
def extend_record(record):
|
def extend_record(record):
|
||||||
runtime_storage_inst = get_vault()['runtime_storage']
|
runtime_storage_inst = get_vault()['runtime_storage']
|
||||||
return runtime_storage_inst.get_by_key(
|
return runtime_storage_inst.get_by_key(
|
||||||
runtime_storage_inst._get_record_name(record['record_id']))
|
runtime_storage_inst._get_record_name(record.record_id))
|
||||||
|
|
||||||
|
|
||||||
def get_vault():
|
def get_vault():
|
||||||
|
@@ -72,19 +72,21 @@ def _get_aggregated_stats(records, metric_filter, keys, param_id,
|
|||||||
param_title=None, finalize_handler=None):
|
param_title=None, finalize_handler=None):
|
||||||
param_title = param_title or param_id
|
param_title = param_title or param_id
|
||||||
result = dict((c, {'metric': 0, 'id': c}) for c in keys)
|
result = dict((c, {'metric': 0, 'id': c}) for c in keys)
|
||||||
context = {}
|
context = {'vault': vault.get_vault()}
|
||||||
if metric_filter:
|
if metric_filter:
|
||||||
for record in records:
|
for record in records:
|
||||||
metric_filter(result, record, param_id, context)
|
metric_filter(result, record, param_id, context)
|
||||||
result[record[param_id]]['name'] = record[param_title]
|
result[getattr(record, param_id)]['name'] = (
|
||||||
|
getattr(record, param_title))
|
||||||
else:
|
else:
|
||||||
for record in records:
|
for record in records:
|
||||||
record_param_id = record[param_id]
|
record_param_id = getattr(record, param_id)
|
||||||
result[record_param_id]['metric'] += 1
|
result[record_param_id]['metric'] += 1
|
||||||
result[record_param_id]['name'] = record[param_title]
|
result[record_param_id]['name'] = getattr(record, param_title)
|
||||||
|
|
||||||
response = [r for r in result.values() if r['metric']]
|
response = [r for r in result.values() if r['metric']]
|
||||||
response = [item for item in map(finalize_handler, response) if item]
|
if finalize_handler:
|
||||||
|
response = [item for item in map(finalize_handler, response) if item]
|
||||||
response.sort(key=lambda x: x['metric'], reverse=True)
|
response.sort(key=lambda x: x['metric'], reverse=True)
|
||||||
utils.add_index(response, item_filter=lambda x: x['id'] != '*independent')
|
utils.add_index(response, item_filter=lambda x: x['id'] != '*independent')
|
||||||
return response
|
return response
|
||||||
@@ -102,8 +104,8 @@ def get_new_companies(records, **kwargs):
|
|||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
for record in records:
|
for record in records:
|
||||||
company_name = record['company_name']
|
company_name = record.company_name
|
||||||
date = record['date']
|
date = record.date
|
||||||
|
|
||||||
if company_name not in result or result[company_name] > date:
|
if company_name not in result or result[company_name] > date:
|
||||||
result[company_name] = date
|
result[company_name] = date
|
||||||
@@ -204,21 +206,21 @@ def get_engineers_extended(records, **kwargs):
|
|||||||
return record
|
return record
|
||||||
|
|
||||||
def record_processing(result, record, param_id):
|
def record_processing(result, record, param_id):
|
||||||
result_row = result[record[param_id]]
|
result_row = result[getattr(record, param_id)]
|
||||||
record_type = record['record_type']
|
record_type = record.record_type
|
||||||
result_row[record_type] = result_row.get(record_type, 0) + 1
|
result_row[record_type] = result_row.get(record_type, 0) + 1
|
||||||
if record_type == 'mark':
|
if record_type == 'mark':
|
||||||
decorators.mark_filter(result, record, param_id, {})
|
decorators.mark_filter(result, record, param_id, {})
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
for record in records:
|
for record in records:
|
||||||
user_id = record['user_id']
|
user_id = record.user_id
|
||||||
if user_id not in result:
|
if user_id not in result:
|
||||||
result[user_id] = {'id': user_id, 'mark': 0, 'review': 0,
|
result[user_id] = {'id': user_id, 'mark': 0, 'review': 0,
|
||||||
'commit': 0, 'email': 0, 'patch': 0,
|
'commit': 0, 'email': 0, 'patch': 0,
|
||||||
'metric': 0}
|
'metric': 0}
|
||||||
record_processing(result, record, 'user_id')
|
record_processing(result, record, 'user_id')
|
||||||
result[user_id]['name'] = record['author_name']
|
result[user_id]['name'] = record.author_name
|
||||||
|
|
||||||
response = result.values()
|
response = result.values()
|
||||||
response = [item for item in map(postprocessing, response) if item]
|
response = [item for item in map(postprocessing, response) if item]
|
||||||
@@ -237,9 +239,9 @@ def get_engineers_extended(records, **kwargs):
|
|||||||
def get_distinct_engineers(records, **kwargs):
|
def get_distinct_engineers(records, **kwargs):
|
||||||
result = {}
|
result = {}
|
||||||
for record in records:
|
for record in records:
|
||||||
result[record['user_id']] = {
|
result[record.user_id] = {
|
||||||
'author_name': record['author_name'],
|
'author_name': record.author_name,
|
||||||
'author_email': record['author_email'],
|
'author_email': record.author_email,
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@@ -526,7 +528,7 @@ def timeline(records, **kwargs):
|
|||||||
week_stat_commits_hl = dict((c, 0) for c in weeks)
|
week_stat_commits_hl = dict((c, 0) for c in weeks)
|
||||||
|
|
||||||
if ('commits' in metric) or ('loc' in metric):
|
if ('commits' in metric) or ('loc' in metric):
|
||||||
handler = lambda record: record['loc']
|
handler = lambda record: record.loc
|
||||||
else:
|
else:
|
||||||
handler = lambda record: 0
|
handler = lambda record: 0
|
||||||
|
|
||||||
@@ -536,13 +538,13 @@ def timeline(records, **kwargs):
|
|||||||
release_stat = collections.defaultdict(set)
|
release_stat = collections.defaultdict(set)
|
||||||
all_stat = collections.defaultdict(set)
|
all_stat = collections.defaultdict(set)
|
||||||
for record in records:
|
for record in records:
|
||||||
if ((record['record_type'] in ['commit', 'member']) or
|
if ((record.record_type in ['commit', 'member']) or
|
||||||
(record['week'] not in weeks)):
|
(record.week not in weeks)):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
day = utils.timestamp_to_day(record['date'])
|
day = utils.timestamp_to_day(record.date)
|
||||||
user = vault.get_user_from_runtime_storage(record['user_id'])
|
user = vault.get_user_from_runtime_storage(record.user_id)
|
||||||
if record['release'] == release_name:
|
if record.release == release_name:
|
||||||
release_stat[day] |= set([user['seq']])
|
release_stat[day] |= set([user['seq']])
|
||||||
all_stat[day] |= set([user['seq']])
|
all_stat[day] |= set([user['seq']])
|
||||||
for day, users in six.iteritems(release_stat):
|
for day, users in six.iteritems(release_stat):
|
||||||
@@ -553,15 +555,15 @@ def timeline(records, **kwargs):
|
|||||||
week_stat_commits[week] += len(users)
|
week_stat_commits[week] += len(users)
|
||||||
else:
|
else:
|
||||||
for record in records:
|
for record in records:
|
||||||
week = record['week']
|
week = record.week
|
||||||
if week in weeks:
|
if week in weeks:
|
||||||
week_stat_loc[week] += handler(record)
|
week_stat_loc[week] += handler(record)
|
||||||
week_stat_commits[week] += 1
|
week_stat_commits[week] += 1
|
||||||
if 'members' in metric:
|
if 'members' in metric:
|
||||||
if record['date'] >= start_date:
|
if record.date >= start_date:
|
||||||
week_stat_commits_hl[week] += 1
|
week_stat_commits_hl[week] += 1
|
||||||
else:
|
else:
|
||||||
if record['release'] == release_name:
|
if record.release == release_name:
|
||||||
week_stat_commits_hl[week] += 1
|
week_stat_commits_hl[week] += 1
|
||||||
|
|
||||||
if 'all' == release_name and 'members' not in metric:
|
if 'all' == release_name and 'members' not in metric:
|
||||||
|
@@ -62,4 +62,7 @@ OPTS = [
|
|||||||
cfg.IntOpt('dashboard-update-interval', default=3600,
|
cfg.IntOpt('dashboard-update-interval', default=3600,
|
||||||
help='The interval specifies how frequently dashboard should '
|
help='The interval specifies how frequently dashboard should '
|
||||||
'check for updates in seconds'),
|
'check for updates in seconds'),
|
||||||
|
cfg.StrOpt('collect-profiler-stats',
|
||||||
|
help='Name of file to store python profiler data. This option '
|
||||||
|
'works for dashboard only'),
|
||||||
]
|
]
|
||||||
|
@@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import itertools
|
import itertools
|
||||||
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
@@ -172,3 +173,7 @@ def algebraic_product(**kwargs):
|
|||||||
for position, key in six.iteritems(position_to_key):
|
for position, key in six.iteritems(position_to_key):
|
||||||
result[key] = chain[position]
|
result[key] = chain[position]
|
||||||
yield result
|
yield result
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(api_response):
|
||||||
|
return json.loads(api_response.data.decode('utf8'))
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from tests.api import test_api
|
from tests.api import test_api
|
||||||
|
|
||||||
|
|
||||||
@@ -64,14 +62,14 @@ class TestAPICompanies(test_api.TestAPI):
|
|||||||
|
|
||||||
response = self.app.get('/api/1.0/companies?metric=commits&'
|
response = self.app.get('/api/1.0/companies?metric=commits&'
|
||||||
'module=glance')
|
'module=glance')
|
||||||
companies = json.loads(response.data)['data']
|
companies = test_api.load_json(response)['data']
|
||||||
self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
|
self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
|
||||||
{'id': 'nec', 'text': 'NEC'},
|
{'id': 'nec', 'text': 'NEC'},
|
||||||
{'id': 'ntt', 'text': 'NTT'}], companies)
|
{'id': 'ntt', 'text': 'NTT'}], companies)
|
||||||
|
|
||||||
response = self.app.get('/api/1.0/companies?metric=marks&'
|
response = self.app.get('/api/1.0/companies?metric=marks&'
|
||||||
'module=glance')
|
'module=glance')
|
||||||
companies = json.loads(response.data)['data']
|
companies = test_api.load_json(response)['data']
|
||||||
self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
|
self.assertEqual([{'id': 'ibm', 'text': 'IBM'},
|
||||||
{'id': 'nec', 'text': 'NEC'}], companies)
|
{'id': 'nec', 'text': 'NEC'}], companies)
|
||||||
|
|
||||||
@@ -90,7 +88,7 @@ class TestAPICompanies(test_api.TestAPI):
|
|||||||
company_name=['NEC', 'IBM', 'NTT'])):
|
company_name=['NEC', 'IBM', 'NTT'])):
|
||||||
|
|
||||||
response = self.app.get('/api/1.0/companies/nec?module=glance')
|
response = self.app.get('/api/1.0/companies/nec?module=glance')
|
||||||
company = json.loads(response.data)['company']
|
company = test_api.load_json(response)['company']
|
||||||
self.assertEqual({'id': 'nec', 'text': 'NEC'}, company)
|
self.assertEqual({'id': 'nec', 'text': 'NEC'}, company)
|
||||||
|
|
||||||
response = self.app.get('/api/1.0/companies/google?module=glance')
|
response = self.app.get('/api/1.0/companies/google?module=glance')
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from tests.api import test_api
|
from tests.api import test_api
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +48,7 @@ class TestAPIModules(test_api.TestAPI):
|
|||||||
|
|
||||||
response = self.app.get('/api/1.0/modules?'
|
response = self.app.get('/api/1.0/modules?'
|
||||||
'project_type=all&metric=commits')
|
'project_type=all&metric=commits')
|
||||||
modules = json.loads(response.data)['data']
|
modules = test_api.load_json(response)['data']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[{'id': 'glance', 'text': 'glance', 'tag': 'module'},
|
[{'id': 'glance', 'text': 'glance', 'tag': 'module'},
|
||||||
{'id': 'nova', 'text': 'nova', 'tag': 'module'},
|
{'id': 'nova', 'text': 'nova', 'tag': 'module'},
|
||||||
@@ -63,7 +61,7 @@ class TestAPIModules(test_api.TestAPI):
|
|||||||
|
|
||||||
response = self.app.get('/api/1.0/modules?module=nova-group&'
|
response = self.app.get('/api/1.0/modules?module=nova-group&'
|
||||||
'project_type=integrated&metric=commits')
|
'project_type=integrated&metric=commits')
|
||||||
modules = json.loads(response.data)['data']
|
modules = test_api.load_json(response)['data']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[{'id': 'glance', 'text': 'glance', 'tag': 'module'},
|
[{'id': 'glance', 'text': 'glance', 'tag': 'module'},
|
||||||
{'id': 'nova', 'text': 'nova', 'tag': 'module'},
|
{'id': 'nova', 'text': 'nova', 'tag': 'module'},
|
||||||
@@ -89,12 +87,12 @@ class TestAPIModules(test_api.TestAPI):
|
|||||||
test_api.make_records(record_type=['commit'])):
|
test_api.make_records(record_type=['commit'])):
|
||||||
|
|
||||||
response = self.app.get('/api/1.0/modules/nova')
|
response = self.app.get('/api/1.0/modules/nova')
|
||||||
module = json.loads(response.data)['module']
|
module = test_api.load_json(response)['module']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'id': 'nova', 'text': 'nova', 'tag': 'module'}, module)
|
{'id': 'nova', 'text': 'nova', 'tag': 'module'}, module)
|
||||||
|
|
||||||
response = self.app.get('/api/1.0/modules/nova-group')
|
response = self.app.get('/api/1.0/modules/nova-group')
|
||||||
module = json.loads(response.data)['module']
|
module = test_api.load_json(response)['module']
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'tag': 'group', 'id': 'nova-group', 'text': 'nova-group'},
|
{'tag': 'group', 'id': 'nova-group', 'text': 'nova-group'},
|
||||||
module)
|
module)
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from tests.api import test_api
|
from tests.api import test_api
|
||||||
|
|
||||||
|
|
||||||
@@ -28,7 +26,7 @@ class TestAPIReleases(test_api.TestAPI):
|
|||||||
{'release_name': 'icehouse', 'end_date': 1397692800}]},
|
{'release_name': 'icehouse', 'end_date': 1397692800}]},
|
||||||
test_api.make_records(record_type=['commit'])):
|
test_api.make_records(record_type=['commit'])):
|
||||||
response = self.app.get('/api/1.0/releases')
|
response = self.app.get('/api/1.0/releases')
|
||||||
releases = json.loads(response.data)['data']
|
releases = test_api.load_json(response)['data']
|
||||||
self.assertEqual(3, len(releases))
|
self.assertEqual(3, len(releases))
|
||||||
self.assertIn({'id': 'all', 'text': 'All'}, releases)
|
self.assertIn({'id': 'all', 'text': 'All'}, releases)
|
||||||
self.assertIn({'id': 'icehouse', 'text': 'Icehouse'}, releases)
|
self.assertIn({'id': 'icehouse', 'text': 'Icehouse'}, releases)
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from tests.api import test_api
|
from tests.api import test_api
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +48,7 @@ class TestAPIStats(test_api.TestAPI):
|
|||||||
module=['glance'])):
|
module=['glance'])):
|
||||||
response = self.app.get('/api/1.0/stats/modules?metric=loc&'
|
response = self.app.get('/api/1.0/stats/modules?metric=loc&'
|
||||||
'project_type=all')
|
'project_type=all')
|
||||||
stats = json.loads(response.data)['stats']
|
stats = test_api.load_json(response)['stats']
|
||||||
self.assertEqual(2, len(stats))
|
self.assertEqual(2, len(stats))
|
||||||
self.assertEqual(600, stats[0]['metric'])
|
self.assertEqual(600, stats[0]['metric'])
|
||||||
self.assertEqual('glance', stats[0]['id'])
|
self.assertEqual('glance', stats[0]['id'])
|
||||||
@@ -106,7 +104,7 @@ class TestAPIStats(test_api.TestAPI):
|
|||||||
user_id=['john_doe', 'bill'])):
|
user_id=['john_doe', 'bill'])):
|
||||||
response = self.app.get('/api/1.0/stats/engineers?metric=loc&'
|
response = self.app.get('/api/1.0/stats/engineers?metric=loc&'
|
||||||
'project_type=all')
|
'project_type=all')
|
||||||
stats = json.loads(response.data)['stats']
|
stats = test_api.load_json(response)['stats']
|
||||||
self.assertEqual(1, len(stats))
|
self.assertEqual(1, len(stats))
|
||||||
self.assertEqual(660, stats[0]['metric'])
|
self.assertEqual(660, stats[0]['metric'])
|
||||||
|
|
||||||
@@ -163,7 +161,7 @@ class TestAPIStats(test_api.TestAPI):
|
|||||||
user_id=['smith'])):
|
user_id=['smith'])):
|
||||||
response = self.app.get('/api/1.0/stats/engineers_extended?'
|
response = self.app.get('/api/1.0/stats/engineers_extended?'
|
||||||
'project_type=all')
|
'project_type=all')
|
||||||
stats = json.loads(response.data)['stats']
|
stats = test_api.load_json(response)['stats']
|
||||||
self.assertEqual(2, len(stats))
|
self.assertEqual(2, len(stats))
|
||||||
self.assertEqual(2, stats[0]['mark'])
|
self.assertEqual(2, stats[0]['mark'])
|
||||||
self.assertEqual('john_doe', stats[0]['id'])
|
self.assertEqual('john_doe', stats[0]['id'])
|
||||||
|
@@ -13,8 +13,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
from tests.api import test_api
|
from tests.api import test_api
|
||||||
|
|
||||||
|
|
||||||
@@ -41,7 +39,7 @@ class TestAPIUsers(test_api.TestAPI):
|
|||||||
user_id=['john_doe', 'bill_smith'])):
|
user_id=['john_doe', 'bill_smith'])):
|
||||||
response = self.app.get('/api/1.0/users?'
|
response = self.app.get('/api/1.0/users?'
|
||||||
'module=nova&metric=commits')
|
'module=nova&metric=commits')
|
||||||
users = json.loads(response.data)['data']
|
users = test_api.load_json(response)['data']
|
||||||
self.assertEqual(2, len(users))
|
self.assertEqual(2, len(users))
|
||||||
self.assertIn({'id': 'john_doe', 'text': 'John Doe'}, users)
|
self.assertIn({'id': 'john_doe', 'text': 'John Doe'}, users)
|
||||||
self.assertIn({'id': 'bill_smith', 'text': 'Bill Smith'}, users)
|
self.assertIn({'id': 'bill_smith', 'text': 'Bill Smith'}, users)
|
||||||
@@ -55,7 +53,7 @@ class TestAPIUsers(test_api.TestAPI):
|
|||||||
test_api.make_records(record_type=['commit'], module=['nova'],
|
test_api.make_records(record_type=['commit'], module=['nova'],
|
||||||
user_name=['John Doe', 'Bill Smith'])):
|
user_name=['John Doe', 'Bill Smith'])):
|
||||||
response = self.app.get('/api/1.0/users/john_doe')
|
response = self.app.get('/api/1.0/users/john_doe')
|
||||||
user = json.loads(response.data)['user']
|
user = test_api.load_json(response)['user']
|
||||||
self.assertEqual('john_doe', user['user_id'])
|
self.assertEqual('john_doe', user['user_id'])
|
||||||
|
|
||||||
def test_user_not_found(self):
|
def test_user_not_found(self):
|
||||||
|
7
tox.ini
7
tox.ini
@@ -27,7 +27,12 @@ commands = python -m testtools.run \
|
|||||||
tests.unit.test_mls \
|
tests.unit.test_mls \
|
||||||
tests.unit.test_record_processor \
|
tests.unit.test_record_processor \
|
||||||
tests.unit.test_utils \
|
tests.unit.test_utils \
|
||||||
tests.unit.test_vcs
|
tests.unit.test_vcs \
|
||||||
|
tests.api.test_companies \
|
||||||
|
tests.api.test_modules \
|
||||||
|
tests.api.test_releases \
|
||||||
|
tests.api.test_stats \
|
||||||
|
tests.api.test_users
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
commands = flake8
|
commands = flake8
|
||||||
|
Reference in New Issue
Block a user