Fix py33 compatibility errors

Part of blueprint py33

Change-Id: Ib24bb7e5147a6d241d0392889832ed34e1c856eb
This commit is contained in:
Ilya Shakhat
2014-06-27 22:25:19 +04:00
committed by Ilya Shakhat
parent 5ad1cbe79c
commit 8ab2a069b4
12 changed files with 118 additions and 49 deletions

View File

@@ -20,6 +20,7 @@ import operator
import time import time
import flask import flask
import six
from stackalytics.dashboard import decorators from stackalytics.dashboard import decorators
from stackalytics.dashboard import helpers from stackalytics.dashboard import helpers
@@ -159,15 +160,15 @@ def members():
def _get_punch_card_data(records): def _get_punch_card_data(records):
punch_card_raw = [] # matrix days x hours punch_card_raw = [] # matrix days x hours
for wday in xrange(0, 7): for wday in six.moves.range(0, 7):
punch_card_raw.append([0] * 24) punch_card_raw.append([0] * 24)
for record in records: for record in records:
tt = datetime.datetime.fromtimestamp(record['date']).timetuple() tt = datetime.datetime.fromtimestamp(record['date']).timetuple()
punch_card_raw[tt.tm_wday][tt.tm_hour] += 1 punch_card_raw[tt.tm_wday][tt.tm_hour] += 1
punch_card_data = [] # format for jqplot bubble renderer punch_card_data = [] # format for jqplot bubble renderer
for wday in xrange(0, 7): for wday in six.moves.range(0, 7):
for hour in xrange(0, 24): for hour in six.moves.range(0, 24):
v = punch_card_raw[wday][hour] v = punch_card_raw[wday][hour]
if v: if v:
punch_card_data.append([hour, wday, v, v]) punch_card_data.append([hour, wday, v, v])

View File

@@ -14,9 +14,7 @@
# limitations under the License. # limitations under the License.
from email import utils as email_utils from email import utils as email_utils
import gzip
import re import re
import StringIO
import six import six
from six.moves import http_client from six.moves import http_client
@@ -85,8 +83,8 @@ def _retrieve_mails(uri):
if not content: if not content:
LOG.error('Error reading mail archive from uri: %s', uri) LOG.error('Error reading mail archive from uri: %s', uri)
return return
gzip_fd = gzip.GzipFile(fileobj=StringIO.StringIO(content))
content = gzip_fd.read() content = utils.gzip_decompress(content)
LOG.debug('Mail archive is loaded, start processing') LOG.debug('Mail archive is loaded, start processing')
content += TRAILING_RECORD content += TRAILING_RECORD

View File

@@ -33,7 +33,7 @@ CNT_EMPTY_MEMBERS = 50
def _convert_str_fields_to_unicode(result): def _convert_str_fields_to_unicode(result):
for field, value in result.iteritems(): for field, value in six.iteritems(result):
if type(value) is str: if type(value) is str:
try: try:
value = six.text_type(value, 'utf8') value = six.text_type(value, 'utf8')

View File

@@ -33,9 +33,9 @@ def _normalize_user(user):
elif y["end_date"] == 0: elif y["end_date"] == 0:
return -1 return -1
else: else:
return cmp(x["end_date"], y["end_date"]) return x["end_date"] - y["end_date"]
user['companies'].sort(cmp=end_date_comparator) user['companies'].sort(key=utils.cmp_to_key(end_date_comparator))
user['user_id'] = user['launchpad_id'] user['user_id'] = user['launchpad_id']

View File

@@ -108,7 +108,7 @@ class Gerrit(Rcs):
return False return False
def _poll_reviews(self, project_organization, module, branch, def _poll_reviews(self, project_organization, module, branch,
start_id=None, last_id=None, is_open=False, start_id=0, last_id=0, is_open=False,
grab_comments=False): grab_comments=False):
sort_key = start_id sort_key = start_id

View File

@@ -127,23 +127,29 @@ class MemcachedStorage(RuntimeStorage):
return self.memcached.incr('user:count') return self.memcached.incr('user:count')
def get_all_users(self): def get_all_users(self):
for n in xrange(0, self.get_by_key('user:count') + 1): for n in six.moves.range(0, self.get_by_key('user:count') + 1):
user = self.get_by_key('user:%s' % n) user = self.get_by_key('user:%s' % n)
if user: if user:
yield user yield user
def get_by_key(self, key): def get_by_key(self, key):
return self.memcached.get(key.encode('utf8')) if six.PY2:
key = key.encode('utf8')
return self.memcached.get(key)
def set_by_key(self, key, value): def set_by_key(self, key, value):
if not self.memcached.set(key.encode('utf8'), value): if six.PY2:
key = key.encode('utf8')
if not self.memcached.set(key, value):
LOG.critical('Failed to store data in memcached: ' LOG.critical('Failed to store data in memcached: '
'key %(key)s, value %(value)s', 'key %(key)s, value %(value)s',
{'key': key, 'value': value}) {'key': key, 'value': value})
raise Exception('Memcached set failed') raise Exception('Memcached set failed')
def delete_by_key(self, key): def delete_by_key(self, key):
if not self.memcached.delete(key.encode('utf8')): if six.PY2:
key = key.encode('utf8')
if not self.memcached.delete(key):
LOG.critical('Failed to delete data from memcached: key %s', key) LOG.critical('Failed to delete data from memcached: key %s', key)
raise Exception('Memcached delete failed') raise Exception('Memcached delete failed')

View File

@@ -15,14 +15,13 @@
import cgi import cgi
import datetime import datetime
import gzip
import json import json
import re import re
import time import time
import iso8601 import iso8601
import six import six
from six.moves.urllib import parse
from six.moves.urllib import request
from stackalytics.openstack.common import log as logging from stackalytics.openstack.common import log as logging
@@ -89,8 +88,8 @@ def check_email_validity(email):
def read_uri(uri): def read_uri(uri):
try: try:
fd = request.urlopen(uri) fd = six.moves.urllib.request.urlopen(uri)
raw = fd.read() raw = fd.read().decode('utf8')
fd.close() fd.close()
return raw return raw
except Exception as e: except Exception as e:
@@ -106,12 +105,50 @@ def read_json_from_uri(uri):
{'error': e, 'uri': uri}) {'error': e, 'uri': uri})
def gzip_decompress(content):
if six.PY3:
return gzip.decompress(content)
else:
gzip_fd = gzip.GzipFile(fileobj=six.moves.StringIO.StringIO(content))
return gzip_fd.read()
def cmp_to_key(mycmp): # ported from python 3
"""Convert a cmp= function into a key= function."""
class K(object):
__slots__ = ['obj']
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
__hash__ = None
return K
def make_range(start, stop, step): def make_range(start, stop, step):
last_full = stop - ((stop - start) % step) last_full = stop - ((stop - start) % step)
for i in six.moves.xrange(start, last_full, step): for i in six.moves.range(start, last_full, step):
yield six.moves.xrange(i, i + step) yield six.moves.range(i, i + step)
if stop > last_full: if stop > last_full:
yield six.moves.xrange(last_full, stop) yield six.moves.range(last_full, stop)
def store_user(runtime_storage_inst, user): def store_user(runtime_storage_inst, user):
@@ -200,7 +237,7 @@ def add_index(sequence, start=1, item_filter=lambda x: True):
def safe_encode(s): def safe_encode(s):
return parse.quote(s.encode('utf-8')) return six.moves.urllib.parse.quote(s.encode('utf-8'))
def make_module_group(module_group_id, name=None, modules=None, tag='module'): def make_module_group(module_group_id, name=None, modules=None, tag='module'):

View File

@@ -190,18 +190,18 @@ class Git(Vcs):
try: try:
output = sh.git('log', '--pretty=' + GIT_LOG_FORMAT, '--shortstat', output = sh.git('log', '--pretty=' + GIT_LOG_FORMAT, '--shortstat',
'-M', '--no-merges', commit_range, _tty_out=False, '-M', '--no-merges', commit_range, _tty_out=False,
_decode_errors='ignore') _decode_errors='ignore', _encoding='utf8')
except sh.ErrorReturnCode as e: except sh.ErrorReturnCode as e:
LOG.error('Unable to get log of git repo %s. Ignore it', LOG.error('Unable to get log of git repo %s. Ignore it',
self.repo['uri']) self.repo['uri'])
LOG.exception(e) LOG.exception(e)
return return
for rec in re.finditer(GIT_LOG_PATTERN, str(output)): for rec in re.finditer(GIT_LOG_PATTERN, six.text_type(output)):
i = 1 i = 1
commit = {} commit = {}
for param in GIT_LOG_PARAMS: for param in GIT_LOG_PARAMS:
commit[param[0]] = six.text_type(rec.group(i), 'utf8') commit[param[0]] = rec.group(i)
i += 1 i += 1
if not utils.check_email_validity(commit['author_email']): if not utils.check_email_validity(commit['author_email']):

View File

@@ -13,9 +13,11 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import functools
import json import json
import jsonschema import jsonschema
import six
import testtools import testtools
@@ -24,17 +26,25 @@ class TestConfigFiles(testtools.TestCase):
super(TestConfigFiles, self).setUp() super(TestConfigFiles, self).setUp()
def _read_file(self, file_name): def _read_file(self, file_name):
with open(file_name, 'r') as content_file: if six.PY3:
opener = functools.partial(open, encoding='utf8')
else:
opener = open
with opener(file_name, 'r') as content_file:
content = content_file.read() content = content_file.read()
return json.loads(content) return json.loads(content)
def _verify_ordering(self, array, key, msg): def _verify_ordering(self, array, key, msg):
sorted_array = sorted(array, key=key) comparator = lambda x, y: (x > y) - (x < y)
diff_msg = None
for i in range(len(array)): diff_msg = ''
if array[i] != sorted_array[i]: for i in range(len(array) - 1):
diff_msg = ('First differing element %s:\n%s\n%s' % if comparator(key(array[i]), key(array[i + 1])) > 0:
(i, array[i], sorted_array[i])) diff_msg = ('Order fails at index %(index)s, '
'elements:\n%(first)s:\n%(second)s' %
{'index': i, 'first': array[i],
'second': array[i + 1]})
break
if diff_msg: if diff_msg:
self.fail(msg + '\n' + diff_msg) self.fail(msg + '\n' + diff_msg)

View File

@@ -743,11 +743,11 @@ class TestRecordProcessor(testtools.TestCase):
'user_name': 'John Doe', 'user_name': 'John Doe',
'emails': ['john_doe@ibm.com', 'john_doe@gmail.com'], 'emails': ['john_doe@ibm.com', 'john_doe@gmail.com'],
'companies': [{'company_name': 'IBM', 'end_date': 0}]} 'companies': [{'company_name': 'IBM', 'end_date': 0}]}
self.assertEqual(user, utils.load_user( self.assertUsersMatch(user, utils.load_user(
record_processor_inst.runtime_storage_inst, 'john_doe')) record_processor_inst.runtime_storage_inst, 'john_doe'))
self.assertEqual(user, utils.load_user( self.assertUsersMatch(user, utils.load_user(
record_processor_inst.runtime_storage_inst, 'john_doe@gmail.com')) record_processor_inst.runtime_storage_inst, 'john_doe@gmail.com'))
self.assertEqual(user, utils.load_user( self.assertUsersMatch(user, utils.load_user(
record_processor_inst.runtime_storage_inst, 'john_doe@ibm.com')) record_processor_inst.runtime_storage_inst, 'john_doe@ibm.com'))
def test_merge_users(self): def test_merge_users(self):
@@ -866,10 +866,10 @@ class TestRecordProcessor(testtools.TestCase):
'companies': [{'company_name': '*independent', 'companies': [{'company_name': '*independent',
'end_date': 0}]} 'end_date': 0}]}
runtime_storage_inst = record_processor_inst.runtime_storage_inst runtime_storage_inst = record_processor_inst.runtime_storage_inst
self.assertEqual(user_1, utils.load_user(runtime_storage_inst, self.assertUsersMatch(user_1, utils.load_user(runtime_storage_inst,
'john_doe')) 'john_doe'))
self.assertEqual(user_2, utils.load_user(runtime_storage_inst, self.assertUsersMatch(user_2, utils.load_user(runtime_storage_inst,
'homer')) 'homer'))
def test_process_commit_with_coauthors(self): def test_process_commit_with_coauthors(self):
record_processor_inst = self.make_record_processor( record_processor_inst = self.make_record_processor(
@@ -1337,6 +1337,16 @@ class TestRecordProcessor(testtools.TestCase):
self.assertEqual(value, actual[key], self.assertEqual(value, actual[key],
'Values for key %s do not match' % key) 'Values for key %s do not match' % key)
def assertUsersMatch(self, expected, actual):
match = True
for key, value in six.iteritems(expected):
if key == 'emails':
match = (set(value) == set(actual[key]))
else:
match = (value == actual[key])
self.assertTrue(match, 'User %s should match %s' % (actual, expected))
# Helpers # Helpers
def make_record_processor(self, users=None, companies=None, releases=None, def make_record_processor(self, users=None, companies=None, releases=None,
@@ -1415,7 +1425,7 @@ def make_runtime_storage(users=None, companies=None, releases=None,
return count return count
def get_all_users(): def get_all_users():
for n in six.moves.xrange( for n in six.moves.range(
0, (runtime_storage_cache.get('user:count') or 0) + 1): 0, (runtime_storage_cache.get('user:count') or 0) + 1):
u = runtime_storage_cache.get('user:%s' % n) u = runtime_storage_cache.get('user:%s' % n)
if u: if u:

View File

@@ -150,9 +150,12 @@ diff_stat:
self.assertEqual(0, commits[4]['lines_deleted']) self.assertEqual(0, commits[4]['lines_deleted'])
self.assertFalse('coauthor' in commits[4]) self.assertFalse('coauthor' in commits[4])
self.assertEqual( self.assertIn(
[{'author_name': 'Tupac Shakur', {'author_name': 'Tupac Shakur',
'author_email': 'tupac.shakur@openstack.com'}, 'author_email': 'tupac.shakur@openstack.com'},
{'author_name': 'Bob Dylan', commits[5]['coauthor'])
'author_email': 'bob.dylan@openstack.com'}],
self.assertIn(
{'author_name': 'Bob Dylan',
'author_email': 'bob.dylan@openstack.com'},
commits[5]['coauthor']) commits[5]['coauthor'])

10
tox.ini
View File

@@ -21,13 +21,17 @@ deps = -r{toxinidir}/requirements-py3.txt
-r{toxinidir}/test-requirements.txt -r{toxinidir}/test-requirements.txt
# to be removed once all tests passed # to be removed once all tests passed
commands = python -m testtools.run \ commands = python -m testtools.run \
tests.unit.test_utils \ tests.unit.test_config_files \
tests.unit.test_default_data_processor \
tests.unit.test_mps \ tests.unit.test_mps \
tests.unit.test_record_processor tests.unit.test_mls \
tests.unit.test_record_processor \
tests.unit.test_utils \
tests.unit.test_vcs
[testenv:pep8] [testenv:pep8]
commands = flake8 commands = flake8
{toxinidir}/tools/requirements_style_check.sh requirements.txt test-requirements.txt {toxinidir}/tools/requirements_style_check.sh requirements.txt requirements-py3.txt test-requirements.txt
distribute = false distribute = false
[testenv:venv] [testenv:venv]