Merge "Remove six.text_type, six.binary_type usage"

This commit is contained in:
Zuul 2022-02-02 21:55:05 +00:00 committed by Gerrit Code Review
commit 2b6a089d89
33 changed files with 102 additions and 169 deletions

View File

@ -15,7 +15,6 @@ glance Specific Commandments
- [G318] Change assertEqual(A, None) or assertEqual(None, A) by optimal assert
like assertIsNone(A)
- [G319] Validate that debug level logs are not translated
- [G320] For python 3 compatibility, use six.text_type() instead of unicode()
- [G327] Prevent use of deprecated contextlib.nested
- [G328] Must use a dict comprehension instead of a dict constructor with
a sequence of key-value pairs

View File

@ -22,7 +22,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
import six
import webob.exc
import glance.api.policy
@ -573,7 +572,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
response.headers['Content-MD5'] = image.checksum
# NOTE(markwash): "response.app_iter = ..." also erroneously resets the
# content-length
response.headers['Content-Length'] = six.text_type(chunk_size)
response.headers['Content-Length'] = str(chunk_size)
def upload(self, response, result):
response.status_int = 204

View File

@ -20,7 +20,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
import webob
@ -411,13 +410,13 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def create(self, response, image_member):
image_member_view = self._format_image_member(image_member)
body = jsonutils.dumps(image_member_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def update(self, response, image_member):
image_member_view = self._format_image_member(image_member)
body = jsonutils.dumps(image_member_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def index(self, response, image_members):
@ -429,13 +428,13 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
totalview = dict(members=image_members_view)
totalview['schema'] = '/v2/schemas/members'
body = jsonutils.dumps(totalview, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def show(self, response, image_member):
image_member_view = self._format_image_member(image_member)
body = jsonutils.dumps(image_member_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'

View File

@ -27,7 +27,6 @@ from oslo_serialization import jsonutils as json
from oslo_utils import encodeutils
from oslo_utils import timeutils as oslo_timeutils
import requests
import six
from six.moves import http_client as http
import six.moves.urllib.parse as urlparse
import webob.exc
@ -1148,8 +1147,7 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
for key in cls._disallowed_properties:
if key in image:
msg = _("Attribute '%s' is read-only.") % key
raise webob.exc.HTTPForbidden(
explanation=six.text_type(msg))
raise webob.exc.HTTPForbidden(explanation=msg)
def create(self, request):
body = self._get_request_body(request)
@ -1278,10 +1276,10 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
path_root = change['path'][0]
if path_root in self._readonly_properties:
msg = _("Attribute '%s' is read-only.") % path_root
raise webob.exc.HTTPForbidden(explanation=six.text_type(msg))
raise webob.exc.HTTPForbidden(explanation=msg)
if path_root in self._reserved_properties:
msg = _("Attribute '%s' is reserved.") % path_root
raise webob.exc.HTTPForbidden(explanation=six.text_type(msg))
raise webob.exc.HTTPForbidden(explanation=msg)
if any(path_root.startswith(ns) for ns in self._reserved_namespaces):
msg = _("Attribute '%s' is reserved.") % path_root
raise webob.exc.HTTPForbidden(explanation=msg)
@ -1315,7 +1313,7 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
if len(path) != limits.get(op, 1):
msg = _("Invalid JSON pointer for this resource: "
"'/%s'") % '/'.join(path)
raise webob.exc.HTTPBadRequest(explanation=six.text_type(msg))
raise webob.exc.HTTPBadRequest(explanation=msg)
def _parse_json_schema_change(self, raw_change, draft_version):
if draft_version == 10:
@ -1656,14 +1654,12 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def show(self, response, image):
image_view = self._format_image(image)
body = json.dumps(image_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = json.dumps(image_view, ensure_ascii=False)
response.content_type = 'application/json'
def update(self, response, image):
image_view = self._format_image(image)
body = json.dumps(image_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = json.dumps(image_view, ensure_ascii=False)
response.content_type = 'application/json'
def index(self, response, result):
@ -1681,8 +1677,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
params['marker'] = result['next_marker']
next_query = urlparse.urlencode(params)
body['next'] = '/v2/images?%s' % next_query
response.unicode_body = six.text_type(json.dumps(body,
ensure_ascii=False))
response.unicode_body = json.dumps(body, ensure_ascii=False)
response.content_type = 'application/json'
def delete_from_store(self, response, result):

View File

@ -17,7 +17,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
import six.moves.urllib.parse as urlparse
import webob.exc
@ -686,7 +685,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def __render(self, json_data, response, response_status=None):
body = jsonutils.dumps(json_data, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
if response_status:
response.status_int = response_status

View File

@ -16,7 +16,6 @@
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
import webob.exc
from wsme.rest import json
@ -434,7 +433,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def show(self, response, metadata_object):
metadata_object_json = json.tojson(MetadefObject, metadata_object)
body = jsonutils.dumps(metadata_object_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def update(self, response, metadata_object):
@ -445,7 +444,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
result.schema = "v2/schemas/metadefs/objects"
metadata_objects_json = json.tojson(MetadefObjects, result)
body = jsonutils.dumps(metadata_objects_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def delete(self, response, result):

View File

@ -16,7 +16,6 @@
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
import webob.exc
from wsme.rest import json
@ -322,13 +321,13 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def show(self, response, result):
property_type_json = json.tojson(PropertyType, result)
body = jsonutils.dumps(property_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def index(self, response, result):
property_type_json = json.tojson(PropertyTypes, result)
body = jsonutils.dumps(property_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def create(self, response, result):

View File

@ -15,7 +15,6 @@
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
from six.moves import http_client as http
import webob.exc
from wsme.rest import json
@ -241,20 +240,20 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def show(self, response, result):
resource_type_json = json.tojson(ResourceTypeAssociations, result)
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def index(self, response, result):
resource_type_json = json.tojson(ResourceTypes, result)
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def create(self, response, result):
resource_type_json = json.tojson(ResourceTypeAssociation, result)
response.status_int = http.CREATED
body = jsonutils.dumps(resource_type_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def delete(self, response, result):

View File

@ -16,7 +16,6 @@
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
import webob.exc
from wsme.rest import json
@ -472,13 +471,13 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
response.status_int = http.CREATED
metadata_tags_json = json.tojson(MetadefTags, result)
body = jsonutils.dumps(metadata_tags_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def show(self, response, metadata_tag):
metadata_tag_json = json.tojson(MetadefTag, metadata_tag)
body = jsonutils.dumps(metadata_tag_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def update(self, response, metadata_tag):
@ -488,7 +487,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def index(self, response, result):
metadata_tags_json = json.tojson(MetadefTags, result)
body = jsonutils.dumps(metadata_tags_json, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = body
response.content_type = 'application/json'
def delete(self, response, result):

View File

@ -325,8 +325,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def get(self, response, task):
task_view = self._format_task(self.task_schema, task)
body = json.dumps(task_view, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = json.dumps(task_view, ensure_ascii=False)
response.content_type = 'application/json'
def index(self, response, result):
@ -345,8 +344,7 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
params['marker'] = result['next_marker']
next_query = urlparse.urlencode(params)
body['next'] = '/v2/tasks?%s' % next_query
response.unicode_body = six.text_type(json.dumps(body,
ensure_ascii=False))
response.unicode_body = json.dumps(body, ensure_ascii=False)
response.content_type = 'application/json'

View File

@ -25,7 +25,6 @@ from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import timeutils
from oslo_utils import units
import six
import taskflow
from taskflow.patterns import linear_flow as lf
from taskflow import retry
@ -704,12 +703,12 @@ class _CompleteTask(task.Task):
# necessary
log_msg = _LE("Task ID %(task_id)s failed. Error: %(exc_type)s: "
"%(e)s")
LOG.exception(log_msg, {'exc_type': six.text_type(type(e)),
LOG.exception(log_msg, {'exc_type': str(type(e)),
'e': encodeutils.exception_to_unicode(e),
'task_id': task.task_id})
err_msg = _("Error: %(exc_type)s: %(e)s")
task.fail(err_msg % {'exc_type': six.text_type(type(e)),
task.fail(err_msg % {'exc_type': str(type(e)),
'e': encodeutils.exception_to_unicode(e)})
finally:
self.task_repo.save(task)

View File

@ -23,7 +23,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
import six
from stevedore import named
from taskflow.patterns import linear_flow as lf
from taskflow import retry
@ -388,12 +387,12 @@ class _CompleteTask(task.Task):
# necessary
log_msg = _LE("Task ID %(task_id)s failed. Error: %(exc_type)s: "
"%(e)s")
LOG.exception(log_msg, {'exc_type': six.text_type(type(e)),
LOG.exception(log_msg, {'exc_type': str(type(e)),
'e': encodeutils.exception_to_unicode(e),
'task_id': task.task_id})
err_msg = _("Error: %(exc_type)s: %(e)s")
task.fail(err_msg % {'exc_type': six.text_type(type(e)),
task.fail(err_msg % {'exc_type': str(type(e)),
'e': encodeutils.exception_to_unicode(e)})
finally:
self.task_repo.save(task)

View File

@ -43,7 +43,6 @@ from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import encodeutils
import six
from glance.common import config
from glance.common import exception
@ -561,7 +560,7 @@ def main():
v = getattr(CONF.command, 'action_kwarg_' + k)
if v is None:
continue
if isinstance(v, six.string_types):
if isinstance(v, str):
v = encodeutils.safe_decode(v)
func_kwargs[k] = v
func_args = [encodeutils.safe_decode(arg)

View File

@ -390,7 +390,7 @@ class BaseClient(object):
if value is None:
del params[key]
continue
if not isinstance(value, six.string_types):
if not isinstance(value, str):
value = str(value)
params[key] = encodeutils.safe_encode(value)
query = urlparse.urlencode(params)

View File

@ -63,8 +63,7 @@ def urlsafe_encrypt(key, plaintext, blocksize=16):
cypher = Cipher(algorithms.AES(key), modes.CBC(init_vector),
backend=backend)
encryptor = cypher.encryptor()
padded = encryptor.update(
pad(six.binary_type(plaintext))) + encryptor.finalize()
padded = encryptor.update(pad(plaintext)) + encryptor.finalize()
encoded = base64.urlsafe_b64encode(init_vector + padded)
if six.PY3:
encoded = encoded.decode('ascii')

View File

@ -16,7 +16,6 @@
"""Glance exception subclasses"""
import six
import six.moves.urllib.parse as urlparse
from glance.i18n import _
@ -58,7 +57,7 @@ class GlanceException(Exception):
# NOTE(flwang): By default, self.msg is an instance of Message, which
# can't be converted by str(). Based on the definition of
# __unicode__, it should return unicode always.
return six.text_type(self.msg)
return str(self.msg)
class MissingCredentialError(GlanceException):

View File

@ -21,7 +21,6 @@ from oslo_concurrency import lockutils
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
import six
from glance.api.v2 import images as v2_api
from glance.common import exception
@ -63,7 +62,7 @@ def _execute(t_id, task_repo, image_repo, image_factory):
# TODO(nikhil): need to bring back save_and_reraise_exception when
# necessary
err_msg = ("Error: " + six.text_type(type(e)) + ': ' +
err_msg = ("Error: " + str(type(e)) + ': ' +
encodeutils.exception_to_unicode(e))
log_msg = err_msg + ("Task ID %s" % task.task_id)
LOG.exception(log_msg)

View File

@ -21,7 +21,6 @@ from oslo_concurrency import lockutils
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
import six
from glance.api.v2 import images as v2_api
from glance.common import exception
@ -65,7 +64,7 @@ def _execute(t_id, task_repo, image_repo, image_factory):
# TODO(nikhil): need to bring back save_and_reraise_exception when
# necessary
err_msg = ("Error: " + six.text_type(type(e)) + ': ' +
err_msg = ("Error: " + str(type(e)) + ': ' +
encodeutils.exception_to_unicode(e))
log_msg = _LE(err_msg + ("Task ID %s" % task.task_id)) # noqa
LOG.exception(log_msg)

View File

@ -313,9 +313,9 @@ def image_meta_to_http_headers(image_meta):
for pk, pv in v.items():
if pv is not None:
headers["x-image-meta-property-%s"
% pk.lower()] = six.text_type(pv)
% pk.lower()] = str(pv)
else:
headers["x-image-meta-%s" % k.lower()] = six.text_type(v)
headers["x-image-meta-%s" % k.lower()] = str(v)
return headers
@ -516,8 +516,10 @@ def no_4byte_params(f):
def wrapper(*args, **kwargs):
def _is_match(some_str):
return (isinstance(some_str, six.text_type) and
REGEX_4BYTE_UNICODE.findall(some_str) != [])
return (
isinstance(some_str, str) and
REGEX_4BYTE_UNICODE.findall(some_str) != []
)
def _check_dict(data_dict):
# a dict of dicts has to be checked recursively

View File

@ -1375,7 +1375,7 @@ class Resource(object):
self.dispatch(self.serializer, action, response, action_result)
# encode all headers in response to utf-8 to prevent unicode errors
for name, value in list(response.headers.items()):
if six.PY2 and isinstance(value, six.text_type):
if six.PY2 and isinstance(value, str):
response.headers[name] = encodeutils.safe_encode(value)
return response
except webob.exc.WSGIHTTPException as e:

View File

@ -32,7 +32,6 @@ from oslo_log import log as logging
from oslo_utils import excutils
import osprofiler.sqlalchemy
from retrying import retry
import six
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
import sqlalchemy
@ -1527,7 +1526,7 @@ def purge_deleted_rows(context, age_in_days, max_rows, session=None):
except (db_exception.DBError, db_exception.DBReferenceError) as ex:
LOG.exception(_LE('DBError detected when force purging '
'table=%(table)s: %(error)s'),
{'table': ti, 'error': six.text_type(ex)})
{'table': ti, 'error': str(ex)})
raise
rows = result.rowcount
@ -1572,7 +1571,7 @@ def purge_deleted_rows(context, age_in_days, max_rows, session=None):
with excutils.save_and_reraise_exception():
LOG.error(_LE('DBError detected when purging from '
"%(tablename)s: %(error)s"),
{'tablename': tbl, 'error': six.text_type(ex)})
{'tablename': tbl, 'error': str(ex)})
rows = result.rowcount
LOG.info(_LI('Deleted %(rows)d row(s) from table %(tbl)s'),
@ -1616,7 +1615,7 @@ def purge_deleted_rows_from_images(context, age_in_days, max_rows,
with excutils.save_and_reraise_exception():
LOG.error(_LE('DBError detected when purging from '
"%(tablename)s: %(error)s"),
{'tablename': tbl, 'error': six.text_type(ex)})
{'tablename': tbl, 'error': str(ex)})
rows = result.rowcount
LOG.info(_LI('Deleted %(rows)d row(s) from table %(tbl)s'),

View File

@ -22,7 +22,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
import six
from glance.common import exception
from glance.common import timeutils
@ -396,9 +395,9 @@ class Task(object):
@message.setter
def message(self, message):
if message:
self._message = six.text_type(message)
self._message = str(message)
else:
self._message = six.text_type('')
self._message = ''
def _validate_task_status_transition(self, cur_status, new_status):
valid_transitions = {

View File

@ -98,16 +98,6 @@ def no_translate_debug_logs(logical_line, filename):
yield(0, "G319: Don't translate debug level logs")
@core.flake8ext
def no_direct_use_of_unicode_function(logical_line):
"""Check for use of unicode() builtin
G320
"""
if unicode_func_re.match(logical_line):
yield(0, "G320: Use six.text_type() instead of unicode()")
@core.flake8ext
def check_no_contextlib_nested(logical_line):
msg = ("G327: contextlib.nested is deprecated since Python 2.7. See "

View File

@ -476,7 +476,7 @@ def set_xattr(path, key, value):
If xattrs aren't supported by the file-system, we skip setting the value.
"""
namespaced_key = _make_namespaced_xattr_key(key)
if not isinstance(value, six.binary_type):
if not isinstance(value, bytes):
value = str(value)
if six.PY3:
value = value.encode('utf-8')

View File

@ -13,7 +13,6 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
import time
from oslo_serialization import jsonutils
@ -43,8 +42,7 @@ def verify_image_hashes_and_status(
test_obj.assertEqual(checksum, image['checksum'])
if os_hash_value:
# make sure we're using the hashing_algorithm we expect
test_obj.assertEqual(six.text_type(os_hash_algo),
image['os_hash_algo'])
test_obj.assertEqual(str(os_hash_algo), image['os_hash_algo'])
test_obj.assertEqual(os_hash_value, image['os_hash_value'])
test_obj.assertEqual(status, image['status'])
test_obj.assertEqual(size, image['size'])

View File

@ -26,7 +26,6 @@ from oslo_serialization import jsonutils
from oslo_utils.secretutils import md5
from oslo_utils import units
import requests
import six
from six.moves import http_client as http
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
@ -209,9 +208,8 @@ class TestImages(functional.FunctionalTest):
status='active',
max_sec=10,
delay_sec=0.2)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -353,9 +351,8 @@ class TestImages(functional.FunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -732,9 +729,8 @@ class TestImages(functional.FunctionalTest):
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(http.NO_CONTENT, response.status_code)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self, image_id, expect_c,
expect_h, 'active',
size=len(image_data))
@ -1176,9 +1172,8 @@ class TestImages(functional.FunctionalTest):
image_data = b'ZZZZZ'
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(http.NO_CONTENT, response.status_code)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
expect_c,
@ -1191,9 +1186,8 @@ class TestImages(functional.FunctionalTest):
image_data = b'WWWWW'
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(http.NO_CONTENT, response.status_code)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image2_id,
expect_c,
@ -4636,9 +4630,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
status='active',
max_sec=15,
delay_sec=0.2)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -4802,9 +4795,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
status='active',
max_sec=15,
delay_sec=0.2)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -4967,9 +4959,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -5131,9 +5122,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -5294,9 +5284,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -5459,9 +5448,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -5684,9 +5672,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -5943,9 +5930,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -6086,9 +6072,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(http.NO_CONTENT, response.status_code)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -6261,9 +6246,8 @@ class TestImagesMultipleBackend(functional.MultipleBackendFunctionalTest):
response = requests.put(path, headers=headers, data=image_data)
self.assertEqual(http.NO_CONTENT, response.status_code)
expect_c = six.text_type(md5(image_data,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(image_data).hexdigest())
expect_c = str(md5(image_data, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(image_data).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,
@ -6798,9 +6782,8 @@ class TestCopyImagePermissions(functional.MultipleBackendFunctionalTest):
delay_sec=0.2,
start_delay_sec=1)
with requests.get(image_data_uri) as r:
expect_c = six.text_type(md5(r.content,
usedforsecurity=False).hexdigest())
expect_h = six.text_type(hashlib.sha512(r.content).hexdigest())
expect_c = str(md5(r.content, usedforsecurity=False).hexdigest())
expect_h = str(hashlib.sha512(r.content).hexdigest())
func_utils.verify_image_hashes_and_status(self,
image_id,
checksum=expect_c,

View File

@ -13,7 +13,6 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
import testtools
from glance.db.migration import CURRENT_RELEASE
@ -29,8 +28,7 @@ class TestDataMigrationVersion(testtools.TestCase):
# by rule, release names must be composed of the 26 letters of the
# ISO Latin alphabet (ord('A')==65, ord('Z')==90)
release_letter = six.text_type(CURRENT_RELEASE[:1].upper()).encode(
'ascii')
release_letter = str(CURRENT_RELEASE[:1].upper()).encode('ascii')
# Convert release letter into an int in [1:26]. The first
# glance release was 'Bexar'.

View File

@ -52,17 +52,6 @@ class HackingTestCase(utils.BaseTestCase):
self.assertEqual(0, len(list(checks.no_translate_debug_logs(
"LOG.info(_('foo'))", "glance/store/foo.py"))))
def test_no_direct_use_of_unicode_function(self):
self.assertEqual(1, len(list(checks.no_direct_use_of_unicode_function(
"unicode('the party dont start til the unicode walks in')"))))
self.assertEqual(1, len(list(checks.no_direct_use_of_unicode_function(
"""unicode('something '
'something else"""))))
self.assertEqual(0, len(list(checks.no_direct_use_of_unicode_function(
"six.text_type('party over')"))))
self.assertEqual(0, len(list(checks.no_direct_use_of_unicode_function(
"not_actually_unicode('something completely different')"))))
def test_no_contextlib_nested(self):
self.assertEqual(1, len(list(checks.check_no_contextlib_nested(
"with contextlib.nested("))))

View File

@ -14,7 +14,6 @@
# under the License.
from oslo_utils import encodeutils
import six
from six.moves import http_client as http
from glance.common import exception
@ -49,6 +48,5 @@ class GlanceExceptionTestCase(test_utils.BaseTestCase):
self.assertIn('test: 500', encodeutils.exception_to_unicode(msg))
def test_non_unicode_error_msg(self):
exc = exception.GlanceException(str('test'))
self.assertIsInstance(encodeutils.exception_to_unicode(exc),
six.text_type)
exc = exception.GlanceException('test')
self.assertIsInstance(encodeutils.exception_to_unicode(exc), str)

View File

@ -659,7 +659,7 @@ class TestHelpers(test_utils.BaseTestCase):
'properties': {'distro': 'Ubuntu 10.04 LTS'}}
headers = utils.image_meta_to_http_headers(fixture)
for k, v in headers.items():
self.assertIsInstance(v, six.text_type)
self.assertIsInstance(v, str)
def test_data_passed_properly_through_headers(self):
"""

View File

@ -5390,14 +5390,14 @@ class TestImagesSerializerWithUnicode(test_utils.BaseTestCase):
u'size': 1024,
u'virtual_size': 3072,
u'checksum': u'ca425b88f047ce8ec45ee90e813ada91',
u'os_hash_algo': six.text_type(FAKEHASHALGO),
u'os_hash_value': six.text_type(MULTIHASH1),
u'os_hash_algo': str(FAKEHASHALGO),
u'os_hash_value': str(MULTIHASH1),
u'container_format': u'ami',
u'disk_format': u'ami',
u'min_ram': 128,
u'min_disk': 10,
u'created_at': six.text_type(ISOTIME),
u'updated_at': six.text_type(ISOTIME),
u'created_at': str(ISOTIME),
u'updated_at': str(ISOTIME),
u'self': u'/v2/images/%s' % UUID1,
u'file': u'/v2/images/%s/file' % UUID1,
u'schema': u'/v2/schemas/image',
@ -5430,14 +5430,14 @@ class TestImagesSerializerWithUnicode(test_utils.BaseTestCase):
u'size': 1024,
u'virtual_size': 3072,
u'checksum': u'ca425b88f047ce8ec45ee90e813ada91',
u'os_hash_algo': six.text_type(FAKEHASHALGO),
u'os_hash_value': six.text_type(MULTIHASH1),
u'os_hash_algo': str(FAKEHASHALGO),
u'os_hash_value': str(MULTIHASH1),
u'container_format': u'ami',
u'disk_format': u'ami',
u'min_ram': 128,
u'min_disk': 10,
u'created_at': six.text_type(ISOTIME),
u'updated_at': six.text_type(ISOTIME),
u'created_at': str(ISOTIME),
u'updated_at': str(ISOTIME),
u'self': u'/v2/images/%s' % UUID1,
u'file': u'/v2/images/%s/file' % UUID1,
u'schema': u'/v2/schemas/image',
@ -5464,14 +5464,14 @@ class TestImagesSerializerWithUnicode(test_utils.BaseTestCase):
u'size': 1024,
u'virtual_size': 3072,
u'checksum': u'ca425b88f047ce8ec45ee90e813ada91',
u'os_hash_algo': six.text_type(FAKEHASHALGO),
u'os_hash_value': six.text_type(MULTIHASH1),
u'os_hash_algo': str(FAKEHASHALGO),
u'os_hash_value': str(MULTIHASH1),
u'container_format': u'ami',
u'disk_format': u'ami',
u'min_ram': 128,
u'min_disk': 10,
u'created_at': six.text_type(ISOTIME),
u'updated_at': six.text_type(ISOTIME),
u'created_at': str(ISOTIME),
u'updated_at': str(ISOTIME),
u'self': u'/v2/images/%s' % UUID1,
u'file': u'/v2/images/%s/file' % UUID1,
u'schema': u'/v2/schemas/image',
@ -5500,14 +5500,14 @@ class TestImagesSerializerWithUnicode(test_utils.BaseTestCase):
u'size': 1024,
u'virtual_size': 3072,
u'checksum': u'ca425b88f047ce8ec45ee90e813ada91',
u'os_hash_algo': six.text_type(FAKEHASHALGO),
u'os_hash_value': six.text_type(MULTIHASH1),
u'os_hash_algo': str(FAKEHASHALGO),
u'os_hash_value': str(MULTIHASH1),
u'container_format': u'ami',
u'disk_format': u'ami',
u'min_ram': 128,
u'min_disk': 10,
u'created_at': six.text_type(ISOTIME),
u'updated_at': six.text_type(ISOTIME),
u'created_at': str(ISOTIME),
u'updated_at': str(ISOTIME),
u'self': u'/v2/images/%s' % UUID1,
u'file': u'/v2/images/%s/file' % UUID1,
u'schema': u'/v2/schemas/image',

View File

@ -551,7 +551,7 @@ def start_http_server(image_id, image_data):
self.send_response(http.OK)
self.send_header('Content-Length', str(len(fixture)))
self.end_headers()
self.wfile.write(six.b(fixture))
self.wfile.write(fixture.encode('latin-1'))
return
def do_HEAD(self):

View File

@ -144,7 +144,6 @@ extension =
G317 = checks:assert_equal_type
G318 = checks:assert_equal_none
G319 = checks:no_translate_debug_logs
G320 = checks:no_direct_use_of_unicode_function
G327 = checks:check_no_contextlib_nested
G328 = checks:dict_constructor_with_list_copy
G329 = checks:check_python3_xrange