Remove six

We don't need this in a Python 3-only world.

Change-Id: I38f57f755cde76e5bbdcc40d63b77a867b248028
This commit is contained in:
jacky06 2020-05-04 20:14:46 +08:00
parent e276e92057
commit 7cf3b161fb
28 changed files with 121 additions and 158 deletions

View File

@ -15,6 +15,5 @@ Searchlight Specific Commandments
- [SL318] Change assertEqual(A, None) or assertEqual(None, A) by optimal assert like
assertIsNone(A)
- [SL319] Validate that logs are not translated
- [SL320] For python 3 compatibility, use six.text_type() instead of unicode()
- [SL327] Prevent use of deprecated contextlib.nested
- [SL343] Check for common double word typos

View File

@ -39,7 +39,6 @@ reno==2.5.0
requests==2.21.0
Routes==2.3.1
simplejson==3.5.1
six==1.10.0
sphinx==1.6.2
stestr==2.0.0
stevedore==1.20.0

View File

@ -30,7 +30,6 @@ Paste>=2.0.2 # MIT
python-keystoneclient>=3.8.0 # Apache-2.0
pyOpenSSL>=17.1.0 # Apache-2.0
# Required by openstack.common libraries
six>=1.10.0 # MIT
oslo.i18n>=3.15.3 # Apache-2.0
oslo.log>=3.36.0 # Apache-2.0

View File

@ -18,7 +18,6 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
import six
import webob.exc
from searchlight.api import policy
@ -361,7 +360,7 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
return {'query': es_query}
def _get_sort_order(self, sort_order):
if isinstance(sort_order, (six.text_type, dict)):
if isinstance(sort_order, (str, dict)):
# Elasticsearch expects a list
sort_order = [sort_order]
elif not isinstance(sort_order, list):
@ -371,7 +370,7 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
def replace_sort_field(sort_field):
# Make some alterations for fields that have a 'raw' field so
# that documents aren't sorted by tokenized values
if isinstance(sort_field, six.text_type):
if isinstance(sort_field, str):
# Raw field name
if sort_field in searchlight.elasticsearch.RAW_SORT_FIELDS:
return sort_field + ".raw"
@ -482,11 +481,11 @@ class RequestDeserializer(wsgi.JSONRequestDeserializer):
if 'include' in _source:
query_params['_source_include'] = _source['include']
if 'exclude' in _source:
if isinstance(_source['exclude'], six.text_type):
if isinstance(_source['exclude'], str):
source_exclude.append(_source['exclude'])
else:
source_exclude.extend(_source['exclude'])
elif isinstance(_source, (list, six.text_type)):
elif isinstance(_source, (list, str)):
query_params['_source_include'] = _source
else:
msg = _("'_source' must be a string, dict or list")
@ -553,17 +552,17 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
def search(self, response, query_result):
body = jsonutils.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = str(body)
response.content_type = 'application/json'
def plugins_info(self, response, query_result):
body = jsonutils.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = str(body)
response.content_type = 'application/json'
def facets(self, response, query_result):
body = jsonutils.dumps(query_result, ensure_ascii=False)
response.unicode_body = six.text_type(body)
response.unicode_body = str(body)
response.content_type = 'application/json'

View File

@ -16,7 +16,6 @@
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
from six.moves import http_client
import webob.dec
@ -65,8 +64,7 @@ class Controller(object):
status=http_client.MULTIPLE_CHOICES,
content_type='application/json')
json = jsonutils.dumps(dict(versions=version_objs))
if six.PY3:
json = json.encode('utf-8')
json = json.encode('utf-8')
response.body = json
return response

View File

@ -16,7 +16,6 @@
import concurrent.futures
import copy
import signal
import six
import sys
import time
@ -87,7 +86,7 @@ class IndexCommands(object):
re-indexes to occur simultaneously. We may need to cleanup. See
sig_handler() for more info.
"""
for group in six.iterkeys(index_names):
for group in index_names.keys():
# Grab the correct tuple as a list, convert list to a
# single tuple, extract second member (the search
# alias) of tuple.
@ -206,7 +205,7 @@ class IndexCommands(object):
_plugins_without_notification = []
_type = utils.expand_type_matches(
_type, six.viewkeys(search_plugins))
_type, search_plugins.keys())
LOG.debug("After expansion, 'type' argument: %s", ", ".join(_type))
group_set = set(group)
@ -377,7 +376,7 @@ class IndexCommands(object):
"should be disabled.\n")
if not notification_less:
ans = six.moves.input(
ans = input(
"\nUse '--force' to suppress this message.\n"
"OK to continue? [y/n]: ")
if ans.lower() != 'y':
@ -649,7 +648,7 @@ def main():
v = getattr(CONF.command, 'action_kwarg_' + k)
if v is None:
continue
if isinstance(v, six.string_types):
if isinstance(v, str):
v = encodeutils.safe_decode(v)
func_kwargs[k] = v
func_args = [encodeutils.safe_decode(arg)

View File

@ -16,7 +16,6 @@
"""Searchlight exception subclasses"""
import six
from searchlight.i18n import _
@ -52,7 +51,7 @@ class SearchlightException(Exception):
# NOTE(flwang): By default, self.msg is an instance of Message, which
# can't be converted by str(). Based on the definition of
# __unicode__, it should return unicode always.
return six.text_type(self.msg)
return str(self.msg)
class NotFound(SearchlightException):

View File

@ -14,7 +14,6 @@
from collections import OrderedDict
import re
import six
from six.moves import configparser
from oslo_config import cfg
@ -25,13 +24,7 @@ import searchlight.api.policy
from searchlight.common import exception
from searchlight.i18n import _
# NOTE(bourke): The default dict_type is collections.OrderedDict in py27, but
# we must set manually for compatibility with py26
# SafeConfigParser was deprecated in Python 3.2
if six.PY3:
CONFIG = configparser.ConfigParser(dict_type=OrderedDict)
else:
CONFIG = configparser.SafeConfigParser(dict_type=OrderedDict)
CONFIG = configparser.ConfigParser(dict_type=OrderedDict)
LOG = logging.getLogger(__name__)
property_opts = [

View File

@ -40,7 +40,6 @@ from oslo_utils import excutils
from oslo_utils import netutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from webob import exc
from searchlight.common import exception
@ -320,8 +319,6 @@ def get_test_suite_socket():
if SEARCHLIGHT_TEST_SOCKET_FD_STR in os.environ:
fd = int(os.environ[SEARCHLIGHT_TEST_SOCKET_FD_STR])
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
if six.PY2:
sock = socket.SocketType(_sock=sock)
sock.listen(CONF.api.backlog)
del os.environ[SEARCHLIGHT_TEST_SOCKET_FD_STR]
os.close(fd)
@ -405,7 +402,7 @@ def no_4byte_params(f):
def wrapper(*args, **kwargs):
def _is_match(some_str):
return (isinstance(some_str, six.text_type) and
return (isinstance(some_str, str) and
REGEX_4BYTE_UNICODE.findall(some_str) != [])
def _check_dict(data_dict):

View File

@ -21,7 +21,6 @@ from oslo_config import types
from oslo_utils import encodeutils
from oslo_utils import timeutils
import re
import six
from searchlight.common import exception
import searchlight.elasticsearch
@ -60,8 +59,7 @@ CONF = cfg.CONF
CONF.register_opts(indexer_opts, group='resource_plugin')
@six.add_metaclass(abc.ABCMeta)
class IndexBase(plugin.Plugin):
class IndexBase(plugin.Plugin, metaclass=abc.ABCMeta):
NotificationHandlerCls = None
def __init__(self):
@ -663,8 +661,7 @@ class IndexBase(plugin.Plugin):
return "resource_plugin:%s" % config_name
@six.add_metaclass(abc.ABCMeta)
class NotificationBase(object):
class NotificationBase(object, metaclass=abc.ABCMeta):
def __init__(self, index_helper, options):
self.index_helper = index_helper
@ -672,7 +669,7 @@ class NotificationBase(object):
def get_notification_supported_events(self):
"""Get the list of event types this plugin responds to."""
return list(six.iterkeys(self.get_event_handlers()))
return list(self.get_event_handlers().keys())
def get_log_fields(self, event_type, payload):
"""Return an iterable of key value pairs in payload that will be

View File

@ -14,7 +14,6 @@
# limitations under the License.
import logging
import six
from searchlight.elasticsearch.plugins import openstack_clients
@ -26,7 +25,7 @@ BLACKLISTED_FIELDS = set((u'links', u'manager', '_loaded', '_info'))
def serialize_cinder_volume(volume):
"""volume can be an id or a 'volume' object from cinderclient"""
if isinstance(volume, six.string_types):
if isinstance(volume, str):
cinder_client = openstack_clients.get_cinderclient()
volume = cinder_client.volumes.get(volume)
@ -47,7 +46,7 @@ def serialize_cinder_volume(volume):
def serialize_cinder_snapshot(snapshot):
"""snapshot can be an id or a 'Snapshot' object from cinderclient"""
if isinstance(snapshot, six.string_types):
if isinstance(snapshot, str):
cinder_client = openstack_clients.get_cinderclient()
snapshot = cinder_client.volume_snapshots.get(snapshot)

View File

@ -18,7 +18,6 @@ import logging
import operator
import glanceclient.exc
import six
from searchlight.elasticsearch.plugins import openstack_clients
from searchlight.elasticsearch.plugins import utils
@ -56,7 +55,7 @@ def _normalize_visibility(image_doc):
def serialize_glance_image(image):
# If we're being asked to index an ID, retrieve the full image information
if isinstance(image, six.text_type):
if isinstance(image, str):
g_client = openstack_clients.get_glanceclient()
image = g_client.images.get(image)

View File

@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
import functools
def serialize_resource(resource, fields):
@ -37,7 +37,7 @@ def obj_payload(payload):
def versioned_payload(func):
@six.wraps(func)
@functools.wraps(func)
def wrapper(self, event_type, payload, timestamp):
return func(self, event_type, obj_payload(payload), timestamp)
return wrapper

View File

@ -17,7 +17,6 @@ import copy
import logging
import novaclient.exceptions
import novaclient.v2.flavors
import six
from oslo_serialization import jsonutils
@ -62,7 +61,7 @@ def _get_flavor_access(flavor):
def serialize_nova_server(server):
nc_client = openstack_clients.get_novaclient()
if isinstance(server, six.text_type):
if isinstance(server, str):
server = nc_client.servers.get(server)
LOG.debug("Serializing server %s for project %s",

View File

@ -18,7 +18,6 @@ from elasticsearch import exceptions as es_exc
from elasticsearch import helpers
import logging
import oslo_utils
import six
from oslo_config import cfg
from oslo_utils import encodeutils
@ -503,7 +502,7 @@ def find_missing_types(index_type_mapping):
es_engine = searchlight.elasticsearch.get_api()
for index in six.iterkeys(index_type_mapping):
for index in index_type_mapping.keys():
for doc_type in index_type_mapping[index]:
try:
mapping = es_engine.indices.get_mapping(index, doc_type)

View File

@ -103,7 +103,7 @@ def no_direct_use_of_unicode_function(logical_line):
SL320
"""
if unicode_func_re.match(logical_line):
yield(0, "SL320: Use six.text_type() instead of unicode()")
yield(0, "SL320: Use str() instead of unicode()")
@core.flake8ext

View File

@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from oslo_config import cfg
from oslo_log import log as logging
@ -145,7 +144,7 @@ class ListenerService(os_service.Service):
topic_exchanges = (
handler.get_notification_topics_exchanges())
for plugin_topic in topic_exchanges:
if isinstance(plugin_topic, six.string_types):
if isinstance(plugin_topic, str):
raise Exception(
_("Plugin %s should return a list of topic "
"exchange pairs") %

View File

@ -18,7 +18,6 @@ import abc
from oslo_config import cfg
import oslo_messaging
import six
notifier_opts = [
@ -95,8 +94,7 @@ class NotificationBase(object):
_send_notification(self.notifier.info, notification_id, payload)
@six.add_metaclass(abc.ABCMeta)
class NotificationProxy(NotificationBase):
class NotificationProxy(NotificationBase, metaclass=abc.ABCMeta):
def __init__(self, repo, context, notifier):
self.repo = repo
self.context = context
@ -110,8 +108,7 @@ class NotificationProxy(NotificationBase):
pass
@six.add_metaclass(abc.ABCMeta)
class NotificationRepoProxy(NotificationBase):
class NotificationRepoProxy(NotificationBase, metaclass=abc.ABCMeta):
def __init__(self, repo, context, notifier):
self.repo = repo
self.context = context
@ -131,8 +128,7 @@ class NotificationRepoProxy(NotificationBase):
pass
@six.add_metaclass(abc.ABCMeta)
class NotificationFactoryProxy(object):
class NotificationFactoryProxy(object, metaclass=abc.ABCMeta):
def __init__(self, factory, context, notifier):
kwargs = {'context': context, 'notifier': notifier}

View File

@ -15,7 +15,6 @@ import abc
from oslo_config import cfg
from oslo_log import log as logging
import six
from stevedore import extension
@ -23,8 +22,7 @@ LOG = logging.getLogger(__name__)
CONF = cfg.CONF
@six.add_metaclass(abc.ABCMeta)
class Plugin(object):
class Plugin(object, metaclass=abc.ABCMeta):
"""This class exists as a point for plugins to define
config options.
"""
@ -48,7 +46,7 @@ class Plugin(object):
for e in mgr:
for group, opts in e.plugin.get_cfg_opts():
if isinstance(group, six.string_types):
if isinstance(group, str):
group = cfg.OptGroup(name=group)
CONF.register_group(group)

View File

@ -13,11 +13,11 @@
# License for the specific language governing permissions and limitations
# under the License.
import builtins as __builtin__
import logging
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables tests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
# Run the fix_greendns_ipv6 function

View File

@ -31,7 +31,6 @@ import platform
import requests
import shutil
import signal
import six
import socket
import sys
import tempfile
@ -556,7 +555,7 @@ class FunctionalTest(test_utils.BaseTestCase):
def _get_hit_source(self, es_response):
"""Parse the _source from the elasticsearch hits"""
if isinstance(es_response, six.string_types):
if isinstance(es_response, str):
es_response = jsonutils.loads(es_response)
return [h["_source"] for h in es_response["hits"]["hits"]]

View File

@ -14,7 +14,6 @@
# limitations under the License.
import os
import six
import time
from unittest import mock
@ -231,7 +230,7 @@ class TestSearchApi(functional.FunctionalTest):
doc_type="OS::Nova::Server")
self.assertEqual(200, response.status_code)
self.assertEqual(set(["doc_count"]),
set(six.iterkeys(json_content["OS::Nova::Server"])))
set(json_content["OS::Nova::Server"].keys()))
self.assertEqual(0, json_content["OS::Nova::Server"]["doc_count"])
response, json_content = self._facet_request(
@ -335,7 +334,7 @@ class TestSearchApi(functional.FunctionalTest):
u'type': u'string'
}
status_facet = list(six.moves.filter(
status_facet = list(filter(
lambda f: f['name'] == 'status',
json_content['OS::Nova::Server']['facets']
))[0]
@ -356,7 +355,7 @@ class TestSearchApi(functional.FunctionalTest):
u'type': u'string'
}
status_facet = list(six.moves.filter(
status_facet = list(filter(
lambda f: f['name'] == 'status',
json_content['OS::Nova::Server']['facets']
))[0]
@ -478,7 +477,7 @@ class TestSearchApi(functional.FunctionalTest):
self.assertEqual(2, json_content['OS::Nova::Server']['doc_count'])
self.assertEqual(['OS::Nova::Server'],
list(six.iterkeys(json_content)))
list(json_content.keys()))
# server1 has two fixed addresses (which should be rolled up into one
# match). server2 has fixed and floating addresses.
@ -491,7 +490,7 @@ class TestSearchApi(functional.FunctionalTest):
u'type': u'string',
u'nested': True
}
fixed_network_facet = list(six.moves.filter(
fixed_network_facet = list(filter(
lambda f: f['name'] == 'networks.OS-EXT-IPS:type',
json_content['OS::Nova::Server']['facets']
))[0]
@ -510,7 +509,7 @@ class TestSearchApi(functional.FunctionalTest):
{u'doc_count': 2, u'key': u'a'}
]
}
image_facet = list(six.moves.filter(
image_facet = list(filter(
lambda f: f['name'] == 'image.id',
json_content['OS::Nova::Server']['facets']
))[0]

View File

@ -16,7 +16,6 @@
import copy
from elasticsearch import exceptions as es_exceptions
from oslo_config import cfg
import six
from unittest import mock
from searchlight.elasticsearch.plugins import helper
@ -108,7 +107,7 @@ class TestIndexingHelper(test_utils.BaseTestCase):
indexing_helper = helper.IndexingHelper(plugin)
_, mapping = six.next(plugin.get_full_mapping())
_, mapping = next(plugin.get_full_mapping())
self.assertIn('region_name', mapping['properties'])
count = len(plugin.get_objects())
@ -125,7 +124,7 @@ class TestIndexingHelper(test_utils.BaseTestCase):
resource_plugin_conf.include_region_name = False
mock_bulk.reset_mock()
_, mapping = six.next(plugin.get_full_mapping())
_, mapping = next(plugin.get_full_mapping())
self.assertNotIn('region_name', mapping['properties'])
indexing_helper.save_documents(plugin.get_objects(),
fake_versions)

View File

@ -16,7 +16,6 @@
import collections
import copy
import operator
import six
import types
from unittest import mock
@ -194,7 +193,7 @@ class TestPlugin(test_utils.BaseTestCase):
with mock.patch.object(plugin, 'get_mapping',
return_value=test_doc_value_mapping):
# get_full_mapping is a generator
doc_type, mapping = six.next(plugin.get_full_mapping())
doc_type, mapping = next(plugin.get_full_mapping())
props = mapping['properties']
# These fields should all have doc_values. Explicitly testing
@ -226,7 +225,7 @@ class TestPlugin(test_utils.BaseTestCase):
def test_rbac_field_doc_values(self):
mock_engine = mock.Mock()
plugin = fake_plugins.FakeSimplePlugin(es_engine=mock_engine)
doc_Type, mapping = six.next(plugin.get_full_mapping())
doc_Type, mapping = next(plugin.get_full_mapping())
props = mapping['properties']
self.assertEqual(True, props[ROLE_USER_FIELD]['doc_values'])
@ -234,7 +233,7 @@ class TestPlugin(test_utils.BaseTestCase):
mock_engine = mock.Mock()
plugin = fake_plugins.FakeSimplePlugin(es_engine=mock_engine)
doc_type, mapping = six.next(plugin.get_full_mapping())
doc_type, mapping = next(plugin.get_full_mapping())
self.assertEqual(True, mapping['properties']['id']['doc_values'])
# Test the same but disabling doc values for the plugin
@ -242,7 +241,7 @@ class TestPlugin(test_utils.BaseTestCase):
'mapping_use_doc_values',
new_callable=mock.PropertyMock) as conf_mock:
conf_mock.return_value = False
doc_type, mapping = six.next(plugin.get_full_mapping())
doc_type, mapping = next(plugin.get_full_mapping())
self.assertNotIn('doc_values', mapping['properties']['id'])
@mock.patch('searchlight.elasticsearch.plugins.base.'

View File

@ -16,7 +16,6 @@
from elasticsearch import exceptions as es_exc
import operator
from oslo_serialization import jsonutils
import six
from unittest import mock
import webob.exc
@ -318,18 +317,18 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_single_index(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'index': 'searchlight-search',
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['searchlight-search'], output['index'])
def test_single_type(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': 'OS::Glance::Image',
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['searchlight-search'], output['index'])
@ -338,7 +337,7 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_empty_request(self):
"""Tests that ALL registered resource types are searched"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({}))
request.body = jsonutils.dumps({}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['searchlight-search'], output['index'])
@ -370,29 +369,29 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_forbidden_schema(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'schema': {},
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_forbidden_self(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'self': {},
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPForbidden, self.deserializer.search,
request)
def test_fields_restriction(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': ['description'],
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['searchlight-search'], output['index'])
@ -401,14 +400,14 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_fields_include_exclude(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': {
'include': ['some', 'thing.*'],
'exclude': ['other.*', 'thing']
}
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertNotIn('_source', output)
@ -421,35 +420,35 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
"""Test various forms for source_exclude"""
role_field = searchlight.elasticsearch.ROLE_USER_FIELD
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': {
'exclude': ['something', 'other thing']
}
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual([role_field, 'something', 'other thing'],
output['_source_exclude'])
# Test with a single field
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': {
'exclude': "something"
}
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual([role_field, 'something'],
output['_source_exclude'])
# Test with a single field
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': "includeme"
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual([role_field],
output['_source_exclude'])
@ -457,11 +456,11 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
output['_source_include'])
# Test with a single field
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': ["includeme", "andme"]
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual([role_field],
output['_source_exclude'])
@ -470,11 +469,11 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_bad_field_include(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'_source': 1234,
}))
}).encode("latin-1")
self.assertRaisesRegex(
webob.exc.HTTPBadRequest,
@ -492,7 +491,7 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
# Apply highlighting to 'name' explicitly setting require_field_match
# and 'content' explicitly setting a highlight_query
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'highlight': {
@ -503,7 +502,7 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
}
}
}
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['searchlight-search'], output['index'])
@ -529,44 +528,44 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_invalid_limit(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'limit': 'invalid',
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_limit(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'limit': -1,
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_invalid_offset(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'offset': 'invalid',
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
def test_negative_offset(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'offset': -1,
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
@ -574,12 +573,12 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_offset_from_error(self):
"""Test that providing offset and from cause errors"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'offset': 10,
'from': 10
}))
}).encode("latin-1")
self.assertRaisesRegex(
webob.exc.HTTPBadRequest,
"Provide 'offset' or 'from', but not both",
@ -588,12 +587,12 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_limit_size_error(self):
"""Test that providing limit and size cause errors"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'size': 10,
'limit': 10
}))
}).encode("latin-1")
self.assertRaisesRegex(
webob.exc.HTTPBadRequest,
"Provide 'limit' or 'size', but not both",
@ -601,12 +600,12 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_limit_and_offset(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'limit': 1,
'offset': 2,
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(1, output['size'])
@ -614,12 +613,12 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_from_and_size(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'size': 1,
'from': 2,
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(1, output['size'])
self.assertEqual(2, output['from_'])
@ -627,10 +626,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_single_sort(self):
"""Test that a single sort field is correctly transformed"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'sort': 'status'
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(['status'], output['query']['sort'])
@ -638,10 +637,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_single_sort_dir(self):
"""Test that a single sort field & dir is correctly transformed"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'sort': {'status': 'desc'}
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual([{'status': 'desc'}], output['query']['sort'])
@ -649,14 +648,14 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_multiple_sort(self):
"""Test multiple sort fields"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'sort': [
'status',
{'created_at': 'desc'},
{'members': {'order': 'asc', 'mode': 'max'}}
]
}))
}).encode("latin-1")
output = self.deserializer.search(request)
expected = [
@ -669,13 +668,13 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_raw_field_sort(self):
"""Some fields (like name) are treated separately"""
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'sort': [
'name',
{'name': {'order': 'desc'}}
]
}))
}).encode("latin-1")
output = self.deserializer.search(request)
expected = [
@ -686,12 +685,12 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_bad_sort(self):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'index': ['glance'],
'type': ['OS::Glance::Image'],
'query': {'match_all': {}},
'sort': 1234
}))
}).encode("latin-1")
self.assertRaises(webob.exc.HTTPBadRequest, self.deserializer.search,
request)
@ -700,9 +699,9 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
'ServerIndex.get_query_filters')
def test_rbac_exception(self, mock_query_filters):
request = unit_test_utils.get_fake_request()
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
}))
}).encode("latin-1")
mock_query_filters.side_effect = Exception("Bad RBAC")
@ -715,10 +714,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_rbac_non_admin(self):
"""Test that a non-admin request results in an RBACed query"""
request = unit_test_utils.get_fake_request(is_admin=False)
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'type': 'OS::Nova::Server',
}))
}).encode("latin-1")
output = self.deserializer.search(request)
tenant_id = '6838eb7b-6ded-dead-beef-b344c77fe8df'
@ -764,10 +763,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_rbac_admin(self):
"""Test that admins have RBAC applied"""
request = unit_test_utils.get_fake_request(is_admin=True)
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'type': 'OS::Nova::Server',
}))
}).encode("latin-1")
output = self.deserializer.search(request)
tenant_id = '6838eb7b-6ded-dead-beef-b344c77fe8df'
nova_rbac_filter = {
@ -811,11 +810,11 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
self.assertEqual(expected_query, output['query'])
# Now test with all_projects
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'type': 'OS::Nova::Server',
'all_projects': True,
}))
}).encode("latin-1")
# Test that if a plugin doesn't allow RBAC to be ignored,
# it isn't. Do it with mocking, because mocking is best
@ -862,10 +861,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
def test_search_version(self):
request = unit_test_utils.get_fake_request(is_admin=True)
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'query': {'match_all': {}},
'version': True
}))
}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(True, output['version'])
@ -877,19 +876,19 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
# Apply highlighting to 'name' explicitly setting require_field_match
# and 'content' explicitly setting a highlight_query
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'aggregations': aggs}))
'aggregations': aggs}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(aggs, output['query']['aggregations'])
# Test 'aggs' too
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'aggs': aggs}))
'aggs': aggs}).encode("latin-1")
output = self.deserializer.search(request)
self.assertEqual(aggs, output['query']['aggregations'])
@ -906,10 +905,10 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
'aggregations': {'name': {'terms': {'field': 'name'}}}
}
}
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'aggregations': aggs}))
'aggregations': aggs}).encode("latin-1")
self.assertRaises(
webob.exc.HTTPForbidden, self.deserializer.search,
@ -920,11 +919,11 @@ class TestSearchDeserializer(test_utils.BaseTestCase):
request = unit_test_utils.get_fake_request()
aggs = {"something": "something"}
request.body = six.b(jsonutils.dumps({
request.body = jsonutils.dumps({
'type': ['OS::Glance::Metadef'],
'query': {'match_all': {}},
'aggregations': aggs,
'aggs': aggs}))
'aggs': aggs}).encode("latin-1")
self.assertRaisesRegex(
webob.exc.HTTPBadRequest,

View File

@ -18,6 +18,8 @@
import copy
import errno
import functools
from http import server as BaseHTTPServer
import io
import os
import shlex
import shutil
@ -27,8 +29,6 @@ from unittest import mock
import fixtures
from oslo_config import cfg
import six
from six.moves import BaseHTTPServer
import testtools
import webob
@ -497,7 +497,7 @@ class FakeAuthMiddleware(wsgi.Middleware):
class FakeHTTPResponse(object):
def __init__(self, status=200, headers=None, data=None, *args, **kwargs):
data = data or 'I am a teapot, short and stout\n'
self.data = six.StringIO(data)
self.data = io.StringIO(data)
self.read = self.data.read
self.status = status
self.headers = headers or {'content-length': len(data)}

View File

@ -15,11 +15,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from urllib import parse as urlparse
import os
from oslo_config import cfg
import oslo_messaging
from oslo_serialization import jsonutils
import six.moves.urllib.parse as urlparse
import sys
import time

View File

@ -45,7 +45,6 @@ import heapq
import sys
import unittest
import six
import subunit
import testtools
@ -272,7 +271,7 @@ class SubunitTestResult(testtools.TestResult):
self.stopTestRun()
def stopTestRun(self):
for cls in six.iterkeys(self.results):
for cls in self.results.keys():
self.writeTestCase(cls)
self.stream.writeln()
self.writeSlowTests()