Merge "Replace six.BytesIO, six.StringIO usage"
This commit is contained in:
commit
fb875de154
@ -9,13 +9,14 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import glance_store as store
|
import glance_store as store
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
|
|
||||||
import glance.cmd.api
|
import glance.cmd.api
|
||||||
import glance.cmd.cache_cleaner
|
import glance.cmd.cache_cleaner
|
||||||
@ -48,7 +49,7 @@ class TestGlanceApiCmd(test_utils.BaseTestCase):
|
|||||||
super(TestGlanceApiCmd, self).setUp()
|
super(TestGlanceApiCmd, self).setUp()
|
||||||
self.__argv_backup = sys.argv
|
self.__argv_backup = sys.argv
|
||||||
sys.argv = ['glance-api']
|
sys.argv = ['glance-api']
|
||||||
self.stderr = six.StringIO()
|
self.stderr = io.StringIO()
|
||||||
sys.stderr = self.stderr
|
sys.stderr = self.stderr
|
||||||
|
|
||||||
store.register_opts(CONF)
|
store.register_opts(CONF)
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
@ -20,7 +21,6 @@ from unittest import mock
|
|||||||
import glance_store
|
import glance_store
|
||||||
from oslo_concurrency import processutils
|
from oslo_concurrency import processutils
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
|
|
||||||
from glance.async_.flows import convert
|
from glance.async_.flows import convert
|
||||||
from glance.async_ import taskflow_executor
|
from glance.async_ import taskflow_executor
|
||||||
@ -173,7 +173,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
return ("", None)
|
return ("", None)
|
||||||
|
|
||||||
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
||||||
dmock.return_value = six.BytesIO(b"TEST_IMAGE")
|
dmock.return_value = io.BytesIO(b"TEST_IMAGE")
|
||||||
|
|
||||||
with mock.patch.object(processutils, 'execute') as exc_mock:
|
with mock.patch.object(processutils, 'execute') as exc_mock:
|
||||||
exc_mock.side_effect = fake_execute
|
exc_mock.side_effect = fake_execute
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
@ -20,7 +21,6 @@ from unittest import mock
|
|||||||
import glance_store
|
import glance_store
|
||||||
from oslo_concurrency import processutils as putils
|
from oslo_concurrency import processutils as putils
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
from taskflow import task
|
from taskflow import task
|
||||||
from taskflow.types import failure
|
from taskflow.types import failure
|
||||||
@ -123,7 +123,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
img_factory.new_image.side_effect = create_image
|
img_factory.new_image.side_effect = create_image
|
||||||
|
|
||||||
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
||||||
dmock.return_value = six.BytesIO(b"TEST_IMAGE")
|
dmock.return_value = io.BytesIO(b"TEST_IMAGE")
|
||||||
|
|
||||||
with mock.patch.object(putils, 'trycmd') as tmock:
|
with mock.patch.object(putils, 'trycmd') as tmock:
|
||||||
tmock.return_value = (json.dumps({
|
tmock.return_value = (json.dumps({
|
||||||
@ -166,7 +166,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
img_factory.new_image.side_effect = create_image
|
img_factory.new_image.side_effect = create_image
|
||||||
|
|
||||||
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
||||||
dmock.return_value = six.BytesIO(b"TEST_IMAGE")
|
dmock.return_value = io.BytesIO(b"TEST_IMAGE")
|
||||||
|
|
||||||
with mock.patch.object(import_flow._ImportToFS, 'execute') as emk:
|
with mock.patch.object(import_flow._ImportToFS, 'execute') as emk:
|
||||||
executor.begin_processing(self.task.task_id)
|
executor.begin_processing(self.task.task_id)
|
||||||
@ -237,7 +237,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
img_factory.new_image.side_effect = create_image
|
img_factory.new_image.side_effect = create_image
|
||||||
|
|
||||||
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
||||||
dmock.return_value = six.BytesIO(b"TEST_IMAGE")
|
dmock.return_value = io.BytesIO(b"TEST_IMAGE")
|
||||||
|
|
||||||
with mock.patch.object(putils, 'trycmd') as tmock:
|
with mock.patch.object(putils, 'trycmd') as tmock:
|
||||||
tmock.return_value = (json.dumps({
|
tmock.return_value = (json.dumps({
|
||||||
@ -287,7 +287,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
img_factory.new_image.side_effect = create_image
|
img_factory.new_image.side_effect = create_image
|
||||||
|
|
||||||
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
with mock.patch.object(script_utils, 'get_image_data_iter') as dmock:
|
||||||
dmock.return_value = six.BytesIO(b"TEST_IMAGE")
|
dmock.return_value = io.BytesIO(b"TEST_IMAGE")
|
||||||
|
|
||||||
with mock.patch.object(putils, 'trycmd') as tmock:
|
with mock.patch.object(putils, 'trycmd') as tmock:
|
||||||
tmock.return_value = (json.dumps({
|
tmock.return_value = (json.dumps({
|
||||||
@ -340,7 +340,7 @@ class TestImportTask(test_utils.BaseTestCase):
|
|||||||
|
|
||||||
with mock.patch.object(urllib.request, 'urlopen') as umock:
|
with mock.patch.object(urllib.request, 'urlopen') as umock:
|
||||||
content = b"TEST_IMAGE"
|
content = b"TEST_IMAGE"
|
||||||
umock.return_value = six.BytesIO(content)
|
umock.return_value = io.BytesIO(content)
|
||||||
|
|
||||||
with mock.patch.object(import_flow, "_get_import_flows") as imock:
|
with mock.patch.object(import_flow, "_get_import_flows") as imock:
|
||||||
imock.return_value = (x for x in [])
|
imock.return_value = (x for x in [])
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
import tempfile
|
import tempfile
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
@ -21,7 +22,6 @@ import glance_store as store
|
|||||||
from glance_store._drivers import cinder
|
from glance_store._drivers import cinder
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
import six
|
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
from glance.common import exception
|
from glance.common import exception
|
||||||
@ -30,7 +30,6 @@ from glance.common import utils
|
|||||||
from glance.tests.unit import base
|
from glance.tests.unit import base
|
||||||
from glance.tests import utils as test_utils
|
from glance.tests import utils as test_utils
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
@ -276,14 +275,14 @@ class TestUtils(test_utils.BaseTestCase):
|
|||||||
"""Ensure limiting reader class accesses all bytes of file"""
|
"""Ensure limiting reader class accesses all bytes of file"""
|
||||||
BYTES = 1024
|
BYTES = 1024
|
||||||
bytes_read = 0
|
bytes_read = 0
|
||||||
data = six.StringIO("*" * BYTES)
|
data = io.StringIO("*" * BYTES)
|
||||||
for chunk in utils.LimitingReader(data, BYTES):
|
for chunk in utils.LimitingReader(data, BYTES):
|
||||||
bytes_read += len(chunk)
|
bytes_read += len(chunk)
|
||||||
|
|
||||||
self.assertEqual(BYTES, bytes_read)
|
self.assertEqual(BYTES, bytes_read)
|
||||||
|
|
||||||
bytes_read = 0
|
bytes_read = 0
|
||||||
data = six.StringIO("*" * BYTES)
|
data = io.StringIO("*" * BYTES)
|
||||||
reader = utils.LimitingReader(data, BYTES)
|
reader = utils.LimitingReader(data, BYTES)
|
||||||
byte = reader.read(1)
|
byte = reader.read(1)
|
||||||
while len(byte) != 0:
|
while len(byte) != 0:
|
||||||
@ -298,7 +297,7 @@ class TestUtils(test_utils.BaseTestCase):
|
|||||||
|
|
||||||
def _consume_all_iter():
|
def _consume_all_iter():
|
||||||
bytes_read = 0
|
bytes_read = 0
|
||||||
data = six.StringIO("*" * BYTES)
|
data = io.StringIO("*" * BYTES)
|
||||||
for chunk in utils.LimitingReader(data, BYTES - 1):
|
for chunk in utils.LimitingReader(data, BYTES - 1):
|
||||||
bytes_read += len(chunk)
|
bytes_read += len(chunk)
|
||||||
|
|
||||||
@ -306,7 +305,7 @@ class TestUtils(test_utils.BaseTestCase):
|
|||||||
|
|
||||||
def _consume_all_read():
|
def _consume_all_read():
|
||||||
bytes_read = 0
|
bytes_read = 0
|
||||||
data = six.StringIO("*" * BYTES)
|
data = io.StringIO("*" * BYTES)
|
||||||
reader = utils.LimitingReader(data, BYTES - 1)
|
reader = utils.LimitingReader(data, BYTES - 1)
|
||||||
byte = reader.read(1)
|
byte = reader.read(1)
|
||||||
while len(byte) != 0:
|
while len(byte) != 0:
|
||||||
|
@ -11,6 +11,8 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
@ -20,7 +22,6 @@ import uuid
|
|||||||
|
|
||||||
import fixtures
|
import fixtures
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
import six
|
|
||||||
from six import moves
|
from six import moves
|
||||||
from six.moves import http_client as http
|
from six.moves import http_client as http
|
||||||
import webob
|
import webob
|
||||||
@ -110,7 +111,7 @@ class FakeHTTPConnection(object):
|
|||||||
class FakeResponse(object):
|
class FakeResponse(object):
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
(code, body, headers) = args
|
(code, body, headers) = args
|
||||||
self.body = six.StringIO(body)
|
self.body = io.StringIO(body)
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.status = code
|
self.status = code
|
||||||
|
|
||||||
@ -246,7 +247,7 @@ class ImageServiceTestCase(test_utils.BaseTestCase):
|
|||||||
class FakeHttpResponse(object):
|
class FakeHttpResponse(object):
|
||||||
def __init__(self, headers, data):
|
def __init__(self, headers, data):
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.data = six.BytesIO(data)
|
self.data = io.BytesIO(data)
|
||||||
|
|
||||||
def getheaders(self):
|
def getheaders(self):
|
||||||
return self.headers
|
return self.headers
|
||||||
@ -356,7 +357,7 @@ class ReplicationCommandsTestCase(test_utils.BaseTestCase):
|
|||||||
|
|
||||||
stdout = sys.stdout
|
stdout = sys.stdout
|
||||||
orig_img_service = glance_replicator.get_image_service
|
orig_img_service = glance_replicator.get_image_service
|
||||||
sys.stdout = six.StringIO()
|
sys.stdout = io.StringIO()
|
||||||
try:
|
try:
|
||||||
glance_replicator.get_image_service = get_image_service
|
glance_replicator.get_image_service = get_image_service
|
||||||
glance_replicator.replication_size(options, args)
|
glance_replicator.replication_size(options, args)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
import datetime
|
import datetime
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
@ -25,7 +26,6 @@ import glance_store as store
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_utils import secretutils
|
from oslo_utils import secretutils
|
||||||
from oslo_utils import units
|
from oslo_utils import units
|
||||||
import six
|
|
||||||
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
|
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
|
||||||
from six.moves import range
|
from six.moves import range
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ CONF = cfg.CONF
|
|||||||
class ImageCacheTestCase(object):
|
class ImageCacheTestCase(object):
|
||||||
|
|
||||||
def _setup_fixture_file(self):
|
def _setup_fixture_file(self):
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
|
|
||||||
self.assertFalse(self.cache.is_cached(1))
|
self.assertFalse(self.cache.is_cached(1))
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ class ImageCacheTestCase(object):
|
|||||||
"""
|
"""
|
||||||
self._setup_fixture_file()
|
self._setup_fixture_file()
|
||||||
|
|
||||||
buff = six.BytesIO()
|
buff = io.BytesIO()
|
||||||
with self.cache.open_for_read(1) as cache_file:
|
with self.cache.open_for_read(1) as cache_file:
|
||||||
for chunk in cache_file:
|
for chunk in cache_file:
|
||||||
buff.write(chunk)
|
buff.write(chunk)
|
||||||
@ -85,7 +85,7 @@ class ImageCacheTestCase(object):
|
|||||||
"""
|
"""
|
||||||
self._setup_fixture_file()
|
self._setup_fixture_file()
|
||||||
|
|
||||||
buff = six.BytesIO()
|
buff = io.BytesIO()
|
||||||
with self.cache.open_for_read(1) as cache_file:
|
with self.cache.open_for_read(1) as cache_file:
|
||||||
for chunk in cache_file:
|
for chunk in cache_file:
|
||||||
buff.write(chunk)
|
buff.write(chunk)
|
||||||
@ -119,7 +119,7 @@ class ImageCacheTestCase(object):
|
|||||||
self.assertFalse(self.cache.is_cached(image_id))
|
self.assertFalse(self.cache.is_cached(image_id))
|
||||||
|
|
||||||
for image_id in (1, 2):
|
for image_id in (1, 2):
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
self.assertTrue(self.cache.cache_image_file(image_id,
|
self.assertTrue(self.cache.cache_image_file(image_id,
|
||||||
FIXTURE_FILE))
|
FIXTURE_FILE))
|
||||||
|
|
||||||
@ -189,21 +189,21 @@ class ImageCacheTestCase(object):
|
|||||||
# prune. We should see only 5 images left after pruning, and the
|
# prune. We should see only 5 images left after pruning, and the
|
||||||
# images that are least recently accessed should be the ones pruned...
|
# images that are least recently accessed should be the ones pruned...
|
||||||
for x in range(10):
|
for x in range(10):
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
self.assertTrue(self.cache.cache_image_file(x, FIXTURE_FILE))
|
self.assertTrue(self.cache.cache_image_file(x, FIXTURE_FILE))
|
||||||
|
|
||||||
self.assertEqual(10 * units.Ki, self.cache.get_cache_size())
|
self.assertEqual(10 * units.Ki, self.cache.get_cache_size())
|
||||||
|
|
||||||
# OK, hit the images that are now cached...
|
# OK, hit the images that are now cached...
|
||||||
for x in range(10):
|
for x in range(10):
|
||||||
buff = six.BytesIO()
|
buff = io.BytesIO()
|
||||||
with self.cache.open_for_read(x) as cache_file:
|
with self.cache.open_for_read(x) as cache_file:
|
||||||
for chunk in cache_file:
|
for chunk in cache_file:
|
||||||
buff.write(chunk)
|
buff.write(chunk)
|
||||||
|
|
||||||
# Add a new image to cache.
|
# Add a new image to cache.
|
||||||
# This is specifically to test the bug: 1438564
|
# This is specifically to test the bug: 1438564
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
self.assertTrue(self.cache.cache_image_file(99, FIXTURE_FILE))
|
self.assertTrue(self.cache.cache_image_file(99, FIXTURE_FILE))
|
||||||
|
|
||||||
self.cache.prune()
|
self.cache.prune()
|
||||||
@ -231,13 +231,13 @@ class ImageCacheTestCase(object):
|
|||||||
"""
|
"""
|
||||||
self.assertEqual(0, self.cache.get_cache_size())
|
self.assertEqual(0, self.cache.get_cache_size())
|
||||||
|
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
self.assertTrue(self.cache.cache_image_file('xxx', FIXTURE_FILE))
|
self.assertTrue(self.cache.cache_image_file('xxx', FIXTURE_FILE))
|
||||||
|
|
||||||
self.assertEqual(1024, self.cache.get_cache_size())
|
self.assertEqual(1024, self.cache.get_cache_size())
|
||||||
|
|
||||||
# OK, hit the image that is now cached...
|
# OK, hit the image that is now cached...
|
||||||
buff = six.BytesIO()
|
buff = io.BytesIO()
|
||||||
with self.cache.open_for_read('xxx') as cache_file:
|
with self.cache.open_for_read('xxx') as cache_file:
|
||||||
for chunk in cache_file:
|
for chunk in cache_file:
|
||||||
buff.write(chunk)
|
buff.write(chunk)
|
||||||
@ -257,7 +257,7 @@ class ImageCacheTestCase(object):
|
|||||||
self.assertFalse(self.cache.is_cached(1))
|
self.assertFalse(self.cache.is_cached(1))
|
||||||
self.assertFalse(self.cache.is_queued(1))
|
self.assertFalse(self.cache.is_queued(1))
|
||||||
|
|
||||||
FIXTURE_FILE = six.BytesIO(FIXTURE_DATA)
|
FIXTURE_FILE = io.BytesIO(FIXTURE_DATA)
|
||||||
|
|
||||||
self.assertTrue(self.cache.queue_image(1))
|
self.assertTrue(self.cache.queue_image(1))
|
||||||
|
|
||||||
|
@ -12,10 +12,11 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
import fixtures
|
import fixtures
|
||||||
from six.moves import StringIO
|
|
||||||
|
|
||||||
from glance.cmd import manage
|
from glance.cmd import manage
|
||||||
from glance.common import exception
|
from glance.common import exception
|
||||||
@ -163,7 +164,7 @@ class TestManage(TestManageBase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestManage, self).setUp()
|
super(TestManage, self).setUp()
|
||||||
self.db = manage.DbCommands()
|
self.db = manage.DbCommands()
|
||||||
self.output = StringIO()
|
self.output = io.StringIO()
|
||||||
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.output))
|
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.output))
|
||||||
|
|
||||||
def test_db_complex_password(self):
|
def test_db_complex_password(self):
|
||||||
|
@ -12,6 +12,8 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import io
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@ -19,7 +21,6 @@ from cursive import exception as cursive_exception
|
|||||||
import glance_store
|
import glance_store
|
||||||
from glance_store._drivers import filesystem
|
from glance_store._drivers import filesystem
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import six
|
|
||||||
from six.moves import http_client as http
|
from six.moves import http_client as http
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
@ -683,7 +684,7 @@ class TestImageDataDeserializer(test_utils.BaseTestCase):
|
|||||||
request.headers['Content-Type'] = 'application/octet-stream'
|
request.headers['Content-Type'] = 'application/octet-stream'
|
||||||
# If we use body_file, webob assumes we want to do a chunked upload,
|
# If we use body_file, webob assumes we want to do a chunked upload,
|
||||||
# ignoring the Content-Length header
|
# ignoring the Content-Length header
|
||||||
request.body_file = six.StringIO('YYY')
|
request.body_file = io.StringIO('YYY')
|
||||||
output = self.deserializer.upload(request)
|
output = self.deserializer.upload(request)
|
||||||
data = output.pop('data')
|
data = output.pop('data')
|
||||||
self.assertEqual('YYY', data.read())
|
self.assertEqual('YYY', data.read())
|
||||||
@ -693,7 +694,7 @@ class TestImageDataDeserializer(test_utils.BaseTestCase):
|
|||||||
def test_upload_chunked_with_content_length(self):
|
def test_upload_chunked_with_content_length(self):
|
||||||
request = unit_test_utils.get_fake_request()
|
request = unit_test_utils.get_fake_request()
|
||||||
request.headers['Content-Type'] = 'application/octet-stream'
|
request.headers['Content-Type'] = 'application/octet-stream'
|
||||||
request.body_file = six.BytesIO(b'YYY')
|
request.body_file = io.BytesIO(b'YYY')
|
||||||
# The deserializer shouldn't care if the Content-Length is
|
# The deserializer shouldn't care if the Content-Length is
|
||||||
# set when the user is attempting to send chunked data.
|
# set when the user is attempting to send chunked data.
|
||||||
request.headers['Content-Length'] = 3
|
request.headers['Content-Length'] = 3
|
||||||
@ -734,7 +735,7 @@ class TestImageDataDeserializer(test_utils.BaseTestCase):
|
|||||||
req = unit_test_utils.get_fake_request()
|
req = unit_test_utils.get_fake_request()
|
||||||
req.headers['Content-Type'] = 'application/octet-stream'
|
req.headers['Content-Type'] = 'application/octet-stream'
|
||||||
req.headers['Content-Length'] = 4
|
req.headers['Content-Length'] = 4
|
||||||
req.body_file = six.BytesIO(b'YYYY')
|
req.body_file = io.BytesIO(b'YYYY')
|
||||||
output = self.deserializer.stage(req)
|
output = self.deserializer.stage(req)
|
||||||
data = output.pop('data')
|
data = output.pop('data')
|
||||||
self.assertEqual(b'YYYY', data.read())
|
self.assertEqual(b'YYYY', data.read())
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
import errno
|
import errno
|
||||||
import functools
|
import functools
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import shutil
|
import shutil
|
||||||
@ -35,7 +36,6 @@ from oslo_log.fixture import logging_error as log_fixture
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from oslo_utils import units
|
from oslo_utils import units
|
||||||
import six
|
|
||||||
from six.moves import BaseHTTPServer
|
from six.moves import BaseHTTPServer
|
||||||
from six.moves import http_client as http
|
from six.moves import http_client as http
|
||||||
import testtools
|
import testtools
|
||||||
@ -619,7 +619,7 @@ class FakeHTTPResponse(object):
|
|||||||
def __init__(self, status=http.OK, headers=None, data=None,
|
def __init__(self, status=http.OK, headers=None, data=None,
|
||||||
*args, **kwargs):
|
*args, **kwargs):
|
||||||
data = data or b'I am a teapot, short and stout\n'
|
data = data or b'I am a teapot, short and stout\n'
|
||||||
self.data = six.BytesIO(data)
|
self.data = io.BytesIO(data)
|
||||||
self.read = self.data.read
|
self.read = self.data.read
|
||||||
self.status = status
|
self.status = status
|
||||||
self.headers = headers or {'content-length': len(data)}
|
self.headers = headers or {'content-length': len(data)}
|
||||||
|
Loading…
Reference in New Issue
Block a user