s3api: Include '-' in multipart ETags
Multipart uploads in AWS (seem to) have ETags like: '"' + MD5_hex(MD5(part1) + ... + MD5(partN)) + '-' + N + '"' On the other hand, Swift SLOs have Etags like: MD5_hex(MD5_hex(part1) + ... + MD5_hex(partN)) (In both examples, MD5 gets the raw 16-byte digest while MD5_hex gets the 32-byte hex-encoded digest.) Some clients (such as aws-sdk-java) use the presence of a dash to decide whether to perform client-side validation of downloads. Other clients (like s3cmd) use the presence of a dash *in bucket listings* to decide whether or not to perform additional HEAD requests to look for MD5 metadata that can be used to compare against the MD5s of local files. Now we include a dash as well, to prevent spurious errors like > Unable to verify integrity of data download. Client calculated > content hash didn't match hash calculated by Amazon S3. The data > may be corrupt. or unnecessary uploads/downloads because the client assumes data has changed that hasn't. For new multipart-uploads via the S3 API, the ETag that is stored will be calculated in the same way that AWS uses. This ETag will be used in GET/HEAD responses, bucket listings, and conditional requests via the S3 API. Accessing the same object via the Swift API will use the SLO Etag; however, in JSON container listings the multipart upload etag will be exposed in a new "s3_etag" key. New SLOs and pre-existing multipart-uploads will continue to behave as before; there is no data migration or mitigation as part of this patch. Change-Id: Ibe68c44bef6c17605863e9084503e8f5dc577fab Closes-Bug: 1522578
This commit is contained in:
parent
cfc4f30d63
commit
84b85f03b4
@ -185,7 +185,12 @@ class BucketController(Controller):
|
|||||||
SubElement(contents, 'Key').text = o['name']
|
SubElement(contents, 'Key').text = o['name']
|
||||||
SubElement(contents, 'LastModified').text = \
|
SubElement(contents, 'LastModified').text = \
|
||||||
o['last_modified'][:-3] + 'Z'
|
o['last_modified'][:-3] + 'Z'
|
||||||
SubElement(contents, 'ETag').text = '"%s"' % o['hash']
|
if 's3_etag' in o:
|
||||||
|
# New-enough MUs are already in the right format
|
||||||
|
etag = o['s3_etag']
|
||||||
|
else:
|
||||||
|
etag = '"%s"' % o['hash']
|
||||||
|
SubElement(contents, 'ETag').text = etag
|
||||||
SubElement(contents, 'Size').text = str(o['bytes'])
|
SubElement(contents, 'Size').text = str(o['bytes'])
|
||||||
if fetch_owner or not is_v2:
|
if fetch_owner or not is_v2:
|
||||||
owner = SubElement(contents, 'Owner')
|
owner = SubElement(contents, 'Owner')
|
||||||
|
@ -59,6 +59,7 @@ Static Large Object when the multipart upload is completed.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from hashlib import md5
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@ -570,6 +571,7 @@ class UploadController(Controller):
|
|||||||
'etag': o['hash'],
|
'etag': o['hash'],
|
||||||
'size_bytes': o['bytes']}) for o in objinfo)
|
'size_bytes': o['bytes']}) for o in objinfo)
|
||||||
|
|
||||||
|
s3_etag_hasher = md5()
|
||||||
manifest = []
|
manifest = []
|
||||||
previous_number = 0
|
previous_number = 0
|
||||||
try:
|
try:
|
||||||
@ -597,6 +599,7 @@ class UploadController(Controller):
|
|||||||
raise InvalidPart(upload_id=upload_id,
|
raise InvalidPart(upload_id=upload_id,
|
||||||
part_number=part_number)
|
part_number=part_number)
|
||||||
|
|
||||||
|
s3_etag_hasher.update(etag.decode('hex'))
|
||||||
info['size_bytes'] = int(info['size_bytes'])
|
info['size_bytes'] = int(info['size_bytes'])
|
||||||
manifest.append(info)
|
manifest.append(info)
|
||||||
except (XMLSyntaxError, DocumentInvalid):
|
except (XMLSyntaxError, DocumentInvalid):
|
||||||
@ -607,6 +610,12 @@ class UploadController(Controller):
|
|||||||
self.logger.error(e)
|
self.logger.error(e)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
s3_etag = '%s-%d' % (s3_etag_hasher.hexdigest(), len(manifest))
|
||||||
|
headers[sysmeta_header('object', 'etag')] = s3_etag
|
||||||
|
# Leave base header value blank; SLO will populate
|
||||||
|
c_etag = '; s3_etag=%s' % s3_etag
|
||||||
|
headers['X-Object-Sysmeta-Container-Update-Override-Etag'] = c_etag
|
||||||
|
|
||||||
# Check the size of each segment except the last and make sure they are
|
# Check the size of each segment except the last and make sure they are
|
||||||
# all more than the minimum upload chunk size
|
# all more than the minimum upload chunk size
|
||||||
for info in manifest[:-1]:
|
for info in manifest[:-1]:
|
||||||
@ -660,7 +669,8 @@ class UploadController(Controller):
|
|||||||
SubElement(result_elem, 'Location').text = host_url + req.path
|
SubElement(result_elem, 'Location').text = host_url + req.path
|
||||||
SubElement(result_elem, 'Bucket').text = req.container_name
|
SubElement(result_elem, 'Bucket').text = req.container_name
|
||||||
SubElement(result_elem, 'Key').text = req.object_name
|
SubElement(result_elem, 'Key').text = req.object_name
|
||||||
SubElement(result_elem, 'ETag').text = resp.etag
|
SubElement(result_elem, 'ETag').text = '"%s"' % s3_etag
|
||||||
|
del resp.headers['ETag']
|
||||||
|
|
||||||
resp.body = tostring(result_elem)
|
resp.body = tostring(result_elem)
|
||||||
resp.status = 200
|
resp.status = 200
|
||||||
|
@ -14,10 +14,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from swift.common.http import HTTP_OK, HTTP_PARTIAL_CONTENT, HTTP_NO_CONTENT
|
from swift.common.http import HTTP_OK, HTTP_PARTIAL_CONTENT, HTTP_NO_CONTENT
|
||||||
|
from swift.common.request_helpers import update_etag_is_at_header
|
||||||
from swift.common.swob import Range, content_range_header_value
|
from swift.common.swob import Range, content_range_header_value
|
||||||
from swift.common.utils import public
|
from swift.common.utils import public
|
||||||
|
|
||||||
from swift.common.middleware.s3api.utils import S3Timestamp
|
from swift.common.middleware.s3api.utils import S3Timestamp, sysmeta_header
|
||||||
from swift.common.middleware.s3api.controllers.base import Controller
|
from swift.common.middleware.s3api.controllers.base import Controller
|
||||||
from swift.common.middleware.s3api.s3response import S3NotImplemented, \
|
from swift.common.middleware.s3api.s3response import S3NotImplemented, \
|
||||||
InvalidRange, NoSuchKey, InvalidArgument, HTTPNoContent
|
InvalidRange, NoSuchKey, InvalidArgument, HTTPNoContent
|
||||||
@ -61,6 +62,11 @@ class ObjectController(Controller):
|
|||||||
return resp
|
return resp
|
||||||
|
|
||||||
def GETorHEAD(self, req):
|
def GETorHEAD(self, req):
|
||||||
|
if any(match_header in req.headers
|
||||||
|
for match_header in ('if-match', 'if-none-match')):
|
||||||
|
# Update where to look
|
||||||
|
update_etag_is_at_header(req, sysmeta_header('object', 'etag'))
|
||||||
|
|
||||||
resp = req.get_response(self.app)
|
resp = req.get_response(self.app)
|
||||||
|
|
||||||
if req.method == 'HEAD':
|
if req.method == 'HEAD':
|
||||||
|
@ -82,9 +82,14 @@ https://github.com/swiftstack/s3compat in detail.
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from cgi import parse_header
|
||||||
|
import json
|
||||||
from paste.deploy import loadwsgi
|
from paste.deploy import loadwsgi
|
||||||
|
|
||||||
from swift.common.wsgi import PipelineWrapper, loadcontext
|
from swift.common.constraints import valid_api_version
|
||||||
|
from swift.common.middleware.listing_formats import \
|
||||||
|
MAX_CONTAINER_LISTING_CONTENT_LENGTH
|
||||||
|
from swift.common.wsgi import PipelineWrapper, loadcontext, WSGIContext
|
||||||
|
|
||||||
from swift.common.middleware.s3api.exception import NotS3Request, \
|
from swift.common.middleware.s3api.exception import NotS3Request, \
|
||||||
InvalidSubresource
|
InvalidSubresource
|
||||||
@ -92,11 +97,86 @@ from swift.common.middleware.s3api.s3request import get_request_class
|
|||||||
from swift.common.middleware.s3api.s3response import ErrorResponse, \
|
from swift.common.middleware.s3api.s3response import ErrorResponse, \
|
||||||
InternalError, MethodNotAllowed, S3ResponseBase, S3NotImplemented
|
InternalError, MethodNotAllowed, S3ResponseBase, S3NotImplemented
|
||||||
from swift.common.utils import get_logger, register_swift_info, \
|
from swift.common.utils import get_logger, register_swift_info, \
|
||||||
config_true_value, config_positive_int_value
|
config_true_value, config_positive_int_value, split_path, \
|
||||||
|
closing_if_possible
|
||||||
from swift.common.middleware.s3api.utils import Config
|
from swift.common.middleware.s3api.utils import Config
|
||||||
from swift.common.middleware.s3api.acl_handlers import get_acl_handler
|
from swift.common.middleware.s3api.acl_handlers import get_acl_handler
|
||||||
|
|
||||||
|
|
||||||
|
class ListingEtagMiddleware(object):
|
||||||
|
def __init__(self, app):
|
||||||
|
self.app = app
|
||||||
|
|
||||||
|
def __call__(self, env, start_response):
|
||||||
|
# a lot of this is cribbed from listing_formats / swob.Request
|
||||||
|
if env['REQUEST_METHOD'] != 'GET':
|
||||||
|
# Nothing to translate
|
||||||
|
return self.app(env, start_response)
|
||||||
|
|
||||||
|
try:
|
||||||
|
v, a, c = split_path(env.get('SCRIPT_NAME', '') +
|
||||||
|
env['PATH_INFO'], 3, 3)
|
||||||
|
if not valid_api_version(v):
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
# not a container request; pass through
|
||||||
|
return self.app(env, start_response)
|
||||||
|
|
||||||
|
ctx = WSGIContext(self.app)
|
||||||
|
resp_iter = ctx._app_call(env)
|
||||||
|
|
||||||
|
content_type = content_length = cl_index = None
|
||||||
|
for index, (header, value) in enumerate(ctx._response_headers):
|
||||||
|
header = header.lower()
|
||||||
|
if header == 'content-type':
|
||||||
|
content_type = value.split(';', 1)[0].strip()
|
||||||
|
if content_length:
|
||||||
|
break
|
||||||
|
elif header == 'content-length':
|
||||||
|
cl_index = index
|
||||||
|
try:
|
||||||
|
content_length = int(value)
|
||||||
|
except ValueError:
|
||||||
|
pass # ignore -- we'll bail later
|
||||||
|
if content_type:
|
||||||
|
break
|
||||||
|
|
||||||
|
if content_type != 'application/json' or content_length is None or \
|
||||||
|
content_length > MAX_CONTAINER_LISTING_CONTENT_LENGTH:
|
||||||
|
start_response(ctx._response_status, ctx._response_headers,
|
||||||
|
ctx._response_exc_info)
|
||||||
|
return resp_iter
|
||||||
|
|
||||||
|
# We've done our sanity checks, slurp the response into memory
|
||||||
|
with closing_if_possible(resp_iter):
|
||||||
|
body = b''.join(resp_iter)
|
||||||
|
|
||||||
|
try:
|
||||||
|
listing = json.loads(body)
|
||||||
|
for item in listing:
|
||||||
|
if 'subdir' in item:
|
||||||
|
continue
|
||||||
|
value, params = parse_header(item['hash'])
|
||||||
|
if 's3_etag' in params:
|
||||||
|
item['s3_etag'] = '"%s"' % params.pop('s3_etag')
|
||||||
|
item['hash'] = value + ''.join(
|
||||||
|
'; %s=%s' % kv for kv in params.items())
|
||||||
|
except (TypeError, KeyError, ValueError):
|
||||||
|
# If anything goes wrong above, drop back to original response
|
||||||
|
start_response(ctx._response_status, ctx._response_headers,
|
||||||
|
ctx._response_exc_info)
|
||||||
|
return [body]
|
||||||
|
|
||||||
|
body = json.dumps(listing)
|
||||||
|
ctx._response_headers[cl_index] = (
|
||||||
|
ctx._response_headers[cl_index][0],
|
||||||
|
str(len(body)),
|
||||||
|
)
|
||||||
|
start_response(ctx._response_status, ctx._response_headers,
|
||||||
|
ctx._response_exc_info)
|
||||||
|
return [body]
|
||||||
|
|
||||||
|
|
||||||
class S3ApiMiddleware(object):
|
class S3ApiMiddleware(object):
|
||||||
"""S3Api: S3 compatibility middleware"""
|
"""S3Api: S3 compatibility middleware"""
|
||||||
def __init__(self, app, conf, *args, **kwargs):
|
def __init__(self, app, conf, *args, **kwargs):
|
||||||
@ -267,6 +347,6 @@ def filter_factory(global_conf, **local_conf):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def s3api_filter(app):
|
def s3api_filter(app):
|
||||||
return S3ApiMiddleware(app, conf)
|
return S3ApiMiddleware(ListingEtagMiddleware(app), conf)
|
||||||
|
|
||||||
return s3api_filter
|
return s3api_filter
|
||||||
|
@ -21,7 +21,8 @@ from swift.common import swob
|
|||||||
from swift.common.utils import config_true_value
|
from swift.common.utils import config_true_value
|
||||||
from swift.common.request_helpers import is_sys_meta
|
from swift.common.request_helpers import is_sys_meta
|
||||||
|
|
||||||
from swift.common.middleware.s3api.utils import snake_to_camel, sysmeta_prefix
|
from swift.common.middleware.s3api.utils import snake_to_camel, \
|
||||||
|
sysmeta_prefix, sysmeta_header
|
||||||
from swift.common.middleware.s3api.etree import Element, SubElement, tostring
|
from swift.common.middleware.s3api.etree import Element, SubElement, tostring
|
||||||
|
|
||||||
|
|
||||||
@ -79,10 +80,6 @@ class S3Response(S3ResponseBase, swob.Response):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
swob.Response.__init__(self, *args, **kwargs)
|
swob.Response.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
if self.etag:
|
|
||||||
# add double quotes to the etag header
|
|
||||||
self.etag = self.etag
|
|
||||||
|
|
||||||
sw_sysmeta_headers = swob.HeaderKeyDict()
|
sw_sysmeta_headers = swob.HeaderKeyDict()
|
||||||
sw_headers = swob.HeaderKeyDict()
|
sw_headers = swob.HeaderKeyDict()
|
||||||
headers = HeaderKeyDict()
|
headers = HeaderKeyDict()
|
||||||
@ -134,7 +131,20 @@ class S3Response(S3ResponseBase, swob.Response):
|
|||||||
# for delete slo
|
# for delete slo
|
||||||
self.is_slo = config_true_value(val)
|
self.is_slo = config_true_value(val)
|
||||||
|
|
||||||
|
# Check whether we stored the AWS-style etag on upload
|
||||||
|
override_etag = sw_sysmeta_headers.get(
|
||||||
|
sysmeta_header('object', 'etag'))
|
||||||
|
if override_etag is not None:
|
||||||
|
# Multipart uploads in AWS have ETags like
|
||||||
|
# <MD5(part_etag1 || ... || part_etagN)>-<number of parts>
|
||||||
|
headers['etag'] = override_etag
|
||||||
|
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
|
|
||||||
|
if self.etag:
|
||||||
|
# add double quotes to the etag header
|
||||||
|
self.etag = self.etag
|
||||||
|
|
||||||
# Used for pure swift header handling at the request layer
|
# Used for pure swift header handling at the request layer
|
||||||
self.sw_headers = sw_headers
|
self.sw_headers = sw_headers
|
||||||
self.sysmeta_headers = sw_sysmeta_headers
|
self.sysmeta_headers = sw_sysmeta_headers
|
||||||
|
@ -78,7 +78,7 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
self.assertEqual(status, 200)
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
self.assertCommonResponseHeaders(headers)
|
self.assertCommonResponseHeaders(headers)
|
||||||
self.assertTrue(headers['content-type'] is not None)
|
self.assertIsNotNone(headers['content-type'])
|
||||||
self.assertEqual(headers['content-length'], str(len(body)))
|
self.assertEqual(headers['content-length'], str(len(body)))
|
||||||
# TODO; requires consideration
|
# TODO; requires consideration
|
||||||
# self.assertEqual(headers['transfer-encoding'], 'chunked')
|
# self.assertEqual(headers['transfer-encoding'], 'chunked')
|
||||||
@ -110,24 +110,24 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
resp_objects = elem.findall('./Contents')
|
resp_objects = elem.findall('./Contents')
|
||||||
self.assertEqual(len(list(resp_objects)), 2)
|
self.assertEqual(len(list(resp_objects)), 2)
|
||||||
for o in resp_objects:
|
for o in resp_objects:
|
||||||
self.assertTrue(o.find('Key').text in req_objects)
|
self.assertIn(o.find('Key').text, req_objects)
|
||||||
self.assertTrue(o.find('LastModified').text is not None)
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
o.find('LastModified').text,
|
o.find('LastModified').text,
|
||||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
self.assertTrue(o.find('ETag').text is not None)
|
self.assertIsNotNone(o.find('ETag').text)
|
||||||
self.assertTrue(o.find('Size').text is not None)
|
self.assertIsNotNone(o.find('Size').text)
|
||||||
self.assertTrue(o.find('StorageClass').text is not None)
|
self.assertIsNotNone(o.find('StorageClass').text)
|
||||||
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
self.assertTrue(o.find('Owner/DisplayName').text,
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
self.conn.user_id)
|
self.conn.user_id)
|
||||||
|
|
||||||
# HEAD Bucket
|
# HEAD Bucket
|
||||||
status, headers, body = self.conn.make_request('HEAD', bucket)
|
status, headers, body = self.conn.make_request('HEAD', bucket)
|
||||||
self.assertEqual(status, 200)
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
self.assertCommonResponseHeaders(headers)
|
self.assertCommonResponseHeaders(headers)
|
||||||
self.assertTrue(headers['content-type'] is not None)
|
self.assertIsNotNone(headers['content-type'])
|
||||||
self.assertEqual(headers['content-length'], str(len(body)))
|
self.assertEqual(headers['content-length'], str(len(body)))
|
||||||
# TODO; requires consideration
|
# TODO; requires consideration
|
||||||
# self.assertEqual(headers['transfer-encoding'], 'chunked')
|
# self.assertEqual(headers['transfer-encoding'], 'chunked')
|
||||||
@ -202,16 +202,16 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
||||||
for i, o in enumerate(resp_objects):
|
for i, o in enumerate(resp_objects):
|
||||||
self.assertEqual(o.find('Key').text, expect_objects[i])
|
self.assertEqual(o.find('Key').text, expect_objects[i])
|
||||||
self.assertTrue(o.find('LastModified').text is not None)
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
o.find('LastModified').text,
|
o.find('LastModified').text,
|
||||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
self.assertTrue(o.find('ETag').text is not None)
|
self.assertIsNotNone(o.find('ETag').text)
|
||||||
self.assertTrue(o.find('Size').text is not None)
|
self.assertIsNotNone(o.find('Size').text)
|
||||||
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
||||||
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
self.assertTrue(o.find('Owner/DisplayName').text,
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
self.conn.user_id)
|
self.conn.user_id)
|
||||||
resp_prefixes = elem.findall('CommonPrefixes')
|
resp_prefixes = elem.findall('CommonPrefixes')
|
||||||
self.assertEqual(len(resp_prefixes), len(expect_prefixes))
|
self.assertEqual(len(resp_prefixes), len(expect_prefixes))
|
||||||
for i, p in enumerate(resp_prefixes):
|
for i, p in enumerate(resp_prefixes):
|
||||||
@ -248,16 +248,16 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
||||||
for i, o in enumerate(resp_objects):
|
for i, o in enumerate(resp_objects):
|
||||||
self.assertEqual(o.find('Key').text, expect_objects[i])
|
self.assertEqual(o.find('Key').text, expect_objects[i])
|
||||||
self.assertTrue(o.find('LastModified').text is not None)
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
o.find('LastModified').text,
|
o.find('LastModified').text,
|
||||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
self.assertTrue(o.find('ETag').text is not None)
|
self.assertIsNotNone(o.find('ETag').text)
|
||||||
self.assertTrue(o.find('Size').text is not None)
|
self.assertIsNotNone(o.find('Size').text)
|
||||||
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
||||||
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
self.assertTrue(o.find('Owner/DisplayName').text,
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
self.conn.user_id)
|
self.conn.user_id)
|
||||||
|
|
||||||
def test_get_bucket_with_max_keys(self):
|
def test_get_bucket_with_max_keys(self):
|
||||||
bucket = 'bucket'
|
bucket = 'bucket'
|
||||||
@ -277,16 +277,16 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
||||||
for i, o in enumerate(resp_objects):
|
for i, o in enumerate(resp_objects):
|
||||||
self.assertEqual(o.find('Key').text, expect_objects[i])
|
self.assertEqual(o.find('Key').text, expect_objects[i])
|
||||||
self.assertTrue(o.find('LastModified').text is not None)
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
o.find('LastModified').text,
|
o.find('LastModified').text,
|
||||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
self.assertTrue(o.find('ETag').text is not None)
|
self.assertIsNotNone(o.find('ETag').text)
|
||||||
self.assertTrue(o.find('Size').text is not None)
|
self.assertIsNotNone(o.find('Size').text)
|
||||||
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
||||||
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
self.assertTrue(o.find('Owner/DisplayName').text,
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
self.conn.user_id)
|
self.conn.user_id)
|
||||||
|
|
||||||
def test_get_bucket_with_prefix(self):
|
def test_get_bucket_with_prefix(self):
|
||||||
bucket = 'bucket'
|
bucket = 'bucket'
|
||||||
@ -306,16 +306,16 @@ class TestS3ApiBucket(S3ApiBase):
|
|||||||
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
self.assertEqual(len(list(resp_objects)), len(expect_objects))
|
||||||
for i, o in enumerate(resp_objects):
|
for i, o in enumerate(resp_objects):
|
||||||
self.assertEqual(o.find('Key').text, expect_objects[i])
|
self.assertEqual(o.find('Key').text, expect_objects[i])
|
||||||
self.assertTrue(o.find('LastModified').text is not None)
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
self.assertRegexpMatches(
|
self.assertRegexpMatches(
|
||||||
o.find('LastModified').text,
|
o.find('LastModified').text,
|
||||||
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
self.assertTrue(o.find('ETag').text is not None)
|
self.assertIsNotNone(o.find('ETag').text)
|
||||||
self.assertTrue(o.find('Size').text is not None)
|
self.assertIsNotNone(o.find('Size').text)
|
||||||
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
self.assertEqual(o.find('StorageClass').text, 'STANDARD')
|
||||||
self.assertTrue(o.find('Owner/ID').text, self.conn.user_id)
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
self.assertTrue(o.find('Owner/DisplayName').text,
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
self.conn.user_id)
|
self.conn.user_id)
|
||||||
|
|
||||||
def test_get_bucket_v2_with_start_after(self):
|
def test_get_bucket_v2_with_start_after(self):
|
||||||
bucket = 'bucket'
|
bucket = 'bucket'
|
||||||
|
@ -312,8 +312,65 @@ class TestS3ApiMultiUpload(S3ApiBase):
|
|||||||
elem.find('Location').text)
|
elem.find('Location').text)
|
||||||
self.assertEqual(elem.find('Bucket').text, bucket)
|
self.assertEqual(elem.find('Bucket').text, bucket)
|
||||||
self.assertEqual(elem.find('Key').text, key)
|
self.assertEqual(elem.find('Key').text, key)
|
||||||
# TODO: confirm completed etag value
|
concatted_etags = ''.join(etag.strip('"') for etag in etags)
|
||||||
self.assertTrue(elem.find('ETag').text is not None)
|
exp_etag = '"%s-%s"' % (
|
||||||
|
md5(concatted_etags.decode('hex')).hexdigest(), len(etags))
|
||||||
|
etag = elem.find('ETag').text
|
||||||
|
self.assertEqual(etag, exp_etag)
|
||||||
|
|
||||||
|
exp_size = self.min_segment_size * len(etags)
|
||||||
|
swift_etag = '"%s"' % md5(concatted_etags).hexdigest()
|
||||||
|
# TODO: GET via swift api, check against swift_etag
|
||||||
|
|
||||||
|
# Check object
|
||||||
|
def check_obj(req_headers, exp_status):
|
||||||
|
status, headers, body = \
|
||||||
|
self.conn.make_request('HEAD', bucket, key, req_headers)
|
||||||
|
self.assertEqual(status, exp_status)
|
||||||
|
self.assertCommonResponseHeaders(headers)
|
||||||
|
self.assertIn('content-length', headers)
|
||||||
|
if exp_status == 412:
|
||||||
|
self.assertNotIn('etag', headers)
|
||||||
|
self.assertEqual(headers['content-length'], '0')
|
||||||
|
else:
|
||||||
|
self.assertIn('etag', headers)
|
||||||
|
self.assertEqual(headers['etag'], exp_etag)
|
||||||
|
if exp_status == 304:
|
||||||
|
self.assertEqual(headers['content-length'], '0')
|
||||||
|
else:
|
||||||
|
self.assertEqual(headers['content-length'], str(exp_size))
|
||||||
|
|
||||||
|
check_obj({}, 200)
|
||||||
|
|
||||||
|
# Sanity check conditionals
|
||||||
|
check_obj({'If-Match': 'some other thing'}, 412)
|
||||||
|
check_obj({'If-None-Match': 'some other thing'}, 200)
|
||||||
|
|
||||||
|
# More interesting conditional cases
|
||||||
|
check_obj({'If-Match': exp_etag}, 200)
|
||||||
|
check_obj({'If-Match': swift_etag}, 412)
|
||||||
|
check_obj({'If-None-Match': swift_etag}, 200)
|
||||||
|
check_obj({'If-None-Match': exp_etag}, 304)
|
||||||
|
|
||||||
|
# Check listings
|
||||||
|
status, headers, body = self.conn.make_request('GET', bucket)
|
||||||
|
self.assertEqual(status, 200)
|
||||||
|
|
||||||
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
|
resp_objects = elem.findall('./Contents')
|
||||||
|
self.assertEqual(len(list(resp_objects)), 1)
|
||||||
|
for o in resp_objects:
|
||||||
|
self.assertEqual(o.find('Key').text, key)
|
||||||
|
self.assertIsNotNone(o.find('LastModified').text)
|
||||||
|
self.assertRegexpMatches(
|
||||||
|
o.find('LastModified').text,
|
||||||
|
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$')
|
||||||
|
self.assertEqual(o.find('ETag').text, exp_etag)
|
||||||
|
self.assertEqual(o.find('Size').text, str(exp_size))
|
||||||
|
self.assertIsNotNone(o.find('StorageClass').text is not None)
|
||||||
|
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
|
||||||
|
self.assertEqual(o.find('Owner/DisplayName').text,
|
||||||
|
self.conn.user_id)
|
||||||
|
|
||||||
def test_initiate_multi_upload_error(self):
|
def test_initiate_multi_upload_error(self):
|
||||||
bucket = 'bucket'
|
bucket = 'bucket'
|
||||||
|
@ -20,7 +20,7 @@ import time
|
|||||||
|
|
||||||
from swift.common import swob
|
from swift.common import swob
|
||||||
|
|
||||||
from swift.common.middleware.s3api.s3api import S3ApiMiddleware
|
from swift.common.middleware.s3api.s3api import filter_factory
|
||||||
from helpers import FakeSwift
|
from helpers import FakeSwift
|
||||||
from swift.common.middleware.s3api.etree import fromstring
|
from swift.common.middleware.s3api.etree import fromstring
|
||||||
from swift.common.middleware.s3api.utils import Config
|
from swift.common.middleware.s3api.utils import Config
|
||||||
@ -78,7 +78,7 @@ class S3ApiTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
self.app = FakeApp()
|
self.app = FakeApp()
|
||||||
self.swift = self.app.swift
|
self.swift = self.app.swift
|
||||||
self.s3api = S3ApiMiddleware(self.app, self.conf)
|
self.s3api = filter_factory({}, **self.conf)(self.app)
|
||||||
|
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test',
|
self.swift.register('HEAD', '/v1/AUTH_test',
|
||||||
swob.HTTPOk, {}, None)
|
swob.HTTPOk, {}, None)
|
||||||
@ -92,9 +92,9 @@ class S3ApiTestCase(unittest.TestCase):
|
|||||||
swob.HTTPNoContent, {}, None)
|
swob.HTTPNoContent, {}, None)
|
||||||
|
|
||||||
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
|
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPOk, {}, "")
|
swob.HTTPOk, {'etag': 'object etag'}, "")
|
||||||
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
|
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPCreated, {}, None)
|
swob.HTTPCreated, {'etag': 'object etag'}, None)
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
|
||||||
swob.HTTPNoContent, {}, None)
|
swob.HTTPNoContent, {}, None)
|
||||||
|
|
||||||
|
@ -36,49 +36,52 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303),
|
self.objects = (('rose', '2011-01-05T02:19:14.275290', 0, 303),
|
||||||
('viola', '2011-01-05T02:19:14.275290', '0', 3909),
|
('viola', '2011-01-05T02:19:14.275290', '0', 3909),
|
||||||
('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
|
('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
|
||||||
|
('mu', '2011-01-05T02:19:14.275290',
|
||||||
|
'md5-of-the-manifest; s3_etag=0', '3909'),
|
||||||
('with space', '2011-01-05T02:19:14.275290', 0, 390),
|
('with space', '2011-01-05T02:19:14.275290', 0, 390),
|
||||||
('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
|
('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
|
||||||
|
|
||||||
objects = map(
|
objects = [
|
||||||
lambda item: {'name': str(item[0]), 'last_modified': str(item[1]),
|
{'name': str(item[0]), 'last_modified': str(item[1]),
|
||||||
'hash': str(item[2]), 'bytes': str(item[3])},
|
'hash': str(item[2]), 'bytes': str(item[3])}
|
||||||
list(self.objects))
|
for item in self.objects]
|
||||||
object_list = json.dumps(objects)
|
object_list = json.dumps(objects)
|
||||||
|
|
||||||
self.prefixes = ['rose', 'viola', 'lily']
|
self.prefixes = ['rose', 'viola', 'lily']
|
||||||
object_list_subdir = []
|
object_list_subdir = [{"subdir": p} for p in self.prefixes]
|
||||||
for p in self.prefixes:
|
|
||||||
object_list_subdir.append({"subdir": p})
|
|
||||||
|
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments',
|
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments',
|
||||||
swob.HTTPNoContent, {}, json.dumps([]))
|
swob.HTTPNoContent, {}, json.dumps([]))
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/rose',
|
for name, _, _, _ in self.objects:
|
||||||
swob.HTTPNoContent, {}, json.dumps([]))
|
self.swift.register(
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/viola',
|
'DELETE', '/v1/AUTH_test/bucket+segments/' + name,
|
||||||
swob.HTTPNoContent, {}, json.dumps([]))
|
swob.HTTPNoContent, {}, json.dumps([]))
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/lily',
|
self.swift.register(
|
||||||
swob.HTTPNoContent, {}, json.dumps([]))
|
'GET',
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with'
|
'/v1/AUTH_test/bucket+segments?format=json&marker=with%2520space',
|
||||||
' space', swob.HTTPNoContent, {}, json.dumps([]))
|
swob.HTTPOk,
|
||||||
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/with%20'
|
{'Content-Type': 'application/json; charset=utf-8'},
|
||||||
'space', swob.HTTPNoContent, {}, json.dumps([]))
|
json.dumps([]))
|
||||||
self.swift.register('GET', '/v1/AUTH_test/bucket+segments?format=json'
|
self.swift.register(
|
||||||
'&marker=with%2520space', swob.HTTPOk, {},
|
'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=',
|
||||||
json.dumps([]))
|
swob.HTTPOk, {'Content-Type': 'application/json'}, object_list)
|
||||||
self.swift.register('GET', '/v1/AUTH_test/bucket+segments?format=json'
|
self.swift.register(
|
||||||
'&marker=', swob.HTTPOk, {}, object_list)
|
'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None)
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent,
|
self.swift.register(
|
||||||
{}, None)
|
'HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None)
|
||||||
self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
|
self.swift.register(
|
||||||
{}, None)
|
'GET', '/v1/AUTH_test/junk', swob.HTTPOk,
|
||||||
self.swift.register('GET', '/v1/AUTH_test/junk', swob.HTTPOk, {},
|
{'Content-Type': 'application/json'}, object_list)
|
||||||
object_list)
|
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET',
|
'GET',
|
||||||
'/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola',
|
'/v1/AUTH_test/junk?delimiter=a&format=json&limit=3&marker=viola',
|
||||||
swob.HTTPOk, {}, json.dumps(objects[2:]))
|
swob.HTTPOk,
|
||||||
self.swift.register('GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
|
{'Content-Type': 'application/json; charset=utf-8'},
|
||||||
{}, json.dumps(object_list_subdir))
|
json.dumps(objects[2:]))
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
|
||||||
|
{'Content-Type': 'application/json; charset=utf-8'},
|
||||||
|
json.dumps(object_list_subdir))
|
||||||
self.swift.register(
|
self.swift.register(
|
||||||
'GET',
|
'GET',
|
||||||
'/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3',
|
'/v1/AUTH_test/subdirs?delimiter=/&format=json&limit=3',
|
||||||
@ -183,18 +186,20 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
def test_bucket_GET_is_truncated(self):
|
def test_bucket_GET_is_truncated(self):
|
||||||
bucket_name = 'junk'
|
bucket_name = 'junk'
|
||||||
|
|
||||||
req = Request.blank('/%s?max-keys=5' % bucket_name,
|
req = Request.blank(
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
'/%s?max-keys=%d' % (bucket_name, len(self.objects)),
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
'Date': self.get_date_header()})
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
elem = fromstring(body, 'ListBucketResult')
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
|
||||||
req = Request.blank('/%s?max-keys=4' % bucket_name,
|
req = Request.blank(
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
'/%s?max-keys=%d' % (bucket_name, len(self.objects) - 1),
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
'Date': self.get_date_header()})
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
elem = fromstring(body, 'ListBucketResult')
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
||||||
@ -211,23 +216,27 @@ class TestS3ApiBucket(S3ApiTestCase):
|
|||||||
def test_bucket_GET_v2_is_truncated(self):
|
def test_bucket_GET_v2_is_truncated(self):
|
||||||
bucket_name = 'junk'
|
bucket_name = 'junk'
|
||||||
|
|
||||||
req = Request.blank('/%s?list-type=2&max-keys=5' % bucket_name,
|
req = Request.blank(
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
'/%s?list-type=2&max-keys=%d' % (bucket_name, len(self.objects)),
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
'Date': self.get_date_header()})
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
elem = fromstring(body, 'ListBucketResult')
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
self.assertEqual(elem.find('./KeyCount').text, '5')
|
self.assertEqual(elem.find('./KeyCount').text, str(len(self.objects)))
|
||||||
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
self.assertEqual(elem.find('./IsTruncated').text, 'false')
|
||||||
|
|
||||||
req = Request.blank('/%s?list-type=2&max-keys=4' % bucket_name,
|
req = Request.blank(
|
||||||
environ={'REQUEST_METHOD': 'GET'},
|
'/%s?list-type=2&max-keys=%d' % (bucket_name,
|
||||||
headers={'Authorization': 'AWS test:tester:hmac',
|
len(self.objects) - 1),
|
||||||
'Date': self.get_date_header()})
|
environ={'REQUEST_METHOD': 'GET'},
|
||||||
|
headers={'Authorization': 'AWS test:tester:hmac',
|
||||||
|
'Date': self.get_date_header()})
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
elem = fromstring(body, 'ListBucketResult')
|
elem = fromstring(body, 'ListBucketResult')
|
||||||
self.assertIsNotNone(elem.find('./NextContinuationToken'))
|
self.assertIsNotNone(elem.find('./NextContinuationToken'))
|
||||||
self.assertEqual(elem.find('./KeyCount').text, '4')
|
self.assertEqual(elem.find('./KeyCount').text,
|
||||||
|
str(len(self.objects) - 1))
|
||||||
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
self.assertEqual(elem.find('./IsTruncated').text, 'true')
|
||||||
|
|
||||||
req = Request.blank('/subdirs?list-type=2&delimiter=/&max-keys=2',
|
req = Request.blank('/subdirs?list-type=2&delimiter=/&max-keys=2',
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
from hashlib import md5
|
import hashlib
|
||||||
from mock import patch
|
from mock import patch
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
@ -28,8 +28,8 @@ from swift.common.utils import json
|
|||||||
from test.unit.common.middleware.s3api import S3ApiTestCase
|
from test.unit.common.middleware.s3api import S3ApiTestCase
|
||||||
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
from test.unit.common.middleware.s3api.helpers import UnreadableInput
|
||||||
from swift.common.middleware.s3api.etree import fromstring, tostring
|
from swift.common.middleware.s3api.etree import fromstring, tostring
|
||||||
from swift.common.middleware.s3api.subresource import Owner, Grant, User, ACL, \
|
from swift.common.middleware.s3api.subresource import Owner, Grant, User, \
|
||||||
encode_acl, decode_acl, ACLPublicRead
|
ACL, encode_acl, decode_acl, ACLPublicRead
|
||||||
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
|
||||||
from swift.common.middleware.s3api.utils import sysmeta_header, mktime, \
|
from swift.common.middleware.s3api.utils import sysmeta_header, mktime, \
|
||||||
S3Timestamp
|
S3Timestamp
|
||||||
@ -40,31 +40,36 @@ from swift.common.middleware.s3api.controllers.multi_upload import \
|
|||||||
xml = '<CompleteMultipartUpload>' \
|
xml = '<CompleteMultipartUpload>' \
|
||||||
'<Part>' \
|
'<Part>' \
|
||||||
'<PartNumber>1</PartNumber>' \
|
'<PartNumber>1</PartNumber>' \
|
||||||
'<ETag>HASH</ETag>' \
|
'<ETag>0123456789abcdef</ETag>' \
|
||||||
'</Part>' \
|
'</Part>' \
|
||||||
'<Part>' \
|
'<Part>' \
|
||||||
'<PartNumber>2</PartNumber>' \
|
'<PartNumber>2</PartNumber>' \
|
||||||
'<ETag>"HASH"</ETag>' \
|
'<ETag>"fedcba9876543210"</ETag>' \
|
||||||
'</Part>' \
|
'</Part>' \
|
||||||
'</CompleteMultipartUpload>'
|
'</CompleteMultipartUpload>'
|
||||||
|
|
||||||
objects_template = \
|
objects_template = \
|
||||||
(('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 100),
|
(('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 100),
|
||||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 200))
|
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 200))
|
||||||
|
|
||||||
multiparts_template = \
|
multiparts_template = \
|
||||||
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1),
|
||||||
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11),
|
('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 11),
|
||||||
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21),
|
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 21),
|
||||||
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
|
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2),
|
||||||
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12),
|
('object/Y/1', '2014-05-07T19:47:54.592270', '0123456789abcdef', 12),
|
||||||
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22),
|
('object/Y/2', '2014-05-07T19:47:55.592270', 'fedcba9876543210', 22),
|
||||||
('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3),
|
('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3),
|
||||||
('object/Z/1', '2014-05-07T19:47:57.592270', 'HASH', 13),
|
('object/Z/1', '2014-05-07T19:47:57.592270', '0123456789abcdef', 13),
|
||||||
('object/Z/2', '2014-05-07T19:47:58.592270', 'HASH', 23),
|
('object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', 23),
|
||||||
('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4),
|
('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4),
|
||||||
('subdir/object/Z/1', '2014-05-07T19:47:58.592270', 'HASH', 41),
|
('subdir/object/Z/1', '2014-05-07T19:47:58.592270', '0123456789abcdef',
|
||||||
('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'HASH', 41))
|
41),
|
||||||
|
('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210',
|
||||||
|
41))
|
||||||
|
|
||||||
|
s3_etag = '"%s-2"' % hashlib.md5(
|
||||||
|
'0123456789abcdeffedcba9876543210'.decode('hex')).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
class TestS3ApiMultiUpload(S3ApiTestCase):
|
class TestS3ApiMultiUpload(S3ApiTestCase):
|
||||||
@ -664,12 +669,32 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
'Date': self.get_date_header(), },
|
'Date': self.get_date_header(), },
|
||||||
body=xml)
|
body=xml)
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
fromstring(body, 'CompleteMultipartUploadResult')
|
elem = fromstring(body, 'CompleteMultipartUploadResult')
|
||||||
|
self.assertNotIn('Etag', headers)
|
||||||
|
self.assertEqual(elem.find('ETag').text, s3_etag)
|
||||||
self.assertEqual(status.split()[0], '200')
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
|
||||||
|
self.assertEqual(self.swift.calls, [
|
||||||
|
# Bucket exists
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
|
# Segment container exists
|
||||||
|
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
|
||||||
|
# Get the currently-uploaded segments
|
||||||
|
('GET', '/v1/AUTH_test/bucket+segments?delimiter=/'
|
||||||
|
'&format=json&prefix=object/X/'),
|
||||||
|
# Create the SLO
|
||||||
|
('PUT', '/v1/AUTH_test/bucket/object?multipart-manifest=put'),
|
||||||
|
# Delete the in-progress-upload marker
|
||||||
|
('DELETE', '/v1/AUTH_test/bucket+segments/object/X')
|
||||||
|
])
|
||||||
|
|
||||||
_, _, headers = self.swift.calls_with_headers[-2]
|
_, _, headers = self.swift.calls_with_headers[-2]
|
||||||
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
|
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
|
||||||
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
|
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
|
||||||
|
# SLO will provide a base value
|
||||||
|
override_etag = '; s3_etag=%s' % s3_etag.strip('"')
|
||||||
|
h = 'X-Object-Sysmeta-Container-Update-Override-Etag'
|
||||||
|
self.assertEqual(headers.get(h), override_etag)
|
||||||
|
|
||||||
def test_object_multipart_upload_complete_404_on_marker_delete(self):
|
def test_object_multipart_upload_complete_404_on_marker_delete(self):
|
||||||
segment_bucket = '/v1/AUTH_test/bucket+segments'
|
segment_bucket = '/v1/AUTH_test/bucket+segments'
|
||||||
@ -882,12 +907,12 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
object_list = [{
|
object_list = [{
|
||||||
'name': 'object/X/1',
|
'name': 'object/X/1',
|
||||||
'last_modified': self.last_modified,
|
'last_modified': self.last_modified,
|
||||||
'hash': 'some hash',
|
'hash': '0123456789abcdef0123456789abcdef',
|
||||||
'bytes': '100',
|
'bytes': '100',
|
||||||
}, {
|
}, {
|
||||||
'name': 'object/X/2',
|
'name': 'object/X/2',
|
||||||
'last_modified': self.last_modified,
|
'last_modified': self.last_modified,
|
||||||
'hash': 'some other hash',
|
'hash': 'fedcba9876543210fedcba9876543210',
|
||||||
'bytes': '1',
|
'bytes': '1',
|
||||||
}, {
|
}, {
|
||||||
'name': 'object/X/3',
|
'name': 'object/X/3',
|
||||||
@ -909,11 +934,11 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
xml = '<CompleteMultipartUpload>' \
|
xml = '<CompleteMultipartUpload>' \
|
||||||
'<Part>' \
|
'<Part>' \
|
||||||
'<PartNumber>1</PartNumber>' \
|
'<PartNumber>1</PartNumber>' \
|
||||||
'<ETag>some hash</ETag>' \
|
'<ETag>0123456789abcdef0123456789abcdef</ETag>' \
|
||||||
'</Part>' \
|
'</Part>' \
|
||||||
'<Part>' \
|
'<Part>' \
|
||||||
'<PartNumber>2</PartNumber>' \
|
'<PartNumber>2</PartNumber>' \
|
||||||
'<ETag>some other hash</ETag>' \
|
'<ETag>fedcba9876543210fedcba9876543210</ETag>' \
|
||||||
'</Part>' \
|
'</Part>' \
|
||||||
'<Part>' \
|
'<Part>' \
|
||||||
'<PartNumber>3</PartNumber>' \
|
'<PartNumber>3</PartNumber>' \
|
||||||
@ -928,6 +953,11 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
body=xml)
|
body=xml)
|
||||||
status, headers, body = self.call_s3api(req)
|
status, headers, body = self.call_s3api(req)
|
||||||
self.assertEqual(status.split()[0], '200')
|
self.assertEqual(status.split()[0], '200')
|
||||||
|
elem = fromstring(body, 'CompleteMultipartUploadResult')
|
||||||
|
self.assertNotIn('Etag', headers)
|
||||||
|
expected_etag = '"%s-3"' % hashlib.md5(''.join(
|
||||||
|
x['hash'] for x in object_list).decode('hex')).hexdigest()
|
||||||
|
self.assertEqual(elem.find('ETag').text, expected_etag)
|
||||||
|
|
||||||
self.assertEqual(self.swift.calls, [
|
self.assertEqual(self.swift.calls, [
|
||||||
('HEAD', '/v1/AUTH_test/bucket'),
|
('HEAD', '/v1/AUTH_test/bucket'),
|
||||||
@ -938,6 +968,12 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
('DELETE', '/v1/AUTH_test/bucket+segments/object/X'),
|
('DELETE', '/v1/AUTH_test/bucket+segments/object/X'),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
_, _, headers = self.swift.calls_with_headers[-2]
|
||||||
|
# SLO will provide a base value
|
||||||
|
override_etag = '; s3_etag=%s' % expected_etag.strip('"')
|
||||||
|
h = 'X-Object-Sysmeta-Container-Update-Override-Etag'
|
||||||
|
self.assertEqual(headers.get(h), override_etag)
|
||||||
|
|
||||||
@s3acl(s3acl_only=True)
|
@s3acl(s3acl_only=True)
|
||||||
def test_object_multipart_upload_complete_s3acl(self):
|
def test_object_multipart_upload_complete_s3acl(self):
|
||||||
acl_headers = encode_acl('object', ACLPublicRead(Owner('test:tester',
|
acl_headers = encode_acl('object', ACLPublicRead(Owner('test:tester',
|
||||||
@ -1107,8 +1143,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
for p in elem.findall('Part'):
|
for p in elem.findall('Part'):
|
||||||
partnum = int(p.find('PartNumber').text)
|
partnum = int(p.find('PartNumber').text)
|
||||||
self.assertEqual(p.find('LastModified').text,
|
self.assertEqual(p.find('LastModified').text,
|
||||||
objects_template[partnum - 1][1][:-3]
|
objects_template[partnum - 1][1][:-3] + 'Z')
|
||||||
+ 'Z')
|
|
||||||
self.assertEqual(p.find('ETag').text.strip(),
|
self.assertEqual(p.find('ETag').text.strip(),
|
||||||
'"%s"' % objects_template[partnum - 1][2])
|
'"%s"' % objects_template[partnum - 1][2])
|
||||||
self.assertEqual(p.find('Size').text,
|
self.assertEqual(p.find('Size').text,
|
||||||
@ -1197,8 +1232,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
for p in elem.findall('Part'):
|
for p in elem.findall('Part'):
|
||||||
partnum = int(p.find('PartNumber').text)
|
partnum = int(p.find('PartNumber').text)
|
||||||
self.assertEqual(p.find('LastModified').text,
|
self.assertEqual(p.find('LastModified').text,
|
||||||
objects_template[partnum - 1][1][:-3]
|
objects_template[partnum - 1][1][:-3] + 'Z')
|
||||||
+ 'Z')
|
|
||||||
self.assertEqual(p.find('ETag').text,
|
self.assertEqual(p.find('ETag').text,
|
||||||
'"%s"' % objects_template[partnum - 1][2])
|
'"%s"' % objects_template[partnum - 1][2])
|
||||||
self.assertEqual(p.find('Size').text,
|
self.assertEqual(p.find('Size').text,
|
||||||
@ -1694,7 +1728,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
|
|||||||
|
|
||||||
def _test_no_body(self, use_content_length=False,
|
def _test_no_body(self, use_content_length=False,
|
||||||
use_transfer_encoding=False, string_to_md5=''):
|
use_transfer_encoding=False, string_to_md5=''):
|
||||||
content_md5 = md5(string_to_md5).digest().encode('base64').strip()
|
raw_md5 = hashlib.md5(string_to_md5).digest()
|
||||||
|
content_md5 = raw_md5.encode('base64').strip()
|
||||||
with UnreadableInput(self) as fake_input:
|
with UnreadableInput(self) as fake_input:
|
||||||
req = Request.blank(
|
req = Request.blank(
|
||||||
'/bucket/object?uploadId=X',
|
'/bucket/object?uploadId=X',
|
||||||
@ -1738,5 +1773,6 @@ class TestS3ApiMultiUploadNonUTC(TestS3ApiMultiUpload):
|
|||||||
os.environ['TZ'] = self.orig_tz
|
os.environ['TZ'] = self.orig_tz
|
||||||
time.tzset()
|
time.tzset()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -42,6 +42,54 @@ from swift.common.middleware.s3api.s3api import filter_factory, \
|
|||||||
from swift.common.middleware.s3api.s3token import S3Token
|
from swift.common.middleware.s3api.s3token import S3Token
|
||||||
|
|
||||||
|
|
||||||
|
class TestListingMiddleware(S3ApiTestCase):
|
||||||
|
def test_s3_etag_in_json(self):
|
||||||
|
# This translation happens all the time, even on normal swift requests
|
||||||
|
body_data = json.dumps([
|
||||||
|
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
|
||||||
|
{'subdir': 'path/'},
|
||||||
|
]).encode('ascii')
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/a/c', swob.HTTPOk,
|
||||||
|
{'Content-Type': 'application/json; charset=UTF-8'},
|
||||||
|
body_data)
|
||||||
|
|
||||||
|
req = Request.blank('/v1/a/c')
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(json.loads(body.decode('ascii')), [
|
||||||
|
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag', 's3_etag': '"mu-etag"'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
|
||||||
|
{'subdir': 'path/'},
|
||||||
|
])
|
||||||
|
|
||||||
|
def test_s3_etag_non_json(self):
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/a/c', swob.HTTPOk,
|
||||||
|
{'Content-Type': 'application/json; charset=UTF-8'},
|
||||||
|
b'Not actually JSON')
|
||||||
|
req = Request.blank('/v1/a/c')
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(body, b'Not actually JSON')
|
||||||
|
|
||||||
|
# Yes JSON, but wrong content-type
|
||||||
|
body_data = json.dumps([
|
||||||
|
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
|
||||||
|
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
|
||||||
|
{'subdir': 'path/'},
|
||||||
|
]).encode('ascii')
|
||||||
|
self.swift.register(
|
||||||
|
'GET', '/v1/a/c', swob.HTTPOk,
|
||||||
|
{'Content-Type': 'text/plain; charset=UTF-8'},
|
||||||
|
body_data)
|
||||||
|
req = Request.blank('/v1/a/c')
|
||||||
|
status, headers, body = self.call_s3api(req)
|
||||||
|
self.assertEqual(body, body_data)
|
||||||
|
|
||||||
|
|
||||||
class TestS3ApiMiddleware(S3ApiTestCase):
|
class TestS3ApiMiddleware(S3ApiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestS3ApiMiddleware, self).setUp()
|
super(TestS3ApiMiddleware, self).setUp()
|
||||||
|
@ -26,9 +26,11 @@ class TestResponse(unittest.TestCase):
|
|||||||
for expected, header_vals in \
|
for expected, header_vals in \
|
||||||
((True, ('true', '1')), (False, ('false', 'ugahhh', None))):
|
((True, ('true', '1')), (False, ('false', 'ugahhh', None))):
|
||||||
for val in header_vals:
|
for val in header_vals:
|
||||||
resp = Response(headers={'X-Static-Large-Object': val})
|
resp = Response(headers={'X-Static-Large-Object': val,
|
||||||
|
'Etag': 'theetag'})
|
||||||
s3resp = S3Response.from_swift_resp(resp)
|
s3resp = S3Response.from_swift_resp(resp)
|
||||||
self.assertEqual(expected, s3resp.is_slo)
|
self.assertEqual(expected, s3resp.is_slo)
|
||||||
|
self.assertEqual('"theetag"', s3resp.headers['ETag'])
|
||||||
|
|
||||||
def test_response_s3api_sysmeta_headers(self):
|
def test_response_s3api_sysmeta_headers(self):
|
||||||
for _server_type in ('object', 'container'):
|
for _server_type in ('object', 'container'):
|
||||||
|
Loading…
Reference in New Issue
Block a user