Add support of Sigv4-streaming
This update implements Sigv4-streaming (chunked upload) as described in the Amazon S3 documentation: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html Closes-Bug: #1810026 Co-Authored-By: Tim Burke <tim.burke@gmail.com> Co-Authored-By: Alistair Coles <alistairncoles@gmail.com> Co-Authored-By: ashnair <ashnair@nvidia.com> Change-Id: I7be1ce9eb5dba7b17bdf3e53b0d05d25ac0a05b0
This commit is contained in:
parent
6212869399
commit
f9ac22971f
@ -30,3 +30,52 @@ class InvalidSubresource(S3Exception):
|
||||
def __init__(self, resource, cause):
|
||||
self.resource = resource
|
||||
self.cause = cause
|
||||
|
||||
|
||||
class S3InputError(BaseException):
|
||||
"""
|
||||
There was an error with the client input detected on read().
|
||||
|
||||
Inherit from BaseException (rather than Exception) so it cuts from the
|
||||
proxy-server app (which will presumably be the one reading the input)
|
||||
through all the layers of the pipeline back to s3api. It should never
|
||||
escape the s3api middleware.
|
||||
"""
|
||||
|
||||
|
||||
class S3InputIncomplete(S3InputError):
|
||||
pass
|
||||
|
||||
|
||||
class S3InputSizeError(S3InputError):
|
||||
def __init__(self, expected, provided):
|
||||
self.expected = expected
|
||||
self.provided = provided
|
||||
|
||||
|
||||
class S3InputChunkTooSmall(S3InputError):
|
||||
def __init__(self, bad_chunk_size, chunk_number):
|
||||
self.bad_chunk_size = bad_chunk_size
|
||||
self.chunk_number = chunk_number
|
||||
|
||||
|
||||
class S3InputMalformedTrailer(S3InputError):
|
||||
pass
|
||||
|
||||
|
||||
class S3InputChunkSignatureMismatch(S3InputError):
|
||||
"""
|
||||
Client provided a chunk-signature, but it doesn't match the data.
|
||||
|
||||
This should result in a 403 going back to the client.
|
||||
"""
|
||||
|
||||
|
||||
class S3InputMissingSecret(S3InputError):
|
||||
"""
|
||||
Client provided per-chunk signatures, but we have no secret with which to
|
||||
verify them.
|
||||
|
||||
This happens if the auth middleware responsible for the user never called
|
||||
the provided ``check_signature`` callback.
|
||||
"""
|
||||
|
@ -26,7 +26,7 @@ from urllib.parse import quote, unquote, parse_qsl
|
||||
import string
|
||||
|
||||
from swift.common.utils import split_path, json, md5, streq_const_time, \
|
||||
get_policy_index, InputProxy
|
||||
close_if_possible, InputProxy, get_policy_index, list_from_csv
|
||||
from swift.common.registry import get_swift_info
|
||||
from swift.common import swob
|
||||
from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \
|
||||
@ -57,8 +57,12 @@ from swift.common.middleware.s3api.s3response import AccessDenied, \
|
||||
MalformedXML, InvalidRequest, RequestTimeout, InvalidBucketName, \
|
||||
BadDigest, AuthorizationHeaderMalformed, SlowDown, \
|
||||
AuthorizationQueryParametersError, ServiceUnavailable, BrokenMPU, \
|
||||
InvalidPartNumber, InvalidPartArgument, XAmzContentSHA256Mismatch
|
||||
from swift.common.middleware.s3api.exception import NotS3Request
|
||||
XAmzContentSHA256Mismatch, IncompleteBody, InvalidChunkSizeError, \
|
||||
InvalidPartNumber, InvalidPartArgument, MalformedTrailerError
|
||||
from swift.common.middleware.s3api.exception import NotS3Request, \
|
||||
S3InputError, S3InputSizeError, S3InputIncomplete, \
|
||||
S3InputChunkSignatureMismatch, S3InputChunkTooSmall, \
|
||||
S3InputMalformedTrailer, S3InputMissingSecret
|
||||
from swift.common.middleware.s3api.utils import utf8encode, \
|
||||
S3Timestamp, mktime, MULTIUPLOAD_SUFFIX
|
||||
from swift.common.middleware.s3api.subresource import decode_acl, encode_acl
|
||||
@ -83,9 +87,20 @@ ALLOWED_SUB_RESOURCES = sorted([
|
||||
MAX_32BIT_INT = 2147483647
|
||||
SIGV2_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
||||
SIGV4_X_AMZ_DATE_FORMAT = '%Y%m%dT%H%M%SZ'
|
||||
SIGV4_CHUNK_MIN_SIZE = 8192
|
||||
SERVICE = 's3' # useful for mocking out in tests
|
||||
|
||||
|
||||
def _is_streaming(aws_sha256):
|
||||
return aws_sha256 in (
|
||||
'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER',
|
||||
)
|
||||
|
||||
|
||||
def _header_strip(value):
|
||||
# S3 seems to strip *all* control characters
|
||||
if value is None:
|
||||
@ -172,6 +187,238 @@ class HashingInput(InputProxy):
|
||||
return chunk
|
||||
|
||||
|
||||
class ChunkReader(InputProxy):
|
||||
"""
|
||||
wsgi.input wrapper to read a single chunk from a chunked input and validate
|
||||
its signature.
|
||||
|
||||
:param wsgi_input: a wsgi input.
|
||||
:param chunk_size: number of bytes to read.
|
||||
:param validator: function to call to validate the chunk's content.
|
||||
:param chunk_params: string of params from the chunk's header.
|
||||
"""
|
||||
def __init__(self, wsgi_input, chunk_size, validator, chunk_params):
|
||||
super().__init__(wsgi_input)
|
||||
self.chunk_size = chunk_size
|
||||
self._validator = validator
|
||||
if self._validator is None:
|
||||
self._signature = None
|
||||
else:
|
||||
self._signature = self._parse_chunk_signature(chunk_params)
|
||||
self._sha256 = sha256()
|
||||
|
||||
def _parse_chunk_signature(self, chunk_params):
|
||||
if not chunk_params:
|
||||
raise S3InputIncomplete
|
||||
start, _, chunk_sig = chunk_params.partition('=')
|
||||
if start.strip() != 'chunk-signature':
|
||||
# Call the validator to update the string to sign
|
||||
self._validator('', '')
|
||||
raise S3InputChunkSignatureMismatch
|
||||
if ';' in chunk_sig:
|
||||
raise S3InputIncomplete
|
||||
chunk_sig = chunk_sig.strip()
|
||||
if not chunk_sig:
|
||||
raise S3InputIncomplete
|
||||
return chunk_sig
|
||||
|
||||
@property
|
||||
def to_read(self):
|
||||
return self.chunk_size - self.bytes_received
|
||||
|
||||
def read(self, size=None, *args, **kwargs):
|
||||
if size is None or size < 0 or size > self.to_read:
|
||||
size = self.to_read
|
||||
return super().read(size)
|
||||
|
||||
def readline(self, size=None, *args, **kwargs):
|
||||
if size is None or size < 0 or size > self.to_read:
|
||||
size = self.to_read
|
||||
return super().readline(size)
|
||||
|
||||
def chunk_update(self, chunk, eof, *args, **kwargs):
|
||||
self._sha256.update(chunk)
|
||||
if self.bytes_received == self.chunk_size:
|
||||
if self._validator and not self._validator(
|
||||
self._sha256.hexdigest(), self._signature):
|
||||
self.close()
|
||||
raise S3InputChunkSignatureMismatch
|
||||
return chunk
|
||||
|
||||
|
||||
class StreamingInput:
|
||||
"""
|
||||
wsgi.input wrapper to read a chunked input, verifying each chunk as it's
|
||||
read. Once all chunks have been read, any trailers are read.
|
||||
|
||||
:param input: a wsgi input.
|
||||
:param decoded_content_length: the number of payload bytes expected to be
|
||||
extracted from chunks.
|
||||
:param expected_trailers: the set of trailer names expected.
|
||||
:param sig_checker: an instance of SigCheckerV4 that will be called to
|
||||
verify each chunk's signature.
|
||||
"""
|
||||
def __init__(self, input, decoded_content_length,
|
||||
expected_trailers, sig_checker):
|
||||
self._input = input
|
||||
self._decoded_content_length = decoded_content_length
|
||||
self._expected_trailers = expected_trailers
|
||||
self._sig_checker = sig_checker
|
||||
# Length of the payload remaining; i.e., number of bytes a caller
|
||||
# still expects to be able to read. Once exhausted, we should be
|
||||
# exactly at the trailers (if present)
|
||||
self._to_read = decoded_content_length
|
||||
# Reader for the current chunk that's in progress
|
||||
self._chunk_reader = None
|
||||
# Track the chunk number, for error messages
|
||||
self._chunk_number = 0
|
||||
# Track the size of the most recently read chunk. AWS enforces an 8k
|
||||
# min chunk size (except the final chunk)
|
||||
self._last_chunk_size = None
|
||||
# When True, we've read the payload, but not necessarily the trailers
|
||||
self._completed_payload = False
|
||||
# When True, we've read the trailers
|
||||
self._completed_trailers = False
|
||||
# Any trailers present after the payload (not available until after
|
||||
# caller has read full payload; i.e., until after _to_read is 0)
|
||||
self.trailers = {}
|
||||
|
||||
def _read_chunk_header(self):
|
||||
"""
|
||||
Read a chunk header, reading at most one line from the raw input.
|
||||
|
||||
Parse out the next chunk size and any other params.
|
||||
|
||||
:returns: a tuple of (chunk_size, chunk_params). chunk_size is an int,
|
||||
chunk_params is string.
|
||||
"""
|
||||
self._chunk_number += 1
|
||||
chunk_header = swob.bytes_to_wsgi(self._input.readline())
|
||||
if chunk_header[-2:] != '\r\n':
|
||||
raise S3InputIncomplete('invalid chunk header: %s' % chunk_header)
|
||||
chunk_size, _, chunk_params = chunk_header[:-2].partition(';')
|
||||
|
||||
try:
|
||||
chunk_size = int(chunk_size, 16)
|
||||
if chunk_size < 0:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise S3InputIncomplete('invalid chunk header: %s' % chunk_header)
|
||||
|
||||
if self._last_chunk_size is not None and \
|
||||
self._last_chunk_size < SIGV4_CHUNK_MIN_SIZE and \
|
||||
chunk_size != 0:
|
||||
raise S3InputChunkTooSmall(self._last_chunk_size,
|
||||
self._chunk_number)
|
||||
self._last_chunk_size = chunk_size
|
||||
|
||||
if chunk_size > self._to_read:
|
||||
raise S3InputSizeError(
|
||||
self._decoded_content_length,
|
||||
self._decoded_content_length - self._to_read + chunk_size)
|
||||
return chunk_size, chunk_params
|
||||
|
||||
def _read_payload(self, size, readline=False):
|
||||
bufs = []
|
||||
bytes_read = 0
|
||||
while not self._completed_payload and (
|
||||
bytes_read < size
|
||||
# Make sure we read the trailing zero-byte chunk at the end
|
||||
or self._to_read == 0):
|
||||
if self._chunk_reader is None:
|
||||
# OK, we're at the start of a new chunk
|
||||
chunk_size, chunk_params = self._read_chunk_header()
|
||||
self._chunk_reader = ChunkReader(
|
||||
self._input,
|
||||
chunk_size,
|
||||
self._sig_checker and
|
||||
self._sig_checker.check_chunk_signature,
|
||||
chunk_params)
|
||||
if readline:
|
||||
buf = self._chunk_reader.readline(size - bytes_read)
|
||||
else:
|
||||
buf = self._chunk_reader.read(size - bytes_read)
|
||||
bufs.append(buf)
|
||||
if self._chunk_reader.to_read == 0:
|
||||
# If it's the final chunk, we're in (possibly empty) trailers
|
||||
# Otherwise, there's a CRLF chunk-separator
|
||||
if self._chunk_reader.chunk_size == 0:
|
||||
self._completed_payload = True
|
||||
elif self._input.read(2) != b'\r\n':
|
||||
raise S3InputIncomplete
|
||||
self._chunk_reader = None
|
||||
bytes_read += len(buf)
|
||||
self._to_read -= len(buf)
|
||||
if readline and buf[-1:] == b'\n':
|
||||
break
|
||||
return b''.join(bufs)
|
||||
|
||||
def _read_trailers(self):
|
||||
if self._expected_trailers:
|
||||
for line in iter(self._input.readline, b''):
|
||||
if not line.endswith(b'\r\n'):
|
||||
raise S3InputIncomplete
|
||||
if line == b'\r\n':
|
||||
break
|
||||
key, _, value = swob.bytes_to_wsgi(line).partition(':')
|
||||
if key.lower() not in self._expected_trailers:
|
||||
raise S3InputMalformedTrailer
|
||||
self.trailers[key.strip()] = value.strip()
|
||||
if 'x-amz-trailer-signature' in self._expected_trailers \
|
||||
and 'x-amz-trailer-signature' not in self.trailers:
|
||||
raise S3InputIncomplete
|
||||
if set(self.trailers.keys()) != self._expected_trailers:
|
||||
raise S3InputMalformedTrailer
|
||||
if 'x-amz-trailer-signature' in self._expected_trailers \
|
||||
and self._sig_checker is not None:
|
||||
if not self._sig_checker.check_trailer_signature(
|
||||
self.trailers):
|
||||
raise S3InputChunkSignatureMismatch
|
||||
if len(self.trailers) == 1:
|
||||
raise S3InputIncomplete
|
||||
# Now that we've read them, we expect no more
|
||||
self._expected_trailers = set()
|
||||
elif self._input.read(2) not in (b'', b'\r\n'):
|
||||
raise S3InputIncomplete
|
||||
|
||||
self._completed_trailers = True
|
||||
|
||||
def _read(self, size, readline=False):
|
||||
data = self._read_payload(size, readline)
|
||||
if self._completed_payload:
|
||||
if not self._completed_trailers:
|
||||
# read trailers, if present
|
||||
self._read_trailers()
|
||||
# At this point, we should have read everything; if we haven't,
|
||||
# that's an error
|
||||
if self._to_read:
|
||||
raise S3InputSizeError(
|
||||
self._decoded_content_length,
|
||||
self._decoded_content_length - self._to_read)
|
||||
return data
|
||||
|
||||
def read(self, size=None):
|
||||
if size is None or size < 0 or size > self._to_read:
|
||||
size = self._to_read
|
||||
try:
|
||||
return self._read(size)
|
||||
except S3InputError:
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def readline(self, size=None):
|
||||
if size is None or size < 0 or size > self._to_read:
|
||||
size = self._to_read
|
||||
try:
|
||||
return self._read(size, True)
|
||||
except S3InputError:
|
||||
self.close()
|
||||
raise
|
||||
|
||||
def close(self):
|
||||
close_if_possible(self._input)
|
||||
|
||||
|
||||
class BaseSigChecker:
|
||||
def __init__(self, req):
|
||||
self.req = req
|
||||
@ -275,10 +522,113 @@ class SigCheckerV4(BaseSigChecker):
|
||||
return derived_secret
|
||||
|
||||
def _check_signature(self):
|
||||
if self._secret is None:
|
||||
raise S3InputMissingSecret
|
||||
valid_signature = hmac.new(
|
||||
self._secret, self.string_to_sign, sha256).hexdigest()
|
||||
return streq_const_time(self.signature, valid_signature)
|
||||
|
||||
def _chunk_string_to_sign(self, data_sha256):
|
||||
"""
|
||||
Create 'ChunkStringToSign' value in Amazon terminology for v4.
|
||||
"""
|
||||
return b'\n'.join([
|
||||
b'AWS4-HMAC-SHA256-PAYLOAD',
|
||||
self.req.timestamp.amz_date_format.encode('ascii'),
|
||||
'/'.join(self.req.scope.values()).encode('utf8'),
|
||||
self.signature.encode('utf8'),
|
||||
sha256(b'').hexdigest().encode('utf8'),
|
||||
data_sha256.encode('utf8')
|
||||
])
|
||||
|
||||
def check_chunk_signature(self, chunk_sha256, signature):
|
||||
"""
|
||||
Check the validity of a chunk's signature.
|
||||
|
||||
This method verifies the signature of a given chunk using its SHA-256
|
||||
hash. It updates the string to sign and the current signature, then
|
||||
checks if the signature is valid. If any chunk signature is invalid,
|
||||
it returns False.
|
||||
|
||||
:param chunk_sha256: (str) The SHA-256 hash of the chunk.
|
||||
:param signature: (str) The signature to be verified.
|
||||
:returns: True if all chunk signatures are valid, False otherwise.
|
||||
"""
|
||||
if not self._all_chunk_signatures_valid:
|
||||
return False
|
||||
# NB: string_to_sign is calculated using the previous signature
|
||||
self.string_to_sign = self._chunk_string_to_sign(chunk_sha256)
|
||||
# So we have to update the signature to compare against *after*
|
||||
# the string-to-sign
|
||||
self.signature = signature
|
||||
self._all_chunk_signatures_valid &= self._check_signature()
|
||||
return self._all_chunk_signatures_valid
|
||||
|
||||
def _trailer_string_to_sign(self, trailers):
|
||||
"""
|
||||
Create 'TrailerChunkStringToSign' value in Amazon terminology for v4.
|
||||
"""
|
||||
canonical_trailers = swob.wsgi_to_bytes(''.join(
|
||||
f'{key}:{value}\n'
|
||||
for key, value in sorted(
|
||||
trailers.items(),
|
||||
key=lambda kvp: swob.wsgi_to_bytes(kvp[0]).lower(),
|
||||
)
|
||||
if key != 'x-amz-trailer-signature'
|
||||
))
|
||||
if not canonical_trailers:
|
||||
canonical_trailers = b'\n'
|
||||
return b'\n'.join([
|
||||
b'AWS4-HMAC-SHA256-TRAILER',
|
||||
self.req.timestamp.amz_date_format.encode('ascii'),
|
||||
'/'.join(self.req.scope.values()).encode('utf8'),
|
||||
self.signature.encode('utf8'),
|
||||
sha256(canonical_trailers).hexdigest().encode('utf8'),
|
||||
])
|
||||
|
||||
def check_trailer_signature(self, trailers):
|
||||
"""
|
||||
Check the validity of a chunk's signature.
|
||||
|
||||
This method verifies the trailers received after the main payload.
|
||||
|
||||
:param trailers: (dict[str, str]) The trailers received.
|
||||
:returns: True if x-amz-trailer-signature is valid, False otherwise.
|
||||
"""
|
||||
if not self._all_chunk_signatures_valid:
|
||||
# if there was a breakdown earlier, this can't be right
|
||||
return False
|
||||
# NB: string_to_sign is calculated using the previous signature
|
||||
self.string_to_sign = self._trailer_string_to_sign(trailers)
|
||||
# So we have to update the signature to compare against *after*
|
||||
# the string-to-sign
|
||||
self.signature = trailers['x-amz-trailer-signature']
|
||||
self._all_chunk_signatures_valid &= self._check_signature()
|
||||
return self._all_chunk_signatures_valid
|
||||
|
||||
|
||||
def _parse_credential(credential_string):
|
||||
"""
|
||||
Parse an AWS credential string into its components.
|
||||
|
||||
This method splits the given credential string into its constituent parts:
|
||||
access key ID, date, AWS region, AWS service, and terminal identifier.
|
||||
The credential string must follow the format:
|
||||
<access-key-id>/<date>/<AWS-region>/<AWS-service>/aws4_request.
|
||||
|
||||
:param credential_string: (str) The AWS credential string to be parsed.
|
||||
:raises AccessDenied: If the credential string is invalid or does not
|
||||
follow the required format.
|
||||
:returns: A dict containing the parsed components of the credential string.
|
||||
"""
|
||||
parts = credential_string.split("/")
|
||||
# credential must be in following format:
|
||||
# <access-key-id>/<date>/<AWS-region>/<AWS-service>/aws4_request
|
||||
if not parts[0] or len(parts) != 5:
|
||||
raise AccessDenied(reason='invalid_credential')
|
||||
return dict(zip(['access', 'date', 'region', 'service', 'terminal'],
|
||||
parts))
|
||||
|
||||
|
||||
class SigV4Mixin(object):
|
||||
"""
|
||||
@ -289,6 +639,10 @@ class SigV4Mixin(object):
|
||||
def _is_query_auth(self):
|
||||
return 'X-Amz-Credential' in self.params
|
||||
|
||||
@property
|
||||
def _is_x_amz_content_sha256_required(self):
|
||||
return not self._is_query_auth
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
"""
|
||||
@ -357,37 +711,6 @@ class SigV4Mixin(object):
|
||||
if int(self.timestamp) + expires < S3Timestamp.now():
|
||||
raise AccessDenied('Request has expired', reason='expired')
|
||||
|
||||
def _validate_sha256(self):
|
||||
aws_sha256 = self.headers.get('x-amz-content-sha256')
|
||||
looks_like_sha256 = (
|
||||
aws_sha256 and len(aws_sha256) == 64 and
|
||||
all(c in '0123456789abcdef' for c in aws_sha256.lower()))
|
||||
if not aws_sha256:
|
||||
if 'X-Amz-Credential' in self.params:
|
||||
pass # pre-signed URL; not required
|
||||
else:
|
||||
msg = 'Missing required header for this request: ' \
|
||||
'x-amz-content-sha256'
|
||||
raise InvalidRequest(msg)
|
||||
elif aws_sha256 == 'UNSIGNED-PAYLOAD':
|
||||
pass
|
||||
elif not looks_like_sha256 and 'X-Amz-Credential' not in self.params:
|
||||
raise InvalidArgument(
|
||||
'x-amz-content-sha256',
|
||||
aws_sha256,
|
||||
'x-amz-content-sha256 must be UNSIGNED-PAYLOAD, or '
|
||||
'a valid sha256 value.')
|
||||
return aws_sha256
|
||||
|
||||
def _parse_credential(self, credential_string):
|
||||
parts = credential_string.split("/")
|
||||
# credential must be in following format:
|
||||
# <access-key-id>/<date>/<AWS-region>/<AWS-service>/aws4_request
|
||||
if not parts[0] or len(parts) != 5:
|
||||
raise AccessDenied(reason='invalid_credential')
|
||||
return dict(zip(['access', 'date', 'region', 'service', 'terminal'],
|
||||
parts))
|
||||
|
||||
def _parse_query_authentication(self):
|
||||
"""
|
||||
Parse v4 query authentication
|
||||
@ -400,7 +723,7 @@ class SigV4Mixin(object):
|
||||
raise InvalidArgument('X-Amz-Algorithm',
|
||||
self.params.get('X-Amz-Algorithm'))
|
||||
try:
|
||||
cred_param = self._parse_credential(
|
||||
cred_param = _parse_credential(
|
||||
swob.wsgi_to_str(self.params['X-Amz-Credential']))
|
||||
sig = swob.wsgi_to_str(self.params['X-Amz-Signature'])
|
||||
if not sig:
|
||||
@ -454,7 +777,7 @@ class SigV4Mixin(object):
|
||||
"""
|
||||
|
||||
auth_str = swob.wsgi_to_str(self.headers['Authorization'])
|
||||
cred_param = self._parse_credential(auth_str.partition(
|
||||
cred_param = _parse_credential(auth_str.partition(
|
||||
"Credential=")[2].split(',')[0])
|
||||
sig = auth_str.partition("Signature=")[2].split(',')[0]
|
||||
if not sig:
|
||||
@ -660,6 +983,14 @@ class S3Request(swob.Request):
|
||||
self.sig_checker = SigCheckerV4(self)
|
||||
else:
|
||||
self.sig_checker = SigCheckerV2(self)
|
||||
aws_sha256 = self.headers.get('x-amz-content-sha256')
|
||||
if self.method in ('PUT', 'POST'):
|
||||
if _is_streaming(aws_sha256):
|
||||
self._install_streaming_input_wrapper(aws_sha256)
|
||||
else:
|
||||
self._install_non_streaming_input_wrapper(aws_sha256)
|
||||
|
||||
# Lock in string-to-sign now, before we start messing with query params
|
||||
self.environ['s3api.auth_details'] = {
|
||||
'access_key': self.access_key,
|
||||
'signature': self.signature,
|
||||
@ -769,6 +1100,10 @@ class S3Request(swob.Request):
|
||||
def _is_query_auth(self):
|
||||
return 'AWSAccessKeyId' in self.params
|
||||
|
||||
@property
|
||||
def _is_x_amz_content_sha256_required(self):
|
||||
return False
|
||||
|
||||
def _parse_host(self):
|
||||
if not self.conf.storage_domains:
|
||||
return None
|
||||
@ -906,7 +1241,110 @@ class S3Request(swob.Request):
|
||||
raise RequestTimeTooSkewed()
|
||||
|
||||
def _validate_sha256(self):
|
||||
return self.headers.get('x-amz-content-sha256')
|
||||
aws_sha256 = self.headers.get('x-amz-content-sha256')
|
||||
if not aws_sha256:
|
||||
if self._is_x_amz_content_sha256_required:
|
||||
msg = 'Missing required header for this request: ' \
|
||||
'x-amz-content-sha256'
|
||||
raise InvalidRequest(msg)
|
||||
else:
|
||||
return
|
||||
|
||||
looks_like_sha256 = (
|
||||
aws_sha256 and len(aws_sha256) == 64 and
|
||||
all(c in '0123456789abcdef' for c in aws_sha256.lower()))
|
||||
if aws_sha256 == 'UNSIGNED-PAYLOAD':
|
||||
pass
|
||||
elif _is_streaming(aws_sha256):
|
||||
decoded_content_length = self.headers.get(
|
||||
'x-amz-decoded-content-length')
|
||||
try:
|
||||
decoded_content_length = int(decoded_content_length)
|
||||
except (ValueError, TypeError):
|
||||
raise MissingContentLength
|
||||
if decoded_content_length < 0:
|
||||
raise InvalidArgument('x-amz-decoded-content-length',
|
||||
decoded_content_length)
|
||||
|
||||
if not isinstance(self, SigV4Mixin) or self._is_query_auth:
|
||||
if decoded_content_length < (self.content_length or 0):
|
||||
raise IncompleteBody(
|
||||
number_bytes_expected=decoded_content_length,
|
||||
number_bytes_provided=self.content_length,
|
||||
)
|
||||
body = self.body_file.read()
|
||||
raise XAmzContentSHA256Mismatch(
|
||||
client_computed_content_s_h_a256=aws_sha256,
|
||||
s3_computed_content_s_h_a256=sha256(body).hexdigest(),
|
||||
)
|
||||
elif aws_sha256 in (
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER',
|
||||
):
|
||||
raise S3NotImplemented(
|
||||
"Don't know how to validate %s streams"
|
||||
% aws_sha256)
|
||||
|
||||
elif not looks_like_sha256 and self._is_x_amz_content_sha256_required:
|
||||
raise InvalidArgument(
|
||||
'x-amz-content-sha256',
|
||||
aws_sha256,
|
||||
'x-amz-content-sha256 must be UNSIGNED-PAYLOAD, '
|
||||
'STREAMING-UNSIGNED-PAYLOAD-TRAILER, '
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD, '
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER or '
|
||||
'a valid sha256 value.')
|
||||
|
||||
return aws_sha256
|
||||
|
||||
def _cleanup_content_encoding(self):
|
||||
if 'aws-chunked' in self.headers.get('Content-Encoding', ''):
|
||||
new_enc = ', '.join(
|
||||
enc for enc in list_from_csv(
|
||||
self.headers.pop('Content-Encoding'))
|
||||
# TODO: test what's stored w/ 'aws-chunked, aws-chunked'
|
||||
if enc != 'aws-chunked')
|
||||
if new_enc:
|
||||
# used to be, AWS would store '', but not any more
|
||||
self.headers['Content-Encoding'] = new_enc
|
||||
|
||||
def _install_streaming_input_wrapper(self, aws_sha256):
|
||||
self._cleanup_content_encoding()
|
||||
self.content_length = int(self.headers.get(
|
||||
'x-amz-decoded-content-length'))
|
||||
expected_trailers = set()
|
||||
if aws_sha256 == 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER':
|
||||
expected_trailers.add('x-amz-trailer-signature')
|
||||
trailer = self.headers.get('x-amz-trailer', '')
|
||||
trailer_list = [
|
||||
v.strip() for v in trailer.rstrip(',').split(',')
|
||||
] if trailer.strip() else []
|
||||
if len(trailer_list) > 1:
|
||||
raise InvalidRequest(
|
||||
'Expecting a single x-amz-checksum- header. Multiple '
|
||||
'checksum Types are not allowed.')
|
||||
else:
|
||||
expected_trailers.update(trailer_list)
|
||||
streaming_input = StreamingInput(
|
||||
self.environ['wsgi.input'],
|
||||
self.content_length,
|
||||
expected_trailers,
|
||||
None if aws_sha256 == 'STREAMING-UNSIGNED-PAYLOAD-TRAILER'
|
||||
else self.sig_checker)
|
||||
self.environ['wsgi.input'] = streaming_input
|
||||
return streaming_input
|
||||
|
||||
def _install_non_streaming_input_wrapper(self, aws_sha256):
|
||||
if (aws_sha256 not in (None, 'UNSIGNED-PAYLOAD') and
|
||||
self.content_length is not None):
|
||||
self.environ['wsgi.input'] = HashingInput(
|
||||
self.environ['wsgi.input'],
|
||||
self.content_length,
|
||||
aws_sha256)
|
||||
# If no content-length, either client's trying to do a HTTP chunked
|
||||
# transfer, or a HTTP/1.0-style transfer (in which case swift will
|
||||
# reject with length-required and we'll translate back to
|
||||
# MissingContentLength)
|
||||
|
||||
def _validate_headers(self):
|
||||
if 'CONTENT_LENGTH' in self.environ:
|
||||
@ -963,21 +1401,7 @@ class S3Request(swob.Request):
|
||||
if 'x-amz-website-redirect-location' in self.headers:
|
||||
raise S3NotImplemented('Website redirection is not supported.')
|
||||
|
||||
aws_sha256 = self._validate_sha256()
|
||||
if (aws_sha256
|
||||
and aws_sha256 != 'UNSIGNED-PAYLOAD'
|
||||
and self.content_length is not None):
|
||||
# Even if client-provided SHA doesn't look like a SHA, wrap the
|
||||
# input anyway so we'll send the SHA of what the client sent in
|
||||
# the eventual error
|
||||
self.environ['wsgi.input'] = HashingInput(
|
||||
self.environ['wsgi.input'],
|
||||
self.content_length,
|
||||
aws_sha256)
|
||||
# If no content-length, either client's trying to do a HTTP chunked
|
||||
# transfer, or a HTTP/1.0-style transfer (in which case swift will
|
||||
# reject with length-required and we'll translate back to
|
||||
# MissingContentLength)
|
||||
self._validate_sha256()
|
||||
|
||||
value = _header_strip(self.headers.get('Content-MD5'))
|
||||
if value is not None:
|
||||
@ -994,15 +1418,6 @@ class S3Request(swob.Request):
|
||||
if len(self.headers['ETag']) != 32:
|
||||
raise InvalidDigest(content_md5=value)
|
||||
|
||||
# https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
|
||||
# describes some of what would be required to support this
|
||||
if any(['aws-chunked' in self.headers.get('content-encoding', ''),
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD' == self.headers.get(
|
||||
'x-amz-content-sha256', ''),
|
||||
'x-amz-decoded-content-length' in self.headers]):
|
||||
raise S3NotImplemented('Transfering payloads in multiple chunks '
|
||||
'using aws-chunked is not supported.')
|
||||
|
||||
if 'x-amz-tagging' in self.headers:
|
||||
raise S3NotImplemented('Object tagging is not supported.')
|
||||
|
||||
@ -1466,6 +1881,8 @@ class S3Request(swob.Request):
|
||||
def translate_read_errors(self):
|
||||
try:
|
||||
yield
|
||||
except S3InputIncomplete:
|
||||
raise IncompleteBody('The request body terminated unexpectedly')
|
||||
except S3InputSHA256Mismatch as err:
|
||||
# hopefully by now any modifications to the path (e.g. tenant to
|
||||
# account translation) will have been made by auth middleware
|
||||
@ -1473,6 +1890,31 @@ class S3Request(swob.Request):
|
||||
client_computed_content_s_h_a256=err.expected,
|
||||
s3_computed_content_s_h_a256=err.computed,
|
||||
)
|
||||
except S3InputChunkSignatureMismatch:
|
||||
raise SignatureDoesNotMatch(
|
||||
**self.signature_does_not_match_kwargs())
|
||||
except S3InputSizeError as e:
|
||||
raise IncompleteBody(
|
||||
number_bytes_expected=e.expected,
|
||||
number_bytes_provided=e.provided,
|
||||
)
|
||||
except S3InputChunkTooSmall as e:
|
||||
raise InvalidChunkSizeError(
|
||||
chunk=e.chunk_number,
|
||||
bad_chunk_size=e.bad_chunk_size,
|
||||
)
|
||||
except S3InputMalformedTrailer:
|
||||
raise MalformedTrailerError
|
||||
except S3InputMissingSecret:
|
||||
# XXX: We should really log something here. The poor user can't do
|
||||
# anything about this; we need to notify the operator to notify the
|
||||
# auth middleware developer
|
||||
raise S3NotImplemented('Transferring payloads in multiple chunks '
|
||||
'using aws-chunked is not supported.')
|
||||
except S3InputError:
|
||||
# All cases should be covered above, but belt & braces
|
||||
# NB: general exception handler in s3api.py will log traceback
|
||||
raise InternalError
|
||||
|
||||
def _get_response(self, app, method, container, obj,
|
||||
headers=None, body=None, query=None):
|
||||
|
@ -408,7 +408,7 @@ class IllegalVersioningConfigurationException(ErrorResponse):
|
||||
class IncompleteBody(ErrorResponse):
|
||||
_status = '400 Bad Request'
|
||||
_msg = 'You did not provide the number of bytes specified by the ' \
|
||||
'Content-Length HTTP header.'
|
||||
'Content-Length HTTP header'
|
||||
|
||||
|
||||
class IncorrectNumberOfFilesInPostRequest(ErrorResponse):
|
||||
@ -457,6 +457,11 @@ class InvalidBucketState(ErrorResponse):
|
||||
_msg = 'The request is not valid with the current state of the bucket.'
|
||||
|
||||
|
||||
class InvalidChunkSizeError(ErrorResponse):
|
||||
_status = '403 Forbidden'
|
||||
_msg = 'Only the last chunk is allowed to have a size less than 8192 bytes'
|
||||
|
||||
|
||||
class InvalidDigest(ErrorResponse):
|
||||
_status = '400 Bad Request'
|
||||
_msg = 'The Content-MD5 you specified was invalid.'
|
||||
@ -578,6 +583,12 @@ class MalformedPOSTRequest(ErrorResponse):
|
||||
'multipart/form-data.'
|
||||
|
||||
|
||||
class MalformedTrailerError(ErrorResponse):
|
||||
_status = '400 Bad Request'
|
||||
_msg = 'The request contained trailing data that was not well-formed ' \
|
||||
'or did not conform to our published schema.'
|
||||
|
||||
|
||||
class MalformedXML(ErrorResponse):
|
||||
_status = '400 Bad Request'
|
||||
_msg = 'The XML you provided was not well-formed or did not validate ' \
|
||||
|
@ -13,7 +13,6 @@
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest
|
||||
|
||||
import calendar
|
||||
@ -29,7 +28,7 @@ from swift.common.middleware.s3api.utils import S3Timestamp
|
||||
from swift.common.utils import md5, quote
|
||||
|
||||
from test.functional.s3api import S3ApiBase, SigV4Mixin, \
|
||||
skip_boto2_sort_header_bug
|
||||
skip_boto2_sort_header_bug, S3ApiBaseBoto3, get_boto3_conn
|
||||
from test.functional.s3api.s3_test_client import Connection
|
||||
from test.functional.s3api.utils import get_error_code, calculate_md5, \
|
||||
get_error_msg
|
||||
@ -45,6 +44,43 @@ def tearDownModule():
|
||||
tf.teardown_package()
|
||||
|
||||
|
||||
class TestS3ApiObjectBoto3(S3ApiBaseBoto3):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.conn = get_boto3_conn(tf.config['s3_access_key'],
|
||||
tf.config['s3_secret_key'])
|
||||
self.bucket = 'test-bucket'
|
||||
resp = self.conn.create_bucket(Bucket=self.bucket)
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
|
||||
def test_put(self):
|
||||
body = b'abcd' * 8192
|
||||
resp = self.conn.put_object(Bucket=self.bucket, Key='obj', Body=body)
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
resp = self.conn.get_object(Bucket=self.bucket, Key='obj')
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
self.assertEqual(body, resp['Body'].read())
|
||||
|
||||
def test_put_chunked(self):
|
||||
body = b'abcd' * 8192
|
||||
resp = self.conn.put_object(Bucket=self.bucket, Key='obj', Body=body,
|
||||
ContentEncoding='aws-chunked')
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
resp = self.conn.get_object(Bucket=self.bucket, Key='obj')
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
self.assertEqual(body, resp['Body'].read())
|
||||
|
||||
def test_put_chunked_sha256(self):
|
||||
body = b'abcd' * 8192
|
||||
resp = self.conn.put_object(Bucket=self.bucket, Key='obj', Body=body,
|
||||
ContentEncoding='aws-chunked',
|
||||
ChecksumAlgorithm='SHA256')
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
resp = self.conn.get_object(Bucket=self.bucket, Key='obj')
|
||||
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
|
||||
self.assertEqual(body, resp['Body'].read())
|
||||
|
||||
|
||||
class TestS3ApiObject(S3ApiBase):
|
||||
def setUp(self):
|
||||
super(TestS3ApiObject, self).setUp()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -793,7 +793,7 @@ class TestMultiPartUpload(BaseMultiPartUploadTestCase):
|
||||
|
||||
with self.assertRaises(ClientError) as cm:
|
||||
self.get_part(key_name, 3)
|
||||
self.assertEqual(416, status_from_error(cm.exception))
|
||||
self.assertEqual(416, status_from_error(cm.exception), cm.exception)
|
||||
self.assertEqual('InvalidPartNumber', code_from_error(cm.exception))
|
||||
|
||||
def test_create_upload_complete_misordered_parts(self):
|
||||
|
@ -1136,6 +1136,16 @@ class TestS3ApiMultiUpload(BaseS3ApiMultiUpload, S3ApiTestCase):
|
||||
'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
|
||||
fake_memcache)
|
||||
|
||||
def test_object_multipart_upload_initiate_with_checksum_algorithm(self):
|
||||
fake_memcache = FakeMemcache()
|
||||
fake_memcache.store[get_cache_key(
|
||||
'AUTH_test', 'bucket+segments')] = {'status': 204}
|
||||
fake_memcache.store[get_cache_key(
|
||||
'AUTH_test', 'bucket')] = {'status': 204}
|
||||
self._test_object_multipart_upload_initiate(
|
||||
{'X-Amz-Checksum-Algorithm': 'CRC32',
|
||||
'X-Amz-Checksum-Type': 'COMPOSITE'}, fake_memcache)
|
||||
|
||||
def test_object_mpu_initiate_with_segment_bucket_mixed_policy(self):
|
||||
fake_memcache = FakeMemcache()
|
||||
fake_memcache.store[get_cache_key(
|
||||
|
@ -919,23 +919,6 @@ class TestS3ApiMiddleware(S3ApiTestCase):
|
||||
def test_website_redirect_location(self):
|
||||
self._test_unsupported_header('x-amz-website-redirect-location')
|
||||
|
||||
def test_aws_chunked(self):
|
||||
self._test_unsupported_header('content-encoding', 'aws-chunked')
|
||||
# https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
|
||||
# has a multi-encoding example:
|
||||
#
|
||||
# > Amazon S3 supports multiple content encodings. For example:
|
||||
# >
|
||||
# > Content-Encoding : aws-chunked,gzip
|
||||
# > That is, you can specify your custom content-encoding when using
|
||||
# > Signature Version 4 streaming API.
|
||||
self._test_unsupported_header('Content-Encoding', 'aws-chunked,gzip')
|
||||
# Some clients skip the content-encoding,
|
||||
# such as minio-go and aws-sdk-java
|
||||
self._test_unsupported_header('x-amz-content-sha256',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD')
|
||||
self._test_unsupported_header('x-amz-decoded-content-length')
|
||||
|
||||
def test_object_tagging(self):
|
||||
self._test_unsupported_header('x-amz-tagging')
|
||||
|
||||
|
@ -17,11 +17,12 @@ from datetime import timedelta
|
||||
import hashlib
|
||||
from unittest.mock import patch, MagicMock
|
||||
import unittest
|
||||
import unittest.mock as mock
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from swift.common import swob
|
||||
from swift.common.middleware.s3api import s3response, controllers
|
||||
from swift.common.middleware.s3api import s3request, s3response, controllers
|
||||
from swift.common.swob import Request, HTTPNoContent
|
||||
from swift.common.middleware.s3api.utils import mktime, Config
|
||||
from swift.common.middleware.s3api.acl_handlers import get_acl_handler
|
||||
@ -30,7 +31,8 @@ from swift.common.middleware.s3api.subresource import ACL, User, Owner, \
|
||||
from test.unit.common.middleware.s3api.test_s3api import S3ApiTestCase
|
||||
from swift.common.middleware.s3api.s3request import S3Request, \
|
||||
S3AclRequest, SigV4Request, SIGV4_X_AMZ_DATE_FORMAT, HashingInput, \
|
||||
S3InputSHA256Mismatch
|
||||
ChunkReader, StreamingInput, S3InputSHA256Mismatch, \
|
||||
S3InputChunkSignatureMismatch
|
||||
from swift.common.middleware.s3api.s3response import InvalidArgument, \
|
||||
NoSuchBucket, InternalError, ServiceUnavailable, \
|
||||
AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed, \
|
||||
@ -97,6 +99,7 @@ class TestRequest(S3ApiTestCase):
|
||||
def setUp(self):
|
||||
super(TestRequest, self).setUp()
|
||||
self.s3api.conf.s3_acl = True
|
||||
s3request.SIGV4_CHUNK_MIN_SIZE = 2
|
||||
|
||||
@patch('swift.common.middleware.s3api.acl_handlers.ACL_MAP', Fake_ACL_MAP)
|
||||
@patch('swift.common.middleware.s3api.s3request.S3AclRequest.authenticate',
|
||||
@ -1039,7 +1042,7 @@ class TestRequest(S3ApiTestCase):
|
||||
caught.exception.body)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_v4_req_xmz_content_sha256_missing(self):
|
||||
def test_v4_req_amz_content_sha256_missing(self):
|
||||
# Virtual hosted-style
|
||||
self.s3api.conf.storage_domains = ['s3.test.com']
|
||||
environ = {
|
||||
@ -1183,6 +1186,542 @@ class TestRequest(S3ApiTestCase):
|
||||
self.assertIn(b'Cannot specify both Range header and partNumber query '
|
||||
b'parameter', cm.exception.body)
|
||||
|
||||
@mock.patch('swift.common.middleware.s3api.subresource.ACL.check_owner')
|
||||
def test_sigv2_content_sha256_ok(self, mock_check_owner):
|
||||
good_sha_256 = hashlib.sha256(b'body').hexdigest()
|
||||
req = Request.blank('/bucket/object',
|
||||
method='PUT',
|
||||
body=b'body',
|
||||
headers={'content-encoding': 'aws-chunked',
|
||||
'x-amz-content-sha256': good_sha_256,
|
||||
'Content-Length': '4',
|
||||
'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status, '200 OK')
|
||||
|
||||
@mock.patch('swift.common.middleware.s3api.subresource.ACL.check_owner')
|
||||
def test_sigv2_content_sha256_bad_value(self, mock_check_owner):
|
||||
good_sha_256 = hashlib.sha256(b'body').hexdigest()
|
||||
bad_sha_256 = hashlib.sha256(b'not body').hexdigest()
|
||||
req = Request.blank('/bucket/object',
|
||||
method='PUT',
|
||||
body=b'body',
|
||||
headers={'content-encoding': 'aws-chunked',
|
||||
'x-amz-content-sha256':
|
||||
bad_sha_256,
|
||||
'Content-Length': '4',
|
||||
'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
|
||||
status, headers, body = self.call_s3api(req)
|
||||
self.assertEqual(status, '400 Bad Request')
|
||||
self.assertIn(f'<ClientComputedContentSHA256>{bad_sha_256}'
|
||||
'</ClientComputedContentSHA256>',
|
||||
body.decode('utf8'))
|
||||
self.assertIn(f'<S3ComputedContentSHA256>{good_sha_256}'
|
||||
'</S3ComputedContentSHA256>',
|
||||
body.decode('utf8'))
|
||||
|
||||
@mock.patch('swift.common.middleware.s3api.subresource.ACL.check_owner')
|
||||
def test_sigv2_content_encoding_aws_chunked_is_ignored(
|
||||
self, mock_check_owner):
|
||||
req = Request.blank('/bucket/object',
|
||||
method='PUT',
|
||||
headers={'content-encoding': 'aws-chunked',
|
||||
'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
|
||||
status, _, body = self.call_s3api(req)
|
||||
self.assertEqual(status, '200 OK')
|
||||
|
||||
def test_sigv2_content_sha256_streaming_is_bad_request(self):
|
||||
def do_test(sha256):
|
||||
req = Request.blank(
|
||||
'/bucket/object',
|
||||
method='PUT',
|
||||
headers={'content-encoding': 'aws-chunked',
|
||||
'x-amz-content-sha256': sha256,
|
||||
'Content-Length': '0',
|
||||
'x-amz-decoded-content-length': '0',
|
||||
'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
status, _, body = self.call_s3api(req)
|
||||
# sig v2 wants that to actually be the SHA!
|
||||
self.assertEqual(status, '400 Bad Request', body)
|
||||
self.assertEqual(self._get_error_code(body),
|
||||
'XAmzContentSHA256Mismatch')
|
||||
self.assertIn(f'<ClientComputedContentSHA256>{sha256}'
|
||||
'</ClientComputedContentSHA256>',
|
||||
body.decode('utf8'))
|
||||
|
||||
do_test('STREAMING-UNSIGNED-PAYLOAD-TRAILER')
|
||||
do_test('STREAMING-AWS4-HMAC-SHA256-PAYLOAD')
|
||||
do_test('STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER')
|
||||
do_test('STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD')
|
||||
do_test('STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER')
|
||||
|
||||
def test_sigv2_content_sha256_streaming_no_decoded_content_length(self):
|
||||
# MissingContentLength trumps XAmzContentSHA256Mismatch
|
||||
def do_test(sha256):
|
||||
req = Request.blank(
|
||||
'/bucket/object',
|
||||
method='PUT',
|
||||
headers={'content-encoding': 'aws-chunked',
|
||||
'x-amz-content-sha256': sha256,
|
||||
'Content-Length': '0',
|
||||
'Authorization': 'AWS test:tester:hmac',
|
||||
'Date': self.get_date_header()})
|
||||
status, _, body = self.call_s3api(req)
|
||||
self.assertEqual(status, '411 Length Required', body)
|
||||
self.assertEqual(self._get_error_code(body),
|
||||
'MissingContentLength')
|
||||
|
||||
do_test('STREAMING-UNSIGNED-PAYLOAD-TRAILER')
|
||||
do_test('STREAMING-AWS4-HMAC-SHA256-PAYLOAD')
|
||||
do_test('STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER')
|
||||
do_test('STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD')
|
||||
do_test('STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER')
|
||||
|
||||
def _make_sig_v4_unsigned_payload_req(self, body=None, extra_headers=None):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-length;host;x-amz-content-sha256;'
|
||||
'x-amz-date,'
|
||||
'Signature=d14bba0da2bba545c8275cb75c99b326cbdfdad015465dbaeca'
|
||||
'e18c7647c73da',
|
||||
'Content-Length': '27',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'UNSIGNED-PAYLOAD',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
}
|
||||
if extra_headers:
|
||||
headers.update(extra_headers)
|
||||
return Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def _test_sig_v4_unsigned_payload(self, body=None, extra_headers=None):
|
||||
req = self._make_sig_v4_unsigned_payload_req(
|
||||
body=body, extra_headers=extra_headers)
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
return sigv4_req
|
||||
|
||||
def test_sig_v4_unsgnd_pyld_no_crc_ok(self):
|
||||
body = b'abcdefghijklmnopqrstuvwxyz\n'
|
||||
sigv4_req = self._test_sig_v4_unsigned_payload(body=body)
|
||||
resp_body = sigv4_req.environ['wsgi.input'].read()
|
||||
self.assertEqual(body, resp_body)
|
||||
|
||||
def _make_valid_v4_streaming_hmac_sha256_payload_request(self):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=aa1b67fc5bc4503d05a636e6e740dcb757d3aa2352f32e7493f'
|
||||
'261f71acbe1d5',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '369',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '25'}
|
||||
body = 'a;chunk-signature=4a397f01db2cd700402dc38931b462e789ae49911d' \
|
||||
'c229d93c9f9c46fd3e0b21\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=49177768ee3e9b77c6353ab9f3b9747d188adc11d4' \
|
||||
'5b38be94a130616e6d64dc\r\nklmnopqrst\r\n' \
|
||||
'5;chunk-signature=c884ebbca35b923cf864854e2a906aa8f5895a7140' \
|
||||
'6c73cc6d4ee057527a8c23\r\nuvwz\n\r\n' \
|
||||
'0;chunk-signature=50f7c470d6bf6c59126eecc2cb020d532a69c92322' \
|
||||
'ddfbbd21811de45491022c\r\n\r\n'
|
||||
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
return SigV4Request(req.environ)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_check_signature_v4_hmac_sha256_payload_chunk_valid(self):
|
||||
s3req = self._make_valid_v4_streaming_hmac_sha256_payload_request()
|
||||
# Verify header signature
|
||||
self.assertTrue(s3req.sig_checker.check_signature('secret'))
|
||||
|
||||
self.assertEqual(b'abcdefghij', s3req.environ['wsgi.input'].read(10))
|
||||
self.assertEqual(b'klmnopqrst', s3req.environ['wsgi.input'].read(10))
|
||||
self.assertEqual(b'uvwz\n', s3req.environ['wsgi.input'].read(10))
|
||||
self.assertEqual(b'', s3req.environ['wsgi.input'].read(10))
|
||||
self.assertTrue(s3req.sig_checker._all_chunk_signatures_valid)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_check_signature_v4_hmac_sha256_payload_no_secret(self):
|
||||
# verify S3InputError if auth middleware does NOT call check_signature
|
||||
# before the stream is read
|
||||
s3req = self._make_valid_v4_streaming_hmac_sha256_payload_request()
|
||||
with self.assertRaises(s3request.S3InputMissingSecret) as cm:
|
||||
s3req.environ['wsgi.input'].read(10)
|
||||
|
||||
# ...which in context gets translated to a 501 response
|
||||
s3req = self._make_valid_v4_streaming_hmac_sha256_payload_request()
|
||||
with self.assertRaises(s3response.S3NotImplemented) as cm, \
|
||||
s3req.translate_read_errors():
|
||||
s3req.environ['wsgi.input'].read(10)
|
||||
self.assertIn(
|
||||
'Transferring payloads in multiple chunks using aws-chunked is '
|
||||
'not supported.', str(cm.exception.body))
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_check_signature_v4_hmac_sha256_payload_chunk_invalid(self):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=aa1b67fc5bc4503d05a636e6e740dcb757d3aa2352f32e7493f'
|
||||
'261f71acbe1d5',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '369',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '25'}
|
||||
# second chunk signature is incorrect, should be
|
||||
# 49177768ee3e9b77c6353ab9f3b9747d188adc11d45b38be94a130616e6d64dc
|
||||
body = 'a;chunk-signature=4a397f01db2cd700402dc38931b462e789ae49911d' \
|
||||
'c229d93c9f9c46fd3e0b21\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=49177768ee3e9b77c6353ab0f3b9747d188adc11d4' \
|
||||
'5b38be94a130616e6d64dc\r\nklmnopqrst\r\n' \
|
||||
'5;chunk-signature=c884ebbca35b923cf864854e2a906aa8f5895a7140' \
|
||||
'6c73cc6d4ee057527a8c23\r\nuvwz\n\r\n' \
|
||||
'0;chunk-signature=50f7c470d6bf6c59126eecc2cb020d532a69c92322' \
|
||||
'ddfbbd21811de45491022c\r\n\r\n'
|
||||
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
|
||||
self.assertEqual(b'abcdefghij', req.environ['wsgi.input'].read(10))
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
req.environ['wsgi.input'].read(10)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_check_signature_v4_hmac_sha256_payload_chunk_wrong_size(self):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=aa1b67fc5bc4503d05a636e6e740dcb757d3aa2352f32e7493f'
|
||||
'261f71acbe1d5',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '369',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '25'}
|
||||
# 2nd chunk contains an incorrect chunk size (9 should be a)...
|
||||
body = 'a;chunk-signature=4a397f01db2cd700402dc38931b462e789ae49911d' \
|
||||
'c229d93c9f9c46fd3e0b21\r\nabcdefghij\r\n' \
|
||||
'9;chunk-signature=49177768ee3e9b77c6353ab9f3b9747d188adc11d4' \
|
||||
'5b38be94a130616e6d64dc\r\nklmnopqrst\r\n' \
|
||||
'5;chunk-signature=c884ebbca35b923cf864854e2a906aa8f5895a7140' \
|
||||
'6c73cc6d4ee057527a8c23\r\nuvwz\n\r\n' \
|
||||
'0;chunk-signature=50f7c470d6bf6c59126eecc2cb020d532a69c92322' \
|
||||
'ddfbbd21811de45491022c\r\n\r\n'
|
||||
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
|
||||
self.assertEqual(b'abcdefghij', req.environ['wsgi.input'].read(10))
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
req.environ['wsgi.input'].read(10)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def test_check_signature_v4_hmac_sha256_payload_chunk_no_last_chunk(self):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=99759fb2823febb695950e6b75a7a1396b164742da9d204f71f'
|
||||
'db3a3a52216aa',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '283',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '25'}
|
||||
body = 'a;chunk-signature=9c35d0203ce923cb7837b5e4a2984f2c107b05ac45' \
|
||||
'80bafce7541c4b142b9712\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=f514382beed5f287a5181b8293399fe006fd9398ee' \
|
||||
'4b8aed910238092a4d5ec7\r\nklmnopqrst\r\n' \
|
||||
'5;chunk-signature=ed6a54f035b920e7daa378ab2d255518c082573c98' \
|
||||
'60127c80d43697375324f4\r\nuvwz\n\r\n'
|
||||
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
self.assertEqual(b'abcdefghij', req.environ['wsgi.input'].read(10))
|
||||
self.assertEqual(b'klmnopqrst', req.environ['wsgi.input'].read(10))
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
req.environ['wsgi.input'].read(5)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def _test_sig_v4_streaming_aws_hmac_sha256_payload_trailer(
|
||||
self, body):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=bee7ad4f1a4f16c22f3b24155ab749b2aca0773065ccf08bc41'
|
||||
'a1e8e84748311',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '369',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256':
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '27',
|
||||
'X-Amz-Trailer': 'x-amz-checksum-sha256',
|
||||
}
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
return req
|
||||
|
||||
def test_check_sig_v4_streaming_aws_hmac_sha256_payload_trailer_ok(self):
|
||||
body = 'a;chunk-signature=c9dd07703599d3d0bd51c96193110756d4f7091d5a' \
|
||||
'4408314a53a802e635b1ad\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=662dc18fb1a3ddad6abc2ce9ebb0748bedacd219eb' \
|
||||
'223a5e80721c2637d30240\r\nklmnopqrst\r\n' \
|
||||
'7;chunk-signature=b63f141c2012de9ac60b961795ef31ad3202b125aa' \
|
||||
'873b4142cf9d815360abc0\r\nuvwxyz\n\r\n' \
|
||||
'0;chunk-signature=b1ff1f86dccfbe9bcc80011e2b87b72e43e0c7f543' \
|
||||
'bb93612c06f9808ccb772e\r\n' \
|
||||
'x-amz-checksum-sha256:foo\r\n' \
|
||||
'x-amz-trailer-signature:347dd27b77f240eee9904e9aaaa10acb955a' \
|
||||
'd1bd0d6dd2e2c64794195eb5535b\r\n'
|
||||
req = self._test_sig_v4_streaming_aws_hmac_sha256_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrstuvwxyz\n',
|
||||
req.environ['wsgi.input'].read())
|
||||
|
||||
def test_check_sig_v4_streaming_aws_hmac_sha256_missing_trailer_sig(self):
|
||||
body = 'a;chunk-signature=c9dd07703599d3d0bd51c96193110756d4f7091d5a' \
|
||||
'4408314a53a802e635b1ad\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=662dc18fb1a3ddad6abc2ce9ebb0748bedacd219eb' \
|
||||
'223a5e80721c2637d30240\r\nklmnopqrst\r\n' \
|
||||
'7;chunk-signature=b63f141c2012de9ac60b961795ef31ad3202b125aa' \
|
||||
'873b4142cf9d815360abc0\r\nuvwxyz\n\r\n' \
|
||||
'0;chunk-signature=b1ff1f86dccfbe9bcc80011e2b87b72e43e0c7f543' \
|
||||
'bb93612c06f9808ccb772e\r\n' \
|
||||
'x-amz-checksum-sha256:foo\r\n'
|
||||
req = self._test_sig_v4_streaming_aws_hmac_sha256_payload_trailer(body)
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_aws_hmac_sha256_payload_trailer_bad(self):
|
||||
body = 'a;chunk-signature=c9dd07703599d3d0bd51c96193110756d4f7091d5a' \
|
||||
'4408314a53a802e635b1ad\r\nabcdefghij\r\n' \
|
||||
'a;chunk-signature=000000000000000000000000000000000000000000' \
|
||||
'0000000000000000000000\r\nklmnopqrst\r\n' \
|
||||
'7;chunk-signature=b63f141c2012de9ac60b961795ef31ad3202b125aa' \
|
||||
'873b4142cf9d815360abc0\r\nuvwxyz\n\r\n' \
|
||||
'0;chunk-signature=b1ff1f86dccfbe9bcc80011e2b87b72e43e0c7f543' \
|
||||
'bb93612c06f9808ccb772e\r\n' \
|
||||
'x-amz-checksum-sha256:foo\r\n'
|
||||
req = self._test_sig_v4_streaming_aws_hmac_sha256_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghij', req.environ['wsgi.input'].read(10))
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
req.environ['wsgi.input'].read(10)
|
||||
|
||||
@patch.object(S3Request, '_validate_dates', lambda *a: None)
|
||||
def _test_sig_v4_streaming_unsigned_payload_trailer(
|
||||
self, body, x_amz_trailer='x-amz-checksum-sha256'):
|
||||
environ = {
|
||||
'HTTP_HOST': 's3.test.com',
|
||||
'REQUEST_METHOD': 'PUT',
|
||||
'RAW_PATH_INFO': '/test/file'}
|
||||
headers = {
|
||||
'Authorization':
|
||||
'AWS4-HMAC-SHA256 '
|
||||
'Credential=test/20220330/us-east-1/s3/aws4_request,'
|
||||
'SignedHeaders=content-encoding;content-length;host;x-amz-con'
|
||||
'tent-sha256;x-amz-date;x-amz-decoded-content-length,'
|
||||
'Signature=43727fcfa7765e97cd3cbfc112fed5fedc31e2b7930588ddbca'
|
||||
'3feaa1205a7f2',
|
||||
'Content-Encoding': 'aws-chunked',
|
||||
'Content-Length': '369',
|
||||
'Host': 's3.test.com',
|
||||
'X-Amz-Content-SHA256': 'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
|
||||
'X-Amz-Date': '20220330T095351Z',
|
||||
'X-Amz-Decoded-Content-Length': '27',
|
||||
}
|
||||
if x_amz_trailer is not None:
|
||||
headers['X-Amz-Trailer'] = x_amz_trailer
|
||||
req = Request.blank(environ['RAW_PATH_INFO'], environ=environ,
|
||||
headers=headers, body=body.encode('utf8'))
|
||||
sigv4_req = SigV4Request(req.environ)
|
||||
# Verify header signature
|
||||
self.assertTrue(sigv4_req.sig_checker.check_signature('secret'))
|
||||
return req
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_ok(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-sha256:foo\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrstuvwxyz\n',
|
||||
req.environ['wsgi.input'].read())
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_none_ok(self):
|
||||
# verify it's ok to not send any trailer
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(
|
||||
body, x_amz_trailer=None)
|
||||
self.assertEqual(b'abcdefghijklmnopqrstuvwxyz\n',
|
||||
req.environ['wsgi.input'].read())
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_undeclared(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-sha256:undeclared\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(
|
||||
body, x_amz_trailer=None)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_multiple(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-sha256:undeclared\r\n'
|
||||
with self.assertRaises(s3request.InvalidRequest):
|
||||
self._test_sig_v4_streaming_unsigned_payload_trailer(
|
||||
body,
|
||||
x_amz_trailer='x-amz-checksum-sha256,x-amz-checksum-crc32')
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_mismatch(self):
|
||||
# the unexpected footer is detected before the incomplete line
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-not-sha256:foo\r\n' \
|
||||
'x-'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
# trailers are read with penultimate chunk??
|
||||
with self.assertRaises(s3request.S3InputMalformedTrailer):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_missing(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
# trailers are read with penultimate chunk??
|
||||
with self.assertRaises(s3request.S3InputMalformedTrailer):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_extra(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-crc32:foo\r\n' \
|
||||
'x-amz-checksum-sha32:foo\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
# trailers are read with penultimate chunk??
|
||||
with self.assertRaises(s3request.S3InputMalformedTrailer):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_duplicate(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-sha256:foo\r\n' \
|
||||
'x-amz-checksum-sha256:bar\r\n'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
# Reading the rest succeeds! AWS would complain about the checksum,
|
||||
# but we aren't looking at it (yet)
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
def test_check_sig_v4_streaming_unsigned_payload_trailer_short(self):
|
||||
body = 'a\r\nabcdefghij\r\n' \
|
||||
'a\r\nklmnopqrst\r\n' \
|
||||
'7\r\nuvwxyz\n\r\n' \
|
||||
'0\r\n' \
|
||||
'x-amz-checksum-sha256'
|
||||
req = self._test_sig_v4_streaming_unsigned_payload_trailer(body)
|
||||
self.assertEqual(b'abcdefghijklmnopqrst',
|
||||
req.environ['wsgi.input'].read(20))
|
||||
# trailers are read with penultimate chunk??
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
req.environ['wsgi.input'].read()
|
||||
|
||||
|
||||
class TestSigV4Request(S3ApiTestCase):
|
||||
def setUp(self):
|
||||
@ -1551,5 +2090,403 @@ class TestHashingInput(S3ApiTestCase):
|
||||
self.assertEqual(b'6789', wrapped.readline())
|
||||
|
||||
|
||||
class TestChunkReader(unittest.TestCase):
|
||||
def test_read_sig_checker_ok(self):
|
||||
raw = '123456789\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(
|
||||
bytes_input, 9, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(9, reader.to_read)
|
||||
self.assertEqual(b'123456789', reader.read())
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456789').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
self.assertFalse(bytes_input.closed)
|
||||
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
reader = ChunkReader(
|
||||
BytesIO(raw), 9, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(9, reader.to_read)
|
||||
self.assertEqual(b'12345678', reader.read(8))
|
||||
self.assertEqual(1, reader.to_read)
|
||||
self.assertEqual(b'9', reader.read(8))
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456789').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
reader = ChunkReader(
|
||||
BytesIO(raw), 9, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(9, reader.to_read)
|
||||
self.assertEqual(b'123456789', reader.read(10))
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456789').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
reader = ChunkReader(
|
||||
BytesIO(raw), 9, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(9, reader.to_read)
|
||||
self.assertEqual(b'123456789', reader.read(-1))
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456789').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
|
||||
def test_read_sig_checker_bad(self):
|
||||
raw = '123456789\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
mock_validator = MagicMock(return_value=False)
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(
|
||||
bytes_input, 9, mock_validator, 'chunk-signature=signature')
|
||||
reader.read(8)
|
||||
self.assertEqual(1, reader.to_read)
|
||||
with self.assertRaises(S3InputChunkSignatureMismatch):
|
||||
reader.read(1)
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456789').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
self.assertTrue(bytes_input.closed)
|
||||
|
||||
def test_read_no_sig_checker(self):
|
||||
raw = '123456789\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(bytes_input, 9, None, None)
|
||||
self.assertEqual(9, reader.to_read)
|
||||
self.assertEqual(b'123456789', reader.read())
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertFalse(bytes_input.closed)
|
||||
|
||||
def test_readline_sig_checker_ok_newline_is_midway_through_chunk(self):
|
||||
raw = '123456\n7\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(
|
||||
bytes_input, 8, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(8, reader.to_read)
|
||||
self.assertEqual(b'123456\n', reader.readline())
|
||||
self.assertEqual(1, reader.to_read)
|
||||
self.assertEqual(b'7', reader.readline())
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'123456\n7').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
self.assertFalse(bytes_input.closed)
|
||||
|
||||
def test_readline_sig_checker_ok_newline_is_end_of_chunk(self):
|
||||
raw = '1234567\n\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(
|
||||
bytes_input, 8, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(8, reader.to_read)
|
||||
self.assertEqual(b'1234567\n', reader.readline())
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'1234567\n').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
self.assertFalse(bytes_input.closed)
|
||||
|
||||
def test_readline_sig_checker_ok_partial_line_read(self):
|
||||
raw = '1234567\n\r\n0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
mock_validator = MagicMock(return_value=True)
|
||||
bytes_input = BytesIO(raw)
|
||||
reader = ChunkReader(
|
||||
bytes_input, 8, mock_validator, 'chunk-signature=signature')
|
||||
self.assertEqual(8, reader.to_read)
|
||||
self.assertEqual(b'12345', reader.readline(5))
|
||||
self.assertEqual(3, reader.to_read)
|
||||
self.assertEqual(b'67', reader.readline(2))
|
||||
self.assertEqual(1, reader.to_read)
|
||||
self.assertEqual(b'\n', reader.readline())
|
||||
self.assertEqual(0, reader.to_read)
|
||||
self.assertEqual(
|
||||
[mock.call(hashlib.sha256(b'1234567\n').hexdigest(), 'signature')],
|
||||
mock_validator.call_args_list)
|
||||
self.assertFalse(bytes_input.closed)
|
||||
|
||||
|
||||
class TestStreamingInput(S3ApiTestCase):
|
||||
def setUp(self):
|
||||
super(TestStreamingInput, self).setUp()
|
||||
# Override chunk min size
|
||||
s3request.SIGV4_CHUNK_MIN_SIZE = 2
|
||||
self.fake_sig_checker = MagicMock()
|
||||
self.fake_sig_checker.check_chunk_signature = \
|
||||
lambda chunk, signature: signature == 'ok'
|
||||
|
||||
def test_read(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789', wrapped.read())
|
||||
self.assertFalse(wrapped._input.closed)
|
||||
wrapped.close()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_read_with_size(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'1234', wrapped.read(4))
|
||||
self.assertEqual(b'56', wrapped.read(2))
|
||||
# trying to read past the end gets us whatever's left
|
||||
self.assertEqual(b'789', wrapped.read(4))
|
||||
# can continue trying to read -- but it'll be empty
|
||||
self.assertEqual(b'', wrapped.read(2))
|
||||
|
||||
self.assertFalse(wrapped._input.closed)
|
||||
wrapped.close()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_read_multiple_chunks(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'7;chunk-signature=ok\r\nabc\ndef\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 16, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789abc\ndef', wrapped.read())
|
||||
self.assertEqual(b'', wrapped.read(2))
|
||||
|
||||
def test_read_multiple_chunks_with_size(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'7;chunk-signature=ok\r\nabc\ndef\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 16, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789a', wrapped.read(10))
|
||||
self.assertEqual(b'bc\n', wrapped.read(3))
|
||||
self.assertEqual(b'def', wrapped.read(4))
|
||||
self.assertEqual(b'', wrapped.read(2))
|
||||
|
||||
def test_readline_newline_in_middle_and_at_end(self):
|
||||
raw = 'a;chunk-signature=ok\r\n123456\n789\r\n' \
|
||||
'4;chunk-signature=ok\r\nabc\n\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 14, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'123456\n', wrapped.readline())
|
||||
self.assertEqual(b'789abc\n', wrapped.readline())
|
||||
self.assertEqual(b'', wrapped.readline())
|
||||
|
||||
def test_readline_newline_in_middle_not_at_end(self):
|
||||
raw = 'a;chunk-signature=ok\r\n123456\n789\r\n' \
|
||||
'3;chunk-signature=ok\r\nabc\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 13, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'123456\n', wrapped.readline())
|
||||
self.assertEqual(b'789abc', wrapped.readline())
|
||||
self.assertEqual(b'', wrapped.readline())
|
||||
|
||||
def test_readline_no_newline(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'3;chunk-signature=ok\r\nabc\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 12, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789abc', wrapped.readline())
|
||||
self.assertEqual(b'', wrapped.readline())
|
||||
|
||||
def test_readline_line_spans_chunks(self):
|
||||
raw = '9;chunk-signature=ok\r\nblah\nblah\r\n' \
|
||||
'9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'7;chunk-signature=ok\r\nabc\ndef\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 25, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'blah\n', wrapped.readline())
|
||||
self.assertEqual(b'blah123456789abc\n', wrapped.readline())
|
||||
self.assertEqual(b'def', wrapped.readline())
|
||||
|
||||
def test_readline_with_size_line_spans_chunks(self):
|
||||
raw = '9;chunk-signature=ok\r\nblah\nblah\r\n' \
|
||||
'9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'7;chunk-signature=ok\r\nabc\ndef\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 25, set(),
|
||||
self.fake_sig_checker)
|
||||
self.assertEqual(b'blah\n', wrapped.readline(8))
|
||||
self.assertEqual(b'blah123456789a', wrapped.readline(14))
|
||||
self.assertEqual(b'bc\n', wrapped.readline(99))
|
||||
self.assertEqual(b'def', wrapped.readline(99))
|
||||
|
||||
def test_chunk_separator_missing(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
wrapped.read()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_final_newline_missing(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputIncomplete):
|
||||
wrapped.read()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_trailing_garbage_ok(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\ngarbage'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789', wrapped.read())
|
||||
|
||||
def test_good_with_trailers(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n' \
|
||||
'x-amz-checksum-crc32: AAAAAA==\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(
|
||||
BytesIO(raw), 9, {'x-amz-checksum-crc32'}, self.fake_sig_checker)
|
||||
self.assertEqual(b'1234', wrapped.read(4))
|
||||
self.assertEqual(b'56', wrapped.read(2))
|
||||
# not at end, trailers haven't been read
|
||||
self.assertEqual({}, wrapped.trailers)
|
||||
# if we get exactly to the end, we go ahead and read the trailers
|
||||
self.assertEqual(b'789', wrapped.read(3))
|
||||
self.assertEqual({'x-amz-checksum-crc32': 'AAAAAA=='},
|
||||
wrapped.trailers)
|
||||
# can continue trying to read -- but it'll be empty
|
||||
self.assertEqual(b'', wrapped.read(2))
|
||||
self.assertEqual({'x-amz-checksum-crc32': 'AAAAAA=='},
|
||||
wrapped.trailers)
|
||||
|
||||
self.assertFalse(wrapped._input.closed)
|
||||
wrapped.close()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_unexpected_trailers(self):
|
||||
def do_test(raw):
|
||||
wrapped = StreamingInput(
|
||||
BytesIO(raw), 9, {'x-amz-checksum-crc32'},
|
||||
self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputMalformedTrailer):
|
||||
wrapped.read()
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
do_test('9;chunk-signature=ok\r\n123456789\r\n'
|
||||
'0;chunk-signature=ok\r\n'
|
||||
'x-amz-checksum-sha256: value\r\n'.encode('utf8'))
|
||||
do_test('9;chunk-signature=ok\r\n123456789\r\n'
|
||||
'0;chunk-signature=ok\r\n'
|
||||
'x-amz-checksum-crc32=value\r\n'.encode('utf8'))
|
||||
do_test('9;chunk-signature=ok\r\n123456789\r\n'
|
||||
'0;chunk-signature=ok\r\n'
|
||||
'x-amz-checksum-crc32\r\n'.encode('utf8'))
|
||||
|
||||
def test_wrong_signature_first_chunk(self):
|
||||
raw = '9;chunk-signature=ko\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
# Can read while in the chunk...
|
||||
self.assertEqual(b'1234', wrapped.read(4))
|
||||
self.assertEqual(b'5678', wrapped.read(4))
|
||||
# But once we hit the end, bomb out
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
wrapped.read(4)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_wrong_signature_middle_chunk(self):
|
||||
raw = '2;chunk-signature=ok\r\n12\r\n' \
|
||||
'2;chunk-signature=ok\r\n34\r\n' \
|
||||
'2;chunk-signature=ko\r\n56\r\n' \
|
||||
'2;chunk-signature=ok\r\n78\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'1234', wrapped.read(4))
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
wrapped.read(4)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_wrong_signature_last_chunk(self):
|
||||
raw = '2;chunk-signature=ok\r\n12\r\n' \
|
||||
'2;chunk-signature=ok\r\n34\r\n' \
|
||||
'2;chunk-signature=ok\r\n56\r\n' \
|
||||
'2;chunk-signature=ok\r\n78\r\n' \
|
||||
'0;chunk-signature=ko\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'12345678', wrapped.read(8))
|
||||
with self.assertRaises(s3request.S3InputChunkSignatureMismatch):
|
||||
wrapped.read(4)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_not_enough_content(self):
|
||||
raw = '9;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(
|
||||
BytesIO(raw), 33, set(), self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputSizeError) as cm:
|
||||
wrapped.read()
|
||||
self.assertEqual(33, cm.exception.expected)
|
||||
self.assertEqual(9, cm.exception.provided)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_wrong_chunk_size(self):
|
||||
# first chunk should be size 9 not a
|
||||
raw = 'a;chunk-signature=ok\r\n123456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputSizeError) as cm:
|
||||
wrapped.read(4)
|
||||
self.assertEqual(9, cm.exception.expected)
|
||||
self.assertEqual(10, cm.exception.provided)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_small_first_chunk_size(self):
|
||||
raw = '1;chunk-signature=ok\r\n1\r\n' \
|
||||
'8;chunk-signature=ok\r\n23456789\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
with self.assertRaises(s3request.S3InputChunkTooSmall) as cm:
|
||||
wrapped.read(4)
|
||||
# note: the chunk number is the one *after* the short chunk
|
||||
self.assertEqual(2, cm.exception.chunk_number)
|
||||
self.assertEqual(1, cm.exception.bad_chunk_size)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_small_final_chunk_size_ok(self):
|
||||
raw = '8;chunk-signature=ok\r\n12345678\r\n' \
|
||||
'1;chunk-signature=ok\r\n9\r\n' \
|
||||
'0;chunk-signature=ok\r\n\r\n'.encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), self.fake_sig_checker)
|
||||
self.assertEqual(b'123456789', wrapped.read())
|
||||
|
||||
def test_invalid_chunk_size(self):
|
||||
# the actual chunk data doesn't need to match the length in the
|
||||
# chunk header for the test
|
||||
raw = ('-1;chunk-signature=ok\r\n123456789\r\n'
|
||||
'0;chunk-signature=ok\r\n\r\n').encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), None)
|
||||
with self.assertRaises(s3request.S3InputIncomplete) as cm:
|
||||
wrapped.read(4)
|
||||
self.assertIn('invalid chunk header', str(cm.exception))
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
def test_invalid_chunk_params(self):
|
||||
def do_test(params, exp_exception):
|
||||
raw = ('9;%s\r\n123456789\r\n'
|
||||
'0;chunk-signature=ok\r\n\r\n' % params).encode('utf8')
|
||||
wrapped = StreamingInput(BytesIO(raw), 9, set(), MagicMock())
|
||||
with self.assertRaises(exp_exception):
|
||||
wrapped.read(4)
|
||||
self.assertTrue(wrapped._input.closed)
|
||||
|
||||
do_test('chunk-signature=', s3request.S3InputIncomplete)
|
||||
do_test('chunk-signature=ok;not-ok', s3request.S3InputIncomplete)
|
||||
do_test('chunk-signature=ok;chunk-signature=ok',
|
||||
s3request.S3InputIncomplete)
|
||||
do_test('chunk-signature', s3request.S3InputIncomplete)
|
||||
# note: underscore not hyphen...
|
||||
do_test('chunk_signature=ok', s3request.S3InputChunkSignatureMismatch)
|
||||
do_test('skunk-cignature=ok', s3request.S3InputChunkSignatureMismatch)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user