fix s3api functional tests

Connection was hard coded to use saio config

Change-Id: I9c11162de89fa3aa2a78aea093b187d0309860f5
Signed-off-by: Thiago da Silva <thiago@redhat.com>
This commit is contained in:
Thiago da Silva 2018-05-29 16:57:57 -04:00 committed by Tim Burke
parent 08db36a295
commit f4bc951508
10 changed files with 61 additions and 42 deletions

View File

@ -587,6 +587,7 @@ def in_process_setup(the_object_server=object_server):
# Below are values used by the functional test framework, as well as
# by the various in-process swift servers
'auth_uri': 'http://127.0.0.1:%d/auth/v1.0/' % prolis.getsockname()[1],
's3_storage_url': 'http://%s:%d/' % prolis.getsockname(),
# Primary functional test account (needs admin access to the
# account)
'account': 'test',
@ -868,6 +869,8 @@ def setup_package():
443 if parsed.scheme == 'https' else 80),
'auth_prefix': parsed.path,
})
config.setdefault('s3_storage_url',
urlunsplit(parsed[:2] + ('', None, None)))
elif 'auth_host' in config:
scheme = 'http'
if config_true_value(config.get('auth_ssl', 'no')):
@ -880,6 +883,8 @@ def setup_package():
auth_prefix += 'v1.0'
config['auth_uri'] = swift_test_auth = urlunsplit(
(scheme, netloc, auth_prefix, None, None))
config.setdefault('s3_storage_url', urlunsplit(
(scheme, netloc, '', None, None)))
# else, neither auth_uri nor auth_host; swift_test_auth will be unset
# and we'll skip everything later

View File

@ -37,7 +37,11 @@ class S3ApiBase(unittest.TestCase):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
try:
self.conn = Connection()
self.conn = Connection(
tf.config['s3_access_key'], tf.config['s3_secret_key'],
user_id='%s:%s' % (tf.config['account'],
tf.config['username']))
self.conn.reset()
except Exception:
message = '%s got an error during initialize process.\n\n%s' % \
@ -67,7 +71,8 @@ class S3ApiBaseBoto3(S3ApiBase):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
try:
self.conn = get_boto3_conn()
self.conn = get_boto3_conn(
tf.config['s3_access_key'], tf.config['s3_secret_key'])
self.endpoint_url = self.conn._endpoint.host
self.access_key = self.conn._request_signer._credentials.access_key
self.region = self.conn._client_config.region_name

View File

@ -15,6 +15,7 @@
import logging
import os
from six.moves.urllib.parse import urlparse
import test.functional as tf
import boto3
from botocore.exceptions import ClientError
@ -46,9 +47,9 @@ class Connection(object):
"""
Connection class used for S3 functional testing.
"""
def __init__(self, aws_access_key='test:tester',
aws_secret_key='testing',
user_id='test:tester'):
def __init__(self, aws_access_key,
aws_secret_key,
user_id=None):
"""
Initialize method.
@ -64,15 +65,16 @@ class Connection(object):
"""
self.aws_access_key = aws_access_key
self.aws_secret_key = aws_secret_key
self.user_id = user_id
# NOTE: auth_host and auth_port can be different from storage location
self.host = tf.config['auth_host']
self.port = int(tf.config['auth_port'])
self.user_id = user_id or aws_access_key
parsed = urlparse(tf.config['s3_storage_url'])
self.host = parsed.hostname
self.port = parsed.port
self.conn = \
S3Connection(aws_access_key, aws_secret_key, is_secure=False,
S3Connection(aws_access_key, aws_secret_key,
is_secure=(parsed.scheme == 'https'),
host=self.host, port=self.port,
calling_format=OrdinaryCallingFormat())
self.conn.auth_region_name = 'us-east-1'
self.conn.auth_region_name = tf.config.get('s3_region', 'us-east-1')
def reset(self):
"""
@ -140,22 +142,26 @@ class Connection(object):
url = self.conn.generate_url(expires_in, method, bucket, obj)
if os.environ.get('S3_USE_SIGV4') == "True":
# V4 signatures are known-broken in boto, but we can work around it
if url.startswith('https://'):
if url.startswith('https://') and not tf.config[
's3_storage_url'].startswith('https://'):
url = 'http://' + url[8:]
if self.port is None:
return url, {}
else:
return url, {'Host': '%(host)s:%(port)d:%(port)d' % {
'host': self.host, 'port': self.port}}
return url, {}
def get_boto3_conn(aws_access_key='test:tester', aws_secret_key='testing'):
host = tf.config['auth_host']
port = int(tf.config['auth_port'])
def get_boto3_conn(aws_access_key, aws_secret_key):
endpoint_url = tf.config['s3_storage_url']
config = boto3.session.Config(s3={'addressing_style': 'path'})
return boto3.client(
's3', aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
config=config, region_name='us-east-1', use_ssl=False,
endpoint_url='http://{}:{}'.format(host, port))
config=config, region_name=tf.config.get('s3_region', 'us-east-1'),
use_ssl=endpoint_url.startswith('https:'),
endpoint_url=endpoint_url)
def tear_down_s3(conn):

View File

@ -93,7 +93,7 @@ class TestS3Acl(S3ApiBase):
def test_put_bucket_acl_error(self):
req_headers = {'x-amz-acl': 'public-read'}
aws_error_conn = Connection(aws_secret_key='invalid')
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
@ -110,7 +110,7 @@ class TestS3Acl(S3ApiBase):
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_bucket_acl_error(self):
aws_error_conn = Connection(aws_secret_key='invalid')
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -126,7 +126,7 @@ class TestS3Acl(S3ApiBase):
def test_get_object_acl_error(self):
self.conn.make_request('PUT', self.bucket, self.obj)
aws_error_conn = Connection(aws_secret_key='invalid')
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, self.obj,
query='acl')

View File

@ -128,7 +128,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.create_bucket(Bucket='bucket')
self.assertEqual(
@ -201,7 +201,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.list_objects(Bucket='bucket')
self.assertEqual(
@ -388,7 +388,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.head_bucket(Bucket='bucket')
self.assertEqual(
@ -419,7 +419,7 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(aws_secret_key='invalid')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.delete_bucket(Bucket='bucket')
self.assertEqual(

View File

@ -134,7 +134,7 @@ class TestS3ApiMultiDelete(S3ApiBase):
content_md5 = calculate_md5(xml)
query = 'delete'
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, body=xml,
headers={

View File

@ -304,9 +304,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertTrue(lines[0].startswith(b'<?xml'), body)
self.assertTrue(lines[0].endswith(b'?>'), body)
elem = fromstring(body, 'CompleteMultipartUploadResult')
# TODO: use tf.config value
self.assertEqual(
'http://%s:%s/bucket/obj1' % (self.conn.host, self.conn.port),
'%s/bucket/obj1' % tf.config['s3_storage_url'].rstrip('/'),
elem.find('Location').text)
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key)
@ -428,7 +427,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.conn.make_request('PUT', bucket)
query = 'uploads'
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -442,7 +441,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.conn.make_request('PUT', bucket)
query = 'uploads'
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -462,7 +461,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -500,7 +499,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key,
headers={
@ -541,7 +540,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
upload_id = elem.find('UploadId').text
query = 'uploadId=%s' % upload_id
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, key, query=query)
@ -568,7 +567,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self._upload_part(bucket, key, upload_id)
query = 'uploadId=%s' % upload_id
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -612,7 +611,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# invalid credentials
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, keys[0], body=xml,
query=query)

View File

@ -147,7 +147,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertCommonResponseHeaders(headers)
def test_put_object_error(self):
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', self.bucket, 'object')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -166,7 +166,7 @@ class TestS3ApiObject(S3ApiBase):
dst_obj = 'dst_object'
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -197,7 +197,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
@ -216,7 +216,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 403)
@ -239,7 +239,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')

View File

@ -69,7 +69,7 @@ class TestS3ApiService(S3ApiBase):
self.assertTrue(b.find('CreationDate') is not None)
def test_service_error_signature_not_match(self):
auth_error_conn = Connection(aws_secret_key='invalid')
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = auth_error_conn.make_request('GET')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')

View File

@ -24,6 +24,10 @@ auth_uri = http://127.0.0.1:8080/auth/v1.0
#auth_version = 3
#auth_uri = http://localhost:5000/v3/
# Used by s3api functional tests, which don't contact auth directly
#s3_storage_url = http://127.0.0.1:8080/
#s3_region = us-east-1
# Primary functional test account (needs admin access to the account)
account = test
username = tester