func test improvements

Not all v1 auth systems use an acct:user format; s3api tests should not require it.

Be a little more tolerant of listing consistency issues when resetting.

Tolerate s3api /info results returning strings instead of ints.

Related-Change: I4a46bd650a53f88c642d402e697869df28bd2fd3
Change-Id: I8f2f247dd113ad637b17d241133b14c35cadecae
This commit is contained in:
Tim Burke 2021-09-14 21:49:03 -07:00
parent 5d52afbe4c
commit b4e532a46f
4 changed files with 30 additions and 9 deletions

View File

@ -46,11 +46,14 @@ class S3ApiBase(unittest.TestCase):
def setUp(self):
if 's3api' not in tf.cluster_info:
raise tf.SkipTest('s3api middleware is not enabled')
if tf.config.get('account'):
user_id = '%s:%s' % (tf.config['account'], tf.config['username'])
else:
user_id = tf.config['username']
try:
self.conn = Connection(
tf.config['s3_access_key'], tf.config['s3_secret_key'],
user_id='%s:%s' % (tf.config['account'],
tf.config['username']))
user_id=user_id)
self.conn.reset()
except Exception:

View File

@ -101,7 +101,13 @@ class Connection(object):
bucket.delete_key(
obj.name, version_id=obj.version_id)
self.conn.delete_bucket(bucket.name)
try:
self.conn.delete_bucket(bucket.name)
except ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketNotEmpty':
raise
# else, listing consistency issue; try again
except S3ResponseError as e:
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.status != 404:
@ -187,7 +193,13 @@ def tear_down_s3(conn):
resp = conn.list_objects(Bucket=bucket)
for obj in resp.get('Contents', []):
conn.delete_object(Bucket=bucket, Key=obj['Key'])
conn.delete_bucket(Bucket=bucket)
try:
conn.delete_bucket(Bucket=bucket)
except ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketNotEmpty':
raise
# else, listing consistency issue; try again
except ClientError as e:
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:

View File

@ -55,8 +55,8 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
def test_bucket(self):
bucket = 'bucket'
max_bucket_listing = tf.cluster_info['s3api'].get(
'max_bucket_listing', 1000)
max_bucket_listing = int(tf.cluster_info['s3api'].get(
'max_bucket_listing', 1000))
# PUT Bucket
resp = self.conn.create_bucket(Bucket=bucket)
@ -241,7 +241,13 @@ class TestS3ApiBucket(S3ApiBaseBoto3):
ctx.exception.response['Error']['Code'], 'NoSuchBucket')
def _prepare_test_get_bucket(self, bucket, objects):
self.conn.create_bucket(Bucket=bucket)
try:
self.conn.create_bucket(Bucket=bucket)
except botocore.exceptions.ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketAlreadyOwnedByYou':
raise
for obj in objects:
self.conn.put_object(Bucket=bucket, Key=obj, Body=b'')

View File

@ -167,8 +167,8 @@ class TestS3ApiMultiDelete(S3ApiBase):
query=query)
self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified')
max_deletes = tf.cluster_info.get('s3api', {}).get(
'max_multi_delete_objects', 1000)
max_deletes = int(tf.cluster_info.get('s3api', {}).get(
'max_multi_delete_objects', 1000))
# specified number of objects are over max_multi_delete_objects
# (Default 1000), but xml size is relatively small
req_objects = ['obj%s' for var in range(max_deletes + 1)]