s3api: actually execute check_pipeline in real world

Previously, S3ApiMiddleware.check_pipeline would always exit early
because the __file__ attribute of the Config instance passed to
check_pipeline was never set. The __file__ key is typically passed to
the S3ApiMiddleware constructor in the wsgi config dict, so this dict
is now passed to check_pipeline() for it to test for the existence of
__file__.

Also, the use of a Config object is replaced with a dict where it
mimics the wsgi conf object in the unit tests setup.

UpgradeImpact
=============

The bug prevented the pipeline order checks described in
proxy-server.conf-sample being made on the proxy-server pipeline when
s3api middleware was included. With this change, these checks will now
be made and an invalid pipeline configuration will result in a
ValueError being raised during proxy-server startup.

A valid pipeline has another middleware (presumed to be an auth
middleware) between s3api and the proxy-server app. If keystoneauth is
found, then a further check is made that s3token is configured after
s3api and before keystoneauth.

The pipeline order checks can be disabled by setting the s3api
auth_pipeline_check option to False in proxy-server.conf. This
mitigation is recommended if previously operating with what will now
be considered an invalid pipeline.

The bug also prevented a check for slo middleware being in the
pipeline between s3api and the proxy-server app. If the slo middleware
is not found then multipart uploads will now not be supported,
regardless of the value of the allow_multipart_uploads option
described in proxy-server.conf-sample. In this case a warning will be
logged during startup but no exception is raised.

Closes-Bug: #1912391
Change-Id: I357537492733b97e5afab4a7b8e6a5c527c650e4
This commit is contained in:
Alistair Coles
2021-01-19 14:25:09 +00:00
parent 4ee05c5ddc
commit 6896f1f54b
6 changed files with 74 additions and 51 deletions

View File

@@ -583,7 +583,9 @@ use = egg:swift#s3api
# Enable multi-part uploads. (default: true)
# This is required to store files larger than Swift's max_file_size (by
# default, 5GiB). Note that has performance implications when deleting objects,
# as we now have to check for whether there are also segments to delete.
# as we now have to check for whether there are also segments to delete. The
# SLO middleware must be in the pipeline after s3api for this option to have
# effect.
# allow_multipart_uploads = True
#
# Set the maximum number of parts for Upload Part operation.(default: 1000)

View File

@@ -242,45 +242,45 @@ class ListingEtagMiddleware(object):
class S3ApiMiddleware(object):
"""S3Api: S3 compatibility middleware"""
def __init__(self, app, conf, *args, **kwargs):
def __init__(self, app, wsgi_conf, *args, **kwargs):
self.app = app
self.conf = Config()
# Set default values if they are not configured
self.conf.allow_no_owner = config_true_value(
conf.get('allow_no_owner', False))
self.conf.location = conf.get('location', 'us-east-1')
wsgi_conf.get('allow_no_owner', False))
self.conf.location = wsgi_conf.get('location', 'us-east-1')
self.conf.dns_compliant_bucket_names = config_true_value(
conf.get('dns_compliant_bucket_names', True))
wsgi_conf.get('dns_compliant_bucket_names', True))
self.conf.max_bucket_listing = config_positive_int_value(
conf.get('max_bucket_listing', 1000))
wsgi_conf.get('max_bucket_listing', 1000))
self.conf.max_parts_listing = config_positive_int_value(
conf.get('max_parts_listing', 1000))
wsgi_conf.get('max_parts_listing', 1000))
self.conf.max_multi_delete_objects = config_positive_int_value(
conf.get('max_multi_delete_objects', 1000))
wsgi_conf.get('max_multi_delete_objects', 1000))
self.conf.multi_delete_concurrency = config_positive_int_value(
conf.get('multi_delete_concurrency', 2))
wsgi_conf.get('multi_delete_concurrency', 2))
self.conf.s3_acl = config_true_value(
conf.get('s3_acl', False))
self.conf.storage_domain = conf.get('storage_domain', '')
wsgi_conf.get('s3_acl', False))
self.conf.storage_domain = wsgi_conf.get('storage_domain', '')
self.conf.auth_pipeline_check = config_true_value(
conf.get('auth_pipeline_check', True))
wsgi_conf.get('auth_pipeline_check', True))
self.conf.max_upload_part_num = config_positive_int_value(
conf.get('max_upload_part_num', 1000))
wsgi_conf.get('max_upload_part_num', 1000))
self.conf.check_bucket_owner = config_true_value(
conf.get('check_bucket_owner', False))
wsgi_conf.get('check_bucket_owner', False))
self.conf.force_swift_request_proxy_log = config_true_value(
conf.get('force_swift_request_proxy_log', False))
wsgi_conf.get('force_swift_request_proxy_log', False))
self.conf.allow_multipart_uploads = config_true_value(
conf.get('allow_multipart_uploads', True))
wsgi_conf.get('allow_multipart_uploads', True))
self.conf.min_segment_size = config_positive_int_value(
conf.get('min_segment_size', 5242880))
wsgi_conf.get('min_segment_size', 5242880))
self.conf.allowable_clock_skew = config_positive_int_value(
conf.get('allowable_clock_skew', 15 * 60))
wsgi_conf.get('allowable_clock_skew', 15 * 60))
self.logger = get_logger(
conf, log_route=conf.get('log_name', 's3api'))
self.check_pipeline(self.conf)
wsgi_conf, log_route=wsgi_conf.get('log_name', 's3api'))
self.check_pipeline(wsgi_conf)
def __call__(self, env, start_response):
try:
@@ -331,14 +331,14 @@ class S3ApiMiddleware(object):
return res
def check_pipeline(self, conf):
def check_pipeline(self, wsgi_conf):
"""
Check that proxy-server.conf has an appropriate pipeline for s3api.
"""
if conf.get('__file__', None) is None:
if wsgi_conf.get('__file__', None) is None:
return
ctx = loadcontext(loadwsgi.APP, conf.__file__)
ctx = loadcontext(loadwsgi.APP, wsgi_conf['__file__'])
pipeline = str(PipelineWrapper(ctx)).split(' ')
# Add compatible with 3rd party middleware.
@@ -354,7 +354,7 @@ class S3ApiMiddleware(object):
'to support multi-part upload, please add it '
'in pipeline')
if not conf.auth_pipeline_check:
if not self.conf.auth_pipeline_check:
self.logger.debug('Skip pipeline auth check.')
return

View File

@@ -23,7 +23,6 @@ from swift.common import swob
from swift.common.middleware.s3api.s3api import filter_factory
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import Config
from test.unit import debug_logger
from test.unit.common.middleware.s3api.helpers import FakeSwift
@@ -70,8 +69,8 @@ class S3ApiTestCase(unittest.TestCase):
unittest.TestCase.__init__(self, name)
def setUp(self):
# setup default config
self.conf = Config({
# setup default config dict
self.conf = {
'allow_no_owner': False,
'location': 'us-east-1',
'dns_compliant_bucket_names': True,
@@ -86,12 +85,13 @@ class S3ApiTestCase(unittest.TestCase):
'force_swift_request_proxy_log': False,
'allow_multipart_uploads': True,
'min_segment_size': 5242880,
})
# those 2 settings has existed the original test setup
self.conf.log_level = 'debug'
'log_level': 'debug'
}
self.app = FakeApp()
self.swift = self.app.swift
# note: self.conf has no __file__ key so check_pipeline will be skipped
# when constructing self.s3api
self.s3api = filter_factory({}, **self.conf)(self.app)
self.logger = self.s3api.logger = self.swift.logger = debug_logger()

View File

@@ -389,7 +389,7 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
@s3acl
def test_object_multi_DELETE_lots_of_keys(self):
elem = Element('Delete')
for i in range(self.conf.max_multi_delete_objects):
for i in range(self.s3api.conf.max_multi_delete_objects):
status = swob.HTTPOk if i % 2 else swob.HTTPNotFound
name = 'x' * 1000 + str(i)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/%s' % name,
@@ -413,12 +413,12 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')),
self.conf.max_multi_delete_objects)
self.s3api.conf.max_multi_delete_objects)
@s3acl
def test_object_multi_DELETE_too_many_keys(self):
elem = Element('Delete')
for i in range(self.conf.max_multi_delete_objects + 1):
for i in range(self.s3api.conf.max_multi_delete_objects + 1):
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'x' * 1000 + str(i)
body = tostring(elem, use_s3ns=False)

View File

@@ -22,6 +22,7 @@ import mock
import requests
import json
import six
from paste.deploy import loadwsgi
from six.moves.urllib.parse import unquote, quote
import swift.common.middleware.s3api
@@ -162,6 +163,23 @@ class TestS3ApiMiddleware(S3ApiTestCase):
check_bad_positive_ints(multi_delete_concurrency=-100)
check_bad_positive_ints(multi_delete_concurrency=0)
def test_init_passes_wsgi_conf_file_to_check_pipeline(self):
# verify that check_pipeline is called during init: add __file__ attr
# to test config to make it more representative of middleware being
# init'd by wgsi
context = mock.Mock()
with patch("swift.common.middleware.s3api.s3api.loadcontext",
return_value=context) as loader, \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
conf = dict(self.conf,
auth_pipeline_check=True,
__file__='proxy-conf-file')
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api = S3ApiMiddleware(None, conf)
loader.assert_called_with(loadwsgi.APP, 'proxy-conf-file')
pipeline.assert_called_with(context)
def test_non_s3_request_passthrough(self):
req = Request.blank('/something')
status, headers, body = self.call_s3api(req)
@@ -823,20 +841,22 @@ class TestS3ApiMiddleware(S3ApiTestCase):
swift_info = utils.get_swift_info()
self.assertTrue('s3api' in swift_info)
self.assertEqual(swift_info['s3api'].get('max_bucket_listing'),
self.conf.max_bucket_listing)
self.conf['max_bucket_listing'])
self.assertEqual(swift_info['s3api'].get('max_parts_listing'),
self.conf.max_parts_listing)
self.conf['max_parts_listing'])
self.assertEqual(swift_info['s3api'].get('max_upload_part_num'),
self.conf.max_upload_part_num)
self.conf['max_upload_part_num'])
self.assertEqual(swift_info['s3api'].get('max_multi_delete_objects'),
self.conf.max_multi_delete_objects)
self.conf['max_multi_delete_objects'])
def test_check_pipeline(self):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
self.conf.auth_pipeline_check = True
self.conf.__file__ = ''
# cause check_pipeline to not return early...
self.conf['__file__'] = ''
# ...and enable pipeline auth checking
self.s3api.conf.auth_pipeline_check = True
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
@@ -876,9 +896,10 @@ class TestS3ApiMiddleware(S3ApiTestCase):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
# Disable pipeline check
self.conf.auth_pipeline_check = False
self.conf.__file__ = ''
# cause check_pipeline to not return early...
self.conf['__file__'] = ''
# ...but disable pipeline auth checking
self.s3api.conf.auth_pipeline_check = False
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
@@ -1049,7 +1070,7 @@ class TestS3ApiMiddleware(S3ApiTestCase):
'S3Request._validate_headers'), \
patch('swift.common.middleware.s3api.utils.time.time',
return_value=fake_time):
req = SigV4Request(env, self.conf, app=None)
req = SigV4Request(env, self.s3api.conf, app=None)
return req
def canonical_string(path, environ):
@@ -1059,7 +1080,7 @@ class TestS3ApiMiddleware(S3ApiTestCase):
# See http://docs.aws.amazon.com/general/latest/gr
# /signature-v4-test-suite.html for where location, service, and
# signing key came from
with patch.object(self.conf, 'location', 'us-east-1'), \
with patch.object(self.s3api.conf, 'location', 'us-east-1'), \
patch.object(swift.common.middleware.s3api.s3request,
'SERVICE', 'host'):
req = _get_req(path, environ)

View File

@@ -108,7 +108,7 @@ class TestRequest(S3ApiTestCase):
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
s3_req = req_klass(req.environ, self.conf, app=None)
s3_req = req_klass(req.environ, self.s3api.conf, app=None)
s3_req.set_acl_handler(
get_acl_handler(s3_req.controller_name)(s3_req, DebugLogger()))
with patch('swift.common.middleware.s3api.s3request.S3Request.'
@@ -116,7 +116,7 @@ class TestRequest(S3ApiTestCase):
patch('swift.common.middleware.s3api.subresource.ACL.'
'check_permission') as m_check_permission:
mock_get_resp.return_value = fake_swift_resp \
or FakeResponse(self.conf.s3_acl)
or FakeResponse(self.s3api.conf.s3_acl)
return mock_get_resp, m_check_permission,\
s3_req.get_response(self.s3api)
@@ -237,7 +237,7 @@ class TestRequest(S3ApiTestCase):
patch.object(Request, 'remote_user', 'authorized'):
m_swift_resp.return_value = FakeSwiftResponse()
s3_req = S3AclRequest(req.environ, self.conf, None)
s3_req = S3AclRequest(req.environ, self.s3api.conf, None)
self.assertNotIn('s3api.auth_details', s3_req.environ)
def test_to_swift_req_Authorization_not_exist_in_swreq(self):
@@ -699,7 +699,7 @@ class TestRequest(S3ApiTestCase):
'X-Amz-Date': x_amz_date}
# Virtual hosted-style
self.conf.storage_domain = 's3.test.com'
self.s3api.conf.storage_domain = 's3.test.com'
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ, Config())
uri = sigv4_req._canonical_uri()
@@ -719,7 +719,7 @@ class TestRequest(S3ApiTestCase):
'REQUEST_METHOD': 'GET'}
# Path-style
self.conf.storage_domain = ''
self.s3api.conf.storage_domain = ''
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ, Config())
uri = sigv4_req._canonical_uri()
@@ -856,7 +856,7 @@ class TestRequest(S3ApiTestCase):
'X-Amz-Date': '20210104T102623Z'}
# Virtual hosted-style
self.conf.storage_domain = 's3.test.com'
self.s3api.conf.storage_domain = 's3.test.com'
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ, Config())
self.assertTrue(
@@ -866,7 +866,7 @@ class TestRequest(S3ApiTestCase):
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def test_check_sigv4_req_zero_content_length_sha256(self):
# Virtual hosted-style
self.conf.storage_domain = 's3.test.com'
self.s3api.conf.storage_domain = 's3.test.com'
# bad sha256
environ = {