Tolerate absolute-form request targets

We've seen S3 clients expecting to be able to send request lines like

    GET https://cluster.domain/bucket/key HTTP/1.1

instead of the expected

    GET /bucket/key HTTP/1.1

Testing against other, independent servers with something like

    ( echo -n $'GET https://www.google.com/ HTTP/1.1\r\nHost: www.google.com\r\nConnection: close\r\n\r\n' ; sleep 1 ) | openssl s_client -connect www.google.com:443

suggests that it may be reasonable to accept them; the RFC even goes so
far as to say

> To allow for transition to the absolute-form for all requests in some
> future version of HTTP, a server MUST accept the absolute-form in
> requests, even though HTTP/1.1 clients will only send them in
> requests to proxies.

(See https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2)

Fix it at the protocol level, so everywhere else we can mostly continue
to assume that PATH_INFO starts with a / like we always have.

Co-Authored-By: Clay Gerrard <clay.gerrard@gmail.com>
Change-Id: I04012e523f01e910f41d5a41cdd86d3d2a1b9c59
This commit is contained in:
Tim Burke 2022-09-07 12:20:16 -07:00
parent 2d7c1dc6dd
commit f6ac7d4491
5 changed files with 164 additions and 0 deletions

View File

@ -132,6 +132,12 @@ class SwiftHttpProtocol(wsgi.HttpProtocol):
400,
"Bad HTTP/0.9 request type (%r)" % command)
return False
if path.startswith(('http://', 'https://')):
host, sep, rest = path.partition('//')[2].partition('/')
if sep:
path = '/' + rest
self.command, self.path = command, path
# Examine the headers and look for a Connection directive.

View File

@ -17,6 +17,7 @@ import os
import requests
from swift.common.bufferedhttp import http_connect_raw
from swift.common.middleware.s3api.etree import fromstring
import test.functional as tf
@ -223,6 +224,35 @@ class TestS3ApiPresignedUrls(S3ApiBase):
status, _junk, _junk = self.conn.make_request('DELETE', bucket)
self.assertEqual(status, 204)
def test_absolute_form_request(self):
bucket = 'test-bucket'
put_url, headers = self.conn.generate_url_and_headers(
'PUT', bucket)
resp = http_connect_raw(
self.conn.host,
self.conn.port,
'PUT',
put_url, # whole URL, not just the path/query!
headers=headers,
ssl=put_url.startswith('https:'),
).getresponse()
self.assertEqual(resp.status, 200,
'Got %d %s' % (resp.status, resp.read()))
delete_url, headers = self.conn.generate_url_and_headers(
'DELETE', bucket)
resp = http_connect_raw(
self.conn.host,
self.conn.port,
'DELETE',
delete_url, # whole URL, not just the path/query!
headers=headers,
ssl=delete_url.startswith('https:'),
).getresponse()
self.assertEqual(resp.status, 204,
'Got %d %s' % (resp.status, resp.read()))
class TestS3ApiPresignedUrlsSigV4(TestS3ApiPresignedUrls):
@classmethod

View File

@ -0,0 +1,56 @@
# Copyright (c) 2022 Nvidia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import SkipTest
from test.s3api import BaseS3TestCase
class AlwaysAbsoluteURLProxyConfig(object):
def __init__(self):
self.settings = {'proxy_use_forwarding_for_https': True}
def proxy_url_for(self, request_url):
return request_url
def proxy_headers_for(self, proxy_url):
return {}
class TestRequestTargetStyle(BaseS3TestCase):
def setUp(self):
self.client = self.get_s3_client(1)
if not self.client._endpoint.host.startswith('https:'):
raise SkipTest('Absolute URL test requires https')
self.bucket_name = self.create_name('test-address-style')
resp = self.client.create_bucket(Bucket=self.bucket_name)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
def tearDown(self):
self.clear_bucket(self.client, self.bucket_name)
super(TestRequestTargetStyle, self).tearDown()
def test_absolute_url(self):
sess = self.client._endpoint.http_session
sess._proxy_config = AlwaysAbsoluteURLProxyConfig()
self.assertEqual({'use_forwarding_for_https': True},
sess._proxies_kwargs())
resp = self.client.list_buckets()
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertIn(self.bucket_name, {
info['Name'] for info in resp['Buckets']})

View File

@ -180,6 +180,48 @@ class TestSwiftHttpProtocolSomeMore(ProtocolTest):
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'/oh\xffboy%what$now%E2%80%bd')
def test_absolute_target(self):
bytes_out = self._run_bytes_through_protocol((
b"GET https://cluster.domain/bucket/key HTTP/1.0\r\n"
b"\r\n"
))
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'/bucket/key')
bytes_out = self._run_bytes_through_protocol((
b"GET http://cluster.domain/v1/acct/cont/obj HTTP/1.0\r\n"
b"\r\n"
))
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'/v1/acct/cont/obj')
# clients talking nonsense
bytes_out = self._run_bytes_through_protocol((
b"GET ftp://cluster.domain/bucket/key HTTP/1.0\r\n"
b"\r\n"
))
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'ftp://cluster.domain/bucket/key')
bytes_out = self._run_bytes_through_protocol((
b"GET https://cluster.domain HTTP/1.0\r\n"
b"\r\n"
))
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'https://cluster.domain')
bytes_out = self._run_bytes_through_protocol((
b"GET http:omg//wtf/bbq HTTP/1.0\r\n"
b"\r\n"
))
lines = [l for l in bytes_out.split(b"\r\n") if l]
self.assertEqual(lines[-1], b'http:omg//wtf/bbq')
def test_bad_request(self):
bytes_out = self._run_bytes_through_protocol((
b"ONLY-METHOD\r\n"

View File

@ -2851,6 +2851,36 @@ class TestReplicatedObjectController(
self.assertIn(b'X-Object-Meta-\xf0\x9f\x8c\xb4: \xf0\x9f\x91\x8d',
headers.split(b'\r\n'))
@unpatch_policies
def test_HEAD_absolute_uri(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile('rwb')
# sanity, this resource is created in setup
path = b'/v1/a'
fd.write(b'HEAD %s HTTP/1.1\r\n'
b'Host: localhost\r\n'
b'Connection: keep-alive\r\n'
b'X-Storage-Token: t\r\n'
b'\r\n' % (path,))
fd.flush()
headers = readuntil2crlfs(fd)
exp = b'HTTP/1.1 204'
self.assertEqual(headers[:len(exp)], exp)
# RFC says we should accept this, too
abs_path = b'http://saio.example.com:8080/v1/a'
fd.write(b'HEAD %s HTTP/1.1\r\n'
b'Host: localhost\r\n'
b'Connection: keep-alive\r\n'
b'X-Storage-Token: t\r\n'
b'\r\n' % (abs_path,))
fd.flush()
headers = readuntil2crlfs(fd)
exp = b'HTTP/1.1 204'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_GET_short_read(self):
prolis = _test_sockets[0]