
Custom SSL handling was introduced because disabling SSL layer compression provided an approximately five fold performance increase in some cases. Without SSL layer compression disabled the image transfer would be CPU bound -- with the CPU performing the DEFLATE algorithm. This would typically limit image transfers to < 20 MB/s. When --no-ssl-compression was specified the client would not negotiate any compression algorithm during the SSL handshake with the server which would remove the CPU bottleneck and transfers could approach wire speed. In order to support '--no-ssl-compression' two totally separate code paths exist depending on whether this is True or False. When SSL compression is disabled, rather than using the standard 'requests' library, we enter some custom code based on pyopenssl and httplib in order to disable compression. This patch/spec proposes removing the custom code because: * It is a burden to maintain Eg adding new code such as keystone session support is more complicated * It can introduce additional failure modes We have seen some bugs related to the 'custom' certificate checking * Newer Operating Systems disable SSL for us. Eg. While Debian 7 defaulted to compression 'on', Debian 8 has compression 'off'. This makes both servers and client less likely to have compression enabled. * Newer combinations of 'requests' and 'python' do this for us Requests disables compression when backed by a version of python which supports it (>= 2.7.9). This makes clients more likely to disable compression out-of-the-box. * It is (in principle) possible to do this on older versions too If pyopenssl, ndg-httpsclient and pyasn1 are installed on older operating system/python combinations, the requests library should disable SSL compression on the client side. * Systems that have SSL compression enabled may be vulnerable to the CRIME (https://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2012-4929) attack. Installations which are security conscious should be running the Glance server with SSL disabled. Full Spec: https://review.openstack.org/#/c/187674 Blueprint: remove-custom-client-ssl-handling Change-Id: I7e7761fc91b0d6da03939374eeedd809534f6edf
345 lines
12 KiB
Python
345 lines
12 KiB
Python
# Copyright 2012 OpenStack Foundation
|
|
# All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
# not use this file except in compliance with the License. You may obtain
|
|
# a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
# License for the specific language governing permissions and limitations
|
|
# under the License.
|
|
|
|
import copy
|
|
import logging
|
|
import socket
|
|
|
|
from keystoneclient import adapter
|
|
from keystoneclient import exceptions as ksc_exc
|
|
from oslo_utils import importutils
|
|
from oslo_utils import netutils
|
|
import requests
|
|
try:
|
|
ProtocolError = requests.packages.urllib3.exceptions.ProtocolError
|
|
except ImportError:
|
|
ProtocolError = requests.exceptions.ConnectionError
|
|
import six
|
|
from six.moves.urllib import parse
|
|
import warnings
|
|
|
|
try:
|
|
import json
|
|
except ImportError:
|
|
import simplejson as json
|
|
|
|
# Python 2.5 compat fix
|
|
if not hasattr(parse, 'parse_qsl'):
|
|
import cgi
|
|
parse.parse_qsl = cgi.parse_qsl
|
|
|
|
from oslo_utils import encodeutils
|
|
|
|
from glanceclient.common import utils
|
|
from glanceclient import exc
|
|
|
|
osprofiler_web = importutils.try_import("osprofiler.web")
|
|
|
|
LOG = logging.getLogger(__name__)
|
|
USER_AGENT = 'python-glanceclient'
|
|
CHUNKSIZE = 1024 * 64 # 64kB
|
|
|
|
|
|
class _BaseHTTPClient(object):
|
|
|
|
@staticmethod
|
|
def _chunk_body(body):
|
|
chunk = body
|
|
while chunk:
|
|
chunk = body.read(CHUNKSIZE)
|
|
if chunk == '':
|
|
break
|
|
yield chunk
|
|
|
|
def _set_common_request_kwargs(self, headers, kwargs):
|
|
"""Handle the common parameters used to send the request."""
|
|
|
|
# Default Content-Type is octet-stream
|
|
content_type = headers.get('Content-Type', 'application/octet-stream')
|
|
|
|
# NOTE(jamielennox): remove this later. Managers should pass json= if
|
|
# they want to send json data.
|
|
data = kwargs.pop("data", None)
|
|
if data is not None and not isinstance(data, six.string_types):
|
|
try:
|
|
data = json.dumps(data)
|
|
content_type = 'application/json'
|
|
except TypeError:
|
|
# Here we assume it's
|
|
# a file-like object
|
|
# and we'll chunk it
|
|
data = self._chunk_body(data)
|
|
|
|
headers['Content-Type'] = content_type
|
|
kwargs['stream'] = content_type == 'application/octet-stream'
|
|
|
|
return data
|
|
|
|
def _handle_response(self, resp):
|
|
if not resp.ok:
|
|
LOG.debug("Request returned failure status %s." % resp.status_code)
|
|
raise exc.from_response(resp, resp.content)
|
|
elif resp.status_code == requests.codes.MULTIPLE_CHOICES:
|
|
raise exc.from_response(resp)
|
|
|
|
content_type = resp.headers.get('Content-Type')
|
|
|
|
# Read body into string if it isn't obviously image data
|
|
if content_type == 'application/octet-stream':
|
|
# Do not read all response in memory when downloading an image.
|
|
body_iter = _close_after_stream(resp, CHUNKSIZE)
|
|
else:
|
|
content = resp.text
|
|
if content_type and content_type.startswith('application/json'):
|
|
# Let's use requests json method, it should take care of
|
|
# response encoding
|
|
body_iter = resp.json()
|
|
else:
|
|
body_iter = six.StringIO(content)
|
|
try:
|
|
body_iter = json.loads(''.join([c for c in body_iter]))
|
|
except ValueError:
|
|
body_iter = None
|
|
|
|
return resp, body_iter
|
|
|
|
|
|
class HTTPClient(_BaseHTTPClient):
|
|
|
|
def __init__(self, endpoint, **kwargs):
|
|
self.endpoint = endpoint
|
|
self.identity_headers = kwargs.get('identity_headers')
|
|
self.auth_token = kwargs.get('token')
|
|
if self.identity_headers:
|
|
if self.identity_headers.get('X-Auth-Token'):
|
|
self.auth_token = self.identity_headers.get('X-Auth-Token')
|
|
del self.identity_headers['X-Auth-Token']
|
|
|
|
self.session = requests.Session()
|
|
self.session.headers["User-Agent"] = USER_AGENT
|
|
|
|
if self.auth_token:
|
|
self.session.headers["X-Auth-Token"] = self.auth_token
|
|
|
|
self.timeout = float(kwargs.get('timeout', 600))
|
|
|
|
if self.endpoint.startswith("https"):
|
|
compression = kwargs.get('ssl_compression', True)
|
|
|
|
if compression is False:
|
|
# Note: This is not seen by default. (python must be
|
|
# run with -Wd)
|
|
warnings.warn('The "ssl_compression" argument has been '
|
|
'deprecated.', DeprecationWarning)
|
|
|
|
if kwargs.get('insecure', False) is True:
|
|
self.session.verify = False
|
|
else:
|
|
if kwargs.get('cacert', None) is not '':
|
|
self.session.verify = kwargs.get('cacert', True)
|
|
|
|
self.session.cert = (kwargs.get('cert_file'),
|
|
kwargs.get('key_file'))
|
|
|
|
@staticmethod
|
|
def parse_endpoint(endpoint):
|
|
return netutils.urlsplit(endpoint)
|
|
|
|
def log_curl_request(self, method, url, headers, data, kwargs):
|
|
curl = ['curl -g -i -X %s' % method]
|
|
|
|
headers = copy.deepcopy(headers)
|
|
headers.update(self.session.headers)
|
|
|
|
for (key, value) in six.iteritems(headers):
|
|
header = '-H \'%s: %s\'' % utils.safe_header(key, value)
|
|
curl.append(header)
|
|
|
|
if not self.session.verify:
|
|
curl.append('-k')
|
|
else:
|
|
if isinstance(self.session.verify, six.string_types):
|
|
curl.append(' --cacert %s' % self.session.verify)
|
|
|
|
if self.session.cert:
|
|
curl.append(' --cert %s --key %s' % self.session.cert)
|
|
|
|
if data and isinstance(data, six.string_types):
|
|
curl.append('-d \'%s\'' % data)
|
|
|
|
curl.append(url)
|
|
|
|
msg = ' '.join([encodeutils.safe_decode(item, errors='ignore')
|
|
for item in curl])
|
|
LOG.debug(msg)
|
|
|
|
@staticmethod
|
|
def log_http_response(resp):
|
|
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
|
dump = ['\nHTTP/%.1f %s %s' % status]
|
|
headers = resp.headers.items()
|
|
dump.extend(['%s: %s' % utils.safe_header(k, v) for k, v in headers])
|
|
dump.append('')
|
|
content_type = resp.headers.get('Content-Type')
|
|
|
|
if content_type != 'application/octet-stream':
|
|
dump.extend([resp.text, ''])
|
|
LOG.debug('\n'.join([encodeutils.safe_decode(x, errors='ignore')
|
|
for x in dump]))
|
|
|
|
@staticmethod
|
|
def encode_headers(headers):
|
|
"""Encodes headers.
|
|
|
|
Note: This should be used right before
|
|
sending anything out.
|
|
|
|
:param headers: Headers to encode
|
|
:returns: Dictionary with encoded headers'
|
|
names and values
|
|
"""
|
|
return dict((encodeutils.safe_encode(h), encodeutils.safe_encode(v))
|
|
for h, v in six.iteritems(headers) if v is not None)
|
|
|
|
def _request(self, method, url, **kwargs):
|
|
"""Send an http request with the specified characteristics.
|
|
|
|
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
|
|
as setting headers and error handling.
|
|
"""
|
|
# Copy the kwargs so we can reuse the original in case of redirects
|
|
headers = copy.deepcopy(kwargs.pop('headers', {}))
|
|
|
|
if self.identity_headers:
|
|
for k, v in six.iteritems(self.identity_headers):
|
|
headers.setdefault(k, v)
|
|
|
|
data = self._set_common_request_kwargs(headers, kwargs)
|
|
|
|
if osprofiler_web:
|
|
headers.update(osprofiler_web.get_trace_id_headers())
|
|
|
|
# Note(flaper87): Before letting headers / url fly,
|
|
# they should be encoded otherwise httplib will
|
|
# complain.
|
|
headers = self.encode_headers(headers)
|
|
|
|
if self.endpoint.endswith("/") or url.startswith("/"):
|
|
conn_url = "%s%s" % (self.endpoint, url)
|
|
else:
|
|
conn_url = "%s/%s" % (self.endpoint, url)
|
|
self.log_curl_request(method, conn_url, headers, data, kwargs)
|
|
|
|
try:
|
|
resp = self.session.request(method,
|
|
conn_url,
|
|
data=data,
|
|
headers=headers,
|
|
**kwargs)
|
|
except requests.exceptions.Timeout as e:
|
|
message = ("Error communicating with %(url)s: %(e)s" %
|
|
dict(url=conn_url, e=e))
|
|
raise exc.InvalidEndpoint(message=message)
|
|
except (requests.exceptions.ConnectionError, ProtocolError) as e:
|
|
message = ("Error finding address for %(url)s: %(e)s" %
|
|
dict(url=conn_url, e=e))
|
|
raise exc.CommunicationError(message=message)
|
|
except socket.gaierror as e:
|
|
message = "Error finding address for %s: %s" % (
|
|
self.endpoint_hostname, e)
|
|
raise exc.InvalidEndpoint(message=message)
|
|
except (socket.error, socket.timeout) as e:
|
|
endpoint = self.endpoint
|
|
message = ("Error communicating with %(endpoint)s %(e)s" %
|
|
{'endpoint': endpoint, 'e': e})
|
|
raise exc.CommunicationError(message=message)
|
|
|
|
resp, body_iter = self._handle_response(resp)
|
|
self.log_http_response(resp)
|
|
return resp, body_iter
|
|
|
|
def head(self, url, **kwargs):
|
|
return self._request('HEAD', url, **kwargs)
|
|
|
|
def get(self, url, **kwargs):
|
|
return self._request('GET', url, **kwargs)
|
|
|
|
def post(self, url, **kwargs):
|
|
return self._request('POST', url, **kwargs)
|
|
|
|
def put(self, url, **kwargs):
|
|
return self._request('PUT', url, **kwargs)
|
|
|
|
def patch(self, url, **kwargs):
|
|
return self._request('PATCH', url, **kwargs)
|
|
|
|
def delete(self, url, **kwargs):
|
|
return self._request('DELETE', url, **kwargs)
|
|
|
|
|
|
def _close_after_stream(response, chunk_size):
|
|
"""Iterate over the content and ensure the response is closed after."""
|
|
# Yield each chunk in the response body
|
|
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
yield chunk
|
|
# Once we're done streaming the body, ensure everything is closed.
|
|
# This will return the connection to the HTTPConnectionPool in urllib3
|
|
# and ideally reduce the number of HTTPConnectionPool full warnings.
|
|
response.close()
|
|
|
|
|
|
class SessionClient(adapter.Adapter, _BaseHTTPClient):
|
|
|
|
def __init__(self, session, **kwargs):
|
|
kwargs.setdefault('user_agent', USER_AGENT)
|
|
kwargs.setdefault('service_type', 'image')
|
|
super(SessionClient, self).__init__(session, **kwargs)
|
|
|
|
def request(self, url, method, **kwargs):
|
|
headers = kwargs.pop('headers', {})
|
|
kwargs['raise_exc'] = False
|
|
data = self._set_common_request_kwargs(headers, kwargs)
|
|
|
|
try:
|
|
resp = super(SessionClient, self).request(url,
|
|
method,
|
|
headers=headers,
|
|
data=data,
|
|
**kwargs)
|
|
except ksc_exc.RequestTimeout as e:
|
|
conn_url = self.get_endpoint(auth=kwargs.get('auth'))
|
|
conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/'))
|
|
message = ("Error communicating with %(url)s %(e)s" %
|
|
dict(url=conn_url, e=e))
|
|
raise exc.InvalidEndpoint(message=message)
|
|
except ksc_exc.ConnectionRefused as e:
|
|
conn_url = self.get_endpoint(auth=kwargs.get('auth'))
|
|
conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/'))
|
|
message = ("Error finding address for %(url)s: %(e)s" %
|
|
dict(url=conn_url, e=e))
|
|
raise exc.CommunicationError(message=message)
|
|
|
|
return self._handle_response(resp)
|
|
|
|
|
|
def get_http_client(endpoint=None, session=None, **kwargs):
|
|
if session:
|
|
return SessionClient(session, **kwargs)
|
|
elif endpoint:
|
|
return HTTPClient(endpoint, **kwargs)
|
|
else:
|
|
raise AttributeError('Constructing a client must contain either an '
|
|
'endpoint or a session')
|