2012-08-02 14:22:27 -07:00
|
|
|
# Copyright 2012 OpenStack LLC.
|
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
import copy
|
2013-07-08 21:18:16 +02:00
|
|
|
import errno
|
|
|
|
import hashlib
|
2012-07-10 20:51:00 -07:00
|
|
|
import httplib
|
2012-03-26 22:48:48 -07:00
|
|
|
import logging
|
2013-09-25 10:39:36 -05:00
|
|
|
import posixpath
|
2012-08-02 14:16:13 -07:00
|
|
|
import socket
|
2012-07-29 22:12:37 -07:00
|
|
|
import StringIO
|
2012-09-21 14:18:22 +00:00
|
|
|
import struct
|
2012-03-26 22:48:48 -07:00
|
|
|
import urlparse
|
|
|
|
|
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
|
|
|
# Python 2.5 compat fix
|
|
|
|
if not hasattr(urlparse, 'parse_qsl'):
|
|
|
|
import cgi
|
|
|
|
urlparse.parse_qsl = cgi.parse_qsl
|
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
import OpenSSL
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-12 18:30:54 -07:00
|
|
|
from glanceclient import exc
|
2013-01-30 15:18:44 +01:00
|
|
|
from glanceclient.common import utils
|
2013-05-22 11:31:25 +02:00
|
|
|
from glanceclient.openstack.common import strutils
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2013-03-20 18:00:39 +00:00
|
|
|
try:
|
|
|
|
from eventlet import patcher
|
|
|
|
# Handle case where we are running in a monkey patched environment
|
|
|
|
if patcher.is_monkey_patched('socket'):
|
|
|
|
from eventlet.green.httplib import HTTPSConnection
|
|
|
|
from eventlet.green.OpenSSL.SSL import GreenConnection as Connection
|
|
|
|
from eventlet.greenio import GreenSocket
|
|
|
|
# TODO(mclaren): A getsockopt workaround: see 'getsockopt' doc string
|
|
|
|
GreenSocket.getsockopt = utils.getsockopt
|
|
|
|
else:
|
|
|
|
raise ImportError
|
|
|
|
except ImportError:
|
|
|
|
from httplib import HTTPSConnection
|
|
|
|
from OpenSSL.SSL import Connection as Connection
|
|
|
|
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
LOG = logging.getLogger(__name__)
|
2012-03-26 22:48:48 -07:00
|
|
|
USER_AGENT = 'python-glanceclient'
|
2012-07-11 19:34:28 -07:00
|
|
|
CHUNKSIZE = 1024 * 64 # 64kB
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
class HTTPClient(object):
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-08-02 14:16:13 -07:00
|
|
|
def __init__(self, endpoint, **kwargs):
|
|
|
|
self.endpoint = endpoint
|
2012-10-03 13:52:55 -07:00
|
|
|
endpoint_parts = self.parse_endpoint(self.endpoint)
|
|
|
|
self.endpoint_scheme = endpoint_parts.scheme
|
|
|
|
self.endpoint_hostname = endpoint_parts.hostname
|
|
|
|
self.endpoint_port = endpoint_parts.port
|
|
|
|
self.endpoint_path = endpoint_parts.path
|
|
|
|
|
|
|
|
self.connection_class = self.get_connection_class(self.endpoint_scheme)
|
|
|
|
self.connection_kwargs = self.get_connection_kwargs(
|
2012-11-21 12:03:07 -06:00
|
|
|
self.endpoint_scheme, **kwargs)
|
2012-10-03 13:52:55 -07:00
|
|
|
|
2013-07-12 20:23:54 +00:00
|
|
|
self.identity_headers = kwargs.get('identity_headers')
|
2012-08-02 14:16:13 -07:00
|
|
|
self.auth_token = kwargs.get('token')
|
2013-07-12 20:23:54 +00:00
|
|
|
if self.identity_headers:
|
|
|
|
if self.identity_headers.get('X-Auth-Token'):
|
|
|
|
self.auth_token = self.identity_headers.get('X-Auth-Token')
|
|
|
|
del self.identity_headers['X-Auth-Token']
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
@staticmethod
|
2012-10-03 13:52:55 -07:00
|
|
|
def parse_endpoint(endpoint):
|
|
|
|
return urlparse.urlparse(endpoint)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-10-03 13:52:55 -07:00
|
|
|
@staticmethod
|
|
|
|
def get_connection_class(scheme):
|
|
|
|
if scheme == 'https':
|
|
|
|
return VerifiedHTTPSConnection
|
|
|
|
else:
|
|
|
|
return httplib.HTTPConnection
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_connection_kwargs(scheme, **kwargs):
|
2012-08-02 14:16:13 -07:00
|
|
|
_kwargs = {'timeout': float(kwargs.get('timeout', 600))}
|
|
|
|
|
2012-10-03 13:52:55 -07:00
|
|
|
if scheme == 'https':
|
2012-12-07 11:21:11 -06:00
|
|
|
_kwargs['cacert'] = kwargs.get('cacert', None)
|
2012-08-02 15:30:50 -07:00
|
|
|
_kwargs['cert_file'] = kwargs.get('cert_file', None)
|
|
|
|
_kwargs['key_file'] = kwargs.get('key_file', None)
|
2012-08-02 14:16:13 -07:00
|
|
|
_kwargs['insecure'] = kwargs.get('insecure', False)
|
2012-09-21 14:18:22 +00:00
|
|
|
_kwargs['ssl_compression'] = kwargs.get('ssl_compression', True)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-10-03 13:52:55 -07:00
|
|
|
return _kwargs
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
def get_connection(self):
|
2012-10-03 13:52:55 -07:00
|
|
|
_class = self.connection_class
|
2012-08-08 13:45:38 -07:00
|
|
|
try:
|
2012-10-03 13:52:55 -07:00
|
|
|
return _class(self.endpoint_hostname, self.endpoint_port,
|
|
|
|
**self.connection_kwargs)
|
2012-09-10 14:57:45 +00:00
|
|
|
except httplib.InvalidURL:
|
2012-08-08 13:45:38 -07:00
|
|
|
raise exc.InvalidEndpoint()
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
def log_curl_request(self, method, url, kwargs):
|
|
|
|
curl = ['curl -i -X %s' % method]
|
|
|
|
|
|
|
|
for (key, value) in kwargs['headers'].items():
|
|
|
|
header = '-H \'%s: %s\'' % (key, value)
|
|
|
|
curl.append(header)
|
|
|
|
|
2012-08-02 15:30:50 -07:00
|
|
|
conn_params_fmt = [
|
|
|
|
('key_file', '--key %s'),
|
|
|
|
('cert_file', '--cert %s'),
|
2012-12-07 11:21:11 -06:00
|
|
|
('cacert', '--cacert %s'),
|
2012-08-02 15:30:50 -07:00
|
|
|
]
|
|
|
|
for (key, fmt) in conn_params_fmt:
|
2012-10-03 13:52:55 -07:00
|
|
|
value = self.connection_kwargs.get(key)
|
2012-08-02 15:30:50 -07:00
|
|
|
if value:
|
|
|
|
curl.append(fmt % value)
|
|
|
|
|
2012-10-03 13:52:55 -07:00
|
|
|
if self.connection_kwargs.get('insecure'):
|
2012-08-02 15:30:50 -07:00
|
|
|
curl.append('-k')
|
|
|
|
|
2013-04-25 13:59:54 +01:00
|
|
|
if kwargs.get('body') is not None:
|
2012-07-29 22:12:37 -07:00
|
|
|
curl.append('-d \'%s\'' % kwargs['body'])
|
|
|
|
|
2012-08-02 14:16:13 -07:00
|
|
|
curl.append('%s%s' % (self.endpoint, url))
|
2013-05-22 11:31:25 +02:00
|
|
|
LOG.debug(strutils.safe_encode(' '.join(curl)))
|
2012-07-29 22:12:37 -07:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def log_http_response(resp, body=None):
|
|
|
|
status = (resp.version / 10.0, resp.status, resp.reason)
|
|
|
|
dump = ['\nHTTP/%.1f %s %s' % status]
|
|
|
|
dump.extend(['%s: %s' % (k, v) for k, v in resp.getheaders()])
|
|
|
|
dump.append('')
|
|
|
|
if body:
|
|
|
|
dump.extend([body, ''])
|
2013-05-22 11:31:25 +02:00
|
|
|
LOG.debug(strutils.safe_encode('\n'.join(dump)))
|
2013-01-30 15:18:44 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def encode_headers(headers):
|
2013-06-09 11:07:27 +02:00
|
|
|
"""Encodes headers.
|
2013-01-30 15:18:44 +01:00
|
|
|
|
|
|
|
Note: This should be used right before
|
|
|
|
sending anything out.
|
|
|
|
|
|
|
|
:param headers: Headers to encode
|
|
|
|
:returns: Dictionary with encoded headers'
|
|
|
|
names and values
|
|
|
|
"""
|
2013-05-22 11:31:25 +02:00
|
|
|
to_str = strutils.safe_encode
|
2013-01-30 15:18:44 +01:00
|
|
|
return dict([(to_str(h), to_str(v)) for h, v in headers.iteritems()])
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
def _http_request(self, url, method, **kwargs):
|
2013-06-09 11:07:27 +02:00
|
|
|
"""Send an http request with the specified characteristics.
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
|
|
|
|
as setting headers and error handling.
|
2012-03-26 22:48:48 -07:00
|
|
|
"""
|
|
|
|
# Copy the kwargs so we can reuse the original in case of redirects
|
2012-05-15 10:01:47 -07:00
|
|
|
kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
|
|
|
|
kwargs['headers'].setdefault('User-Agent', USER_AGENT)
|
|
|
|
if self.auth_token:
|
|
|
|
kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2013-07-12 20:23:54 +00:00
|
|
|
if self.identity_headers:
|
|
|
|
for k, v in self.identity_headers.iteritems():
|
|
|
|
kwargs['headers'].setdefault(k, v)
|
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
self.log_curl_request(method, url, kwargs)
|
2012-07-10 20:51:00 -07:00
|
|
|
conn = self.get_connection()
|
2012-08-08 13:45:38 -07:00
|
|
|
|
2013-01-30 15:18:44 +01:00
|
|
|
# Note(flaper87): Before letting headers / url fly,
|
|
|
|
# they should be encoded otherwise httplib will
|
|
|
|
# complain. If we decide to rely on python-request
|
|
|
|
# this wont be necessary anymore.
|
|
|
|
kwargs['headers'] = self.encode_headers(kwargs['headers'])
|
|
|
|
|
2012-08-08 13:45:38 -07:00
|
|
|
try:
|
2013-05-14 10:31:45 -05:00
|
|
|
if self.endpoint_path:
|
2013-09-25 10:39:36 -05:00
|
|
|
# NOTE(yuyangbj): this method _http_request could either be
|
|
|
|
# called by API layer, or be called recursively with
|
|
|
|
# redirection. For example, url would be '/v1/images/detail'
|
|
|
|
# from API layer, but url would be 'https://example.com:92/
|
|
|
|
# v1/images/detail' from recursion.
|
|
|
|
# See bug #1230032 and bug #1208618.
|
|
|
|
if url is not None:
|
|
|
|
all_parts = urlparse.urlparse(url)
|
|
|
|
if not (all_parts.scheme and all_parts.netloc):
|
|
|
|
norm_parse = posixpath.normpath
|
|
|
|
url = norm_parse('/'.join([self.endpoint_path, url]))
|
|
|
|
else:
|
|
|
|
url = self.endpoint_path
|
|
|
|
|
2013-08-05 14:42:19 -07:00
|
|
|
conn_url = urlparse.urlsplit(url).geturl()
|
2013-01-30 15:18:44 +01:00
|
|
|
# Note(flaper87): Ditto, headers / url
|
|
|
|
# encoding to make httplib happy.
|
2013-05-22 11:31:25 +02:00
|
|
|
conn_url = strutils.safe_encode(conn_url)
|
2012-09-26 12:56:51 +00:00
|
|
|
if kwargs['headers'].get('Transfer-Encoding') == 'chunked':
|
|
|
|
conn.putrequest(method, conn_url)
|
|
|
|
for header, value in kwargs['headers'].items():
|
|
|
|
conn.putheader(header, value)
|
|
|
|
conn.endheaders()
|
|
|
|
chunk = kwargs['body'].read(CHUNKSIZE)
|
|
|
|
# Chunk it, baby...
|
|
|
|
while chunk:
|
|
|
|
conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
|
|
|
|
chunk = kwargs['body'].read(CHUNKSIZE)
|
|
|
|
conn.send('0\r\n\r\n')
|
|
|
|
else:
|
|
|
|
conn.request(method, conn_url, **kwargs)
|
2012-08-08 13:45:38 -07:00
|
|
|
resp = conn.getresponse()
|
2012-08-10 21:06:44 +00:00
|
|
|
except socket.gaierror as e:
|
2013-02-19 15:40:16 +00:00
|
|
|
message = "Error finding address for %s: %s" % (
|
|
|
|
self.endpoint_hostname, e)
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.InvalidEndpoint(message=message)
|
|
|
|
except (socket.error, socket.timeout) as e:
|
2012-09-12 09:40:04 -04:00
|
|
|
endpoint = self.endpoint
|
|
|
|
message = "Error communicating with %(endpoint)s %(e)s" % locals()
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.CommunicationError(message=message)
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
body_iter = ResponseBodyIterator(resp)
|
|
|
|
|
|
|
|
# Read body into string if it isn't obviously image data
|
|
|
|
if resp.getheader('content-type', None) != 'application/octet-stream':
|
|
|
|
body_str = ''.join([chunk for chunk in body_iter])
|
|
|
|
self.log_http_response(resp, body_str)
|
|
|
|
body_iter = StringIO.StringIO(body_str)
|
|
|
|
else:
|
|
|
|
self.log_http_response(resp)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
if 400 <= resp.status < 600:
|
2012-09-13 11:12:00 +02:00
|
|
|
LOG.error("Request returned failure status.")
|
2013-01-01 22:33:15 +09:00
|
|
|
raise exc.from_response(resp, body_str)
|
2012-03-26 22:48:48 -07:00
|
|
|
elif resp.status in (301, 302, 305):
|
|
|
|
# Redirected. Reissue the request to the new location.
|
2013-09-26 11:43:29 -04:00
|
|
|
return self._http_request(resp.getheader('location', None), method,
|
|
|
|
**kwargs)
|
2012-09-07 20:54:09 +00:00
|
|
|
elif resp.status == 300:
|
|
|
|
raise exc.from_response(resp)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-11 19:34:28 -07:00
|
|
|
return resp, body_iter
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-05-15 10:01:47 -07:00
|
|
|
def json_request(self, method, url, **kwargs):
|
2012-03-26 22:48:48 -07:00
|
|
|
kwargs.setdefault('headers', {})
|
2012-05-15 10:01:47 -07:00
|
|
|
kwargs['headers'].setdefault('Content-Type', 'application/json')
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-05-15 10:01:47 -07:00
|
|
|
if 'body' in kwargs:
|
|
|
|
kwargs['body'] = json.dumps(kwargs['body'])
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-07-11 19:34:28 -07:00
|
|
|
resp, body_iter = self._http_request(url, method, **kwargs)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2013-09-26 11:42:16 -04:00
|
|
|
if 'application/json' in resp.getheader('content-type', ''):
|
2012-07-29 22:12:37 -07:00
|
|
|
body = ''.join([chunk for chunk in body_iter])
|
2012-05-15 10:01:47 -07:00
|
|
|
try:
|
|
|
|
body = json.loads(body)
|
|
|
|
except ValueError:
|
2012-07-29 22:12:37 -07:00
|
|
|
LOG.error('Could not decode response body as JSON')
|
2012-05-15 10:01:47 -07:00
|
|
|
else:
|
|
|
|
body = None
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-05-15 10:01:47 -07:00
|
|
|
return resp, body
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-05-15 10:01:47 -07:00
|
|
|
def raw_request(self, method, url, **kwargs):
|
|
|
|
kwargs.setdefault('headers', {})
|
|
|
|
kwargs['headers'].setdefault('Content-Type',
|
|
|
|
'application/octet-stream')
|
2012-09-26 12:56:51 +00:00
|
|
|
if 'body' in kwargs:
|
|
|
|
if (hasattr(kwargs['body'], 'read')
|
2012-11-21 12:03:07 -06:00
|
|
|
and method.lower() in ('post', 'put')):
|
2012-09-26 12:56:51 +00:00
|
|
|
# We use 'Transfer-Encoding: chunked' because
|
|
|
|
# body size may not always be known in advance.
|
|
|
|
kwargs['headers']['Transfer-Encoding'] = 'chunked'
|
2012-05-15 10:01:47 -07:00
|
|
|
return self._http_request(url, method, **kwargs)
|
2012-07-11 19:34:28 -07:00
|
|
|
|
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
class OpenSSLConnectionDelegator(object):
|
|
|
|
"""
|
|
|
|
An OpenSSL.SSL.Connection delegator.
|
|
|
|
|
|
|
|
Supplies an additional 'makefile' method which httplib requires
|
|
|
|
and is not present in OpenSSL.SSL.Connection.
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
Note: Since it is not possible to inherit from OpenSSL.SSL.Connection
|
|
|
|
a delegator must be used.
|
2012-08-02 14:16:13 -07:00
|
|
|
"""
|
2012-09-21 14:18:22 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
2013-03-20 18:00:39 +00:00
|
|
|
self.connection = Connection(*args, **kwargs)
|
2012-09-21 14:18:22 +00:00
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
return getattr(self.connection, name)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
def makefile(self, *args, **kwargs):
|
2013-06-20 20:53:08 +00:00
|
|
|
# Making sure socket is closed when this file is closed
|
|
|
|
# since we now avoid closing socket on connection close
|
|
|
|
# see new close method under VerifiedHTTPSConnection
|
|
|
|
kwargs['close'] = True
|
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
return socket._fileobject(self.connection, *args, **kwargs)
|
|
|
|
|
|
|
|
|
2013-03-20 18:00:39 +00:00
|
|
|
class VerifiedHTTPSConnection(HTTPSConnection):
|
2012-09-21 14:18:22 +00:00
|
|
|
"""
|
|
|
|
Extended HTTPSConnection which uses the OpenSSL library
|
|
|
|
for enhanced SSL support.
|
2012-11-16 13:46:59 +00:00
|
|
|
Note: Much of this functionality can eventually be replaced
|
|
|
|
with native Python 3.3 code.
|
2012-09-21 14:18:22 +00:00
|
|
|
"""
|
2013-01-22 11:42:42 +00:00
|
|
|
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
2012-12-07 11:21:11 -06:00
|
|
|
cacert=None, timeout=None, insecure=False,
|
2012-09-21 14:18:22 +00:00
|
|
|
ssl_compression=True):
|
2013-03-20 18:00:39 +00:00
|
|
|
HTTPSConnection.__init__(self, host, port,
|
|
|
|
key_file=key_file,
|
|
|
|
cert_file=cert_file)
|
2012-08-02 14:16:13 -07:00
|
|
|
self.key_file = key_file
|
|
|
|
self.cert_file = cert_file
|
|
|
|
self.timeout = timeout
|
|
|
|
self.insecure = insecure
|
2012-09-21 14:18:22 +00:00
|
|
|
self.ssl_compression = ssl_compression
|
2012-12-07 11:21:11 -06:00
|
|
|
self.cacert = cacert
|
2012-09-21 14:18:22 +00:00
|
|
|
self.setcontext()
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
@staticmethod
|
2012-11-16 13:46:59 +00:00
|
|
|
def host_matches_cert(host, x509):
|
|
|
|
"""
|
|
|
|
Verify that the the x509 certificate we have received
|
|
|
|
from 'host' correctly identifies the server we are
|
|
|
|
connecting to, ie that the certificate's Common Name
|
|
|
|
or a Subject Alternative Name matches 'host'.
|
|
|
|
"""
|
2013-08-14 15:37:45 -07:00
|
|
|
common_name = x509.get_subject().commonName
|
|
|
|
|
2012-11-16 13:46:59 +00:00
|
|
|
# First see if we can match the CN
|
2013-08-14 15:37:45 -07:00
|
|
|
if common_name == host:
|
2012-11-16 13:46:59 +00:00
|
|
|
return True
|
|
|
|
|
2013-08-14 15:37:45 -07:00
|
|
|
# Support single wildcard matching
|
|
|
|
if common_name.startswith('*.') and host.find('.') > 0:
|
|
|
|
if common_name[2:] == host.split('.', 1)[1]:
|
|
|
|
return True
|
|
|
|
|
2012-11-16 13:46:59 +00:00
|
|
|
# Also try Subject Alternative Names for a match
|
|
|
|
san_list = None
|
|
|
|
for i in xrange(x509.get_extension_count()):
|
|
|
|
ext = x509.get_extension(i)
|
|
|
|
if ext.get_short_name() == 'subjectAltName':
|
|
|
|
san_list = str(ext)
|
|
|
|
for san in ''.join(san_list.split()).split(','):
|
|
|
|
if san == "DNS:%s" % host:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Server certificate does not match host
|
|
|
|
msg = ('Host "%s" does not match x509 certificate contents: '
|
2013-08-14 15:37:45 -07:00
|
|
|
'CommonName "%s"' % (host, common_name))
|
2012-11-16 13:46:59 +00:00
|
|
|
if san_list is not None:
|
|
|
|
msg = msg + ', subjectAltName "%s"' % san_list
|
|
|
|
raise exc.SSLCertificateError(msg)
|
|
|
|
|
|
|
|
def verify_callback(self, connection, x509, errnum,
|
|
|
|
depth, preverify_ok):
|
2013-06-18 15:34:45 +00:00
|
|
|
# NOTE(leaman): preverify_ok may be a non-boolean type
|
|
|
|
preverify_ok = bool(preverify_ok)
|
2012-11-16 13:46:59 +00:00
|
|
|
if x509.has_expired():
|
|
|
|
msg = "SSL Certificate expired on '%s'" % x509.get_notAfter()
|
|
|
|
raise exc.SSLCertificateError(msg)
|
|
|
|
|
2013-06-18 15:34:45 +00:00
|
|
|
if depth == 0 and preverify_ok:
|
2012-11-16 13:46:59 +00:00
|
|
|
# We verify that the host matches against the last
|
|
|
|
# certificate in the chain
|
|
|
|
return self.host_matches_cert(self.host, x509)
|
|
|
|
else:
|
|
|
|
# Pass through OpenSSL's default result
|
|
|
|
return preverify_ok
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
def setcontext(self):
|
2012-08-02 14:16:13 -07:00
|
|
|
"""
|
2012-09-21 14:18:22 +00:00
|
|
|
Set up the OpenSSL context.
|
|
|
|
"""
|
|
|
|
self.context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
if self.ssl_compression is False:
|
|
|
|
self.context.set_options(0x20000) # SSL_OP_NO_COMPRESSION
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
if self.insecure is not True:
|
|
|
|
self.context.set_verify(OpenSSL.SSL.VERIFY_PEER,
|
|
|
|
self.verify_callback)
|
2012-08-02 14:16:13 -07:00
|
|
|
else:
|
2012-09-21 14:18:22 +00:00
|
|
|
self.context.set_verify(OpenSSL.SSL.VERIFY_NONE,
|
2013-02-01 10:02:01 +00:00
|
|
|
lambda *args: True)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-08-02 15:30:50 -07:00
|
|
|
if self.cert_file:
|
2012-09-21 14:18:22 +00:00
|
|
|
try:
|
|
|
|
self.context.use_certificate_file(self.cert_file)
|
2013-04-22 16:38:55 +02:00
|
|
|
except Exception as e:
|
2012-09-21 14:18:22 +00:00
|
|
|
msg = 'Unable to load cert from "%s" %s' % (self.cert_file, e)
|
|
|
|
raise exc.SSLConfigurationError(msg)
|
|
|
|
if self.key_file is None:
|
|
|
|
# We support having key and cert in same file
|
|
|
|
try:
|
|
|
|
self.context.use_privatekey_file(self.cert_file)
|
2013-04-22 16:38:55 +02:00
|
|
|
except Exception as e:
|
2012-09-21 14:18:22 +00:00
|
|
|
msg = ('No key file specified and unable to load key '
|
|
|
|
'from "%s" %s' % (self.cert_file, e))
|
|
|
|
raise exc.SSLConfigurationError(msg)
|
|
|
|
|
|
|
|
if self.key_file:
|
|
|
|
try:
|
|
|
|
self.context.use_privatekey_file(self.key_file)
|
2013-04-22 16:38:55 +02:00
|
|
|
except Exception as e:
|
2012-09-21 14:18:22 +00:00
|
|
|
msg = 'Unable to load key from "%s" %s' % (self.key_file, e)
|
|
|
|
raise exc.SSLConfigurationError(msg)
|
2012-08-02 15:30:50 -07:00
|
|
|
|
2012-12-07 11:21:11 -06:00
|
|
|
if self.cacert:
|
2012-09-21 14:18:22 +00:00
|
|
|
try:
|
2012-12-07 11:21:11 -06:00
|
|
|
self.context.load_verify_locations(self.cacert)
|
2013-04-22 16:38:55 +02:00
|
|
|
except Exception as e:
|
2012-12-07 11:21:11 -06:00
|
|
|
msg = 'Unable to load CA from "%s"' % (self.cacert, e)
|
2012-09-21 14:18:22 +00:00
|
|
|
raise exc.SSLConfigurationError(msg)
|
|
|
|
else:
|
|
|
|
self.context.set_default_verify_paths()
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-09-21 14:18:22 +00:00
|
|
|
def connect(self):
|
|
|
|
"""
|
|
|
|
Connect to an SSL port using the OpenSSL library and apply
|
|
|
|
per-connection parameters.
|
|
|
|
"""
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
if self.timeout is not None:
|
|
|
|
# '0' microseconds
|
|
|
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO,
|
2013-06-20 20:53:08 +00:00
|
|
|
struct.pack('fL', self.timeout, 0))
|
2012-09-21 14:18:22 +00:00
|
|
|
self.sock = OpenSSLConnectionDelegator(self.context, sock)
|
|
|
|
self.sock.connect((self.host, self.port))
|
2012-08-10 18:32:07 +00:00
|
|
|
|
2013-06-20 20:53:08 +00:00
|
|
|
def close(self):
|
|
|
|
if self.sock:
|
|
|
|
# Removing reference to socket but don't close it yet.
|
|
|
|
# Response close will close both socket and associated
|
|
|
|
# file. Closing socket too soon will cause response
|
|
|
|
# reads to fail with socket IO error 'Bad file descriptor'.
|
|
|
|
self.sock = None
|
|
|
|
|
|
|
|
# Calling close on HTTPConnection to continue doing that cleanup.
|
|
|
|
HTTPSConnection.close(self)
|
|
|
|
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2012-07-11 19:34:28 -07:00
|
|
|
class ResponseBodyIterator(object):
|
2013-07-08 21:18:16 +02:00
|
|
|
"""
|
|
|
|
A class that acts as an iterator over an HTTP response.
|
|
|
|
|
|
|
|
This class will also check response body integrity when iterating over
|
|
|
|
the instance and if a checksum was supplied using `set_checksum` method,
|
|
|
|
else by default the class will not do any integrity check.
|
|
|
|
"""
|
2012-07-11 19:34:28 -07:00
|
|
|
|
|
|
|
def __init__(self, resp):
|
2013-07-08 21:18:16 +02:00
|
|
|
self._resp = resp
|
|
|
|
self._checksum = None
|
|
|
|
self._size = int(resp.getheader('content-length', 0))
|
|
|
|
self._end_reached = False
|
|
|
|
|
|
|
|
def set_checksum(self, checksum):
|
|
|
|
"""
|
|
|
|
Set checksum to check against when iterating over this instance.
|
|
|
|
|
|
|
|
:raise: AttributeError if iterator is already consumed.
|
|
|
|
"""
|
|
|
|
if self._end_reached:
|
|
|
|
raise AttributeError("Can't set checksum for an already consumed"
|
|
|
|
" iterator")
|
|
|
|
self._checksum = checksum
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return int(self._size)
|
2012-07-11 19:34:28 -07:00
|
|
|
|
|
|
|
def __iter__(self):
|
2013-07-08 21:18:16 +02:00
|
|
|
md5sum = hashlib.md5()
|
2012-07-11 19:34:28 -07:00
|
|
|
while True:
|
2013-07-08 21:18:16 +02:00
|
|
|
try:
|
|
|
|
chunk = self.next()
|
|
|
|
except StopIteration:
|
|
|
|
self._end_reached = True
|
|
|
|
# NOTE(mouad): Check image integrity when the end of response
|
|
|
|
# body is reached.
|
|
|
|
md5sum = md5sum.hexdigest()
|
|
|
|
if self._checksum is not None and md5sum != self._checksum:
|
|
|
|
raise IOError(errno.EPIPE,
|
|
|
|
'Corrupted image. Checksum was %s '
|
|
|
|
'expected %s' % (md5sum, self._checksum))
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
yield chunk
|
|
|
|
md5sum.update(chunk)
|
2012-07-11 19:34:28 -07:00
|
|
|
|
|
|
|
def next(self):
|
2013-07-08 21:18:16 +02:00
|
|
|
chunk = self._resp.read(CHUNKSIZE)
|
2012-07-11 19:34:28 -07:00
|
|
|
if chunk:
|
|
|
|
return chunk
|
|
|
|
else:
|
|
|
|
raise StopIteration()
|