Replace old httpclient with requests
This review implements blueprint python-request and replaces the old http client implementation in favor of a new one based on python-requests. Major changes: * raw_request and json_request removed since everything is now being handled by the same method "_request" * New methods that match HTTP's methods were added: - get - put - post - head - patch - delete * Content-Type is now being "inferred" based on the data being sent: - if it is file-like object it chunks the request - if it is a python type not instance of basestring then it'll try to serialize it to json - Every other case will keep the incoming content-type and will send the data as is. * Glanceclient's HTTPSConnection implementation will be used if no-compression flag is set to True. Co-Author: Flavio Percoco<flaper87@gmail.com> Change-Id: I09f70eee3e2777f52ce040296015d41649c2586a
This commit is contained in:
parent
1db17aaad9
commit
dbb242b776
@ -14,16 +14,11 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import posixpath
|
||||
import socket
|
||||
import ssl
|
||||
import struct
|
||||
|
||||
import requests
|
||||
import six
|
||||
from six.moves import http_client
|
||||
from six.moves.urllib import parse
|
||||
|
||||
try:
|
||||
@ -36,9 +31,7 @@ if not hasattr(parse, 'parse_qsl'):
|
||||
import cgi
|
||||
parse.parse_qsl = cgi.parse_qsl
|
||||
|
||||
import OpenSSL
|
||||
|
||||
from glanceclient.common import utils
|
||||
from glanceclient.common import https
|
||||
from glanceclient import exc
|
||||
from glanceclient.openstack.common import importutils
|
||||
from glanceclient.openstack.common import network_utils
|
||||
@ -46,48 +39,15 @@ from glanceclient.openstack.common import strutils
|
||||
|
||||
osprofiler_web = importutils.try_import("osprofiler.web")
|
||||
|
||||
try:
|
||||
from eventlet import patcher
|
||||
# Handle case where we are running in a monkey patched environment
|
||||
if patcher.is_monkey_patched('socket'):
|
||||
from eventlet.green.httplib import HTTPSConnection
|
||||
from eventlet.green.OpenSSL.SSL import GreenConnection as Connection
|
||||
from eventlet.greenio import GreenSocket
|
||||
# TODO(mclaren): A getsockopt workaround: see 'getsockopt' doc string
|
||||
GreenSocket.getsockopt = utils.getsockopt
|
||||
else:
|
||||
raise ImportError
|
||||
except ImportError:
|
||||
HTTPSConnection = http_client.HTTPSConnection
|
||||
from OpenSSL.SSL import Connection as Connection
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
USER_AGENT = 'python-glanceclient'
|
||||
CHUNKSIZE = 1024 * 64 # 64kB
|
||||
|
||||
|
||||
def to_bytes(s):
|
||||
if isinstance(s, six.string_types):
|
||||
return six.b(s)
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
class HTTPClient(object):
|
||||
|
||||
def __init__(self, endpoint, **kwargs):
|
||||
self.endpoint = endpoint
|
||||
endpoint_parts = self.parse_endpoint(self.endpoint)
|
||||
self.endpoint_scheme = endpoint_parts.scheme
|
||||
self.endpoint_hostname = endpoint_parts.hostname
|
||||
self.endpoint_port = endpoint_parts.port
|
||||
self.endpoint_path = endpoint_parts.path
|
||||
|
||||
self.connection_class = self.get_connection_class(self.endpoint_scheme)
|
||||
self.connection_kwargs = self.get_connection_kwargs(
|
||||
self.endpoint_scheme, **kwargs)
|
||||
|
||||
self.identity_headers = kwargs.get('identity_headers')
|
||||
self.auth_token = kwargs.get('token')
|
||||
if self.identity_headers:
|
||||
@ -95,71 +55,58 @@ class HTTPClient(object):
|
||||
self.auth_token = self.identity_headers.get('X-Auth-Token')
|
||||
del self.identity_headers['X-Auth-Token']
|
||||
|
||||
self.session = requests.Session()
|
||||
self.session.headers["User-Agent"] = USER_AGENT
|
||||
self.session.headers["X-Auth-Token"] = self.auth_token
|
||||
|
||||
self.timeout = float(kwargs.get('timeout', 600))
|
||||
|
||||
if self.endpoint.startswith("https"):
|
||||
compression = kwargs.get('ssl_compression', True)
|
||||
|
||||
if not compression:
|
||||
self.session.mount("https://", https.HTTPSAdapter())
|
||||
|
||||
self.session.verify = kwargs.get('cacert',
|
||||
not kwargs.get('insecure', True))
|
||||
self.session.cert = (kwargs.get('cert_file'),
|
||||
kwargs.get('key_file'))
|
||||
|
||||
@staticmethod
|
||||
def parse_endpoint(endpoint):
|
||||
return network_utils.urlsplit(endpoint)
|
||||
|
||||
@staticmethod
|
||||
def get_connection_class(scheme):
|
||||
if scheme == 'https':
|
||||
return VerifiedHTTPSConnection
|
||||
else:
|
||||
return http_client.HTTPConnection
|
||||
|
||||
@staticmethod
|
||||
def get_connection_kwargs(scheme, **kwargs):
|
||||
_kwargs = {'timeout': float(kwargs.get('timeout', 600))}
|
||||
|
||||
if scheme == 'https':
|
||||
_kwargs['cacert'] = kwargs.get('cacert', None)
|
||||
_kwargs['cert_file'] = kwargs.get('cert_file', None)
|
||||
_kwargs['key_file'] = kwargs.get('key_file', None)
|
||||
_kwargs['insecure'] = kwargs.get('insecure', False)
|
||||
_kwargs['ssl_compression'] = kwargs.get('ssl_compression', True)
|
||||
|
||||
return _kwargs
|
||||
|
||||
def get_connection(self):
|
||||
_class = self.connection_class
|
||||
try:
|
||||
return _class(self.endpoint_hostname, self.endpoint_port,
|
||||
**self.connection_kwargs)
|
||||
except http_client.InvalidURL:
|
||||
raise exc.InvalidEndpoint()
|
||||
|
||||
def log_curl_request(self, method, url, kwargs):
|
||||
def log_curl_request(self, method, url, headers, data, kwargs):
|
||||
curl = ['curl -i -X %s' % method]
|
||||
|
||||
for (key, value) in kwargs['headers'].items():
|
||||
for (key, value) in self.session.headers.items():
|
||||
if key.lower() == 'x-auth-token':
|
||||
value = '*' * 3
|
||||
header = '-H \'%s: %s\'' % (key, value)
|
||||
curl.append(header)
|
||||
curl.append(strutils.safe_encode(header))
|
||||
|
||||
conn_params_fmt = [
|
||||
('key_file', '--key %s'),
|
||||
('cert_file', '--cert %s'),
|
||||
('cacert', '--cacert %s'),
|
||||
]
|
||||
for (key, fmt) in conn_params_fmt:
|
||||
value = self.connection_kwargs.get(key)
|
||||
if value:
|
||||
curl.append(fmt % value)
|
||||
|
||||
if self.connection_kwargs.get('insecure'):
|
||||
if not self.session.verify:
|
||||
curl.append('-k')
|
||||
else:
|
||||
if isinstance(self.session.verify, six.string_types):
|
||||
curl.append(' --cacert %s' % self.session.verify)
|
||||
|
||||
if kwargs.get('body') is not None:
|
||||
curl.append('-d \'%s\'' % kwargs['body'])
|
||||
if self.session.cert:
|
||||
curl.append(' --cert %s --key %s' % self.session.cert)
|
||||
|
||||
curl.append('%s%s' % (self.endpoint, url))
|
||||
if data and isinstance(data, six.string_types):
|
||||
curl.append('-d \'%s\'' % data)
|
||||
|
||||
if "//:" not in url:
|
||||
url = '%s%s' % (self.endpoint, url)
|
||||
curl.append(url)
|
||||
LOG.debug(strutils.safe_encode(' '.join(curl), errors='ignore'))
|
||||
|
||||
@staticmethod
|
||||
def log_http_response(resp, body=None):
|
||||
status = (resp.version / 10.0, resp.status, resp.reason)
|
||||
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
||||
dump = ['\nHTTP/%.1f %s %s' % status]
|
||||
headers = resp.getheaders()
|
||||
headers = resp.headers.items()
|
||||
if 'X-Auth-Token' in headers:
|
||||
headers['X-Auth-Token'] = '*' * 3
|
||||
dump.extend(['%s: %s' % (k, v) for k, v in headers])
|
||||
@ -183,69 +130,59 @@ class HTTPClient(object):
|
||||
return dict((strutils.safe_encode(h), strutils.safe_encode(v))
|
||||
for h, v in six.iteritems(headers))
|
||||
|
||||
def _http_request(self, url, method, **kwargs):
|
||||
def _request(self, method, url, **kwargs):
|
||||
"""Send an http request with the specified characteristics.
|
||||
|
||||
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
|
||||
as setting headers and error handling.
|
||||
"""
|
||||
# Copy the kwargs so we can reuse the original in case of redirects
|
||||
kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
|
||||
kwargs['headers'].setdefault('User-Agent', USER_AGENT)
|
||||
headers = kwargs.pop("headers", {})
|
||||
headers = headers and copy.deepcopy(headers) or {}
|
||||
|
||||
if osprofiler_web:
|
||||
kwargs['headers'].update(osprofiler_web.get_trace_id_headers())
|
||||
# Default Content-Type is octet-stream
|
||||
content_type = headers.get('Content-Type', 'application/octet-stream')
|
||||
|
||||
if self.auth_token:
|
||||
kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
|
||||
def chunk_body(body):
|
||||
chunk = body
|
||||
while chunk:
|
||||
chunk = body.read(CHUNKSIZE)
|
||||
yield chunk
|
||||
|
||||
if self.identity_headers:
|
||||
for k, v in six.iteritems(self.identity_headers):
|
||||
kwargs['headers'].setdefault(k, v)
|
||||
data = kwargs.pop("data", None)
|
||||
if data is not None and not isinstance(data, six.string_types):
|
||||
try:
|
||||
data = json.dumps(data)
|
||||
content_type = 'application/json'
|
||||
except TypeError:
|
||||
# Here we assume it's
|
||||
# a file-like object
|
||||
# and we'll chunk it
|
||||
data = chunk_body(data)
|
||||
|
||||
self.log_curl_request(method, url, kwargs)
|
||||
conn = self.get_connection()
|
||||
headers['Content-Type'] = content_type
|
||||
|
||||
# Note(flaper87): Before letting headers / url fly,
|
||||
# they should be encoded otherwise httplib will
|
||||
# complain. If we decide to rely on python-request
|
||||
# this wont be necessary anymore.
|
||||
kwargs['headers'] = self.encode_headers(kwargs['headers'])
|
||||
# complain.
|
||||
headers = self.encode_headers(headers)
|
||||
|
||||
try:
|
||||
if self.endpoint_path:
|
||||
# NOTE(yuyangbj): this method _http_request could either be
|
||||
# called by API layer, or be called recursively with
|
||||
# redirection. For example, url would be '/v1/images/detail'
|
||||
# from API layer, but url would be 'https://example.com:92/
|
||||
# v1/images/detail' from recursion.
|
||||
# See bug #1230032 and bug #1208618.
|
||||
if url is not None:
|
||||
all_parts = parse.urlparse(url)
|
||||
if not (all_parts.scheme and all_parts.netloc):
|
||||
norm_parse = posixpath.normpath
|
||||
url = norm_parse('/'.join([self.endpoint_path, url]))
|
||||
else:
|
||||
url = self.endpoint_path
|
||||
|
||||
conn_url = parse.urlsplit(url).geturl()
|
||||
# Note(flaper87): Ditto, headers / url
|
||||
# encoding to make httplib happy.
|
||||
conn_url = strutils.safe_encode(conn_url)
|
||||
if kwargs['headers'].get('Transfer-Encoding') == 'chunked':
|
||||
conn.putrequest(method, conn_url)
|
||||
for header, value in kwargs['headers'].items():
|
||||
conn.putheader(header, value)
|
||||
conn.endheaders()
|
||||
chunk = kwargs['body'].read(CHUNKSIZE)
|
||||
# Chunk it, baby...
|
||||
while chunk:
|
||||
conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
|
||||
chunk = kwargs['body'].read(CHUNKSIZE)
|
||||
conn.send('0\r\n\r\n')
|
||||
else:
|
||||
conn.request(method, conn_url, **kwargs)
|
||||
resp = conn.getresponse()
|
||||
conn_url = "%s/%s" % (self.endpoint, url)
|
||||
self.log_curl_request(method, conn_url, headers, data, kwargs)
|
||||
resp = self.session.request(method,
|
||||
conn_url,
|
||||
data=data,
|
||||
stream=True,
|
||||
headers=headers,
|
||||
**kwargs)
|
||||
except requests.exceptions.Timeout as e:
|
||||
message = ("Error communicating with %(endpoint)s %(e)s" %
|
||||
dict(url=conn_url, e=e))
|
||||
raise exc.InvalidEndpoint(message=message)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
message = ("Error finding address for %(url)s: %(e)s" %
|
||||
dict(url=conn_url, e=e))
|
||||
raise exc.CommunicationError(message=message)
|
||||
except socket.gaierror as e:
|
||||
message = "Error finding address for %s: %s" % (
|
||||
self.endpoint_hostname, e)
|
||||
@ -256,357 +193,46 @@ class HTTPClient(object):
|
||||
{'endpoint': endpoint, 'e': e})
|
||||
raise exc.CommunicationError(message=message)
|
||||
|
||||
body_iter = ResponseBodyIterator(resp)
|
||||
|
||||
# Read body into string if it isn't obviously image data
|
||||
if resp.getheader('content-type', None) != 'application/octet-stream':
|
||||
body_str = b''.join([to_bytes(chunk) for chunk in body_iter])
|
||||
self.log_http_response(resp, body_str)
|
||||
body_iter = six.BytesIO(body_str)
|
||||
else:
|
||||
self.log_http_response(resp)
|
||||
|
||||
if 400 <= resp.status < 600:
|
||||
LOG.debug("Request returned failure status: %d" % resp.status)
|
||||
raise exc.from_response(resp, body_str)
|
||||
elif resp.status in (301, 302, 305):
|
||||
# Redirected. Reissue the request to the new location.
|
||||
return self._http_request(resp.getheader('location', None), method,
|
||||
**kwargs)
|
||||
elif resp.status == 300:
|
||||
if not resp.ok:
|
||||
LOG.error("Request returned failure status %s." % resp.status_code)
|
||||
raise exc.from_response(resp, resp.content)
|
||||
elif resp.status_code == requests.codes.MULTIPLE_CHOICES:
|
||||
raise exc.from_response(resp)
|
||||
|
||||
content_type = resp.headers.get('Content-Type')
|
||||
|
||||
# Read body into string if it isn't obviously image data
|
||||
if content_type == 'application/octet-stream':
|
||||
# Do not read all response in memory when
|
||||
# downloading an image.
|
||||
body_iter = resp.iter_content(chunk_size=CHUNKSIZE)
|
||||
self.log_http_response(resp)
|
||||
else:
|
||||
content = resp.content
|
||||
self.log_http_response(resp, content)
|
||||
if content_type and content_type.startswith('application/json'):
|
||||
# Let's use requests json method,
|
||||
# it should take care of response
|
||||
# encoding
|
||||
body_iter = resp.json()
|
||||
else:
|
||||
body_iter = six.StringIO(content)
|
||||
return resp, body_iter
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type', 'application/json')
|
||||
|
||||
if 'body' in kwargs:
|
||||
kwargs['body'] = json.dumps(kwargs['body'])
|
||||
|
||||
resp, body_iter = self._http_request(url, method, **kwargs)
|
||||
|
||||
if 'application/json' in resp.getheader('content-type', ''):
|
||||
body = ''.join([chunk for chunk in body_iter])
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except ValueError:
|
||||
LOG.error('Could not decode response body as JSON')
|
||||
else:
|
||||
body = None
|
||||
|
||||
return resp, body
|
||||
|
||||
def raw_request(self, method, url, **kwargs):
|
||||
kwargs.setdefault('headers', {})
|
||||
kwargs['headers'].setdefault('Content-Type',
|
||||
'application/octet-stream')
|
||||
|
||||
if 'content_length' in kwargs:
|
||||
content_length = kwargs.pop('content_length')
|
||||
else:
|
||||
content_length = None
|
||||
|
||||
if (('body' in kwargs) and (hasattr(kwargs['body'], 'read') and
|
||||
method.lower() in ('post', 'put'))):
|
||||
|
||||
# NOTE(dosaboy): only use chunked transfer if not setting a
|
||||
# content length since setting it will implicitly disable
|
||||
# chunking.
|
||||
|
||||
file_content_length = utils.get_file_size(kwargs['body'])
|
||||
if content_length is None:
|
||||
content_length = file_content_length
|
||||
elif (file_content_length and
|
||||
(content_length != file_content_length)):
|
||||
errmsg = ("supplied content-length (%s) does not match "
|
||||
"length of supplied data (%s)" %
|
||||
(content_length, file_content_length))
|
||||
raise AttributeError(errmsg)
|
||||
|
||||
if content_length is None:
|
||||
# We use 'Transfer-Encoding: chunked' because
|
||||
# body size may not always be known in advance.
|
||||
kwargs['headers']['Transfer-Encoding'] = 'chunked'
|
||||
else:
|
||||
kwargs['headers']['Content-Length'] = str(content_length)
|
||||
|
||||
return self._http_request(url, method, **kwargs)
|
||||
|
||||
def client_request(self, method, url, **kwargs):
|
||||
# NOTE(akurilin): this method provides compatibility with methods which
|
||||
# expects requests.Response object(for example - methods of
|
||||
# class Managers from common code).
|
||||
if 'json' in kwargs and 'body' not in kwargs:
|
||||
kwargs['body'] = kwargs.pop('json')
|
||||
resp, body = self.json_request(method, url, **kwargs)
|
||||
resp.json = lambda: body
|
||||
resp.content = bool(body)
|
||||
resp.status_code = resp.status
|
||||
return resp
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
return self.client_request("HEAD", url, **kwargs)
|
||||
return self._request('HEAD', url, **kwargs)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
return self.client_request("GET", url, **kwargs)
|
||||
return self._request('GET', url, **kwargs)
|
||||
|
||||
def post(self, url, **kwargs):
|
||||
return self.client_request("POST", url, **kwargs)
|
||||
return self._request('POST', url, **kwargs)
|
||||
|
||||
def put(self, url, **kwargs):
|
||||
return self.client_request("PUT", url, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
return self.raw_request("DELETE", url, **kwargs)
|
||||
return self._request('PUT', url, **kwargs)
|
||||
|
||||
def patch(self, url, **kwargs):
|
||||
return self.client_request("PATCH", url, **kwargs)
|
||||
return self._request('PATCH', url, **kwargs)
|
||||
|
||||
|
||||
class OpenSSLConnectionDelegator(object):
|
||||
"""
|
||||
An OpenSSL.SSL.Connection delegator.
|
||||
|
||||
Supplies an additional 'makefile' method which httplib requires
|
||||
and is not present in OpenSSL.SSL.Connection.
|
||||
|
||||
Note: Since it is not possible to inherit from OpenSSL.SSL.Connection
|
||||
a delegator must be used.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.connection = Connection(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.connection, name)
|
||||
|
||||
def makefile(self, *args, **kwargs):
|
||||
# Making sure socket is closed when this file is closed
|
||||
# since we now avoid closing socket on connection close
|
||||
# see new close method under VerifiedHTTPSConnection
|
||||
kwargs['close'] = True
|
||||
|
||||
return socket._fileobject(self.connection, *args, **kwargs)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
"""
|
||||
Extended HTTPSConnection which uses the OpenSSL library
|
||||
for enhanced SSL support.
|
||||
Note: Much of this functionality can eventually be replaced
|
||||
with native Python 3.3 code.
|
||||
"""
|
||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||
cacert=None, timeout=None, insecure=False,
|
||||
ssl_compression=True):
|
||||
# List of exceptions reported by Python3 instead of
|
||||
# SSLConfigurationError
|
||||
if six.PY3:
|
||||
excp_lst = (TypeError, FileNotFoundError, ssl.SSLError)
|
||||
else:
|
||||
excp_lst = ()
|
||||
try:
|
||||
HTTPSConnection.__init__(self, host, port,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file)
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.timeout = timeout
|
||||
self.insecure = insecure
|
||||
self.ssl_compression = ssl_compression
|
||||
self.cacert = None if cacert is None else str(cacert)
|
||||
self.setcontext()
|
||||
# ssl exceptions are reported in various form in Python 3
|
||||
# so to be compatible, we report the same kind as under
|
||||
# Python2
|
||||
except excp_lst as e:
|
||||
raise exc.SSLConfigurationError(str(e))
|
||||
|
||||
@staticmethod
|
||||
def host_matches_cert(host, x509):
|
||||
"""
|
||||
Verify that the x509 certificate we have received
|
||||
from 'host' correctly identifies the server we are
|
||||
connecting to, i.e. that the certificate's Common Name
|
||||
or a Subject Alternative Name matches 'host'.
|
||||
"""
|
||||
def check_match(name):
|
||||
# Directly match the name
|
||||
if name == host:
|
||||
return True
|
||||
|
||||
# Support single wildcard matching
|
||||
if name.startswith('*.') and host.find('.') > 0:
|
||||
if name[2:] == host.split('.', 1)[1]:
|
||||
return True
|
||||
|
||||
common_name = x509.get_subject().commonName
|
||||
|
||||
# First see if we can match the CN
|
||||
if check_match(common_name):
|
||||
return True
|
||||
|
||||
# Also try Subject Alternative Names for a match
|
||||
san_list = None
|
||||
for i in range(x509.get_extension_count()):
|
||||
ext = x509.get_extension(i)
|
||||
if ext.get_short_name() == b'subjectAltName':
|
||||
san_list = str(ext)
|
||||
for san in ''.join(san_list.split()).split(','):
|
||||
if san.startswith('DNS:'):
|
||||
if check_match(san.split(':', 1)[1]):
|
||||
return True
|
||||
|
||||
# Server certificate does not match host
|
||||
msg = ('Host "%s" does not match x509 certificate contents: '
|
||||
'CommonName "%s"' % (host, common_name))
|
||||
if san_list is not None:
|
||||
msg = msg + ', subjectAltName "%s"' % san_list
|
||||
raise exc.SSLCertificateError(msg)
|
||||
|
||||
def verify_callback(self, connection, x509, errnum,
|
||||
depth, preverify_ok):
|
||||
# NOTE(leaman): preverify_ok may be a non-boolean type
|
||||
preverify_ok = bool(preverify_ok)
|
||||
if x509.has_expired():
|
||||
msg = "SSL Certificate expired on '%s'" % x509.get_notAfter()
|
||||
raise exc.SSLCertificateError(msg)
|
||||
|
||||
if depth == 0 and preverify_ok:
|
||||
# We verify that the host matches against the last
|
||||
# certificate in the chain
|
||||
return self.host_matches_cert(self.host, x509)
|
||||
else:
|
||||
# Pass through OpenSSL's default result
|
||||
return preverify_ok
|
||||
|
||||
def setcontext(self):
|
||||
"""
|
||||
Set up the OpenSSL context.
|
||||
"""
|
||||
self.context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
|
||||
|
||||
if self.ssl_compression is False:
|
||||
self.context.set_options(0x20000) # SSL_OP_NO_COMPRESSION
|
||||
|
||||
if self.insecure is not True:
|
||||
self.context.set_verify(OpenSSL.SSL.VERIFY_PEER,
|
||||
self.verify_callback)
|
||||
else:
|
||||
self.context.set_verify(OpenSSL.SSL.VERIFY_NONE,
|
||||
lambda *args: True)
|
||||
|
||||
if self.cert_file:
|
||||
try:
|
||||
self.context.use_certificate_file(self.cert_file)
|
||||
except Exception as e:
|
||||
msg = 'Unable to load cert from "%s" %s' % (self.cert_file, e)
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
if self.key_file is None:
|
||||
# We support having key and cert in same file
|
||||
try:
|
||||
self.context.use_privatekey_file(self.cert_file)
|
||||
except Exception as e:
|
||||
msg = ('No key file specified and unable to load key '
|
||||
'from "%s" %s' % (self.cert_file, e))
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
|
||||
if self.key_file:
|
||||
try:
|
||||
self.context.use_privatekey_file(self.key_file)
|
||||
except Exception as e:
|
||||
msg = 'Unable to load key from "%s" %s' % (self.key_file, e)
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
|
||||
if self.cacert:
|
||||
try:
|
||||
self.context.load_verify_locations(to_bytes(self.cacert))
|
||||
except Exception as e:
|
||||
msg = ('Unable to load CA from "%(cacert)s" %(exc)s' %
|
||||
dict(cacert=self.cacert, exc=e))
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
else:
|
||||
self.context.set_default_verify_paths()
|
||||
|
||||
def connect(self):
|
||||
"""
|
||||
Connect to an SSL port using the OpenSSL library and apply
|
||||
per-connection parameters.
|
||||
"""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if self.timeout is not None:
|
||||
# '0' microseconds
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO,
|
||||
struct.pack('fL', self.timeout, 0))
|
||||
self.sock = OpenSSLConnectionDelegator(self.context, sock)
|
||||
self.sock.connect((self.host, self.port))
|
||||
|
||||
def close(self):
|
||||
if self.sock:
|
||||
# Removing reference to socket but don't close it yet.
|
||||
# Response close will close both socket and associated
|
||||
# file. Closing socket too soon will cause response
|
||||
# reads to fail with socket IO error 'Bad file descriptor'.
|
||||
self.sock = None
|
||||
|
||||
# Calling close on HTTPConnection to continue doing that cleanup.
|
||||
HTTPSConnection.close(self)
|
||||
|
||||
|
||||
class ResponseBodyIterator(object):
|
||||
"""
|
||||
A class that acts as an iterator over an HTTP response.
|
||||
|
||||
This class will also check response body integrity when iterating over
|
||||
the instance and if a checksum was supplied using `set_checksum` method,
|
||||
else by default the class will not do any integrity check.
|
||||
"""
|
||||
|
||||
def __init__(self, resp):
|
||||
self._resp = resp
|
||||
self._checksum = None
|
||||
self._size = int(resp.getheader('content-length', 0))
|
||||
self._end_reached = False
|
||||
|
||||
def set_checksum(self, checksum):
|
||||
"""
|
||||
Set checksum to check against when iterating over this instance.
|
||||
|
||||
:raise: AttributeError if iterator is already consumed.
|
||||
"""
|
||||
if self._end_reached:
|
||||
raise AttributeError("Can't set checksum for an already consumed"
|
||||
" iterator")
|
||||
self._checksum = checksum
|
||||
|
||||
def __len__(self):
|
||||
return int(self._size)
|
||||
|
||||
def __iter__(self):
|
||||
md5sum = hashlib.md5()
|
||||
while True:
|
||||
try:
|
||||
chunk = self.next()
|
||||
except StopIteration:
|
||||
self._end_reached = True
|
||||
# NOTE(mouad): Check image integrity when the end of response
|
||||
# body is reached.
|
||||
md5sum = md5sum.hexdigest()
|
||||
if self._checksum is not None and md5sum != self._checksum:
|
||||
raise IOError(errno.EPIPE,
|
||||
'Corrupted image. Checksum was %s '
|
||||
'expected %s' % (md5sum, self._checksum))
|
||||
raise
|
||||
else:
|
||||
yield chunk
|
||||
if isinstance(chunk, six.string_types):
|
||||
chunk = six.b(chunk)
|
||||
md5sum.update(chunk)
|
||||
|
||||
def next(self):
|
||||
chunk = self._resp.read(CHUNKSIZE)
|
||||
if chunk:
|
||||
return chunk
|
||||
else:
|
||||
raise StopIteration()
|
||||
def delete(self, url, **kwargs):
|
||||
return self._request('DELETE', url, **kwargs)
|
||||
|
274
glanceclient/common/https.py
Normal file
274
glanceclient/common/https.py
Normal file
@ -0,0 +1,274 @@
|
||||
# Copyright 2014 Red Hat, Inc
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import socket
|
||||
import struct
|
||||
|
||||
import OpenSSL
|
||||
from requests import adapters
|
||||
try:
|
||||
from requests.packages.urllib3 import connectionpool
|
||||
from requests.packages.urllib3 import poolmanager
|
||||
except ImportError:
|
||||
from urllib3 import connectionpool
|
||||
from urllib3 import poolmanager
|
||||
|
||||
import six
|
||||
import ssl
|
||||
|
||||
from glanceclient.common import utils
|
||||
|
||||
try:
|
||||
from eventlet import patcher
|
||||
# Handle case where we are running in a monkey patched environment
|
||||
if patcher.is_monkey_patched('socket'):
|
||||
from eventlet.green.httplib import HTTPSConnection
|
||||
from eventlet.green.OpenSSL.SSL import GreenConnection as Connection
|
||||
from eventlet.greenio import GreenSocket
|
||||
# TODO(mclaren): A getsockopt workaround: see 'getsockopt' doc string
|
||||
GreenSocket.getsockopt = utils.getsockopt
|
||||
else:
|
||||
raise ImportError
|
||||
except ImportError:
|
||||
try:
|
||||
from httplib import HTTPSConnection
|
||||
except ImportError:
|
||||
from http.client import HTTPSConnection
|
||||
from OpenSSL.SSL import Connection as Connection
|
||||
|
||||
|
||||
from glanceclient import exc
|
||||
|
||||
|
||||
def to_bytes(s):
|
||||
if isinstance(s, six.string_types):
|
||||
return six.b(s)
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
class HTTPSAdapter(adapters.HTTPAdapter):
|
||||
"""
|
||||
This adapter will be used just when
|
||||
ssl compression should be disabled.
|
||||
|
||||
The init method overwrites the default
|
||||
https pool by setting glanceclient's
|
||||
one.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# NOTE(flaper87): This line forces poolmanager to use
|
||||
# glanceclient HTTPSConnection
|
||||
poolmanager.pool_classes_by_scheme["https"] = HTTPSConnectionPool
|
||||
super(HTTPSAdapter, self).__init__(*args, **kwargs)
|
||||
|
||||
def cert_verify(self, conn, url, verify, cert):
|
||||
super(HTTPSAdapter, self).cert_verify(conn, url, verify, cert)
|
||||
conn.insecure = not verify
|
||||
|
||||
|
||||
class HTTPSConnectionPool(connectionpool.HTTPSConnectionPool):
|
||||
"""
|
||||
HTTPSConnectionPool will be instantiated when a new
|
||||
connection is requested to the HTTPSAdapter.This
|
||||
implementation overwrites the _new_conn method and
|
||||
returns an instances of glanceclient's VerifiedHTTPSConnection
|
||||
which handles no compression.
|
||||
|
||||
ssl_compression is hard-coded to False because this will
|
||||
be used just when the user sets --no-ssl-compression.
|
||||
"""
|
||||
|
||||
scheme = 'https'
|
||||
|
||||
def _new_conn(self):
|
||||
self.num_connections += 1
|
||||
return VerifiedHTTPSConnection(host=self.host,
|
||||
port=self.port,
|
||||
key_file=self.key_file,
|
||||
cert_file=self.cert_file,
|
||||
cacert=self.ca_certs,
|
||||
insecure=self.insecure,
|
||||
ssl_compression=False)
|
||||
|
||||
|
||||
class OpenSSLConnectionDelegator(object):
|
||||
"""
|
||||
An OpenSSL.SSL.Connection delegator.
|
||||
|
||||
Supplies an additional 'makefile' method which httplib requires
|
||||
and is not present in OpenSSL.SSL.Connection.
|
||||
|
||||
Note: Since it is not possible to inherit from OpenSSL.SSL.Connection
|
||||
a delegator must be used.
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.connection = Connection(*args, **kwargs)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.connection, name)
|
||||
|
||||
def makefile(self, *args, **kwargs):
|
||||
return socket._fileobject(self.connection, *args, **kwargs)
|
||||
|
||||
|
||||
class VerifiedHTTPSConnection(HTTPSConnection):
|
||||
"""
|
||||
Extended HTTPSConnection which uses the OpenSSL library
|
||||
for enhanced SSL support.
|
||||
Note: Much of this functionality can eventually be replaced
|
||||
with native Python 3.3 code.
|
||||
"""
|
||||
def __init__(self, host, port=None, key_file=None, cert_file=None,
|
||||
cacert=None, timeout=None, insecure=False,
|
||||
ssl_compression=True):
|
||||
# List of exceptions reported by Python3 instead of
|
||||
# SSLConfigurationError
|
||||
if six.PY3:
|
||||
excp_lst = (TypeError, FileNotFoundError, ssl.SSLError)
|
||||
else:
|
||||
excp_lst = ()
|
||||
try:
|
||||
HTTPSConnection.__init__(self, host, port,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file)
|
||||
self.key_file = key_file
|
||||
self.cert_file = cert_file
|
||||
self.timeout = timeout
|
||||
self.insecure = insecure
|
||||
self.ssl_compression = ssl_compression
|
||||
self.cacert = None if cacert is None else str(cacert)
|
||||
self.set_context()
|
||||
# ssl exceptions are reported in various form in Python 3
|
||||
# so to be compatible, we report the same kind as under
|
||||
# Python2
|
||||
except excp_lst as e:
|
||||
raise exc.SSLConfigurationError(str(e))
|
||||
|
||||
@staticmethod
|
||||
def host_matches_cert(host, x509):
|
||||
"""
|
||||
Verify that the x509 certificate we have received
|
||||
from 'host' correctly identifies the server we are
|
||||
connecting to, ie that the certificate's Common Name
|
||||
or a Subject Alternative Name matches 'host'.
|
||||
"""
|
||||
def check_match(name):
|
||||
# Directly match the name
|
||||
if name == host:
|
||||
return True
|
||||
|
||||
# Support single wildcard matching
|
||||
if name.startswith('*.') and host.find('.') > 0:
|
||||
if name[2:] == host.split('.', 1)[1]:
|
||||
return True
|
||||
|
||||
common_name = x509.get_subject().commonName
|
||||
|
||||
# First see if we can match the CN
|
||||
if check_match(common_name):
|
||||
return True
|
||||
# Also try Subject Alternative Names for a match
|
||||
san_list = None
|
||||
for i in range(x509.get_extension_count()):
|
||||
ext = x509.get_extension(i)
|
||||
if ext.get_short_name() == b'subjectAltName':
|
||||
san_list = str(ext)
|
||||
for san in ''.join(san_list.split()).split(','):
|
||||
if san.startswith('DNS:'):
|
||||
if check_match(san.split(':', 1)[1]):
|
||||
return True
|
||||
|
||||
# Server certificate does not match host
|
||||
msg = ('Host "%s" does not match x509 certificate contents: '
|
||||
'CommonName "%s"' % (host, common_name))
|
||||
if san_list is not None:
|
||||
msg = msg + ', subjectAltName "%s"' % san_list
|
||||
raise exc.SSLCertificateError(msg)
|
||||
|
||||
def verify_callback(self, connection, x509, errnum,
|
||||
depth, preverify_ok):
|
||||
if x509.has_expired():
|
||||
msg = "SSL Certificate expired on '%s'" % x509.get_notAfter()
|
||||
raise exc.SSLCertificateError(msg)
|
||||
|
||||
if depth == 0 and preverify_ok:
|
||||
# We verify that the host matches against the last
|
||||
# certificate in the chain
|
||||
return self.host_matches_cert(self.host, x509)
|
||||
else:
|
||||
# Pass through OpenSSL's default result
|
||||
return preverify_ok
|
||||
|
||||
def set_context(self):
|
||||
"""
|
||||
Set up the OpenSSL context.
|
||||
"""
|
||||
self.context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
|
||||
|
||||
if self.ssl_compression is False:
|
||||
self.context.set_options(0x20000) # SSL_OP_NO_COMPRESSION
|
||||
|
||||
if self.insecure is not True:
|
||||
self.context.set_verify(OpenSSL.SSL.VERIFY_PEER,
|
||||
self.verify_callback)
|
||||
else:
|
||||
self.context.set_verify(OpenSSL.SSL.VERIFY_NONE,
|
||||
lambda *args: True)
|
||||
|
||||
if self.cert_file:
|
||||
try:
|
||||
self.context.use_certificate_file(self.cert_file)
|
||||
except Exception as e:
|
||||
msg = 'Unable to load cert from "%s" %s' % (self.cert_file, e)
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
if self.key_file is None:
|
||||
# We support having key and cert in same file
|
||||
try:
|
||||
self.context.use_privatekey_file(self.cert_file)
|
||||
except Exception as e:
|
||||
msg = ('No key file specified and unable to load key '
|
||||
'from "%s" %s' % (self.cert_file, e))
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
|
||||
if self.key_file:
|
||||
try:
|
||||
self.context.use_privatekey_file(self.key_file)
|
||||
except Exception as e:
|
||||
msg = 'Unable to load key from "%s" %s' % (self.key_file, e)
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
|
||||
if self.cacert:
|
||||
try:
|
||||
self.context.load_verify_locations(to_bytes(self.cacert))
|
||||
except Exception as e:
|
||||
msg = 'Unable to load CA from "%s" %s' % (self.cacert, e)
|
||||
raise exc.SSLConfigurationError(msg)
|
||||
else:
|
||||
self.context.set_default_verify_paths()
|
||||
|
||||
def connect(self):
|
||||
"""
|
||||
Connect to an SSL port using the OpenSSL library and apply
|
||||
per-connection parameters.
|
||||
"""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
if self.timeout is not None:
|
||||
# '0' microseconds
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO,
|
||||
struct.pack('LL', self.timeout, 0))
|
||||
self.sock = OpenSSLConnectionDelegator(self.context, sock)
|
||||
self.sock.connect((self.host, self.port))
|
@ -16,6 +16,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import errno
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -335,3 +336,22 @@ def print_image(image_obj, max_col_width=None):
|
||||
print_dict(image, max_column_width=max_col_width)
|
||||
else:
|
||||
print_dict(image)
|
||||
|
||||
|
||||
def integrity_iter(iter, checksum):
|
||||
"""
|
||||
Check image data integrity.
|
||||
|
||||
:raises: IOError
|
||||
"""
|
||||
md5sum = hashlib.md5()
|
||||
for chunk in iter:
|
||||
yield chunk
|
||||
if isinstance(chunk, six.string_types):
|
||||
chunk = six.b(chunk)
|
||||
md5sum.update(chunk)
|
||||
md5sum = md5sum.hexdigest()
|
||||
if md5sum != checksum:
|
||||
raise IOError(errno.EPIPE,
|
||||
'Corrupt image download. Checksum was %s expected %s' %
|
||||
(md5sum, checksum))
|
||||
|
@ -152,7 +152,7 @@ for obj_name in dir(sys.modules[__name__]):
|
||||
|
||||
def from_response(response, body=None):
|
||||
"""Return an instance of an HTTPException based on httplib response."""
|
||||
cls = _code_map.get(response.status, HTTPException)
|
||||
cls = _code_map.get(response.status_code, HTTPException)
|
||||
if body:
|
||||
details = body.replace('\n\n', '\n')
|
||||
return cls(details=details)
|
||||
|
@ -13,10 +13,10 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from glanceclient.common import http
|
||||
from glanceclient.common.http import HTTPClient
|
||||
from glanceclient.common import utils
|
||||
from glanceclient.v1 import image_members
|
||||
from glanceclient.v1 import images
|
||||
from glanceclient.v1.image_members import ImageMemberManager
|
||||
from glanceclient.v1.images import ImageManager
|
||||
|
||||
|
||||
class Client(object):
|
||||
@ -31,7 +31,7 @@ class Client(object):
|
||||
|
||||
def __init__(self, endpoint, *args, **kwargs):
|
||||
"""Initialize a new client for the Images v1 API."""
|
||||
self.http_client = http.HTTPClient(utils.strip_version(endpoint),
|
||||
*args, **kwargs)
|
||||
self.images = images.ImageManager(self.http_client)
|
||||
self.image_members = image_members.ImageMemberManager(self.http_client)
|
||||
self.http_client = HTTPClient(utils.strip_version(endpoint),
|
||||
*args, **kwargs)
|
||||
self.images = ImageManager(self.http_client)
|
||||
self.image_members = ImageMemberManager(self.http_client)
|
||||
|
@ -34,7 +34,7 @@ class ImageMemberManager(base.ManagerWithFind):
|
||||
def get(self, image, member_id):
|
||||
image_id = base.getid(image)
|
||||
url = '/v1/images/%s/members/%s' % (image_id, member_id)
|
||||
resp, body = self.client.json_request('GET', url)
|
||||
resp, body = self.client.get(url)
|
||||
member = body['member']
|
||||
member['image_id'] = image_id
|
||||
return ImageMember(self, member, loaded=True)
|
||||
@ -60,7 +60,7 @@ class ImageMemberManager(base.ManagerWithFind):
|
||||
def _list_by_image(self, image):
|
||||
image_id = base.getid(image)
|
||||
url = '/v1/images/%s/members' % image_id
|
||||
resp, body = self.client.json_request('GET', url)
|
||||
resp, body = self.client.get(url)
|
||||
out = []
|
||||
for member in body['members']:
|
||||
member['image_id'] = image_id
|
||||
@ -70,7 +70,7 @@ class ImageMemberManager(base.ManagerWithFind):
|
||||
def _list_by_member(self, member):
|
||||
member_id = base.getid(member)
|
||||
url = '/v1/shared-images/%s' % member_id
|
||||
resp, body = self.client.json_request('GET', url)
|
||||
resp, body = self.client.get(url)
|
||||
out = []
|
||||
for member in body['shared_images']:
|
||||
member['member_id'] = member_id
|
||||
@ -84,7 +84,7 @@ class ImageMemberManager(base.ManagerWithFind):
|
||||
"""Creates an image."""
|
||||
url = '/v1/images/%s/members/%s' % (base.getid(image), member_id)
|
||||
body = {'member': {'can_share': can_share}}
|
||||
self._put(url, json=body)
|
||||
self.client.put(url, data=body)
|
||||
|
||||
def replace(self, image, members):
|
||||
memberships = []
|
||||
@ -100,4 +100,4 @@ class ImageMemberManager(base.ManagerWithFind):
|
||||
obj['can_share'] = member['can_share']
|
||||
memberships.append(obj)
|
||||
url = '/v1/images/%s/members' % base.getid(image)
|
||||
self.client.json_request('PUT', url, {}, {'memberships': memberships})
|
||||
self.client.put(url, data={'memberships': memberships})
|
||||
|
@ -14,10 +14,9 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from glanceclient.common import utils
|
||||
from glanceclient.openstack.common.apiclient import base
|
||||
@ -60,12 +59,12 @@ class ImageManager(base.ManagerWithFind):
|
||||
resource_class = Image
|
||||
|
||||
def _list(self, url, response_key, obj_class=None, body=None):
|
||||
resp = self.client.get(url)
|
||||
resp, body = self.client.get(url)
|
||||
|
||||
if obj_class is None:
|
||||
obj_class = self.resource_class
|
||||
|
||||
data = resp.json()[response_key]
|
||||
data = body[response_key]
|
||||
return ([obj_class(self, res, loaded=True) for res in data if res],
|
||||
resp)
|
||||
|
||||
@ -123,13 +122,12 @@ class ImageManager(base.ManagerWithFind):
|
||||
:rtype: :class:`Image`
|
||||
"""
|
||||
image_id = base.getid(image)
|
||||
resp, body = self.client.raw_request(
|
||||
'HEAD', '/v1/images/%s' % parse.quote(str(image_id)))
|
||||
meta = self._image_meta_from_headers(dict(resp.getheaders()))
|
||||
resp, body = self.client.head('/v1/images/%s'
|
||||
% urlparse.quote(str(image_id)))
|
||||
meta = self._image_meta_from_headers(resp.headers)
|
||||
return_request_id = kwargs.get('return_req_id', None)
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
return Image(self, meta)
|
||||
|
||||
def data(self, image, do_checksum=True, **kwargs):
|
||||
@ -140,14 +138,14 @@ class ImageManager(base.ManagerWithFind):
|
||||
:rtype: iterable containing image data
|
||||
"""
|
||||
image_id = base.getid(image)
|
||||
resp, body = self.client.raw_request(
|
||||
'GET', '/v1/images/%s' % parse.quote(str(image_id)))
|
||||
checksum = resp.getheader('x-image-meta-checksum', None)
|
||||
resp, body = self.client.get('/v1/images/%s'
|
||||
% urlparse.quote(str(image_id)))
|
||||
checksum = resp.headers.get('x-image-meta-checksum', None)
|
||||
if do_checksum and checksum is not None:
|
||||
body.set_checksum(checksum)
|
||||
return utils.integrity_iter(body, checksum)
|
||||
return_request_id = kwargs.get('return_req_id', None)
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
|
||||
return body
|
||||
|
||||
@ -194,11 +192,11 @@ class ImageManager(base.ManagerWithFind):
|
||||
# trying to encode them
|
||||
qp[param] = strutils.safe_encode(value)
|
||||
|
||||
url = '/v1/images/detail?%s' % parse.urlencode(qp)
|
||||
url = '/v1/images/detail?%s' % urlparse.urlencode(qp)
|
||||
images, resp = self._list(url, "images")
|
||||
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
|
||||
for image in images:
|
||||
if filter_owner(owner, image):
|
||||
@ -253,10 +251,11 @@ class ImageManager(base.ManagerWithFind):
|
||||
|
||||
def delete(self, image, **kwargs):
|
||||
"""Delete an image."""
|
||||
resp = self._delete("/v1/images/%s" % base.getid(image))[0]
|
||||
url = "/v1/images/%s" % base.getid(image)
|
||||
resp, body = self.client.delete(url)
|
||||
return_request_id = kwargs.get('return_req_id', None)
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create an image
|
||||
@ -284,12 +283,12 @@ class ImageManager(base.ManagerWithFind):
|
||||
if copy_from is not None:
|
||||
hdrs['x-glance-api-copy-from'] = copy_from
|
||||
|
||||
resp, body_iter = self.client.raw_request(
|
||||
'POST', '/v1/images', headers=hdrs, body=image_data)
|
||||
body = json.loads(''.join([c for c in body_iter]))
|
||||
resp, body = self.client.post('/v1/images',
|
||||
headers=hdrs,
|
||||
data=image_data)
|
||||
return_request_id = kwargs.get('return_req_id', None)
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
|
||||
return Image(self, self._format_image_meta_for_user(body['image']))
|
||||
|
||||
@ -327,11 +326,9 @@ class ImageManager(base.ManagerWithFind):
|
||||
hdrs['x-glance-api-copy-from'] = copy_from
|
||||
|
||||
url = '/v1/images/%s' % base.getid(image)
|
||||
resp, body_iter = self.client.raw_request(
|
||||
'PUT', url, headers=hdrs, body=image_data)
|
||||
body = json.loads(''.join([c for c in body_iter]))
|
||||
resp, body = self.client.put(url, headers=hdrs, data=image_data)
|
||||
return_request_id = kwargs.get('return_req_id', None)
|
||||
if return_request_id is not None:
|
||||
return_request_id.append(resp.getheader(OS_REQ_ID_HDR, None))
|
||||
return_request_id.append(resp.headers.get(OS_REQ_ID_HDR, None))
|
||||
|
||||
return Image(self, self._format_image_meta_for_user(body['image']))
|
||||
|
@ -21,25 +21,22 @@ class Controller(object):
|
||||
|
||||
def list(self, image_id):
|
||||
url = '/v2/images/%s/members' % image_id
|
||||
resp, body = self.http_client.json_request('GET', url)
|
||||
resp, body = self.http_client.get(url)
|
||||
for member in body['members']:
|
||||
yield self.model(member)
|
||||
|
||||
def delete(self, image_id, member_id):
|
||||
self.http_client.json_request('DELETE',
|
||||
'/v2/images/%s/members/%s' %
|
||||
(image_id, member_id))
|
||||
self.http_client.delete('/v2/images/%s/members/%s' %
|
||||
(image_id, member_id))
|
||||
|
||||
def update(self, image_id, member_id, member_status):
|
||||
url = '/v2/images/%s/members/%s' % (image_id, member_id)
|
||||
body = {'status': member_status}
|
||||
resp, updated_member = self.http_client.json_request('PUT', url,
|
||||
body=body)
|
||||
resp, updated_member = self.http_client.put(url, data=body)
|
||||
return self.model(updated_member)
|
||||
|
||||
def create(self, image_id, member_id):
|
||||
url = '/v2/images/%s/members' % image_id
|
||||
body = {'member': member_id}
|
||||
resp, created_member = self.http_client.json_request('POST', url,
|
||||
body=body)
|
||||
resp, created_member = self.http_client.post(url, data=body)
|
||||
return self.model(created_member)
|
||||
|
@ -27,7 +27,7 @@ class Controller(object):
|
||||
:param tag_value: value of the tag.
|
||||
"""
|
||||
url = '/v2/images/%s/tags/%s' % (image_id, tag_value)
|
||||
self.http_client.json_request('PUT', url)
|
||||
self.http_client.put(url)
|
||||
|
||||
def delete(self, image_id, tag_value):
|
||||
"""
|
||||
@ -37,4 +37,4 @@ class Controller(object):
|
||||
:param tag_value: tag value to be deleted.
|
||||
"""
|
||||
url = '/v2/images/%s/tags/%s' % (image_id, tag_value)
|
||||
self.http_client.json_request('DELETE', url)
|
||||
self.http_client.delete(url)
|
||||
|
@ -16,7 +16,6 @@
|
||||
import json
|
||||
import six
|
||||
from six.moves.urllib import parse
|
||||
|
||||
import warlock
|
||||
|
||||
from glanceclient.common import utils
|
||||
@ -42,7 +41,7 @@ class Controller(object):
|
||||
empty_fun = lambda *args, **kwargs: None
|
||||
|
||||
def paginate(url):
|
||||
resp, body = self.http_client.json_request('GET', url)
|
||||
resp, body = self.http_client.get(url)
|
||||
for image in body['images']:
|
||||
# NOTE(bcwaldon): remove 'self' for now until we have
|
||||
# an elegant way to pass it into the model constructor
|
||||
@ -94,7 +93,7 @@ class Controller(object):
|
||||
|
||||
def get(self, image_id):
|
||||
url = '/v2/images/%s' % image_id
|
||||
resp, body = self.http_client.json_request('GET', url)
|
||||
resp, body = self.http_client.get(url)
|
||||
#NOTE(bcwaldon): remove 'self' for now until we have an elegant
|
||||
# way to pass it into the model constructor without conflict
|
||||
body.pop('self', None)
|
||||
@ -108,11 +107,12 @@ class Controller(object):
|
||||
:param do_checksum: Enable/disable checksum validation.
|
||||
"""
|
||||
url = '/v2/images/%s/file' % image_id
|
||||
resp, body = self.http_client.raw_request('GET', url)
|
||||
checksum = resp.getheader('content-md5', None)
|
||||
resp, body = self.http_client.get(url)
|
||||
checksum = resp.headers.get('content-md5', None)
|
||||
if do_checksum and checksum is not None:
|
||||
body.set_checksum(checksum)
|
||||
return body
|
||||
return utils.integrity_iter(body, checksum)
|
||||
else:
|
||||
return body
|
||||
|
||||
def upload(self, image_id, image_data, image_size=None):
|
||||
"""
|
||||
@ -124,14 +124,17 @@ class Controller(object):
|
||||
"""
|
||||
url = '/v2/images/%s/file' % image_id
|
||||
hdrs = {'Content-Type': 'application/octet-stream'}
|
||||
self.http_client.raw_request('PUT', url,
|
||||
headers=hdrs,
|
||||
body=image_data,
|
||||
content_length=image_size)
|
||||
if image_size:
|
||||
body = {'image_data': image_data,
|
||||
'image_size': image_size}
|
||||
else:
|
||||
body = image_data
|
||||
self.http_client.put(url, headers=hdrs, data=body)
|
||||
|
||||
def delete(self, image_id):
|
||||
"""Delete an image."""
|
||||
self.http_client.json_request('DELETE', '/v2/images/%s' % image_id)
|
||||
url = '/v2/images/%s' % image_id
|
||||
self.http_client.delete(url)
|
||||
|
||||
def create(self, **kwargs):
|
||||
"""Create an image."""
|
||||
@ -144,7 +147,7 @@ class Controller(object):
|
||||
except warlock.InvalidOperation as e:
|
||||
raise TypeError(utils.exception_to_str(e))
|
||||
|
||||
resp, body = self.http_client.json_request('POST', url, body=image)
|
||||
resp, body = self.http_client.post(url, data=image)
|
||||
#NOTE(esheffield): remove 'self' for now until we have an elegant
|
||||
# way to pass it into the model constructor without conflict
|
||||
body.pop('self', None)
|
||||
@ -178,9 +181,7 @@ class Controller(object):
|
||||
|
||||
url = '/v2/images/%s' % image_id
|
||||
hdrs = {'Content-Type': 'application/openstack-images-v2.1-json-patch'}
|
||||
self.http_client.raw_request('PATCH', url,
|
||||
headers=hdrs,
|
||||
body=image.patch)
|
||||
self.http_client.patch(url, headers=hdrs, data=image.patch)
|
||||
|
||||
#NOTE(bcwaldon): calling image.patch doesn't clear the changes, so
|
||||
# we need to fetch the image again to get a clean history. This is
|
||||
@ -197,9 +198,7 @@ class Controller(object):
|
||||
def _send_image_update_request(self, image_id, patch_body):
|
||||
url = '/v2/images/%s' % image_id
|
||||
hdrs = {'Content-Type': 'application/openstack-images-v2.1-json-patch'}
|
||||
self.http_client.raw_request('PATCH', url,
|
||||
headers=hdrs,
|
||||
body=json.dumps(patch_body))
|
||||
self.http_client.patch(url, headers=hdrs, data=json.dumps(patch_body))
|
||||
|
||||
def add_location(self, image_id, url, metadata):
|
||||
"""Add a new location entry to an image's list of locations.
|
||||
|
@ -81,5 +81,5 @@ class Controller(object):
|
||||
|
||||
def get(self, schema_name):
|
||||
uri = '/v2/schemas/%s' % schema_name
|
||||
_, raw_schema = self.http_client.json_request('GET', uri)
|
||||
_, raw_schema = self.http_client.get(uri)
|
||||
return Schema(raw_schema)
|
||||
|
@ -4,5 +4,6 @@ argparse
|
||||
PrettyTable>=0.7,<0.8
|
||||
python-keystoneclient>=0.9.0
|
||||
pyOpenSSL>=0.11
|
||||
requests>=1.1
|
||||
warlock>=1.0.1,<2
|
||||
six>=1.7.0
|
||||
|
@ -12,18 +12,16 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from glanceclient import exc
|
||||
|
||||
|
||||
FakeResponse = collections.namedtuple('HTTPResponse', ['status'])
|
||||
|
||||
|
||||
class TestHTTPExceptions(testtools.TestCase):
|
||||
def test_from_response(self):
|
||||
"""exc.from_response should return instance of an HTTP exception."""
|
||||
out = exc.from_response(FakeResponse(400))
|
||||
mock_resp = mock.Mock()
|
||||
mock_resp.status_code = 400
|
||||
out = exc.from_response(mock_resp)
|
||||
self.assertIsInstance(out, exc.HTTPBadRequest)
|
||||
|
@ -12,21 +12,18 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
|
||||
import errno
|
||||
import socket
|
||||
|
||||
import mock
|
||||
from mox3 import mox
|
||||
import requests
|
||||
import six
|
||||
from six.moves import http_client
|
||||
from six.moves.urllib import parse
|
||||
import tempfile
|
||||
import testtools
|
||||
import types
|
||||
|
||||
import glanceclient
|
||||
from glanceclient.common import http
|
||||
from glanceclient.common import utils as client_utils
|
||||
from glanceclient.common import https
|
||||
from glanceclient import exc
|
||||
from tests import utils
|
||||
|
||||
@ -36,8 +33,7 @@ class TestClient(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestClient, self).setUp()
|
||||
self.mock = mox.Mox()
|
||||
self.mock.StubOutWithMock(http_client.HTTPConnection, 'request')
|
||||
self.mock.StubOutWithMock(http_client.HTTPConnection, 'getresponse')
|
||||
self.mock.StubOutWithMock(requests.Session, 'request')
|
||||
|
||||
self.endpoint = 'http://example.com:9292'
|
||||
self.client = http.HTTPClient(self.endpoint, token=u'abc123')
|
||||
@ -85,14 +81,16 @@ class TestClient(testtools.TestCase):
|
||||
And the error should list the host and port that refused the
|
||||
connection
|
||||
"""
|
||||
http_client.HTTPConnection.request(
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
data=mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg(),
|
||||
).AndRaise(socket.error())
|
||||
stream=mox.IgnoreArg(),
|
||||
).AndRaise(requests.exceptions.ConnectionError())
|
||||
self.mock.ReplayAll()
|
||||
try:
|
||||
self.client.json_request('GET', '/v1/images/detail?limit=20')
|
||||
self.client.get('/v1/images/detail?limit=20')
|
||||
#NOTE(alaski) We expect exc.CommunicationError to be raised
|
||||
# so we should never reach this point. try/except is used here
|
||||
# rather than assertRaises() so that we can check the body of
|
||||
@ -103,47 +101,23 @@ class TestClient(testtools.TestCase):
|
||||
(comm_err.message, self.endpoint))
|
||||
self.assertTrue(self.endpoint in comm_err.message, fail_msg)
|
||||
|
||||
def test_request_redirected(self):
|
||||
resp = utils.FakeResponse({'location': 'http://www.example.com'},
|
||||
status=302, body=six.BytesIO())
|
||||
http_client.HTTPConnection.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg(),
|
||||
)
|
||||
http_client.HTTPConnection.getresponse().AndReturn(resp)
|
||||
|
||||
# The second request should be to the redirected location
|
||||
expected_response = b'Ok'
|
||||
resp2 = utils.FakeResponse({}, six.BytesIO(expected_response))
|
||||
http_client.HTTPConnection.request(
|
||||
'GET',
|
||||
'http://www.example.com',
|
||||
headers=mox.IgnoreArg(),
|
||||
)
|
||||
http_client.HTTPConnection.getresponse().AndReturn(resp2)
|
||||
|
||||
self.mock.ReplayAll()
|
||||
|
||||
self.client.json_request('GET', '/v1/images/detail')
|
||||
|
||||
def test_http_encoding(self):
|
||||
http_client.HTTPConnection.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg())
|
||||
|
||||
# Lets fake the response
|
||||
# returned by httplib
|
||||
expected_response = b'Ok'
|
||||
fake = utils.FakeResponse({}, six.BytesIO(expected_response))
|
||||
http_client.HTTPConnection.getresponse().AndReturn(fake)
|
||||
# returned by requests
|
||||
response = 'Ok'
|
||||
headers = {"Content-Type": "text/plain"}
|
||||
fake = utils.FakeResponse(headers, six.StringIO(response))
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
data=mox.IgnoreArg(),
|
||||
stream=mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()).AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
|
||||
headers = {"test": u'ni\xf1o'}
|
||||
resp, body = self.client.raw_request('GET', '/v1/images/detail',
|
||||
headers=headers)
|
||||
self.assertEqual(fake, resp)
|
||||
resp, body = self.client.get('/v1/images/detail', headers=headers)
|
||||
self.assertEqual(resp, fake)
|
||||
|
||||
def test_headers_encoding(self):
|
||||
value = u'ni\xf1o'
|
||||
@ -156,153 +130,19 @@ class TestClient(testtools.TestCase):
|
||||
|
||||
def test_raw_request(self):
|
||||
" Verify the path being used for HTTP requests reflects accurately. "
|
||||
|
||||
def check_request(method, path, **kwargs):
|
||||
self.assertEqual('GET', method)
|
||||
# NOTE(kmcdonald): See bug #1179984 for more details.
|
||||
self.assertEqual('/v1/images/detail', path)
|
||||
|
||||
http_client.HTTPConnection.request(
|
||||
headers = {"Content-Type": "text/plain"}
|
||||
response = 'Ok'
|
||||
fake = utils.FakeResponse({}, six.StringIO(response))
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()).WithSideEffects(check_request)
|
||||
|
||||
# fake the response returned by httplib
|
||||
fake = utils.FakeResponse({}, six.BytesIO(b'Ok'))
|
||||
http_client.HTTPConnection.getresponse().AndReturn(fake)
|
||||
data=mox.IgnoreArg(),
|
||||
stream=mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()).AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
|
||||
resp, body = self.client.raw_request('GET', '/v1/images/detail')
|
||||
self.assertEqual(fake, resp)
|
||||
|
||||
def test_customized_path_raw_request(self):
|
||||
"""
|
||||
Verify the customized path being used for HTTP requests
|
||||
reflects accurately
|
||||
"""
|
||||
|
||||
def check_request(method, path, **kwargs):
|
||||
self.assertEqual('GET', method)
|
||||
self.assertEqual('/customized-path/v1/images/detail', path)
|
||||
|
||||
# NOTE(yuyangbj): see bug 1230032 to get more info
|
||||
endpoint = 'http://example.com:9292/customized-path'
|
||||
client = http.HTTPClient(endpoint, token=u'abc123')
|
||||
self.assertEqual('/customized-path', client.endpoint_path)
|
||||
|
||||
http_client.HTTPConnection.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()).WithSideEffects(check_request)
|
||||
|
||||
# fake the response returned by httplib
|
||||
fake = utils.FakeResponse({}, six.BytesIO(b'Ok'))
|
||||
http_client.HTTPConnection.getresponse().AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
|
||||
resp, body = client.raw_request('GET', '/v1/images/detail')
|
||||
self.assertEqual(fake, resp)
|
||||
|
||||
def test_raw_request_no_content_length(self):
|
||||
with tempfile.NamedTemporaryFile() as test_file:
|
||||
test_file.write(b'abcd')
|
||||
test_file.seek(0)
|
||||
data_length = 4
|
||||
self.assertEqual(data_length,
|
||||
client_utils.get_file_size(test_file))
|
||||
|
||||
exp_resp = {'body': test_file}
|
||||
exp_resp['headers'] = {'Content-Length': str(data_length),
|
||||
'Content-Type': 'application/octet-stream'}
|
||||
|
||||
def mock_request(url, method, **kwargs):
|
||||
return kwargs
|
||||
|
||||
rq_kwargs = {'body': test_file, 'content_length': None}
|
||||
|
||||
with mock.patch.object(self.client, '_http_request') as mock_rq:
|
||||
mock_rq.side_effect = mock_request
|
||||
resp = self.client.raw_request('PUT', '/v1/images/detail',
|
||||
**rq_kwargs)
|
||||
|
||||
rq_kwargs.pop('content_length')
|
||||
headers = {'Content-Length': str(data_length),
|
||||
'Content-Type': 'application/octet-stream'}
|
||||
rq_kwargs['headers'] = headers
|
||||
|
||||
mock_rq.assert_called_once_with('/v1/images/detail', 'PUT',
|
||||
**rq_kwargs)
|
||||
|
||||
self.assertEqual(exp_resp, resp)
|
||||
|
||||
def test_raw_request_w_content_length(self):
|
||||
with tempfile.NamedTemporaryFile() as test_file:
|
||||
test_file.write(b'abcd')
|
||||
test_file.seek(0)
|
||||
data_length = 4
|
||||
self.assertEqual(data_length,
|
||||
client_utils.get_file_size(test_file))
|
||||
|
||||
exp_resp = {'body': test_file}
|
||||
# NOTE: we expect the actual file size to be overridden by the
|
||||
# supplied content length.
|
||||
exp_resp['headers'] = {'Content-Length': '4',
|
||||
'Content-Type': 'application/octet-stream'}
|
||||
|
||||
def mock_request(url, method, **kwargs):
|
||||
return kwargs
|
||||
|
||||
rq_kwargs = {'body': test_file, 'content_length': data_length}
|
||||
|
||||
with mock.patch.object(self.client, '_http_request') as mock_rq:
|
||||
mock_rq.side_effect = mock_request
|
||||
resp = self.client.raw_request('PUT', '/v1/images/detail',
|
||||
**rq_kwargs)
|
||||
|
||||
rq_kwargs.pop('content_length')
|
||||
headers = {'Content-Length': str(data_length),
|
||||
'Content-Type': 'application/octet-stream'}
|
||||
rq_kwargs['headers'] = headers
|
||||
|
||||
mock_rq.assert_called_once_with('/v1/images/detail', 'PUT',
|
||||
**rq_kwargs)
|
||||
|
||||
self.assertEqual(exp_resp, resp)
|
||||
|
||||
def test_raw_request_w_bad_content_length(self):
|
||||
with tempfile.NamedTemporaryFile() as test_file:
|
||||
test_file.write(b'abcd')
|
||||
test_file.seek(0)
|
||||
self.assertEqual(4, client_utils.get_file_size(test_file))
|
||||
|
||||
def mock_request(url, method, **kwargs):
|
||||
return kwargs
|
||||
|
||||
with mock.patch.object(self.client, '_http_request', mock_request):
|
||||
self.assertRaises(AttributeError, self.client.raw_request,
|
||||
'PUT', '/v1/images/detail', body=test_file,
|
||||
content_length=32)
|
||||
|
||||
def test_connection_refused_raw_request(self):
|
||||
"""
|
||||
Should receive a CommunicationError if connection refused.
|
||||
And the error should list the host and port that refused the
|
||||
connection
|
||||
"""
|
||||
endpoint = 'http://example.com:9292'
|
||||
client = http.HTTPClient(endpoint, token=u'abc123')
|
||||
http_client.HTTPConnection.request(mox.IgnoreArg(), mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()
|
||||
).AndRaise(socket.error())
|
||||
self.mock.ReplayAll()
|
||||
try:
|
||||
client.raw_request('GET', '/v1/images/detail?limit=20')
|
||||
|
||||
self.fail('An exception should have bypassed this line.')
|
||||
except exc.CommunicationError as comm_err:
|
||||
fail_msg = ("Exception message '%s' should contain '%s'" %
|
||||
(comm_err.message, endpoint))
|
||||
self.assertTrue(endpoint in comm_err.message, fail_msg)
|
||||
resp, body = self.client.get('/v1/images/detail', headers=headers)
|
||||
self.assertEqual(resp, fake)
|
||||
|
||||
def test_parse_endpoint(self):
|
||||
endpoint = 'http://example.com:9292'
|
||||
@ -313,81 +153,84 @@ class TestClient(testtools.TestCase):
|
||||
query='', fragment='')
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_get_connection_class(self):
|
||||
endpoint = 'http://example.com:9292'
|
||||
test_client = http.HTTPClient(endpoint, token=u'adc123')
|
||||
actual = (test_client.get_connection_class('https'))
|
||||
self.assertEqual(http.VerifiedHTTPSConnection, actual)
|
||||
|
||||
def test_get_connections_kwargs_http(self):
|
||||
endpoint = 'http://example.com:9292'
|
||||
test_client = http.HTTPClient(endpoint, token=u'adc123')
|
||||
actual = test_client.get_connection_kwargs('http', insecure=True)
|
||||
self.assertEqual({'timeout': 600.0}, actual)
|
||||
self.assertEqual(test_client.timeout, 600.0)
|
||||
|
||||
def test_get_connections_kwargs_https(self):
|
||||
endpoint = 'http://example.com:9292'
|
||||
test_client = http.HTTPClient(endpoint, token=u'adc123')
|
||||
actual = test_client.get_connection_kwargs('https', insecure=True)
|
||||
expected = {'cacert': None,
|
||||
'cert_file': None,
|
||||
'insecure': True,
|
||||
'key_file': None,
|
||||
'ssl_compression': True,
|
||||
'timeout': 600.0}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_log_curl_request_with_non_ascii_char(self):
|
||||
try:
|
||||
headers = {'header1': 'value1\xa5\xa6'}
|
||||
http_client_object = http.HTTPClient(self.endpoint)
|
||||
http_client_object.log_curl_request('GET',
|
||||
'http://www.example.com/\xa5',
|
||||
{'headers': headers})
|
||||
except UnicodeDecodeError as e:
|
||||
self.fail("Unexpected UnicodeDecodeError exception '%s'" % e)
|
||||
|
||||
|
||||
class TestHostResolutionError(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestHostResolutionError, self).setUp()
|
||||
self.mock = mox.Mox()
|
||||
self.invalid_host = "example.com.incorrect_top_level_domain"
|
||||
|
||||
def test_incorrect_domain_error(self):
|
||||
"""
|
||||
Make sure that using a domain which does not resolve causes an
|
||||
exception which mentions that specific hostname as a reason for
|
||||
failure.
|
||||
"""
|
||||
class FailingConnectionClass(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def putrequest(self, *args, **kwargs):
|
||||
raise socket.gaierror(-2, "Name or service not known")
|
||||
|
||||
def request(self, *args, **kwargs):
|
||||
raise socket.gaierror(-2, "Name or service not known")
|
||||
|
||||
self.endpoint = 'http://%s:9292' % (self.invalid_host,)
|
||||
self.client = http.HTTPClient(self.endpoint, token=u'abc123')
|
||||
|
||||
self.mock.StubOutWithMock(self.client, 'get_connection')
|
||||
self.client.get_connection().AndReturn(FailingConnectionClass())
|
||||
def test_http_chunked_request(self):
|
||||
# Lets fake the response
|
||||
# returned by requests
|
||||
response = "Ok"
|
||||
data = six.StringIO(response)
|
||||
fake = utils.FakeResponse({}, data)
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
stream=mox.IgnoreArg(),
|
||||
data=mox.IsA(types.GeneratorType),
|
||||
headers=mox.IgnoreArg()).AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
|
||||
try:
|
||||
self.client.raw_request('GET', '/example/path')
|
||||
self.fail("gaierror should be raised")
|
||||
except exc.InvalidEndpoint as e:
|
||||
self.assertTrue(self.invalid_host in str(e),
|
||||
"exception should contain the hostname")
|
||||
headers = {"test": u'chunked_request'}
|
||||
resp, body = self.client.post('/v1/images/',
|
||||
headers=headers, data=data)
|
||||
self.assertEqual(resp, fake)
|
||||
|
||||
def tearDown(self):
|
||||
super(TestHostResolutionError, self).tearDown()
|
||||
self.mock.UnsetStubs()
|
||||
def test_http_json(self):
|
||||
data = {"test": "json_request"}
|
||||
fake = utils.FakeResponse({}, "OK")
|
||||
|
||||
def test_json(passed_data):
|
||||
"""
|
||||
This function tests whether the data
|
||||
being passed to request's method is
|
||||
a valid json or not.
|
||||
|
||||
This function will be called by pymox
|
||||
|
||||
:params passed_data: The data being
|
||||
passed to requests.Session.request.
|
||||
"""
|
||||
if not isinstance(passed_data, six.string_types):
|
||||
return False
|
||||
|
||||
try:
|
||||
passed_data = json.loads(passed_data)
|
||||
return data == passed_data
|
||||
except (TypeError, ValueError):
|
||||
return False
|
||||
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
stream=mox.IgnoreArg(),
|
||||
data=mox.Func(test_json),
|
||||
headers=mox.IgnoreArg()).AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
|
||||
headers = {"test": u'chunked_request'}
|
||||
resp, body = self.client.post('/v1/images/',
|
||||
headers=headers,
|
||||
data=data)
|
||||
self.assertEqual(resp, fake)
|
||||
|
||||
def test_http_chunked_response(self):
|
||||
headers = {"Content-Type": "application/octet-stream"}
|
||||
data = "TEST"
|
||||
fake = utils.FakeResponse(headers, six.StringIO(data))
|
||||
|
||||
requests.Session.request(
|
||||
mox.IgnoreArg(),
|
||||
mox.IgnoreArg(),
|
||||
stream=mox.IgnoreArg(),
|
||||
data=mox.IgnoreArg(),
|
||||
headers=mox.IgnoreArg()).AndReturn(fake)
|
||||
self.mock.ReplayAll()
|
||||
headers = {"test": u'chunked_request'}
|
||||
resp, body = self.client.get('/v1/images/')
|
||||
self.assertTrue(isinstance(body, types.GeneratorType))
|
||||
self.assertEqual([data], list(body))
|
||||
|
||||
|
||||
class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
@ -396,7 +239,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
def test_setcontext_unable_to_load_cacert(self):
|
||||
"""Add this UT case with Bug#1265730."""
|
||||
self.assertRaises(exc.SSLConfigurationError,
|
||||
http.VerifiedHTTPSConnection,
|
||||
https.VerifiedHTTPSConnection,
|
||||
"127.0.0.1",
|
||||
None,
|
||||
None,
|
||||
@ -405,45 +248,3 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
None,
|
||||
False,
|
||||
True)
|
||||
|
||||
|
||||
class TestResponseBodyIterator(testtools.TestCase):
|
||||
|
||||
def test_iter_default_chunk_size_64k(self):
|
||||
resp = utils.FakeResponse({}, six.BytesIO(b'X' * 98304))
|
||||
iterator = http.ResponseBodyIterator(resp)
|
||||
chunks = list(iterator)
|
||||
self.assertEqual([b'X' * 65536, b'X' * 32768], chunks)
|
||||
|
||||
def test_integrity_check_with_correct_checksum(self):
|
||||
resp = utils.FakeResponse({}, six.BytesIO(b'CCC'))
|
||||
body = http.ResponseBodyIterator(resp)
|
||||
body.set_checksum('defb99e69a9f1f6e06f15006b1f166ae')
|
||||
list(body)
|
||||
|
||||
def test_integrity_check_with_wrong_checksum(self):
|
||||
resp = utils.FakeResponse({}, six.BytesIO(b'BB'))
|
||||
body = http.ResponseBodyIterator(resp)
|
||||
body.set_checksum('wrong')
|
||||
try:
|
||||
list(body)
|
||||
self.fail('integrity checked passed with wrong checksum')
|
||||
except IOError as e:
|
||||
self.assertEqual(errno.EPIPE, e.errno)
|
||||
|
||||
def test_set_checksum_in_consumed_iterator(self):
|
||||
resp = utils.FakeResponse({}, six.BytesIO(b'CCC'))
|
||||
body = http.ResponseBodyIterator(resp)
|
||||
list(body)
|
||||
# Setting checksum for an already consumed iterator should raise an
|
||||
# AttributeError.
|
||||
self.assertRaises(
|
||||
AttributeError, body.set_checksum,
|
||||
'defb99e69a9f1f6e06f15006b1f166ae')
|
||||
|
||||
def test_body_size(self):
|
||||
size = 1000000007
|
||||
resp = utils.FakeResponse(
|
||||
{'content-length': str(size)}, six.BytesIO(b'BB'))
|
||||
body = http.ResponseBodyIterator(resp)
|
||||
self.assertEqual(size, len(body))
|
||||
|
@ -105,13 +105,11 @@ class ShellCacheSchemaTest(utils.TestCase):
|
||||
super(ShellCacheSchemaTest, self).setUp()
|
||||
self._mock_client_setup()
|
||||
self._mock_shell_setup()
|
||||
os.path.exists = mock.MagicMock()
|
||||
self.cache_dir = '/dir_for_cached_schema'
|
||||
self.cache_file = self.cache_dir + '/image_schema.json'
|
||||
|
||||
def tearDown(self):
|
||||
super(ShellCacheSchemaTest, self).tearDown()
|
||||
os.path.exists.reset_mock()
|
||||
|
||||
def _mock_client_setup(self):
|
||||
self.schema_dict = {
|
||||
@ -137,27 +135,8 @@ class ShellCacheSchemaTest(utils.TestCase):
|
||||
return Args(args)
|
||||
|
||||
@mock.patch('six.moves.builtins.open', new=mock.mock_open(), create=True)
|
||||
def test_cache_schema_gets_when_not_exists(self):
|
||||
mocked_path_exists_result_lst = [True, False]
|
||||
os.path.exists.side_effect = \
|
||||
lambda *args: mocked_path_exists_result_lst.pop(0)
|
||||
|
||||
options = {
|
||||
'get_schema': False
|
||||
}
|
||||
|
||||
self.shell._cache_schema(self._make_args(options),
|
||||
home_dir=self.cache_dir)
|
||||
|
||||
self.assertEqual(4, open.mock_calls.__len__())
|
||||
self.assertEqual(mock.call(self.cache_file, 'w'), open.mock_calls[0])
|
||||
self.assertEqual(mock.call().write(json.dumps(self.schema_dict)),
|
||||
open.mock_calls[2])
|
||||
|
||||
@mock.patch('six.moves.builtins.open', new=mock.mock_open(), create=True)
|
||||
def test_cache_schema_gets_when_forced(self):
|
||||
os.path.exists.return_value = True
|
||||
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
def test_cache_schema_gets_when_forced(self, exists_mock):
|
||||
options = {
|
||||
'get_schema': True
|
||||
}
|
||||
@ -171,9 +150,23 @@ class ShellCacheSchemaTest(utils.TestCase):
|
||||
open.mock_calls[2])
|
||||
|
||||
@mock.patch('six.moves.builtins.open', new=mock.mock_open(), create=True)
|
||||
def test_cache_schema_leaves_when_present_not_forced(self):
|
||||
os.path.exists.return_value = True
|
||||
@mock.patch('os.path.exists', side_effect=[True, False])
|
||||
def test_cache_schema_gets_when_not_exists(self, exists_mock):
|
||||
options = {
|
||||
'get_schema': False
|
||||
}
|
||||
|
||||
self.shell._cache_schema(self._make_args(options),
|
||||
home_dir=self.cache_dir)
|
||||
|
||||
self.assertEqual(4, open.mock_calls.__len__())
|
||||
self.assertEqual(mock.call(self.cache_file, 'w'), open.mock_calls[0])
|
||||
self.assertEqual(mock.call().write(json.dumps(self.schema_dict)),
|
||||
open.mock_calls[2])
|
||||
|
||||
@mock.patch('six.moves.builtins.open', new=mock.mock_open(), create=True)
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
def test_cache_schema_leaves_when_present_not_forced(self, exists_mock):
|
||||
options = {
|
||||
'get_schema': False
|
||||
}
|
||||
@ -183,5 +176,5 @@ class ShellCacheSchemaTest(utils.TestCase):
|
||||
|
||||
os.path.exists.assert_any_call(self.cache_dir)
|
||||
os.path.exists.assert_any_call(self.cache_file)
|
||||
self.assertEqual(2, os.path.exists.call_count)
|
||||
self.assertEqual(2, exists_mock.call_count)
|
||||
self.assertEqual(0, open.mock_calls.__len__())
|
||||
|
@ -16,9 +16,11 @@
|
||||
import os
|
||||
|
||||
from OpenSSL import crypto
|
||||
from requests.packages.urllib3 import poolmanager
|
||||
import testtools
|
||||
|
||||
from glanceclient.common import http
|
||||
from glanceclient.common import https
|
||||
from glanceclient import exc
|
||||
|
||||
|
||||
@ -26,6 +28,26 @@ TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'var'))
|
||||
|
||||
|
||||
class TestRequestsIntegration(testtools.TestCase):
|
||||
|
||||
def test_pool_patch(self):
|
||||
client = http.HTTPClient("https://localhost",
|
||||
ssl_compression=True)
|
||||
self.assertNotEqual(https.HTTPSConnectionPool,
|
||||
poolmanager.pool_classes_by_scheme["https"])
|
||||
|
||||
adapter = client.session.adapters.get("https://")
|
||||
self.assertFalse(isinstance(adapter, https.HTTPSAdapter))
|
||||
|
||||
client = http.HTTPClient("https://localhost",
|
||||
ssl_compression=False)
|
||||
self.assertEqual(https.HTTPSConnectionPool,
|
||||
poolmanager.pool_classes_by_scheme["https"])
|
||||
|
||||
adapter = client.session.adapters.get("https://")
|
||||
self.assertTrue(isinstance(adapter, https.HTTPSAdapter))
|
||||
|
||||
|
||||
class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
def test_ssl_init_ok(self):
|
||||
"""
|
||||
@ -35,10 +57,10 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
except exc.SSLConfigurationError:
|
||||
self.fail('Failed to init VerifiedHTTPSConnection.')
|
||||
|
||||
@ -49,9 +71,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
self.fail('Failed to raise assertion.')
|
||||
except exc.SSLConfigurationError:
|
||||
pass
|
||||
@ -63,9 +85,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
key_file = os.path.join(TEST_VAR_DIR, 'privatekey.key')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cacert=cacert)
|
||||
except exc.SSLConfigurationError:
|
||||
pass
|
||||
except Exception:
|
||||
@ -78,9 +100,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
self.fail('Failed to raise assertion.')
|
||||
except exc.SSLConfigurationError:
|
||||
pass
|
||||
@ -92,9 +114,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'badcert.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
self.fail('Failed to raise assertion.')
|
||||
except exc.SSLConfigurationError:
|
||||
pass
|
||||
@ -106,9 +128,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'badca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
self.fail('Failed to raise assertion.')
|
||||
except exc.SSLConfigurationError:
|
||||
pass
|
||||
@ -123,7 +145,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
# The expected cert should have CN=0.0.0.0
|
||||
self.assertEqual('0.0.0.0', cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('0.0.0.0', 0)
|
||||
conn = https.VerifiedHTTPSConnection('0.0.0.0', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
@ -138,7 +160,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
# The expected cert should have CN=*.pong.example.com
|
||||
self.assertEqual('*.pong.example.com', cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('ping.pong.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('ping.pong.example.com', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
@ -153,13 +175,13 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
# The expected cert should have CN=0.0.0.0
|
||||
self.assertEqual('0.0.0.0', cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('alt1.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('alt1.example.com', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('alt2.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('alt2.example.com', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
@ -174,19 +196,19 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
# The expected cert should have CN=0.0.0.0
|
||||
self.assertEqual('0.0.0.0', cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('alt1.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('alt1.example.com', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('alt2.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('alt2.example.com', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
except Exception:
|
||||
self.fail('Unexpected exception.')
|
||||
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('alt3.example.net', 0)
|
||||
conn = https.VerifiedHTTPSConnection('alt3.example.net', 0)
|
||||
conn.verify_callback(None, cert, 0, 0, 1)
|
||||
self.fail('Failed to raise assertion.')
|
||||
except exc.SSLCertificateError:
|
||||
@ -202,7 +224,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
# The expected cert should have CN=0.0.0.0
|
||||
self.assertEqual('0.0.0.0', cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('mismatch.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('mismatch.example.com', 0)
|
||||
except Exception:
|
||||
self.fail('Failed to init VerifiedHTTPSConnection.')
|
||||
|
||||
@ -220,10 +242,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
self.assertEqual('openstack.example.com',
|
||||
cert.get_subject().commonName)
|
||||
try:
|
||||
conn = http.VerifiedHTTPSConnection('openstack.example.com', 0)
|
||||
conn = https.VerifiedHTTPSConnection('openstack.example.com', 0)
|
||||
except Exception:
|
||||
self.fail('Failed to init VerifiedHTTPSConnection.')
|
||||
|
||||
self.assertRaises(exc.SSLCertificateError,
|
||||
conn.verify_callback, None, cert, 0, 0, 1)
|
||||
|
||||
@ -236,7 +257,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
key_file = 'fake.key'
|
||||
self.assertRaises(
|
||||
exc.SSLConfigurationError,
|
||||
http.VerifiedHTTPSConnection, '127.0.0.1',
|
||||
https.VerifiedHTTPSConnection, '127.0.0.1',
|
||||
0, key_file=key_file,
|
||||
cert_file=cert_file, cacert=cacert)
|
||||
|
||||
@ -248,7 +269,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection(
|
||||
https.VerifiedHTTPSConnection(
|
||||
'127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
@ -264,7 +285,7 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
|
||||
cacert = os.path.join(TEST_VAR_DIR, 'ca.crt')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection(
|
||||
https.VerifiedHTTPSConnection(
|
||||
'127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
@ -286,9 +307,9 @@ class TestVerifiedHTTPSConnection(testtools.TestCase):
|
||||
cert_file = cert_file.encode('ascii', 'strict').decode('utf-8')
|
||||
cacert = cacert.encode('ascii', 'strict').decode('utf-8')
|
||||
try:
|
||||
http.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
https.VerifiedHTTPSConnection('127.0.0.1', 0,
|
||||
key_file=key_file,
|
||||
cert_file=cert_file,
|
||||
cacert=cacert)
|
||||
except exc.SSLConfigurationError:
|
||||
self.fail('Failed to init VerifiedHTTPSConnection.')
|
||||
|
126
tests/utils.py
126
tests/utils.py
@ -14,64 +14,53 @@
|
||||
# under the License.
|
||||
|
||||
import copy
|
||||
import requests
|
||||
import json
|
||||
import six
|
||||
import testtools
|
||||
|
||||
from glanceclient.common import http
|
||||
|
||||
|
||||
class FakeAPI(object):
|
||||
def __init__(self, fixtures):
|
||||
self.fixtures = fixtures
|
||||
self.calls = []
|
||||
|
||||
def _request(self, method, url, headers=None, body=None,
|
||||
def _request(self, method, url, headers=None, data=None,
|
||||
content_length=None):
|
||||
call = (method, url, headers or {}, body)
|
||||
call = (method, url, headers or {}, data)
|
||||
if content_length is not None:
|
||||
call = tuple(list(call) + [content_length])
|
||||
self.calls.append(call)
|
||||
return self.fixtures[url][method]
|
||||
fixture = self.fixtures[url][method]
|
||||
|
||||
def raw_request(self, *args, **kwargs):
|
||||
fixture = self._request(*args, **kwargs)
|
||||
resp = FakeResponse(fixture[0], six.StringIO(fixture[1]))
|
||||
body_iter = http.ResponseBodyIterator(resp)
|
||||
return resp, body_iter
|
||||
data = fixture[1]
|
||||
if isinstance(fixture[1], six.string_types):
|
||||
try:
|
||||
data = json.loads(fixture[1])
|
||||
except ValueError:
|
||||
data = six.StringIO(fixture[1])
|
||||
|
||||
def json_request(self, *args, **kwargs):
|
||||
fixture = self._request(*args, **kwargs)
|
||||
return FakeResponse(fixture[0]), fixture[1]
|
||||
return FakeResponse(fixture[0], fixture[1]), data
|
||||
|
||||
def client_request(self, method, url, **kwargs):
|
||||
if 'json' in kwargs and 'body' not in kwargs:
|
||||
kwargs['body'] = kwargs.pop('json')
|
||||
resp, body = self.json_request(method, url, **kwargs)
|
||||
resp.json = lambda: body
|
||||
resp.content = bool(body)
|
||||
return resp
|
||||
def get(self, *args, **kwargs):
|
||||
return self._request('GET', *args, **kwargs)
|
||||
|
||||
def head(self, url, **kwargs):
|
||||
return self.client_request("HEAD", url, **kwargs)
|
||||
def post(self, *args, **kwargs):
|
||||
return self._request('POST', *args, **kwargs)
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
return self.client_request("GET", url, **kwargs)
|
||||
def put(self, *args, **kwargs):
|
||||
return self._request('PUT', *args, **kwargs)
|
||||
|
||||
def post(self, url, **kwargs):
|
||||
return self.client_request("POST", url, **kwargs)
|
||||
def patch(self, *args, **kwargs):
|
||||
return self._request('PATCH', *args, **kwargs)
|
||||
|
||||
def put(self, url, **kwargs):
|
||||
return self.client_request("PUT", url, **kwargs)
|
||||
def delete(self, *args, **kwargs):
|
||||
return self._request('DELETE', *args, **kwargs)
|
||||
|
||||
def delete(self, url, **kwargs):
|
||||
return self.raw_request("DELETE", url, **kwargs)
|
||||
|
||||
def patch(self, url, **kwargs):
|
||||
return self.client_request("PATCH", url, **kwargs)
|
||||
def head(self, *args, **kwargs):
|
||||
return self._request('HEAD', *args, **kwargs)
|
||||
|
||||
|
||||
class FakeResponse(object):
|
||||
class RawRequest(object):
|
||||
def __init__(self, headers, body=None,
|
||||
version=1.0, status=200, reason="Ok"):
|
||||
"""
|
||||
@ -97,36 +86,55 @@ class FakeResponse(object):
|
||||
return self.body.read(amt)
|
||||
|
||||
|
||||
class FakeResponse(object):
|
||||
def __init__(self, headers=None, body=None,
|
||||
version=1.0, status_code=200, reason="Ok"):
|
||||
"""
|
||||
:param headers: dict representing HTTP response headers
|
||||
:param body: file-like object
|
||||
:param version: HTTP Version
|
||||
:param status: Response status code
|
||||
:param reason: Status code related message.
|
||||
"""
|
||||
self.body = body
|
||||
self.reason = reason
|
||||
self.version = version
|
||||
self.headers = headers
|
||||
self.status_code = status_code
|
||||
self.raw = RawRequest(headers, body=body, reason=reason,
|
||||
version=version, status=status_code)
|
||||
|
||||
@property
|
||||
def ok(self):
|
||||
return (self.status_code < 400 or
|
||||
self.status_code >= 600)
|
||||
|
||||
def read(self, amt):
|
||||
return self.body.read(amt)
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
if hasattr(self.body, "read"):
|
||||
return self.body.read()
|
||||
return self.body
|
||||
|
||||
def json(self, **kwargs):
|
||||
return self.body and json.loads(self.content) or ""
|
||||
|
||||
def iter_content(self, chunk_size=1, decode_unicode=False):
|
||||
while True:
|
||||
chunk = self.raw.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
yield chunk
|
||||
|
||||
|
||||
class TestCase(testtools.TestCase):
|
||||
TEST_REQUEST_BASE = {
|
||||
'config': {'danger_mode': False},
|
||||
'verify': True}
|
||||
|
||||
|
||||
class TestResponse(requests.Response):
|
||||
"""
|
||||
Class used to wrap requests.Response and provide some
|
||||
convenience to initialize with a dict
|
||||
"""
|
||||
def __init__(self, data):
|
||||
self._text = None
|
||||
super(TestResponse, self)
|
||||
if isinstance(data, dict):
|
||||
self.status_code = data.get('status_code', None)
|
||||
self.headers = data.get('headers', None)
|
||||
# Fake the text attribute to streamline Response creation
|
||||
self._text = data.get('text', None)
|
||||
else:
|
||||
self.status_code = data
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
@property
|
||||
def text(self):
|
||||
return self._text
|
||||
|
||||
|
||||
class FakeTTYStdout(six.StringIO):
|
||||
"""A Fake stdout that try to emulate a TTY device as much as possible."""
|
||||
|
||||
|
@ -932,7 +932,7 @@ class ParameterFakeAPI(utils.FakeAPI):
|
||||
},
|
||||
]}
|
||||
|
||||
def json_request(self, method, url, **kwargs):
|
||||
def get(self, url, **kwargs):
|
||||
self.url = url
|
||||
return utils.FakeResponse({}), ParameterFakeAPI.image_list
|
||||
|
||||
|
@ -257,7 +257,7 @@ class LegacyShellV1Test(testtools.TestCase):
|
||||
args = Image()
|
||||
gc = client.Client('1', 'http://is.invalid')
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint, test_shell.do_update, gc, args)
|
||||
exc.CommunicationError, test_shell.do_update, gc, args)
|
||||
|
||||
def test_do_update(self):
|
||||
class Image():
|
||||
|
@ -224,81 +224,81 @@ class ShellInvalidEndpointandParameterTest(utils.TestCase):
|
||||
|
||||
def test_image_list_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint, self.run_command, 'image-list')
|
||||
exc.CommunicationError, self.run_command, 'image-list')
|
||||
|
||||
def test_image_details_invalid_endpoint_legacy(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint, self.run_command, 'details')
|
||||
exc.CommunicationError, self.run_command, 'details')
|
||||
|
||||
def test_image_update_invalid_endpoint_legacy(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'update {"name":""test}')
|
||||
|
||||
def test_image_index_invalid_endpoint_legacy(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'index')
|
||||
|
||||
def test_image_create_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'image-create')
|
||||
|
||||
def test_image_delete_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'image-delete <fake>')
|
||||
|
||||
def test_image_download_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'image-download <fake>')
|
||||
|
||||
def test_image_members_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'image-members fake_id')
|
||||
|
||||
def test_members_list_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'member-list --image-id fake')
|
||||
|
||||
def test_member_replace_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'members-replace image_id member_id')
|
||||
|
||||
def test_image_show_invalid_endpoint_legacy(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint, self.run_command, 'show image')
|
||||
exc.CommunicationError, self.run_command, 'show image')
|
||||
|
||||
def test_image_show_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'image-show --human-readable <IMAGE_ID>')
|
||||
|
||||
def test_member_images_invalid_endpoint_legacy(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command, 'member-images member_id')
|
||||
|
||||
def test_member_create_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command,
|
||||
'member-create --can-share <IMAGE_ID> <TENANT_ID>')
|
||||
|
||||
def test_member_delete_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command,
|
||||
'member-delete <IMAGE_ID> <TENANT_ID>')
|
||||
|
||||
def test_member_add_invalid_endpoint(self):
|
||||
self.assertRaises(
|
||||
exc.InvalidEndpoint,
|
||||
exc.CommunicationError,
|
||||
self.run_command,
|
||||
'member-add <IMAGE_ID> <TENANT_ID>')
|
||||
|
||||
|
@ -514,9 +514,11 @@ class TestController(testtools.TestCase):
|
||||
image_data = 'CCC'
|
||||
image_id = '606b0e88-7c5a-4d54-b5bb-046105d4de6f'
|
||||
self.controller.upload(image_id, image_data, image_size=3)
|
||||
body = {'image_data': image_data,
|
||||
'image_size': 3}
|
||||
expect = [('PUT', '/v2/images/%s/file' % image_id,
|
||||
{'Content-Type': 'application/octet-stream'},
|
||||
image_data, 3)]
|
||||
body)]
|
||||
self.assertEqual(expect, self.api.calls)
|
||||
|
||||
def test_data_without_checksum(self):
|
||||
|
@ -13,17 +13,12 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import mock
|
||||
import six
|
||||
import testtools
|
||||
|
||||
from glanceclient.common import http
|
||||
from glanceclient.common import progressbar
|
||||
from glanceclient.common import utils
|
||||
from glanceclient.v2 import shell as test_shell
|
||||
from tests import utils as test_utils
|
||||
|
||||
|
||||
class ShellV2Test(testtools.TestCase):
|
||||
@ -208,16 +203,18 @@ class ShellV2Test(testtools.TestCase):
|
||||
utils.print_dict.assert_called_once_with({
|
||||
'id': 'pass', 'name': 'IMG-01', 'disk_format': 'vhd'})
|
||||
|
||||
def test_do_location_add_update_with_invalid_json_metadata(self):
|
||||
args = self._make_args({'id': 'pass',
|
||||
'url': 'http://foo/bar',
|
||||
'metadata': '{1, 2, 3}'})
|
||||
self.assert_exits_with_msg(test_shell.do_location_add,
|
||||
args,
|
||||
'Metadata is not a valid JSON object.')
|
||||
self.assert_exits_with_msg(test_shell.do_location_update,
|
||||
args,
|
||||
'Metadata is not a valid JSON object.')
|
||||
def test_do_explain(self):
|
||||
input = {
|
||||
'page_size': 18,
|
||||
'id': 'pass',
|
||||
'schemas': 'test',
|
||||
'model': 'test',
|
||||
}
|
||||
args = self._make_args(input)
|
||||
with mock.patch.object(utils, 'print_list'):
|
||||
test_shell.do_explain(self.gc, args)
|
||||
|
||||
self.gc.schemas.get.assert_called_once_with('test')
|
||||
|
||||
def test_do_location_add(self):
|
||||
gc = self.gc
|
||||
@ -260,19 +257,6 @@ class ShellV2Test(testtools.TestCase):
|
||||
loc['metadata'])
|
||||
utils.print_dict.assert_called_once_with(expect_image)
|
||||
|
||||
def test_do_explain(self):
|
||||
input = {
|
||||
'page_size': 18,
|
||||
'id': 'pass',
|
||||
'schemas': 'test',
|
||||
'model': 'test',
|
||||
}
|
||||
args = self._make_args(input)
|
||||
with mock.patch.object(utils, 'print_list'):
|
||||
test_shell.do_explain(self.gc, args)
|
||||
|
||||
self.gc.schemas.get.assert_called_once_with('test')
|
||||
|
||||
def test_image_upload(self):
|
||||
args = self._make_args(
|
||||
{'id': 'IMG-01', 'file': 'test', 'size': 1024, 'progress': False})
|
||||
@ -283,46 +267,6 @@ class ShellV2Test(testtools.TestCase):
|
||||
test_shell.do_image_upload(self.gc, args)
|
||||
mocked_upload.assert_called_once_with('IMG-01', 'testfile', 1024)
|
||||
|
||||
def test_image_upload_with_progressbar(self):
|
||||
args = self._make_args(
|
||||
{'id': 'IMG-01', 'file': 'test', 'size': 1024, 'progress': True})
|
||||
|
||||
with mock.patch.object(self.gc.images, 'upload') as mocked_upload:
|
||||
utils.get_data_file = mock.Mock(return_value='testfile')
|
||||
utils.get_file_size = mock.Mock(return_value=8)
|
||||
mocked_upload.return_value = None
|
||||
test_shell.do_image_upload(self.gc, args)
|
||||
self.assertIsInstance(mocked_upload.call_args[0][1],
|
||||
progressbar.VerboseFileWrapper)
|
||||
|
||||
def test_image_download(self):
|
||||
args = self._make_args(
|
||||
{'id': 'pass', 'file': 'test', 'progress': False})
|
||||
|
||||
with mock.patch.object(self.gc.images, 'data') as mocked_data:
|
||||
resp = test_utils.FakeResponse({}, six.StringIO('CCC'))
|
||||
ret = mocked_data.return_value = http.ResponseBodyIterator(resp)
|
||||
test_shell.do_image_download(self.gc, args)
|
||||
|
||||
mocked_data.assert_called_once_with('pass')
|
||||
utils.save_image.assert_called_once_with(ret, 'test')
|
||||
|
||||
def test_image_download_with_progressbar(self):
|
||||
args = self._make_args(
|
||||
{'id': 'pass', 'file': 'test', 'progress': True})
|
||||
|
||||
with mock.patch.object(self.gc.images, 'data') as mocked_data:
|
||||
resp = test_utils.FakeResponse({}, six.StringIO('CCC'))
|
||||
mocked_data.return_value = http.ResponseBodyIterator(resp)
|
||||
test_shell.do_image_download(self.gc, args)
|
||||
|
||||
mocked_data.assert_called_once_with('pass')
|
||||
utils.save_image.assert_called_once_with(mock.ANY, 'test')
|
||||
self.assertIsInstance(
|
||||
utils.save_image.call_args[0][0],
|
||||
progressbar.VerboseIteratorWrapper
|
||||
)
|
||||
|
||||
def test_do_image_delete(self):
|
||||
args = self._make_args({'id': 'pass', 'file': 'test'})
|
||||
with mock.patch.object(self.gc.images, 'delete') as mocked_delete:
|
||||
|
Loading…
x
Reference in New Issue
Block a user