2013-09-20 04:05:51 +08:00
|
|
|
# Copyright 2012 OpenStack Foundation
|
2012-08-02 14:22:27 -07:00
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
import copy
|
|
|
|
import logging
|
2012-08-02 14:16:13 -07:00
|
|
|
import socket
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
import requests
|
2014-01-10 17:16:09 +01:00
|
|
|
import six
|
2014-01-15 13:34:09 -06:00
|
|
|
from six.moves.urllib import parse
|
2013-10-15 14:47:30 -04:00
|
|
|
|
2012-03-26 22:48:48 -07:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
|
|
|
# Python 2.5 compat fix
|
2014-01-15 13:34:09 -06:00
|
|
|
if not hasattr(parse, 'parse_qsl'):
|
2012-03-26 22:48:48 -07:00
|
|
|
import cgi
|
2014-01-15 13:34:09 -06:00
|
|
|
parse.parse_qsl = cgi.parse_qsl
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
from glanceclient.common import https
|
2013-12-16 15:28:05 +01:00
|
|
|
from glanceclient import exc
|
2014-06-28 22:09:26 +04:00
|
|
|
from glanceclient.openstack.common import importutils
|
2014-03-27 20:57:09 +00:00
|
|
|
from glanceclient.openstack.common import network_utils
|
2013-05-22 11:31:25 +02:00
|
|
|
from glanceclient.openstack.common import strutils
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-06-28 22:09:26 +04:00
|
|
|
osprofiler_web = importutils.try_import("osprofiler.web")
|
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
LOG = logging.getLogger(__name__)
|
2012-03-26 22:48:48 -07:00
|
|
|
USER_AGENT = 'python-glanceclient'
|
2012-07-11 19:34:28 -07:00
|
|
|
CHUNKSIZE = 1024 * 64 # 64kB
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
class HTTPClient(object):
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-08-02 14:16:13 -07:00
|
|
|
def __init__(self, endpoint, **kwargs):
|
|
|
|
self.endpoint = endpoint
|
2013-07-12 20:23:54 +00:00
|
|
|
self.identity_headers = kwargs.get('identity_headers')
|
2012-08-02 14:16:13 -07:00
|
|
|
self.auth_token = kwargs.get('token')
|
2013-07-12 20:23:54 +00:00
|
|
|
if self.identity_headers:
|
|
|
|
if self.identity_headers.get('X-Auth-Token'):
|
|
|
|
self.auth_token = self.identity_headers.get('X-Auth-Token')
|
|
|
|
del self.identity_headers['X-Auth-Token']
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.session = requests.Session()
|
|
|
|
self.session.headers["User-Agent"] = USER_AGENT
|
|
|
|
self.session.headers["X-Auth-Token"] = self.auth_token
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.timeout = float(kwargs.get('timeout', 600))
|
2012-10-03 13:52:55 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if self.endpoint.startswith("https"):
|
|
|
|
compression = kwargs.get('ssl_compression', True)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if not compression:
|
|
|
|
self.session.mount("https://", https.HTTPSAdapter())
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.session.verify = kwargs.get('cacert',
|
|
|
|
not kwargs.get('insecure', True))
|
|
|
|
self.session.cert = (kwargs.get('cert_file'),
|
|
|
|
kwargs.get('key_file'))
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
@staticmethod
|
|
|
|
def parse_endpoint(endpoint):
|
|
|
|
return network_utils.urlsplit(endpoint)
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def log_curl_request(self, method, url, headers, data, kwargs):
|
2012-07-29 22:12:37 -07:00
|
|
|
curl = ['curl -i -X %s' % method]
|
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
for (key, value) in self.session.headers.items():
|
2014-03-27 10:55:19 +00:00
|
|
|
if key.lower() == 'x-auth-token':
|
|
|
|
value = '*' * 3
|
2012-07-29 22:12:37 -07:00
|
|
|
header = '-H \'%s: %s\'' % (key, value)
|
2014-07-01 14:45:12 +05:30
|
|
|
curl.append(strutils.safe_encode(header))
|
|
|
|
|
|
|
|
if not self.session.verify:
|
2012-08-02 15:30:50 -07:00
|
|
|
curl.append('-k')
|
2014-07-01 14:45:12 +05:30
|
|
|
else:
|
|
|
|
if isinstance(self.session.verify, six.string_types):
|
|
|
|
curl.append(' --cacert %s' % self.session.verify)
|
|
|
|
|
|
|
|
if self.session.cert:
|
|
|
|
curl.append(' --cert %s --key %s' % self.session.cert)
|
2012-08-02 15:30:50 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if data and isinstance(data, six.string_types):
|
|
|
|
curl.append('-d \'%s\'' % data)
|
2012-07-29 22:12:37 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
curl.append(url)
|
Change a debug line to prevent UnicodeDecodeError issue
This debug line is causing tracebacks in the n-cpu logs for
tempest runs. Its because the logged data is sometimes unicode:
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 846, in emit
msg = self.format(record)
File "/opt/stack/new/nova/nova/openstack/common/log.py", line 710, in format
return logging.StreamHandler.format(self, record)
File "/usr/lib/python2.7/logging/__init__.py", line 723, in format
return fmt.format(record)
File "/opt/stack/new/nova/nova/openstack/common/log.py", line 674, in format
return logging.Formatter.format(self, record)
File "/usr/lib/python2.7/logging/__init__.py", line 467, in format
s = self._fmt % record.__dict__
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 1234: ordinal not in range(128)
Logged from file http.py, line 153
The change used correct encoding error handling policy for the log
which may includes non-ascii char.
Closes-bug: 1320655
Change-Id: I97f5f14b9beddcceb7fbd371062caf5a38a62a20
2014-06-17 17:45:47 +10:00
|
|
|
LOG.debug(strutils.safe_encode(' '.join(curl), errors='ignore'))
|
2012-07-29 22:12:37 -07:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def log_http_response(resp, body=None):
|
2014-07-01 14:45:12 +05:30
|
|
|
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
2012-07-29 22:12:37 -07:00
|
|
|
dump = ['\nHTTP/%.1f %s %s' % status]
|
2014-07-01 14:45:12 +05:30
|
|
|
headers = resp.headers.items()
|
2014-03-27 10:55:19 +00:00
|
|
|
if 'X-Auth-Token' in headers:
|
|
|
|
headers['X-Auth-Token'] = '*' * 3
|
|
|
|
dump.extend(['%s: %s' % (k, v) for k, v in headers])
|
2012-07-29 22:12:37 -07:00
|
|
|
dump.append('')
|
|
|
|
if body:
|
2014-04-26 23:34:58 +02:00
|
|
|
body = strutils.safe_decode(body)
|
2012-07-29 22:12:37 -07:00
|
|
|
dump.extend([body, ''])
|
2014-04-26 23:34:58 +02:00
|
|
|
LOG.debug('\n'.join([strutils.safe_encode(x) for x in dump]))
|
2013-01-30 15:18:44 +01:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def encode_headers(headers):
|
2013-06-09 11:07:27 +02:00
|
|
|
"""Encodes headers.
|
2013-01-30 15:18:44 +01:00
|
|
|
|
|
|
|
Note: This should be used right before
|
|
|
|
sending anything out.
|
|
|
|
|
|
|
|
:param headers: Headers to encode
|
|
|
|
:returns: Dictionary with encoded headers'
|
|
|
|
names and values
|
|
|
|
"""
|
2014-01-10 11:13:21 +01:00
|
|
|
return dict((strutils.safe_encode(h), strutils.safe_encode(v))
|
|
|
|
for h, v in six.iteritems(headers))
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def _request(self, method, url, **kwargs):
|
2013-06-09 11:07:27 +02:00
|
|
|
"""Send an http request with the specified characteristics.
|
2012-07-10 20:51:00 -07:00
|
|
|
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
|
|
|
|
as setting headers and error handling.
|
2012-03-26 22:48:48 -07:00
|
|
|
"""
|
|
|
|
# Copy the kwargs so we can reuse the original in case of redirects
|
2014-07-01 14:45:12 +05:30
|
|
|
headers = kwargs.pop("headers", {})
|
|
|
|
headers = headers and copy.deepcopy(headers) or {}
|
2014-06-28 22:09:26 +04:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
# Default Content-Type is octet-stream
|
|
|
|
content_type = headers.get('Content-Type', 'application/octet-stream')
|
2014-06-28 22:09:26 +04:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def chunk_body(body):
|
|
|
|
chunk = body
|
|
|
|
while chunk:
|
|
|
|
chunk = body.read(CHUNKSIZE)
|
|
|
|
yield chunk
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
data = kwargs.pop("data", None)
|
|
|
|
if data is not None and not isinstance(data, six.string_types):
|
|
|
|
try:
|
|
|
|
data = json.dumps(data)
|
|
|
|
content_type = 'application/json'
|
|
|
|
except TypeError:
|
|
|
|
# Here we assume it's
|
|
|
|
# a file-like object
|
|
|
|
# and we'll chunk it
|
|
|
|
data = chunk_body(data)
|
2013-07-12 20:23:54 +00:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
headers['Content-Type'] = content_type
|
2014-07-15 07:58:34 -07:00
|
|
|
stream = True if content_type == 'application/octet-stream' else False
|
2012-08-08 13:45:38 -07:00
|
|
|
|
2013-01-30 15:18:44 +01:00
|
|
|
# Note(flaper87): Before letting headers / url fly,
|
|
|
|
# they should be encoded otherwise httplib will
|
2014-07-01 14:45:12 +05:30
|
|
|
# complain.
|
|
|
|
headers = self.encode_headers(headers)
|
2013-01-30 15:18:44 +01:00
|
|
|
|
2012-08-08 13:45:38 -07:00
|
|
|
try:
|
2014-08-06 21:08:52 +08:00
|
|
|
if self.endpoint.endswith("/") or url.startswith("/"):
|
|
|
|
conn_url = "%s%s" % (self.endpoint, url)
|
|
|
|
else:
|
|
|
|
conn_url = "%s/%s" % (self.endpoint, url)
|
2014-07-01 14:45:12 +05:30
|
|
|
self.log_curl_request(method, conn_url, headers, data, kwargs)
|
|
|
|
resp = self.session.request(method,
|
|
|
|
conn_url,
|
|
|
|
data=data,
|
2014-07-15 07:58:34 -07:00
|
|
|
stream=stream,
|
2014-07-01 14:45:12 +05:30
|
|
|
headers=headers,
|
|
|
|
**kwargs)
|
|
|
|
except requests.exceptions.Timeout as e:
|
|
|
|
message = ("Error communicating with %(endpoint)s %(e)s" %
|
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.InvalidEndpoint(message=message)
|
|
|
|
except requests.exceptions.ConnectionError as e:
|
|
|
|
message = ("Error finding address for %(url)s: %(e)s" %
|
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.CommunicationError(message=message)
|
2012-08-10 21:06:44 +00:00
|
|
|
except socket.gaierror as e:
|
2013-02-19 15:40:16 +00:00
|
|
|
message = "Error finding address for %s: %s" % (
|
|
|
|
self.endpoint_hostname, e)
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.InvalidEndpoint(message=message)
|
|
|
|
except (socket.error, socket.timeout) as e:
|
2012-09-12 09:40:04 -04:00
|
|
|
endpoint = self.endpoint
|
2013-08-04 16:12:23 +02:00
|
|
|
message = ("Error communicating with %(endpoint)s %(e)s" %
|
|
|
|
{'endpoint': endpoint, 'e': e})
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.CommunicationError(message=message)
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if not resp.ok:
|
|
|
|
LOG.error("Request returned failure status %s." % resp.status_code)
|
|
|
|
raise exc.from_response(resp, resp.content)
|
|
|
|
elif resp.status_code == requests.codes.MULTIPLE_CHOICES:
|
2012-09-07 20:54:09 +00:00
|
|
|
raise exc.from_response(resp)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
content_type = resp.headers.get('Content-Type')
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
# Read body into string if it isn't obviously image data
|
|
|
|
if content_type == 'application/octet-stream':
|
|
|
|
# Do not read all response in memory when
|
|
|
|
# downloading an image.
|
|
|
|
body_iter = resp.iter_content(chunk_size=CHUNKSIZE)
|
|
|
|
self.log_http_response(resp)
|
2013-09-03 12:27:54 +01:00
|
|
|
else:
|
2014-07-01 14:45:12 +05:30
|
|
|
content = resp.content
|
|
|
|
self.log_http_response(resp, content)
|
|
|
|
if content_type and content_type.startswith('application/json'):
|
|
|
|
# Let's use requests json method,
|
|
|
|
# it should take care of response
|
|
|
|
# encoding
|
|
|
|
body_iter = resp.json()
|
2013-09-03 12:27:54 +01:00
|
|
|
else:
|
2014-07-01 14:45:12 +05:30
|
|
|
body_iter = six.StringIO(content)
|
2014-07-15 07:58:34 -07:00
|
|
|
try:
|
|
|
|
body_iter = json.loads(''.join([c for c in body_iter]))
|
|
|
|
except ValueError:
|
|
|
|
body_iter = None
|
2014-07-01 14:45:12 +05:30
|
|
|
return resp, body_iter
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def head(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('HEAD', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def get(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('GET', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def post(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('POST', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def put(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('PUT', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def patch(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('PATCH', url, **kwargs)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def delete(self, url, **kwargs):
|
|
|
|
return self._request('DELETE', url, **kwargs)
|