2013-09-20 04:05:51 +08:00
|
|
|
# Copyright 2012 OpenStack Foundation
|
2012-08-02 14:22:27 -07:00
|
|
|
# All Rights Reserved.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
import copy
|
|
|
|
import logging
|
2012-08-02 14:16:13 -07:00
|
|
|
import socket
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2016-06-14 06:57:02 +04:00
|
|
|
from keystoneauth1 import adapter
|
|
|
|
from keystoneauth1 import exceptions as ksa_exc
|
2017-06-23 13:43:29 -04:00
|
|
|
import OpenSSL
|
2015-02-05 22:27:16 +00:00
|
|
|
from oslo_utils import importutils
|
|
|
|
from oslo_utils import netutils
|
2014-07-01 14:45:12 +05:30
|
|
|
import requests
|
2014-01-10 17:16:09 +01:00
|
|
|
import six
|
2018-05-15 16:52:58 -04:00
|
|
|
import six.moves.urllib.parse as urlparse
|
2013-10-15 14:47:30 -04:00
|
|
|
|
2012-03-26 22:48:48 -07:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
2015-02-05 22:27:16 +00:00
|
|
|
from oslo_utils import encodeutils
|
2015-01-06 14:32:05 +00:00
|
|
|
|
2015-05-19 19:59:06 -05:00
|
|
|
from glanceclient.common import utils
|
2013-12-16 15:28:05 +01:00
|
|
|
from glanceclient import exc
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-06-28 22:09:26 +04:00
|
|
|
osprofiler_web = importutils.try_import("osprofiler.web")
|
|
|
|
|
2012-07-29 22:12:37 -07:00
|
|
|
LOG = logging.getLogger(__name__)
|
2012-03-26 22:48:48 -07:00
|
|
|
USER_AGENT = 'python-glanceclient'
|
2012-07-11 19:34:28 -07:00
|
|
|
CHUNKSIZE = 1024 * 64 # 64kB
|
2017-05-24 07:17:30 -04:00
|
|
|
REQ_ID_HEADER = 'X-OpenStack-Request-ID'
|
2018-07-18 11:30:10 +08:00
|
|
|
TOKEN_HEADERS = ['X-Auth-Token', 'X-Service-Token']
|
2012-03-26 22:48:48 -07:00
|
|
|
|
|
|
|
|
2016-05-13 15:03:41 +03:00
|
|
|
def encode_headers(headers):
|
|
|
|
"""Encodes headers.
|
|
|
|
|
|
|
|
Note: This should be used right before
|
|
|
|
sending anything out.
|
|
|
|
|
|
|
|
:param headers: Headers to encode
|
|
|
|
:returns: Dictionary with encoded headers'
|
|
|
|
names and values
|
|
|
|
"""
|
2018-05-15 16:52:58 -04:00
|
|
|
# NOTE(rosmaita): This function's rejection of any header name without a
|
|
|
|
# corresponding value is arguably justified by RFC 7230. In any case, that
|
|
|
|
# behavior was already here and there is an existing unit test for it.
|
|
|
|
|
|
|
|
# Bug #1766235: According to RFC 8187, headers must be encoded as ASCII.
|
|
|
|
# So we first %-encode them to get them into range < 128 and then turn
|
|
|
|
# them into ASCII.
|
2018-07-18 11:30:10 +08:00
|
|
|
encoded_dict = {}
|
|
|
|
for h, v in headers.items():
|
|
|
|
if v is not None:
|
|
|
|
# if the item is token, do not quote '+' as well.
|
2018-11-01 21:36:11 +00:00
|
|
|
# NOTE(imacdonn): urlparse.quote() is intended for quoting the
|
|
|
|
# path part of a URL, but headers like x-image-meta-location
|
|
|
|
# include an entire URL. We should avoid encoding the colon in
|
|
|
|
# this case (bug #1788942)
|
|
|
|
safe = '=+/' if h in TOKEN_HEADERS else '/:'
|
2018-07-18 11:30:10 +08:00
|
|
|
if six.PY2:
|
|
|
|
# incoming items may be unicode, so get them into something
|
|
|
|
# the py2 version of urllib can handle before percent encoding
|
|
|
|
key = urlparse.quote(encodeutils.safe_encode(h), safe)
|
|
|
|
value = urlparse.quote(encodeutils.safe_encode(v), safe)
|
|
|
|
else:
|
|
|
|
key = urlparse.quote(h, safe)
|
|
|
|
value = urlparse.quote(v, safe)
|
|
|
|
encoded_dict[key] = value
|
2018-05-15 16:52:58 -04:00
|
|
|
return dict((encodeutils.safe_encode(h, encoding='ascii'),
|
|
|
|
encodeutils.safe_encode(v, encoding='ascii'))
|
|
|
|
for h, v in encoded_dict.items())
|
2016-05-13 15:03:41 +03:00
|
|
|
|
|
|
|
|
2014-11-25 13:25:12 +10:00
|
|
|
class _BaseHTTPClient(object):
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _chunk_body(body):
|
|
|
|
chunk = body
|
|
|
|
while chunk:
|
|
|
|
chunk = body.read(CHUNKSIZE)
|
2016-06-23 19:08:51 +05:30
|
|
|
if not chunk:
|
2014-11-25 13:25:12 +10:00
|
|
|
break
|
|
|
|
yield chunk
|
|
|
|
|
|
|
|
def _set_common_request_kwargs(self, headers, kwargs):
|
|
|
|
"""Handle the common parameters used to send the request."""
|
|
|
|
|
|
|
|
# Default Content-Type is octet-stream
|
|
|
|
content_type = headers.get('Content-Type', 'application/octet-stream')
|
|
|
|
|
|
|
|
# NOTE(jamielennox): remove this later. Managers should pass json= if
|
|
|
|
# they want to send json data.
|
|
|
|
data = kwargs.pop("data", None)
|
|
|
|
if data is not None and not isinstance(data, six.string_types):
|
|
|
|
try:
|
|
|
|
data = json.dumps(data)
|
|
|
|
content_type = 'application/json'
|
|
|
|
except TypeError:
|
|
|
|
# Here we assume it's
|
|
|
|
# a file-like object
|
|
|
|
# and we'll chunk it
|
|
|
|
data = self._chunk_body(data)
|
|
|
|
|
|
|
|
headers['Content-Type'] = content_type
|
|
|
|
kwargs['stream'] = content_type == 'application/octet-stream'
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
def _handle_response(self, resp):
|
|
|
|
if not resp.ok:
|
2016-07-12 21:11:05 +08:00
|
|
|
LOG.debug("Request returned failure status %s.", resp.status_code)
|
2014-11-25 13:25:12 +10:00
|
|
|
raise exc.from_response(resp, resp.content)
|
2015-09-01 23:28:11 +02:00
|
|
|
elif (resp.status_code == requests.codes.MULTIPLE_CHOICES and
|
|
|
|
resp.request.path_url != '/versions'):
|
|
|
|
# NOTE(flaper87): Eventually, we'll remove the check on `versions`
|
|
|
|
# which is a bug (1491350) on the server.
|
2014-11-25 13:25:12 +10:00
|
|
|
raise exc.from_response(resp)
|
|
|
|
|
|
|
|
content_type = resp.headers.get('Content-Type')
|
|
|
|
|
|
|
|
# Read body into string if it isn't obviously image data
|
|
|
|
if content_type == 'application/octet-stream':
|
|
|
|
# Do not read all response in memory when downloading an image.
|
|
|
|
body_iter = _close_after_stream(resp, CHUNKSIZE)
|
|
|
|
else:
|
|
|
|
content = resp.text
|
|
|
|
if content_type and content_type.startswith('application/json'):
|
|
|
|
# Let's use requests json method, it should take care of
|
|
|
|
# response encoding
|
|
|
|
body_iter = resp.json()
|
|
|
|
else:
|
|
|
|
body_iter = six.StringIO(content)
|
|
|
|
try:
|
|
|
|
body_iter = json.loads(''.join([c for c in body_iter]))
|
|
|
|
except ValueError:
|
|
|
|
body_iter = None
|
|
|
|
|
|
|
|
return resp, body_iter
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPClient(_BaseHTTPClient):
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2012-08-02 14:16:13 -07:00
|
|
|
def __init__(self, endpoint, **kwargs):
|
|
|
|
self.endpoint = endpoint
|
2013-07-12 20:23:54 +00:00
|
|
|
self.identity_headers = kwargs.get('identity_headers')
|
2012-08-02 14:16:13 -07:00
|
|
|
self.auth_token = kwargs.get('token')
|
2015-08-03 10:04:15 +02:00
|
|
|
self.language_header = kwargs.get('language_header')
|
2017-05-24 07:17:30 -04:00
|
|
|
self.global_request_id = kwargs.get('global_request_id')
|
2013-07-12 20:23:54 +00:00
|
|
|
if self.identity_headers:
|
2016-08-26 15:42:57 +05:30
|
|
|
self.auth_token = self.identity_headers.pop('X-Auth-Token',
|
|
|
|
self.auth_token)
|
2012-03-26 22:48:48 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.session = requests.Session()
|
|
|
|
self.session.headers["User-Agent"] = USER_AGENT
|
2014-10-17 07:53:05 -07:00
|
|
|
|
2015-08-03 10:04:15 +02:00
|
|
|
if self.language_header:
|
|
|
|
self.session.headers["Accept-Language"] = self.language_header
|
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.timeout = float(kwargs.get('timeout', 600))
|
2012-10-03 13:52:55 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if self.endpoint.startswith("https"):
|
2014-08-15 14:53:34 +00:00
|
|
|
|
2015-06-08 14:49:52 +00:00
|
|
|
if kwargs.get('insecure', False) is True:
|
|
|
|
self.session.verify = False
|
2014-08-15 14:53:34 +00:00
|
|
|
else:
|
2015-06-08 14:49:52 +00:00
|
|
|
if kwargs.get('cacert', None) is not '':
|
|
|
|
self.session.verify = kwargs.get('cacert', True)
|
2014-08-15 14:53:34 +00:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
self.session.cert = (kwargs.get('cert_file'),
|
|
|
|
kwargs.get('key_file'))
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
@staticmethod
|
|
|
|
def parse_endpoint(endpoint):
|
2014-10-03 16:13:11 +03:00
|
|
|
return netutils.urlsplit(endpoint)
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def log_curl_request(self, method, url, headers, data, kwargs):
|
2014-11-21 13:04:29 +00:00
|
|
|
curl = ['curl -g -i -X %s' % method]
|
2012-07-29 22:12:37 -07:00
|
|
|
|
2014-08-01 15:02:22 +08:00
|
|
|
headers = copy.deepcopy(headers)
|
|
|
|
headers.update(self.session.headers)
|
|
|
|
|
2017-02-08 16:27:54 +08:00
|
|
|
for (key, value) in headers.items():
|
2015-05-19 19:59:06 -05:00
|
|
|
header = '-H \'%s: %s\'' % utils.safe_header(key, value)
|
2014-08-01 15:02:22 +08:00
|
|
|
curl.append(header)
|
2014-07-01 14:45:12 +05:30
|
|
|
|
|
|
|
if not self.session.verify:
|
2012-08-02 15:30:50 -07:00
|
|
|
curl.append('-k')
|
2014-07-01 14:45:12 +05:30
|
|
|
else:
|
|
|
|
if isinstance(self.session.verify, six.string_types):
|
|
|
|
curl.append(' --cacert %s' % self.session.verify)
|
|
|
|
|
|
|
|
if self.session.cert:
|
|
|
|
curl.append(' --cert %s --key %s' % self.session.cert)
|
2012-08-02 15:30:50 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
if data and isinstance(data, six.string_types):
|
|
|
|
curl.append('-d \'%s\'' % data)
|
2012-07-29 22:12:37 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
curl.append(url)
|
2014-08-01 15:02:22 +08:00
|
|
|
|
2015-01-06 14:32:05 +00:00
|
|
|
msg = ' '.join([encodeutils.safe_decode(item, errors='ignore')
|
2014-08-01 15:02:22 +08:00
|
|
|
for item in curl])
|
|
|
|
LOG.debug(msg)
|
2012-07-29 22:12:37 -07:00
|
|
|
|
|
|
|
@staticmethod
|
2014-11-25 13:25:12 +10:00
|
|
|
def log_http_response(resp):
|
2014-07-01 14:45:12 +05:30
|
|
|
status = (resp.raw.version / 10.0, resp.status_code, resp.reason)
|
2012-07-29 22:12:37 -07:00
|
|
|
dump = ['\nHTTP/%.1f %s %s' % status]
|
2014-07-01 14:45:12 +05:30
|
|
|
headers = resp.headers.items()
|
2015-05-19 19:59:06 -05:00
|
|
|
dump.extend(['%s: %s' % utils.safe_header(k, v) for k, v in headers])
|
2012-07-29 22:12:37 -07:00
|
|
|
dump.append('')
|
2014-11-25 13:25:12 +10:00
|
|
|
content_type = resp.headers.get('Content-Type')
|
|
|
|
|
|
|
|
if content_type != 'application/octet-stream':
|
|
|
|
dump.extend([resp.text, ''])
|
2015-01-06 14:32:05 +00:00
|
|
|
LOG.debug('\n'.join([encodeutils.safe_decode(x, errors='ignore')
|
2014-09-15 14:55:28 -07:00
|
|
|
for x in dump]))
|
2013-01-30 15:18:44 +01:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def _request(self, method, url, **kwargs):
|
2013-06-09 11:07:27 +02:00
|
|
|
"""Send an http request with the specified characteristics.
|
2015-07-20 17:29:49 +03:00
|
|
|
|
2012-07-10 20:51:00 -07:00
|
|
|
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
|
|
|
|
as setting headers and error handling.
|
2012-03-26 22:48:48 -07:00
|
|
|
"""
|
|
|
|
# Copy the kwargs so we can reuse the original in case of redirects
|
2014-11-25 13:25:12 +10:00
|
|
|
headers = copy.deepcopy(kwargs.pop('headers', {}))
|
2014-06-28 22:09:26 +04:00
|
|
|
|
2014-11-21 13:55:43 +01:00
|
|
|
if self.identity_headers:
|
2017-02-08 16:27:54 +08:00
|
|
|
for k, v in self.identity_headers.items():
|
2014-11-21 13:55:43 +01:00
|
|
|
headers.setdefault(k, v)
|
|
|
|
|
2014-11-25 13:25:12 +10:00
|
|
|
data = self._set_common_request_kwargs(headers, kwargs)
|
2012-08-08 13:45:38 -07:00
|
|
|
|
2016-03-30 08:49:24 +03:00
|
|
|
# add identity header to the request
|
|
|
|
if not headers.get('X-Auth-Token'):
|
|
|
|
headers['X-Auth-Token'] = self.auth_token
|
|
|
|
|
2017-05-24 07:17:30 -04:00
|
|
|
if self.global_request_id:
|
|
|
|
headers.setdefault(REQ_ID_HEADER, self.global_request_id)
|
|
|
|
|
2014-08-01 15:02:22 +08:00
|
|
|
if osprofiler_web:
|
|
|
|
headers.update(osprofiler_web.get_trace_id_headers())
|
|
|
|
|
2013-01-30 15:18:44 +01:00
|
|
|
# Note(flaper87): Before letting headers / url fly,
|
|
|
|
# they should be encoded otherwise httplib will
|
2014-07-01 14:45:12 +05:30
|
|
|
# complain.
|
2016-05-13 15:03:41 +03:00
|
|
|
headers = encode_headers(headers)
|
2013-01-30 15:18:44 +01:00
|
|
|
|
2014-11-25 13:25:12 +10:00
|
|
|
if self.endpoint.endswith("/") or url.startswith("/"):
|
|
|
|
conn_url = "%s%s" % (self.endpoint, url)
|
|
|
|
else:
|
|
|
|
conn_url = "%s/%s" % (self.endpoint, url)
|
|
|
|
self.log_curl_request(method, conn_url, headers, data, kwargs)
|
|
|
|
|
2012-08-08 13:45:38 -07:00
|
|
|
try:
|
2014-07-01 14:45:12 +05:30
|
|
|
resp = self.session.request(method,
|
|
|
|
conn_url,
|
|
|
|
data=data,
|
|
|
|
headers=headers,
|
|
|
|
**kwargs)
|
|
|
|
except requests.exceptions.Timeout as e:
|
2015-07-06 14:37:50 +08:00
|
|
|
message = ("Error communicating with %(url)s: %(e)s" %
|
2014-07-01 14:45:12 +05:30
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.InvalidEndpoint(message=message)
|
2015-12-14 15:22:52 -05:00
|
|
|
except requests.exceptions.ConnectionError as e:
|
2014-07-01 14:45:12 +05:30
|
|
|
message = ("Error finding address for %(url)s: %(e)s" %
|
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.CommunicationError(message=message)
|
2012-08-10 21:06:44 +00:00
|
|
|
except socket.gaierror as e:
|
2013-02-19 15:40:16 +00:00
|
|
|
message = "Error finding address for %s: %s" % (
|
|
|
|
self.endpoint_hostname, e)
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.InvalidEndpoint(message=message)
|
2017-05-19 14:53:34 -04:00
|
|
|
except (socket.error, socket.timeout, IOError) as e:
|
2012-09-12 09:40:04 -04:00
|
|
|
endpoint = self.endpoint
|
2013-08-04 16:12:23 +02:00
|
|
|
message = ("Error communicating with %(endpoint)s %(e)s" %
|
|
|
|
{'endpoint': endpoint, 'e': e})
|
2012-08-10 21:06:44 +00:00
|
|
|
raise exc.CommunicationError(message=message)
|
2017-06-23 13:43:29 -04:00
|
|
|
except OpenSSL.SSL.Error as e:
|
|
|
|
message = ("SSL Error communicating with %(url)s: %(e)s" %
|
|
|
|
{'url': conn_url, 'e': e})
|
|
|
|
raise exc.CommunicationError(message=message)
|
2012-07-10 20:51:00 -07:00
|
|
|
|
2017-01-18 12:38:26 +05:30
|
|
|
# log request-id for each api call
|
|
|
|
request_id = resp.headers.get('x-openstack-request-id')
|
|
|
|
if request_id:
|
|
|
|
LOG.debug('%(method)s call to image for '
|
|
|
|
'%(url)s used request id '
|
|
|
|
'%(response_request_id)s',
|
|
|
|
{'method': resp.request.method,
|
|
|
|
'url': resp.url,
|
|
|
|
'response_request_id': request_id})
|
|
|
|
|
2014-11-25 13:25:12 +10:00
|
|
|
resp, body_iter = self._handle_response(resp)
|
|
|
|
self.log_http_response(resp)
|
2014-07-01 14:45:12 +05:30
|
|
|
return resp, body_iter
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def head(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('HEAD', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def get(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('GET', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def post(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('POST', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def put(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('PUT', url, **kwargs)
|
2014-04-16 17:29:15 +03:00
|
|
|
|
|
|
|
def patch(self, url, **kwargs):
|
2014-07-01 14:45:12 +05:30
|
|
|
return self._request('PATCH', url, **kwargs)
|
2012-08-02 14:16:13 -07:00
|
|
|
|
2014-07-01 14:45:12 +05:30
|
|
|
def delete(self, url, **kwargs):
|
|
|
|
return self._request('DELETE', url, **kwargs)
|
2015-01-15 16:09:23 -06:00
|
|
|
|
|
|
|
|
|
|
|
def _close_after_stream(response, chunk_size):
|
|
|
|
"""Iterate over the content and ensure the response is closed after."""
|
|
|
|
# Yield each chunk in the response body
|
|
|
|
for chunk in response.iter_content(chunk_size=chunk_size):
|
|
|
|
yield chunk
|
|
|
|
# Once we're done streaming the body, ensure everything is closed.
|
|
|
|
# This will return the connection to the HTTPConnectionPool in urllib3
|
|
|
|
# and ideally reduce the number of HTTPConnectionPool full warnings.
|
|
|
|
response.close()
|
2014-11-25 13:25:12 +10:00
|
|
|
|
|
|
|
|
|
|
|
class SessionClient(adapter.Adapter, _BaseHTTPClient):
|
|
|
|
|
|
|
|
def __init__(self, session, **kwargs):
|
|
|
|
kwargs.setdefault('user_agent', USER_AGENT)
|
|
|
|
kwargs.setdefault('service_type', 'image')
|
2017-05-24 07:17:30 -04:00
|
|
|
self.global_request_id = kwargs.pop('global_request_id', None)
|
2014-11-25 13:25:12 +10:00
|
|
|
super(SessionClient, self).__init__(session, **kwargs)
|
|
|
|
|
|
|
|
def request(self, url, method, **kwargs):
|
2016-11-11 18:39:34 -07:00
|
|
|
headers = kwargs.pop('headers', {})
|
2017-05-24 07:17:30 -04:00
|
|
|
if self.global_request_id:
|
|
|
|
headers.setdefault(REQ_ID_HEADER, self.global_request_id)
|
|
|
|
|
2014-11-25 13:25:12 +10:00
|
|
|
kwargs['raise_exc'] = False
|
|
|
|
data = self._set_common_request_kwargs(headers, kwargs)
|
|
|
|
try:
|
2016-11-11 18:39:34 -07:00
|
|
|
# NOTE(pumaranikar): To avoid bug #1641239, no modification of
|
|
|
|
# headers should be allowed after encode_headers() is called.
|
|
|
|
resp = super(SessionClient,
|
|
|
|
self).request(url,
|
|
|
|
method,
|
|
|
|
headers=encode_headers(headers),
|
|
|
|
data=data,
|
|
|
|
**kwargs)
|
2016-06-14 06:57:02 +04:00
|
|
|
except ksa_exc.ConnectTimeout as e:
|
2015-07-06 14:37:50 +08:00
|
|
|
conn_url = self.get_endpoint(auth=kwargs.get('auth'))
|
|
|
|
conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/'))
|
|
|
|
message = ("Error communicating with %(url)s %(e)s" %
|
2014-11-25 13:25:12 +10:00
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.InvalidEndpoint(message=message)
|
2016-06-14 06:57:02 +04:00
|
|
|
except ksa_exc.ConnectFailure as e:
|
2014-11-25 13:25:12 +10:00
|
|
|
conn_url = self.get_endpoint(auth=kwargs.get('auth'))
|
|
|
|
conn_url = "%s/%s" % (conn_url.rstrip('/'), url.lstrip('/'))
|
|
|
|
message = ("Error finding address for %(url)s: %(e)s" %
|
|
|
|
dict(url=conn_url, e=e))
|
|
|
|
raise exc.CommunicationError(message=message)
|
|
|
|
|
|
|
|
return self._handle_response(resp)
|
|
|
|
|
|
|
|
|
|
|
|
def get_http_client(endpoint=None, session=None, **kwargs):
|
|
|
|
if session:
|
|
|
|
return SessionClient(session, **kwargs)
|
|
|
|
elif endpoint:
|
|
|
|
return HTTPClient(endpoint, **kwargs)
|
|
|
|
else:
|
|
|
|
raise AttributeError('Constructing a client must contain either an '
|
|
|
|
'endpoint or a session')
|