2012-05-08 11:17:04 +01:00
|
|
|
# Copyright (c) 2010-2012 OpenStack, LLC.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
|
|
# implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
"""
|
2013-11-01 11:40:44 -07:00
|
|
|
OpenStack Swift client library used internally
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
import socket
|
2017-06-13 11:14:52 -07:00
|
|
|
import re
|
2014-01-24 17:40:16 +01:00
|
|
|
import requests
|
2012-05-09 15:50:17 +01:00
|
|
|
import logging
|
2013-03-05 16:56:02 +00:00
|
|
|
import warnings
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
from distutils.version import StrictVersion
|
|
|
|
from requests.exceptions import RequestException, SSLError
|
2015-08-18 19:22:24 +08:00
|
|
|
from six.moves import http_client
|
2015-05-18 08:05:02 -07:00
|
|
|
from six.moves.urllib.parse import quote as _quote, unquote
|
2017-06-13 10:51:33 +02:00
|
|
|
from six.moves.urllib.parse import urljoin, urlparse, urlunparse
|
2013-09-04 19:32:09 +01:00
|
|
|
from time import sleep, time
|
2014-03-24 18:16:51 +01:00
|
|
|
import six
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-03-28 15:45:37 -07:00
|
|
|
from swiftclient import version as swiftclient_version
|
2014-04-15 14:50:10 -04:00
|
|
|
from swiftclient.exceptions import ClientException
|
2015-05-14 17:09:43 -07:00
|
|
|
from swiftclient.utils import (
|
2017-03-07 13:38:22 +04:00
|
|
|
iter_wrapper, LengthWrapper, ReadableToIterable, parse_api_response,
|
|
|
|
get_body)
|
2013-08-15 18:46:33 -07:00
|
|
|
|
2015-12-29 16:54:05 -08:00
|
|
|
# Default is 100, increase to 256
|
2015-08-18 19:22:24 +08:00
|
|
|
http_client._MAXHEADERS = 256
|
|
|
|
|
2017-06-13 11:14:52 -07:00
|
|
|
VERSIONFUL_AUTH_PATH = re.compile('v[2-3](?:\.0)?$')
|
2014-03-25 08:21:21 +00:00
|
|
|
AUTH_VERSIONS_V1 = ('1.0', '1', 1)
|
|
|
|
AUTH_VERSIONS_V2 = ('2.0', '2', 2)
|
|
|
|
AUTH_VERSIONS_V3 = ('3.0', '3', 3)
|
2014-11-06 14:10:13 +00:00
|
|
|
USER_METADATA_TYPE = tuple('x-%s-meta-' % type_ for type_ in
|
|
|
|
('container', 'account', 'object'))
|
2014-03-25 08:21:21 +00:00
|
|
|
|
2013-08-15 18:46:33 -07:00
|
|
|
try:
|
|
|
|
from logging import NullHandler
|
|
|
|
except ImportError:
|
2015-09-09 17:41:21 -07:00
|
|
|
# Added in Python 2.7
|
2013-08-15 18:46:33 -07:00
|
|
|
class NullHandler(logging.Handler):
|
|
|
|
def handle(self, record):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def emit(self, record):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def createLock(self):
|
|
|
|
self.lock = None
|
|
|
|
|
2018-09-07 16:50:10 -07:00
|
|
|
ksexceptions = ksclient_v2 = ksclient_v3 = None
|
|
|
|
try:
|
|
|
|
from keystoneclient import exceptions as ksexceptions
|
|
|
|
# prevent keystoneclient warning us that it has no log handlers
|
|
|
|
logging.getLogger('keystoneclient').addHandler(NullHandler())
|
|
|
|
from keystoneclient.v2_0 import client as ksclient_v2
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
from keystoneclient.v3 import client as ksclient_v3
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
# requests version 1.2.3 try to encode headers in ascii, preventing
|
|
|
|
# utf-8 encoded header to be 'prepared'
|
|
|
|
if StrictVersion(requests.__version__) < StrictVersion('2.0.0'):
|
|
|
|
from requests.structures import CaseInsensitiveDict
|
|
|
|
|
|
|
|
def prepare_unicode_headers(self, headers):
|
|
|
|
if headers:
|
|
|
|
self.headers = CaseInsensitiveDict(headers)
|
|
|
|
else:
|
|
|
|
self.headers = CaseInsensitiveDict()
|
|
|
|
requests.models.PreparedRequest.prepare_headers = prepare_unicode_headers
|
|
|
|
|
2012-05-09 15:50:17 +01:00
|
|
|
logger = logging.getLogger("swiftclient")
|
2013-08-15 18:46:33 -07:00
|
|
|
logger.addHandler(NullHandler())
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2016-02-24 16:56:55 -08:00
|
|
|
#: Default behaviour is to redact header values known to contain secrets,
|
|
|
|
#: such as ``X-Auth-Key`` and ``X-Auth-Token``. Up to the first 16 chars
|
|
|
|
#: may be revealed.
|
|
|
|
#:
|
|
|
|
#: To disable, set the value of ``redact_sensitive_headers`` to ``False``.
|
|
|
|
#:
|
|
|
|
#: When header redaction is enabled, ``reveal_sensitive_prefix`` configures the
|
|
|
|
#: maximum length of any sensitive header data sent to the logs. If the header
|
|
|
|
#: is less than twice this length, only ``int(len(value)/2)`` chars will be
|
|
|
|
#: logged; if it is less than 15 chars long, even less will be logged.
|
2016-02-19 13:18:15 +00:00
|
|
|
logger_settings = {
|
2016-02-24 16:56:55 -08:00
|
|
|
'redact_sensitive_headers': True,
|
2016-02-19 13:18:15 +00:00
|
|
|
'reveal_sensitive_prefix': 16
|
|
|
|
}
|
|
|
|
#: A list of sensitive headers to redact in logs. Note that when extending this
|
|
|
|
#: list, the header names must be added in all lower case.
|
|
|
|
LOGGER_SENSITIVE_HEADERS = [
|
|
|
|
'x-auth-token', 'x-auth-key', 'x-service-token', 'x-storage-token',
|
|
|
|
'x-account-meta-temp-url-key', 'x-account-meta-temp-url-key-2',
|
|
|
|
'x-container-meta-temp-url-key', 'x-container-meta-temp-url-key-2',
|
|
|
|
'set-cookie'
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def safe_value(name, value):
|
|
|
|
"""
|
|
|
|
Only show up to logger_settings['reveal_sensitive_prefix'] characters
|
|
|
|
from a sensitive header.
|
|
|
|
|
|
|
|
:param name: Header name
|
|
|
|
:param value: Header value
|
2016-02-25 17:13:35 +00:00
|
|
|
:return: Safe header value
|
2016-02-19 13:18:15 +00:00
|
|
|
"""
|
|
|
|
if name.lower() in LOGGER_SENSITIVE_HEADERS:
|
|
|
|
prefix_length = logger_settings.get('reveal_sensitive_prefix', 16)
|
|
|
|
prefix_length = int(
|
|
|
|
min(prefix_length, (len(value) ** 2) / 32, len(value) / 2)
|
|
|
|
)
|
|
|
|
redacted_value = value[0:prefix_length]
|
|
|
|
return redacted_value + '...'
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def scrub_headers(headers):
|
|
|
|
"""
|
|
|
|
Redact header values that can contain sensitive information that
|
|
|
|
should not be logged.
|
|
|
|
|
|
|
|
:param headers: Either a dict or an iterable of two-element tuples
|
|
|
|
:return: Safe dictionary of headers with sensitive information removed
|
|
|
|
"""
|
|
|
|
if isinstance(headers, dict):
|
|
|
|
headers = headers.items()
|
|
|
|
headers = [
|
|
|
|
(parse_header_string(key), parse_header_string(val))
|
|
|
|
for (key, val) in headers
|
|
|
|
]
|
2016-02-24 16:56:55 -08:00
|
|
|
if not logger_settings.get('redact_sensitive_headers', True):
|
2016-02-19 13:18:15 +00:00
|
|
|
return dict(headers)
|
|
|
|
if logger_settings.get('reveal_sensitive_prefix', 16) < 0:
|
|
|
|
logger_settings['reveal_sensitive_prefix'] = 16
|
|
|
|
return {key: safe_value(key, val) for (key, val) in headers}
|
|
|
|
|
2012-05-09 15:50:17 +01:00
|
|
|
|
|
|
|
def http_log(args, kwargs, resp, body):
|
2013-10-15 09:46:46 -07:00
|
|
|
if not logger.isEnabledFor(logging.INFO):
|
2012-05-09 15:50:17 +01:00
|
|
|
return
|
|
|
|
|
2013-10-15 09:46:46 -07:00
|
|
|
# create and log equivalent curl command
|
2012-05-09 15:50:17 +01:00
|
|
|
string_parts = ['curl -i']
|
|
|
|
for element in args:
|
2012-11-16 15:23:25 +10:00
|
|
|
if element == 'HEAD':
|
|
|
|
string_parts.append(' -I')
|
|
|
|
elif element in ('GET', 'POST', 'PUT'):
|
2012-05-09 15:50:17 +01:00
|
|
|
string_parts.append(' -X %s' % element)
|
|
|
|
else:
|
2018-04-17 14:36:57 -07:00
|
|
|
string_parts.append(' %s' % parse_header_string(element))
|
2012-05-09 15:50:17 +01:00
|
|
|
if 'headers' in kwargs:
|
2016-02-19 13:18:15 +00:00
|
|
|
headers = scrub_headers(kwargs['headers'])
|
|
|
|
for element in headers:
|
|
|
|
header = ' -H "%s: %s"' % (element, headers[element])
|
2012-05-09 15:50:17 +01:00
|
|
|
string_parts.append(header)
|
|
|
|
|
2013-10-15 09:46:46 -07:00
|
|
|
# log response as debug if good, or info if error
|
|
|
|
if resp.status < 300:
|
|
|
|
log_method = logger.debug
|
|
|
|
else:
|
|
|
|
log_method = logger.info
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2014-05-19 19:52:35 +02:00
|
|
|
log_method("REQ: %s", "".join(string_parts))
|
|
|
|
log_method("RESP STATUS: %s %s", resp.status, resp.reason)
|
2016-02-19 13:18:15 +00:00
|
|
|
log_method("RESP HEADERS: %s", scrub_headers(resp.getheaders()))
|
2012-05-09 15:50:17 +01:00
|
|
|
if body:
|
2017-03-07 13:38:22 +04:00
|
|
|
resp_headers = resp_header_dict(resp)
|
|
|
|
nbody = get_body(resp_headers, body)
|
|
|
|
log_method("RESP BODY: %s", nbody)
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2015-05-18 08:05:02 -07:00
|
|
|
def parse_header_string(data):
|
2016-02-26 11:25:10 -08:00
|
|
|
if not isinstance(data, (six.text_type, six.binary_type)):
|
|
|
|
data = str(data)
|
2015-05-18 08:05:02 -07:00
|
|
|
if six.PY2:
|
|
|
|
if isinstance(data, six.text_type):
|
|
|
|
# Under Python2 requests only returns binary_type, but if we get
|
|
|
|
# some stray text_type input, this should prevent unquote from
|
2015-12-29 16:54:05 -08:00
|
|
|
# interpreting %-encoded data as raw code-points.
|
2015-05-18 08:05:02 -07:00
|
|
|
data = data.encode('utf8')
|
|
|
|
try:
|
|
|
|
unquoted = unquote(data).decode('utf8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
try:
|
|
|
|
return data.decode('utf8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return quote(data).decode('utf8')
|
|
|
|
else:
|
|
|
|
if isinstance(data, six.binary_type):
|
|
|
|
# Under Python3 requests only returns text_type and tosses (!) the
|
|
|
|
# rest of the headers. If that ever changes, this should be a sane
|
|
|
|
# approach.
|
|
|
|
try:
|
|
|
|
data = data.decode('ascii')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
data = quote(data)
|
|
|
|
try:
|
|
|
|
unquoted = unquote(data, errors='strict')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return data
|
|
|
|
return unquoted
|
|
|
|
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def quote(value, safe='/'):
|
|
|
|
"""
|
2014-03-31 12:40:24 +02:00
|
|
|
Patched version of urllib.quote that encodes utf8 strings before quoting.
|
|
|
|
On Python 3, call directly urllib.parse.quote().
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
2014-03-31 12:40:24 +02:00
|
|
|
if six.PY3:
|
|
|
|
return _quote(value, safe=safe)
|
2015-09-09 17:41:21 -07:00
|
|
|
return _quote(encode_utf8(value), safe)
|
2012-10-11 15:04:00 +03:00
|
|
|
|
|
|
|
|
|
|
|
def encode_utf8(value):
|
2016-08-25 10:51:29 -07:00
|
|
|
if type(value) in six.integer_types + (float, bool):
|
|
|
|
# As of requests 2.11.0, headers must be byte- or unicode-strings.
|
|
|
|
# Convert some known-good types as a convenience for developers.
|
|
|
|
# Note that we *don't* convert subclasses, as they may have overriddden
|
|
|
|
# __str__ or __repr__.
|
|
|
|
# See https://github.com/kennethreitz/requests/pull/3366 for more info
|
|
|
|
value = str(value)
|
2014-03-24 18:16:51 +01:00
|
|
|
if isinstance(value, six.text_type):
|
2012-05-08 11:17:04 +01:00
|
|
|
value = value.encode('utf8')
|
2012-10-11 15:04:00 +03:00
|
|
|
return value
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2014-11-06 14:10:13 +00:00
|
|
|
def encode_meta_headers(headers):
|
|
|
|
"""Only encode metadata headers keys"""
|
|
|
|
ret = {}
|
|
|
|
for header, value in headers.items():
|
|
|
|
value = encode_utf8(value)
|
|
|
|
header = header.lower()
|
|
|
|
|
|
|
|
if (isinstance(header, six.string_types)
|
|
|
|
and header.startswith(USER_METADATA_TYPE)):
|
|
|
|
header = encode_utf8(header)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-11-06 14:10:13 +00:00
|
|
|
ret[header] = value
|
|
|
|
return ret
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-11-06 14:10:13 +00:00
|
|
|
|
2015-02-12 12:34:07 +00:00
|
|
|
class _ObjectBody(object):
|
|
|
|
"""
|
|
|
|
Readable and iterable object body response wrapper.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, resp, chunk_size):
|
|
|
|
"""
|
|
|
|
Wrap the underlying response
|
|
|
|
|
|
|
|
:param resp: the response to wrap
|
|
|
|
:param chunk_size: number of bytes to return each iteration/next call
|
|
|
|
"""
|
|
|
|
self.resp = resp
|
|
|
|
self.chunk_size = chunk_size
|
|
|
|
|
|
|
|
def read(self, length=None):
|
|
|
|
return self.resp.read(length)
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def next(self):
|
2015-03-04 14:31:00 +00:00
|
|
|
buf = self.read(self.chunk_size)
|
2015-02-12 12:34:07 +00:00
|
|
|
if not buf:
|
|
|
|
raise StopIteration()
|
|
|
|
return buf
|
|
|
|
|
|
|
|
def __next__(self):
|
|
|
|
return self.next()
|
|
|
|
|
2018-01-22 18:22:04 -08:00
|
|
|
def close(self):
|
|
|
|
self.resp.close()
|
|
|
|
|
2015-02-12 12:34:07 +00:00
|
|
|
|
2015-03-04 14:31:00 +00:00
|
|
|
class _RetryBody(_ObjectBody):
|
|
|
|
"""
|
|
|
|
Wrapper for object body response which triggers a retry
|
|
|
|
(from offset) if the connection is dropped after partially
|
|
|
|
downloading the object.
|
|
|
|
"""
|
2016-01-08 11:31:32 -08:00
|
|
|
def __init__(self, resp, connection, container, obj,
|
2015-03-04 14:31:00 +00:00
|
|
|
resp_chunk_size=None, query_string=None, response_dict=None,
|
|
|
|
headers=None):
|
|
|
|
"""
|
|
|
|
Wrap the underlying response
|
|
|
|
|
|
|
|
:param resp: the response to wrap
|
|
|
|
:param connection: Connection class instance
|
|
|
|
:param container: the name of the container the object is in
|
|
|
|
:param obj: the name of object we are downloading
|
|
|
|
:param resp_chunk_size: if defined, chunk size of data to read
|
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
|
|
|
:param headers: an optional dictionary with additional headers to
|
|
|
|
include in the request
|
|
|
|
"""
|
|
|
|
super(_RetryBody, self).__init__(resp, resp_chunk_size)
|
2016-01-08 11:31:32 -08:00
|
|
|
self.expected_length = int(self.resp.getheader('Content-Length'))
|
2015-03-04 14:31:00 +00:00
|
|
|
self.conn = connection
|
|
|
|
self.container = container
|
|
|
|
self.obj = obj
|
|
|
|
self.query_string = query_string
|
|
|
|
self.response_dict = response_dict
|
2017-08-25 12:13:12 -07:00
|
|
|
self.headers = dict(headers) if headers is not None else {}
|
2015-03-04 14:31:00 +00:00
|
|
|
self.bytes_read = 0
|
|
|
|
|
|
|
|
def read(self, length=None):
|
|
|
|
buf = None
|
|
|
|
try:
|
|
|
|
buf = self.resp.read(length)
|
|
|
|
self.bytes_read += len(buf)
|
2016-10-28 12:19:35 +02:00
|
|
|
except (socket.error, RequestException):
|
2015-03-04 14:31:00 +00:00
|
|
|
if self.conn.attempts > self.conn.retries:
|
|
|
|
raise
|
|
|
|
if (not buf and self.bytes_read < self.expected_length and
|
|
|
|
self.conn.attempts <= self.conn.retries):
|
|
|
|
self.headers['Range'] = 'bytes=%d-' % self.bytes_read
|
2016-01-08 11:31:32 -08:00
|
|
|
self.headers['If-Match'] = self.resp.getheader('ETag')
|
2015-03-04 14:31:00 +00:00
|
|
|
hdrs, body = self.conn._retry(None, get_object,
|
|
|
|
self.container, self.obj,
|
|
|
|
resp_chunk_size=self.chunk_size,
|
|
|
|
query_string=self.query_string,
|
|
|
|
response_dict=self.response_dict,
|
|
|
|
headers=self.headers,
|
|
|
|
attempts=self.conn.attempts)
|
2016-01-18 11:38:44 -08:00
|
|
|
expected_range = 'bytes %d-%d/%d' % (
|
|
|
|
self.bytes_read,
|
|
|
|
self.expected_length - 1,
|
|
|
|
self.expected_length)
|
|
|
|
if 'content-range' not in hdrs:
|
|
|
|
# Server didn't respond with partial content; manually seek
|
|
|
|
logger.warning('Received 200 while retrying %s/%s; seeking...',
|
|
|
|
self.container, self.obj)
|
|
|
|
to_read = self.bytes_read
|
|
|
|
while to_read > 0:
|
|
|
|
buf = body.resp.read(min(to_read, self.chunk_size))
|
|
|
|
to_read -= len(buf)
|
|
|
|
elif hdrs['content-range'] != expected_range:
|
|
|
|
msg = ('Expected range "%s" while retrying %s/%s '
|
|
|
|
'but got "%s"' % (expected_range, self.container,
|
|
|
|
self.obj, hdrs['content-range']))
|
|
|
|
raise ClientException(msg)
|
2016-01-08 11:31:32 -08:00
|
|
|
self.resp = body.resp
|
2015-03-04 14:31:00 +00:00
|
|
|
buf = self.read(length)
|
|
|
|
return buf
|
|
|
|
|
|
|
|
|
2014-11-06 14:10:13 +00:00
|
|
|
class HTTPConnection(object):
|
2014-01-24 17:40:16 +01:00
|
|
|
def __init__(self, url, proxy=None, cacert=None, insecure=False,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=None, cert_key=None, ssl_compression=False,
|
|
|
|
default_user_agent=None, timeout=None):
|
2014-01-24 17:40:16 +01:00
|
|
|
"""
|
|
|
|
Make an HTTPConnection or HTTPSConnection
|
|
|
|
|
|
|
|
:param url: url to connect to
|
|
|
|
:param proxy: proxy to connect through, if any; None by default; str
|
|
|
|
of the format 'http://127.0.0.1:8888' to set one
|
|
|
|
:param cacert: A CA bundle file to use in verifying a TLS server
|
|
|
|
certificate.
|
|
|
|
:param insecure: Allow to access servers without checking SSL certs.
|
|
|
|
The server's certificate will not be verified.
|
2016-04-10 23:18:17 +02:00
|
|
|
:param cert: Client certificate file to connect on SSL server
|
|
|
|
requiring SSL client certificate.
|
|
|
|
:param cert_key: Client certificate private key file.
|
2014-01-24 17:40:16 +01:00
|
|
|
:param ssl_compression: SSL compression should be disabled by default
|
|
|
|
and this setting is not usable as of now. The
|
|
|
|
parameter is kept for backward compatibility.
|
2014-03-28 15:45:37 -07:00
|
|
|
:param default_user_agent: Set the User-Agent header on every request.
|
|
|
|
If set to None (default), the user agent
|
|
|
|
will be "python-swiftclient-<version>". This
|
|
|
|
may be overridden on a per-request basis by
|
|
|
|
explicitly setting the user-agent header on
|
|
|
|
a call to request().
|
2015-03-29 01:38:16 -04:00
|
|
|
:param timeout: socket read timeout value, passed directly to
|
|
|
|
the requests library.
|
2014-01-24 17:40:16 +01:00
|
|
|
:raises ClientException: Unable to handle protocol scheme
|
|
|
|
"""
|
|
|
|
self.url = url
|
|
|
|
self.parsed_url = urlparse(url)
|
|
|
|
self.host = self.parsed_url.netloc
|
|
|
|
self.port = self.parsed_url.port
|
|
|
|
self.requests_args = {}
|
2014-02-18 18:11:58 +01:00
|
|
|
self.request_session = requests.Session()
|
2015-05-20 16:07:04 -07:00
|
|
|
# Don't use requests's default headers
|
|
|
|
self.request_session.headers = None
|
2018-06-22 16:49:03 -07:00
|
|
|
self.resp = None
|
2014-01-24 17:40:16 +01:00
|
|
|
if self.parsed_url.scheme not in ('http', 'https'):
|
2015-02-23 11:09:39 +00:00
|
|
|
raise ClientException('Unsupported scheme "%s" in url "%s"'
|
|
|
|
% (self.parsed_url.scheme, url))
|
2014-01-24 17:40:16 +01:00
|
|
|
self.requests_args['verify'] = not insecure
|
2014-02-13 23:33:01 -08:00
|
|
|
if cacert and not insecure:
|
2014-01-24 17:40:16 +01:00
|
|
|
# verify requests parameter is used to pass the CA_BUNDLE file
|
|
|
|
# see: http://docs.python-requests.org/en/latest/user/advanced/
|
|
|
|
self.requests_args['verify'] = cacert
|
2016-04-10 23:18:17 +02:00
|
|
|
if cert:
|
|
|
|
# NOTE(cbrandily): cert requests parameter is used to pass client
|
|
|
|
# cert path or a tuple with client certificate/key paths.
|
|
|
|
if cert_key:
|
|
|
|
self.requests_args['cert'] = cert, cert_key
|
|
|
|
else:
|
|
|
|
self.requests_args['cert'] = cert
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
if proxy:
|
|
|
|
proxy_parsed = urlparse(proxy)
|
|
|
|
if not proxy_parsed.scheme:
|
|
|
|
raise ClientException("Proxy's missing scheme")
|
|
|
|
self.requests_args['proxies'] = {
|
|
|
|
proxy_parsed.scheme: '%s://%s' % (
|
|
|
|
proxy_parsed.scheme, proxy_parsed.netloc
|
|
|
|
)
|
|
|
|
}
|
|
|
|
self.requests_args['stream'] = True
|
2014-03-28 15:45:37 -07:00
|
|
|
if default_user_agent is None:
|
|
|
|
default_user_agent = \
|
|
|
|
'python-swiftclient-%s' % swiftclient_version.version_string
|
|
|
|
self.default_user_agent = default_user_agent
|
2015-03-29 01:38:16 -04:00
|
|
|
if timeout:
|
|
|
|
self.requests_args['timeout'] = timeout
|
2014-01-24 17:40:16 +01:00
|
|
|
|
|
|
|
def _request(self, *arg, **kwarg):
|
2016-09-28 11:25:56 +07:00
|
|
|
"""Final wrapper before requests call, to be patched in tests"""
|
2014-02-18 18:11:58 +01:00
|
|
|
return self.request_session.request(*arg, **kwarg)
|
2014-01-24 17:40:16 +01:00
|
|
|
|
2014-03-30 10:08:09 -07:00
|
|
|
def request(self, method, full_path, data=None, headers=None, files=None):
|
2016-09-28 11:25:56 +07:00
|
|
|
"""Encode url and header, then call requests.request"""
|
2014-03-30 10:08:09 -07:00
|
|
|
if headers is None:
|
|
|
|
headers = {}
|
2014-04-15 16:28:50 -04:00
|
|
|
else:
|
2014-11-06 14:10:13 +00:00
|
|
|
headers = encode_meta_headers(headers)
|
2014-04-15 16:28:50 -04:00
|
|
|
|
2014-03-28 15:45:37 -07:00
|
|
|
# set a default User-Agent header if it wasn't passed in
|
|
|
|
if 'user-agent' not in headers:
|
|
|
|
headers['user-agent'] = self.default_user_agent
|
2014-04-24 17:04:21 +02:00
|
|
|
url = "%s://%s%s" % (
|
2014-01-24 17:40:16 +01:00
|
|
|
self.parsed_url.scheme,
|
|
|
|
self.parsed_url.netloc,
|
2014-04-24 17:04:21 +02:00
|
|
|
full_path)
|
2014-01-24 17:40:16 +01:00
|
|
|
self.resp = self._request(method, url, headers=headers, data=data,
|
|
|
|
files=files, **self.requests_args)
|
|
|
|
return self.resp
|
|
|
|
|
2014-03-30 10:08:09 -07:00
|
|
|
def putrequest(self, full_path, data=None, headers=None, files=None):
|
2014-01-24 17:40:16 +01:00
|
|
|
"""
|
|
|
|
Use python-requests files upload
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
:param data: Use data generator for chunked-transfer
|
|
|
|
:param files: Use files for default transfer
|
|
|
|
"""
|
|
|
|
return self.request('PUT', full_path, data, headers, files)
|
|
|
|
|
|
|
|
def getresponse(self):
|
2016-09-28 11:25:56 +07:00
|
|
|
"""Adapt requests response to httplib interface"""
|
2014-01-24 17:40:16 +01:00
|
|
|
self.resp.status = self.resp.status_code
|
|
|
|
old_getheader = self.resp.raw.getheader
|
|
|
|
|
2018-04-17 14:36:57 -07:00
|
|
|
def _decode_header(string):
|
|
|
|
if string is None or six.PY2:
|
|
|
|
return string
|
|
|
|
return string.encode('iso-8859-1').decode('utf-8')
|
|
|
|
|
|
|
|
def _encode_header(string):
|
|
|
|
if string is None or six.PY2:
|
|
|
|
return string
|
|
|
|
return string.encode('utf-8').decode('iso-8859-1')
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
def getheaders():
|
2018-04-17 14:36:57 -07:00
|
|
|
return [(_decode_header(k), _decode_header(v))
|
|
|
|
for k, v in self.resp.headers.items()]
|
2014-01-24 17:40:16 +01:00
|
|
|
|
|
|
|
def getheader(k, v=None):
|
2018-04-17 14:36:57 -07:00
|
|
|
return _decode_header(old_getheader(
|
|
|
|
_encode_header(k.lower()), _encode_header(v)))
|
2014-01-24 17:40:16 +01:00
|
|
|
|
2015-01-21 14:38:02 -06:00
|
|
|
def releasing_read(*args, **kwargs):
|
|
|
|
chunk = self.resp.raw.read(*args, **kwargs)
|
|
|
|
if not chunk:
|
|
|
|
# NOTE(sigmavirus24): Release the connection back to the
|
|
|
|
# urllib3's connection pool. This will reduce the number of
|
|
|
|
# log messages seen in bug #1341777. This does not actually
|
|
|
|
# close a socket. It will also prevent people from being
|
|
|
|
# mislead as to the cause of a bug as in bug #1424732.
|
|
|
|
self.resp.close()
|
|
|
|
return chunk
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
self.resp.getheaders = getheaders
|
|
|
|
self.resp.getheader = getheader
|
2015-01-21 14:38:02 -06:00
|
|
|
self.resp.read = releasing_read
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
return self.resp
|
|
|
|
|
2018-06-22 16:49:03 -07:00
|
|
|
def close(self):
|
|
|
|
if self.resp:
|
|
|
|
self.resp.close()
|
|
|
|
self.request_session.close()
|
|
|
|
|
2014-01-24 17:40:16 +01:00
|
|
|
|
|
|
|
def http_connection(*arg, **kwarg):
|
2016-09-28 11:25:56 +07:00
|
|
|
""":returns: tuple of (parsed url, connection object)"""
|
2014-01-24 17:40:16 +01:00
|
|
|
conn = HTTPConnection(*arg, **kwarg)
|
|
|
|
return conn.parsed_url, conn
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2014-02-13 23:33:01 -08:00
|
|
|
def get_auth_1_0(url, user, key, snet, **kwargs):
|
2015-02-07 08:51:40 +08:00
|
|
|
cacert = kwargs.get('cacert', None)
|
2014-02-13 23:33:01 -08:00
|
|
|
insecure = kwargs.get('insecure', False)
|
2016-04-10 23:18:17 +02:00
|
|
|
cert = kwargs.get('cert')
|
|
|
|
cert_key = kwargs.get('cert_key')
|
2015-04-26 01:37:15 -06:00
|
|
|
timeout = kwargs.get('timeout', None)
|
|
|
|
parsed, conn = http_connection(url, cacert=cacert, insecure=insecure,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=cert, cert_key=cert_key,
|
2015-04-26 01:37:15 -06:00
|
|
|
timeout=timeout)
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'GET'
|
2016-02-19 13:18:15 +00:00
|
|
|
headers = {'X-Auth-User': user, 'X-Auth-Key': key}
|
|
|
|
conn.request(method, parsed.path, '', headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
2012-05-08 14:10:14 +01:00
|
|
|
body = resp.read()
|
2018-06-22 16:49:03 -07:00
|
|
|
resp.close()
|
|
|
|
conn.close()
|
2016-02-19 13:18:15 +00:00
|
|
|
http_log((url, method,), headers, resp, body)
|
2013-05-22 18:17:59 -07:00
|
|
|
url = resp.getheader('x-storage-url')
|
2012-05-08 14:10:14 +01:00
|
|
|
|
|
|
|
# There is a side-effect on current Rackspace 1.0 server where a
|
|
|
|
# bad URL would get you that document page and a 200. We error out
|
|
|
|
# if we don't have a x-storage-url header and if we get a body.
|
|
|
|
if resp.status < 200 or resp.status >= 300 or (body and not url):
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Auth GET failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if snet:
|
|
|
|
parsed = list(urlparse(url))
|
|
|
|
# Second item in the list is the netloc
|
|
|
|
netloc = parsed[1]
|
|
|
|
parsed[1] = 'snet-' + netloc
|
|
|
|
url = urlunparse(parsed)
|
2018-04-17 14:36:57 -07:00
|
|
|
|
2018-07-24 11:53:25 -07:00
|
|
|
token = resp.getheader('x-storage-token', resp.getheader('x-auth-token'))
|
|
|
|
return url, token
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2012-12-05 13:18:27 +09:00
|
|
|
def get_keystoneclient_2_0(auth_url, user, key, os_options, **kwargs):
|
2014-03-25 08:21:21 +00:00
|
|
|
# this function is only here to preserve the historic 'public'
|
|
|
|
# interface of this module
|
|
|
|
kwargs.update({'auth_version': '2.0'})
|
|
|
|
return get_auth_keystone(auth_url, user, key, os_options, **kwargs)
|
|
|
|
|
|
|
|
|
2014-09-22 11:52:44 +01:00
|
|
|
def get_auth_keystone(auth_url, user, key, os_options, **kwargs):
|
|
|
|
"""
|
|
|
|
Authenticate against a keystone server.
|
|
|
|
|
|
|
|
We are using the keystoneclient library for authentication.
|
|
|
|
"""
|
|
|
|
|
|
|
|
insecure = kwargs.get('insecure', False)
|
2015-04-26 01:37:15 -06:00
|
|
|
timeout = kwargs.get('timeout', None)
|
2017-06-13 10:51:33 +02:00
|
|
|
auth_version = kwargs.get('auth_version', None)
|
2015-11-16 15:49:30 -08:00
|
|
|
debug = logger.isEnabledFor(logging.DEBUG)
|
2014-09-22 11:52:44 +01:00
|
|
|
|
2017-06-13 10:51:33 +02:00
|
|
|
# Add the version suffix in case of versionless Keystone endpoints. If
|
|
|
|
# auth_version is also unset it is likely that it is v3
|
2017-06-13 11:14:52 -07:00
|
|
|
if not VERSIONFUL_AUTH_PATH.match(
|
|
|
|
urlparse(auth_url).path.rstrip('/').rsplit('/', 1)[-1]):
|
|
|
|
# Normalize auth_url to end in a slash because urljoin
|
|
|
|
auth_url = auth_url.rstrip('/') + '/'
|
2017-06-13 10:51:33 +02:00
|
|
|
if auth_version and auth_version in AUTH_VERSIONS_V2:
|
|
|
|
auth_url = urljoin(auth_url, "v2.0")
|
|
|
|
else:
|
|
|
|
auth_url = urljoin(auth_url, "v3")
|
|
|
|
auth_version = '3'
|
|
|
|
logger.debug("Versionless auth_url - using %s as endpoint" % auth_url)
|
|
|
|
|
|
|
|
# Legacy default if not set
|
|
|
|
if auth_version is None:
|
2018-05-04 13:31:03 -05:00
|
|
|
auth_version = '2'
|
2018-09-07 16:50:10 -07:00
|
|
|
|
|
|
|
ksclient = None
|
|
|
|
if auth_version in AUTH_VERSIONS_V3:
|
|
|
|
if ksclient_v3 is not None:
|
|
|
|
ksclient = ksclient_v3
|
|
|
|
else:
|
|
|
|
if ksclient_v2 is not None:
|
|
|
|
ksclient = ksclient_v2
|
|
|
|
|
|
|
|
if ksclient is None:
|
|
|
|
raise ClientException('''
|
|
|
|
Auth versions 2.0 and 3 require python-keystoneclient, install it or use Auth
|
|
|
|
version 1.0 which requires ST_AUTH, ST_USER, and ST_KEY environment
|
|
|
|
variables to be set or overridden with -A, -U, or -K.''')
|
2014-09-22 11:52:44 +01:00
|
|
|
|
2012-09-17 16:42:16 +01:00
|
|
|
try:
|
2014-03-25 08:21:21 +00:00
|
|
|
_ksclient = ksclient.Client(
|
|
|
|
username=user,
|
|
|
|
password=key,
|
2015-04-12 22:33:57 -05:00
|
|
|
token=os_options.get('auth_token'),
|
2014-03-25 08:21:21 +00:00
|
|
|
tenant_name=os_options.get('tenant_name'),
|
|
|
|
tenant_id=os_options.get('tenant_id'),
|
|
|
|
user_id=os_options.get('user_id'),
|
|
|
|
user_domain_name=os_options.get('user_domain_name'),
|
|
|
|
user_domain_id=os_options.get('user_domain_id'),
|
|
|
|
project_name=os_options.get('project_name'),
|
|
|
|
project_id=os_options.get('project_id'),
|
|
|
|
project_domain_name=os_options.get('project_domain_name'),
|
|
|
|
project_domain_id=os_options.get('project_domain_id'),
|
|
|
|
debug=debug,
|
|
|
|
cacert=kwargs.get('cacert'),
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=kwargs.get('cert'),
|
|
|
|
key=kwargs.get('cert_key'),
|
2015-04-26 01:37:15 -06:00
|
|
|
auth_url=auth_url, insecure=insecure, timeout=timeout)
|
2018-09-07 16:50:10 -07:00
|
|
|
except ksexceptions.Unauthorized:
|
2014-03-25 08:21:21 +00:00
|
|
|
msg = 'Unauthorized. Check username, password and tenant name/id.'
|
|
|
|
if auth_version in AUTH_VERSIONS_V3:
|
2014-10-29 10:11:29 +00:00
|
|
|
msg = ('Unauthorized. Check username/id, password, '
|
|
|
|
'tenant name/id and user/tenant domain name/id.')
|
2014-03-25 08:21:21 +00:00
|
|
|
raise ClientException(msg)
|
2018-09-07 16:50:10 -07:00
|
|
|
except ksexceptions.AuthorizationFailure as err:
|
2012-09-17 16:42:16 +01:00
|
|
|
raise ClientException('Authorization Failure. %s' % err)
|
2012-07-04 21:46:02 +02:00
|
|
|
service_type = os_options.get('service_type') or 'object-store'
|
2012-08-28 10:25:42 -04:00
|
|
|
endpoint_type = os_options.get('endpoint_type') or 'publicURL'
|
2012-09-05 15:55:53 +01:00
|
|
|
try:
|
2015-11-16 15:49:30 -08:00
|
|
|
filter_kwargs = {}
|
|
|
|
if os_options.get('region_name'):
|
|
|
|
filter_kwargs['attr'] = 'region'
|
|
|
|
filter_kwargs['filter_value'] = os_options['region_name']
|
2012-09-05 15:55:53 +01:00
|
|
|
endpoint = _ksclient.service_catalog.url_for(
|
|
|
|
service_type=service_type,
|
2015-11-16 15:49:30 -08:00
|
|
|
endpoint_type=endpoint_type,
|
|
|
|
**filter_kwargs)
|
2018-09-07 16:50:10 -07:00
|
|
|
except ksexceptions.EndpointNotFound:
|
2012-09-05 15:55:53 +01:00
|
|
|
raise ClientException('Endpoint for %s not found - '
|
|
|
|
'have you specified a region?' % service_type)
|
2014-11-06 14:10:13 +00:00
|
|
|
return endpoint, _ksclient.auth_token
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2012-07-04 21:46:02 +02:00
|
|
|
def get_auth(auth_url, user, key, **kwargs):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get authentication/authorization credentials.
|
|
|
|
|
2014-10-24 01:02:53 -07:00
|
|
|
:kwarg auth_version: the api version of the supplied auth params
|
2015-12-29 16:54:05 -08:00
|
|
|
:kwarg os_options: a dict, the openstack identity service options
|
2014-10-24 01:02:53 -07:00
|
|
|
|
|
|
|
:returns: a tuple, (storage_url, token)
|
|
|
|
|
2015-12-29 16:54:05 -08:00
|
|
|
N.B. if the optional os_options parameter includes a non-empty
|
2016-09-27 16:08:44 +07:00
|
|
|
'object_storage_url' key it will override the default storage url returned
|
|
|
|
by the auth service.
|
2014-10-24 01:02:53 -07:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
The snet parameter is used for Rackspace's ServiceNet internal network
|
|
|
|
implementation. In this function, it simply adds *snet-* to the beginning
|
|
|
|
of the host name for the returned storage URL. With Rackspace Cloud Files,
|
|
|
|
use of this network path causes no bandwidth charges but requires the
|
|
|
|
client to be running on Rackspace's ServiceNet network.
|
2012-07-04 21:46:02 +02:00
|
|
|
"""
|
2016-03-29 19:10:42 +00:00
|
|
|
session = kwargs.get('session', None)
|
2012-07-04 21:46:02 +02:00
|
|
|
auth_version = kwargs.get('auth_version', '1')
|
2012-09-19 15:47:36 +00:00
|
|
|
os_options = kwargs.get('os_options', {})
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2015-02-07 08:51:40 +08:00
|
|
|
cacert = kwargs.get('cacert', None)
|
2014-02-13 23:33:01 -08:00
|
|
|
insecure = kwargs.get('insecure', False)
|
2016-04-10 23:18:17 +02:00
|
|
|
cert = kwargs.get('cert')
|
|
|
|
cert_key = kwargs.get('cert_key')
|
2015-06-02 21:02:42 +01:00
|
|
|
timeout = kwargs.get('timeout', None)
|
2016-03-29 19:10:42 +00:00
|
|
|
|
|
|
|
if session:
|
|
|
|
service_type = os_options.get('service_type', 'object-store')
|
|
|
|
interface = os_options.get('endpoint_type', 'public')
|
2018-05-16 16:03:57 -07:00
|
|
|
region_name = os_options.get('region_name')
|
2016-03-29 19:10:42 +00:00
|
|
|
storage_url = session.get_endpoint(service_type=service_type,
|
2018-05-16 16:03:57 -07:00
|
|
|
interface=interface,
|
|
|
|
region_name=region_name)
|
2016-03-29 19:10:42 +00:00
|
|
|
token = session.get_token()
|
|
|
|
elif auth_version in AUTH_VERSIONS_V1:
|
2013-06-28 21:26:54 -07:00
|
|
|
storage_url, token = get_auth_1_0(auth_url,
|
|
|
|
user,
|
|
|
|
key,
|
2014-02-13 23:33:01 -08:00
|
|
|
kwargs.get('snet'),
|
2015-02-07 08:51:40 +08:00
|
|
|
cacert=cacert,
|
2015-04-26 01:37:15 -06:00
|
|
|
insecure=insecure,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=cert,
|
|
|
|
cert_key=cert_key,
|
2015-04-26 01:37:15 -06:00
|
|
|
timeout=timeout)
|
2014-03-25 08:21:21 +00:00
|
|
|
elif auth_version in AUTH_VERSIONS_V2 + AUTH_VERSIONS_V3:
|
|
|
|
# We are handling a special use case here where the user argument
|
|
|
|
# specifies both the user name and tenant name in the form tenant:user
|
|
|
|
if user and not kwargs.get('tenant_name') and ':' in user:
|
2014-10-29 10:11:29 +00:00
|
|
|
os_options['tenant_name'], user = user.split(':')
|
2012-07-04 21:46:02 +02:00
|
|
|
|
2015-12-30 14:25:22 +08:00
|
|
|
# We are allowing to have a tenant_name argument in get_auth
|
2012-07-04 21:46:02 +02:00
|
|
|
# directly without having os_options
|
|
|
|
if kwargs.get('tenant_name'):
|
2012-09-19 15:47:36 +00:00
|
|
|
os_options['tenant_name'] = kwargs['tenant_name']
|
2012-07-04 21:46:02 +02:00
|
|
|
|
2014-03-25 08:21:21 +00:00
|
|
|
if not (os_options.get('tenant_name') or os_options.get('tenant_id')
|
|
|
|
or os_options.get('project_name')
|
|
|
|
or os_options.get('project_id')):
|
2014-10-20 18:00:01 +01:00
|
|
|
if auth_version in AUTH_VERSIONS_V2:
|
|
|
|
raise ClientException('No tenant specified')
|
|
|
|
raise ClientException('No project name or project id specified.')
|
2012-07-04 21:46:02 +02:00
|
|
|
|
2014-03-25 08:21:21 +00:00
|
|
|
storage_url, token = get_auth_keystone(auth_url, user,
|
|
|
|
key, os_options,
|
|
|
|
cacert=cacert,
|
|
|
|
insecure=insecure,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=cert,
|
|
|
|
cert_key=cert_key,
|
2015-04-26 01:37:15 -06:00
|
|
|
timeout=timeout,
|
2014-03-25 08:21:21 +00:00
|
|
|
auth_version=auth_version)
|
2013-06-28 21:26:54 -07:00
|
|
|
else:
|
2016-03-29 19:10:42 +00:00
|
|
|
raise ClientException('Unknown auth_version %s specified and no '
|
|
|
|
'session found.' % auth_version)
|
2012-07-04 21:46:02 +02:00
|
|
|
|
2013-06-28 21:26:54 -07:00
|
|
|
# Override storage url, if necessary
|
|
|
|
if os_options.get('object_storage_url'):
|
|
|
|
return os_options['object_storage_url'], token
|
|
|
|
else:
|
|
|
|
return storage_url, token
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2015-05-18 08:05:02 -07:00
|
|
|
def resp_header_dict(resp):
|
|
|
|
resp_headers = {}
|
|
|
|
for header, value in resp.getheaders():
|
|
|
|
header = parse_header_string(header).lower()
|
|
|
|
resp_headers[header] = parse_header_string(value)
|
|
|
|
return resp_headers
|
|
|
|
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def store_response(resp, response_dict):
|
|
|
|
"""
|
|
|
|
store information about an operation into a dict
|
|
|
|
|
|
|
|
:param resp: an http response object containing the response
|
|
|
|
headers
|
|
|
|
:param response_dict: a dict into which are placed the
|
|
|
|
status, reason and a dict of lower-cased headers
|
|
|
|
"""
|
|
|
|
if response_dict is not None:
|
|
|
|
response_dict['status'] = resp.status
|
|
|
|
response_dict['reason'] = resp.reason
|
2015-05-18 08:05:02 -07:00
|
|
|
response_dict['headers'] = resp_header_dict(resp)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def get_account(url, token, marker=None, limit=None, prefix=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
end_marker=None, http_conn=None, full_listing=False,
|
2018-11-23 22:47:15 -08:00
|
|
|
service_token=None, headers=None, delimiter=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get a listing of containers for the account.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param marker: marker query
|
|
|
|
:param limit: limit query
|
|
|
|
:param prefix: prefix query
|
2013-05-02 00:15:11 +09:00
|
|
|
:param end_marker: end_marker query
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param full_listing: if True, return a full listing, else returns a max
|
|
|
|
of 10000 listings
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2016-09-19 23:18:18 +08:00
|
|
|
:param headers: additional headers to include in the request
|
2018-11-23 22:47:15 -08:00
|
|
|
:param delimiter: delimiter query
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a tuple of (response headers, a list of containers) The response
|
|
|
|
headers will be a dict and all header names will be lowercase.
|
|
|
|
:raises ClientException: HTTP GET request failed
|
|
|
|
"""
|
2016-09-19 23:18:18 +08:00
|
|
|
req_headers = {'X-Auth-Token': token, 'Accept-Encoding': 'gzip'}
|
|
|
|
if service_token:
|
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if not http_conn:
|
|
|
|
http_conn = http_connection(url)
|
|
|
|
if full_listing:
|
2018-11-23 22:47:15 -08:00
|
|
|
rv = get_account(url, token, marker, limit, prefix, end_marker,
|
|
|
|
http_conn, headers=req_headers, delimiter=delimiter)
|
2012-05-08 11:17:04 +01:00
|
|
|
listing = rv[1]
|
|
|
|
while listing:
|
|
|
|
marker = listing[-1]['name']
|
2014-11-06 14:10:13 +00:00
|
|
|
listing = get_account(url, token, marker, limit, prefix,
|
2018-11-23 22:47:15 -08:00
|
|
|
end_marker, http_conn, headers=req_headers,
|
|
|
|
delimiter=delimiter)[1]
|
2012-05-08 11:17:04 +01:00
|
|
|
if listing:
|
|
|
|
rv[1].extend(listing)
|
|
|
|
return rv
|
|
|
|
parsed, conn = http_conn
|
|
|
|
qs = 'format=json'
|
|
|
|
if marker:
|
|
|
|
qs += '&marker=%s' % quote(marker)
|
|
|
|
if limit:
|
|
|
|
qs += '&limit=%d' % limit
|
|
|
|
if prefix:
|
|
|
|
qs += '&prefix=%s' % quote(prefix)
|
2018-11-23 22:47:15 -08:00
|
|
|
if delimiter:
|
|
|
|
qs += '&delimiter=%s' % quote(delimiter)
|
2013-05-02 00:15:11 +09:00
|
|
|
if end_marker:
|
|
|
|
qs += '&end_marker=%s' % quote(end_marker)
|
2012-05-09 15:50:17 +01:00
|
|
|
full_path = '%s?%s' % (parsed.path, qs)
|
2012-11-16 15:23:25 +10:00
|
|
|
method = 'GET'
|
2016-09-19 23:18:18 +08:00
|
|
|
conn.request(method, full_path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
2012-05-09 15:50:17 +01:00
|
|
|
body = resp.read()
|
2016-09-19 23:18:18 +08:00
|
|
|
http_log(("%s?%s" % (url, qs), method,), {'headers': req_headers},
|
|
|
|
resp, body)
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Account GET failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status == 204:
|
|
|
|
return resp_headers, []
|
2015-05-14 17:09:43 -07:00
|
|
|
return resp_headers, parse_api_response(resp_headers, body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2016-09-19 23:18:18 +08:00
|
|
|
def head_account(url, token, http_conn=None, headers=None,
|
|
|
|
service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get account stats.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2016-09-19 23:18:18 +08:00
|
|
|
:param headers: additional headers to include in the request
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a dict containing the response's headers (all header names will
|
|
|
|
be lowercase)
|
|
|
|
:raises ClientException: HTTP HEAD request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
2012-05-09 15:50:17 +01:00
|
|
|
method = "HEAD"
|
2016-09-19 23:18:18 +08:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2016-09-19 23:18:18 +08:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
|
|
|
|
|
|
|
conn.request(method, parsed.path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2016-09-19 23:18:18 +08:00
|
|
|
http_log((url, method,), {'headers': req_headers}, resp, body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Account HEAD failed', body)
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2012-05-08 11:17:04 +01:00
|
|
|
return resp_headers
|
|
|
|
|
|
|
|
|
2015-05-13 09:48:41 +00:00
|
|
|
def post_account(url, token, headers, http_conn=None, response_dict=None,
|
2015-06-11 14:33:39 -07:00
|
|
|
service_token=None, query_string=None, data=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Update an account's metadata.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param headers: additional headers to include in the request
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2015-06-11 14:33:39 -07:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
|
|
|
:param data: an optional message body for the request
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP POST request failed
|
2015-06-11 14:33:39 -07:00
|
|
|
:returns: resp_headers, body
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'POST'
|
2015-06-11 14:33:39 -07:00
|
|
|
path = parsed.path
|
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
|
|
|
conn.request(method, path, data, req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2017-08-25 12:13:12 -07:00
|
|
|
http_log((url, method,), {'headers': req_headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Account POST failed', body)
|
2015-06-11 14:33:39 -07:00
|
|
|
resp_headers = {}
|
|
|
|
for header, value in resp.getheaders():
|
|
|
|
resp_headers[header.lower()] = value
|
|
|
|
return resp_headers, body
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_container(url, token, container, marker=None, limit=None,
|
2013-05-02 00:15:11 +09:00
|
|
|
prefix=None, delimiter=None, end_marker=None,
|
|
|
|
path=None, http_conn=None,
|
2016-02-23 15:02:03 -06:00
|
|
|
full_listing=False, service_token=None, headers=None,
|
|
|
|
query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get a listing of objects for the container.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name to get a listing for
|
|
|
|
:param marker: marker query
|
|
|
|
:param limit: limit query
|
|
|
|
:param prefix: prefix query
|
2013-05-02 00:15:11 +09:00
|
|
|
:param delimiter: string to delimit the queries on
|
|
|
|
:param end_marker: marker query
|
|
|
|
:param path: path query (equivalent: "delimiter=/" and "prefix=path/")
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param full_listing: if True, return a full listing, else returns a max
|
|
|
|
of 10000 listings
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2015-09-22 11:09:44 +01:00
|
|
|
:param headers: additional headers to include in the request
|
2016-02-23 15:02:03 -06:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a tuple of (response headers, a list of objects) The response
|
|
|
|
headers will be a dict and all header names will be lowercase.
|
|
|
|
:raises ClientException: HTTP GET request failed
|
|
|
|
"""
|
|
|
|
if not http_conn:
|
|
|
|
http_conn = http_connection(url)
|
|
|
|
if full_listing:
|
|
|
|
rv = get_container(url, token, container, marker, limit, prefix,
|
2015-05-13 09:48:41 +00:00
|
|
|
delimiter, end_marker, path, http_conn,
|
2016-02-22 15:05:27 +00:00
|
|
|
service_token=service_token, headers=headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
listing = rv[1]
|
|
|
|
while listing:
|
|
|
|
if not delimiter:
|
|
|
|
marker = listing[-1]['name']
|
|
|
|
else:
|
|
|
|
marker = listing[-1].get('name', listing[-1].get('subdir'))
|
|
|
|
listing = get_container(url, token, container, marker, limit,
|
2013-05-02 00:15:11 +09:00
|
|
|
prefix, delimiter, end_marker, path,
|
2016-02-22 15:05:27 +00:00
|
|
|
http_conn, service_token=service_token,
|
2015-09-15 14:34:53 +05:30
|
|
|
headers=headers)[1]
|
2012-05-08 11:17:04 +01:00
|
|
|
if listing:
|
|
|
|
rv[1].extend(listing)
|
|
|
|
return rv
|
|
|
|
parsed, conn = http_conn
|
2013-05-02 00:15:11 +09:00
|
|
|
cont_path = '%s/%s' % (parsed.path, quote(container))
|
2012-05-08 11:17:04 +01:00
|
|
|
qs = 'format=json'
|
|
|
|
if marker:
|
|
|
|
qs += '&marker=%s' % quote(marker)
|
|
|
|
if limit:
|
|
|
|
qs += '&limit=%d' % limit
|
|
|
|
if prefix:
|
|
|
|
qs += '&prefix=%s' % quote(prefix)
|
|
|
|
if delimiter:
|
|
|
|
qs += '&delimiter=%s' % quote(delimiter)
|
2013-05-02 00:15:11 +09:00
|
|
|
if end_marker:
|
|
|
|
qs += '&end_marker=%s' % quote(end_marker)
|
|
|
|
if path:
|
|
|
|
qs += '&path=%s' % quote(path)
|
2016-02-23 15:02:03 -06:00
|
|
|
if query_string:
|
|
|
|
qs += '&%s' % query_string.lstrip('?')
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers = {'X-Auth-Token': token, 'Accept-Encoding': 'gzip'}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'GET'
|
2017-08-25 12:13:12 -07:00
|
|
|
conn.request(method, '%s?%s' % (cont_path, qs), '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
2012-05-09 15:50:17 +01:00
|
|
|
body = resp.read()
|
2013-11-29 20:46:22 -05:00
|
|
|
http_log(('%(url)s%(cont_path)s?%(qs)s' %
|
|
|
|
{'url': url.replace(parsed.path, ''),
|
|
|
|
'cont_path': cont_path,
|
|
|
|
'qs': qs}, method,),
|
2017-08-25 12:13:12 -07:00
|
|
|
{'headers': req_headers}, resp, body)
|
2012-05-09 15:50:17 +01:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Container GET failed', body)
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status == 204:
|
|
|
|
return resp_headers, []
|
2015-05-14 17:09:43 -07:00
|
|
|
return resp_headers, parse_api_response(resp_headers, body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2015-05-13 09:48:41 +00:00
|
|
|
def head_container(url, token, container, http_conn=None, headers=None,
|
|
|
|
service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get container stats.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name to get stats for
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2015-09-22 11:09:44 +01:00
|
|
|
:param headers: additional headers to include in the request
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a dict containing the response's headers (all header names will
|
|
|
|
be lowercase)
|
|
|
|
:raises ClientException: HTTP HEAD request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s' % (parsed.path, quote(container))
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'HEAD'
|
2012-05-08 11:17:04 +01:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
req_headers['X-Service-Token'] = service_token
|
2012-05-08 11:17:04 +01:00
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
2012-05-09 15:50:17 +01:00
|
|
|
conn.request(method, path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': req_headers}, resp, body)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(
|
|
|
|
resp, 'Container HEAD failed', body)
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2012-05-08 11:17:04 +01:00
|
|
|
return resp_headers
|
|
|
|
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def put_container(url, token, container, headers=None, http_conn=None,
|
2016-02-23 15:02:03 -06:00
|
|
|
response_dict=None, service_token=None, query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Create a container
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name to create
|
|
|
|
:param headers: additional headers to include in the request
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2016-02-23 15:02:03 -06:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP PUT request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s' % (parsed.path, quote(container))
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'PUT'
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
|
|
|
if 'content-length' not in (k.lower() for k in req_headers):
|
|
|
|
req_headers['Content-Length'] = '0'
|
2016-02-23 15:02:03 -06:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string.lstrip('?')
|
2017-08-25 12:13:12 -07:00
|
|
|
conn.request(method, path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2017-08-25 12:13:12 -07:00
|
|
|
{'headers': req_headers}, resp, body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Container PUT failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def post_container(url, token, container, headers, http_conn=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
response_dict=None, service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Update a container's metadata.
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name to update
|
|
|
|
:param headers: additional headers to include in the request
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP POST request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s' % (parsed.path, quote(container))
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'POST'
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
2014-05-13 23:57:24 +02:00
|
|
|
if 'content-length' not in (k.lower() for k in headers):
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['Content-Length'] = '0'
|
|
|
|
conn.request(method, path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2017-08-25 12:13:12 -07:00
|
|
|
{'headers': req_headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(
|
|
|
|
resp, 'Container POST failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def delete_container(url, token, container, http_conn=None,
|
2016-02-23 15:02:03 -06:00
|
|
|
response_dict=None, service_token=None,
|
2016-09-19 23:18:18 +08:00
|
|
|
query_string=None, headers=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Delete a container
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name to delete
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2016-02-23 15:02:03 -06:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2016-09-19 23:18:18 +08:00
|
|
|
:param headers: additional headers to include in the request
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP DELETE request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s' % (parsed.path, quote(container))
|
2016-09-19 23:18:18 +08:00
|
|
|
if headers:
|
|
|
|
headers = dict(headers)
|
|
|
|
else:
|
|
|
|
headers = {}
|
|
|
|
|
|
|
|
headers['X-Auth-Token'] = token
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
headers['X-Service-Token'] = service_token
|
2016-02-23 15:02:03 -06:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string.lstrip('?')
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'DELETE'
|
|
|
|
conn.request(method, path, '', headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(
|
|
|
|
resp, 'Container DELETE failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_object(url, token, container, name, http_conn=None,
|
2013-06-12 12:02:02 +00:00
|
|
|
resp_chunk_size=None, query_string=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
response_dict=None, headers=None, service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get an object
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name that the object is in
|
|
|
|
:param name: object name to get
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param resp_chunk_size: if defined, chunk size of data to read. NOTE: If
|
|
|
|
you specify a resp_chunk_size you must fully read
|
|
|
|
the object's contents before making another
|
|
|
|
request.
|
2013-03-05 15:12:04 -08:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2013-07-27 17:06:28 +10:00
|
|
|
:param headers: an optional dictionary with additional headers to include
|
|
|
|
in the request
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a tuple of (response headers, the object's contents) The response
|
|
|
|
headers will be a dict and all header names will be lowercase.
|
|
|
|
:raises ClientException: HTTP GET request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
|
2013-03-05 15:12:04 -08:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'GET'
|
2013-07-27 17:06:28 +10:00
|
|
|
headers = headers.copy() if headers else {}
|
|
|
|
headers['X-Auth-Token'] = token
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
headers['X-Service-Token'] = service_token
|
2012-05-09 15:50:17 +01:00
|
|
|
conn.request(method, path, '', headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
parsed_response = {}
|
|
|
|
store_response(resp, parsed_response)
|
|
|
|
if response_dict is not None:
|
|
|
|
response_dict.update(parsed_response)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': headers}, resp, body)
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Object GET failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp_chunk_size:
|
2015-02-12 12:34:07 +00:00
|
|
|
object_body = _ObjectBody(resp, resp_chunk_size)
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
|
|
|
object_body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
|
|
|
{'headers': headers}, resp, None)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
return parsed_response['headers'], object_body
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2015-05-13 09:48:41 +00:00
|
|
|
def head_object(url, token, container, name, http_conn=None,
|
2018-03-05 17:33:22 -08:00
|
|
|
service_token=None, headers=None, query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Get object info
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name that the object is in
|
|
|
|
:param name: object name to get info for
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2015-09-22 11:09:44 +01:00
|
|
|
:param headers: additional headers to include in the request
|
2012-05-08 11:17:04 +01:00
|
|
|
:returns: a dict containing the response's headers (all header names will
|
|
|
|
be lowercase)
|
|
|
|
:raises ClientException: HTTP HEAD request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
|
2018-03-05 17:33:22 -08:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string
|
2015-09-15 14:34:53 +05:30
|
|
|
if headers:
|
|
|
|
headers = dict(headers)
|
|
|
|
else:
|
|
|
|
headers = {}
|
|
|
|
headers['X-Auth-Token'] = token
|
2012-05-09 15:50:17 +01:00
|
|
|
method = 'HEAD'
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
headers['X-Service-Token'] = service_token
|
2012-05-09 15:50:17 +01:00
|
|
|
conn.request(method, path, '', headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), method,),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': headers}, resp, body)
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Object HEAD failed', body)
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2012-05-08 11:17:04 +01:00
|
|
|
return resp_headers
|
|
|
|
|
|
|
|
|
|
|
|
def put_object(url, token=None, container=None, name=None, contents=None,
|
2013-03-05 16:56:02 +00:00
|
|
|
content_length=None, etag=None, chunk_size=None,
|
2013-03-05 15:12:04 -08:00
|
|
|
content_type=None, headers=None, http_conn=None, proxy=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
query_string=None, response_dict=None, service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Put an object
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token; if None, no token will be sent
|
|
|
|
:param container: container name that the object is in; if None, the
|
|
|
|
container name is expected to be part of the url
|
|
|
|
:param name: object name to put; if None, the object name is expected to be
|
|
|
|
part of the url
|
2015-09-10 14:37:32 -07:00
|
|
|
:param contents: a string, a file-like object or an iterable
|
2014-10-29 10:11:29 +00:00
|
|
|
to read object data from;
|
2012-05-08 11:17:04 +01:00
|
|
|
if None, a zero-byte put will be done
|
|
|
|
:param content_length: value to send as content-length header; also limits
|
|
|
|
the amount read from contents; if None, it will be
|
|
|
|
computed via the contents or chunked transfer
|
|
|
|
encoding will be used
|
|
|
|
:param etag: etag of contents; if None, no etag will be sent
|
2013-03-05 16:56:02 +00:00
|
|
|
:param chunk_size: chunk size of data to write; it defaults to 65536;
|
2014-04-25 11:26:28 -07:00
|
|
|
used only if the contents object has a 'read'
|
2014-05-02 18:39:14 +02:00
|
|
|
method, e.g. file-like objects, ignored otherwise
|
2015-10-06 10:08:02 +02:00
|
|
|
|
|
|
|
:param content_type: value to send as content-type header, overriding any
|
|
|
|
value included in the headers param; if None and no
|
|
|
|
value is found in the headers param, an empty string
|
|
|
|
value will be sent
|
2012-05-08 11:17:04 +01:00
|
|
|
:param headers: additional headers to include in the request, if any
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param proxy: proxy to connect through, if any; None by default; str of the
|
|
|
|
format 'http://127.0.0.1:8888' to set one
|
2013-03-05 15:12:04 -08:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2013-06-12 12:02:02 +00:00
|
|
|
:returns: etag
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP PUT request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url, proxy=proxy)
|
|
|
|
path = parsed.path
|
|
|
|
if container:
|
|
|
|
path = '%s/%s' % (path.rstrip('/'), quote(container))
|
|
|
|
if name:
|
|
|
|
path = '%s/%s' % (path.rstrip('/'), quote(name))
|
2013-03-05 15:12:04 -08:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string
|
2012-05-08 11:17:04 +01:00
|
|
|
if headers:
|
|
|
|
headers = dict(headers)
|
|
|
|
else:
|
|
|
|
headers = {}
|
|
|
|
if token:
|
|
|
|
headers['X-Auth-Token'] = token
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
headers['X-Service-Token'] = service_token
|
2012-05-08 11:17:04 +01:00
|
|
|
if etag:
|
|
|
|
headers['ETag'] = etag.strip('"')
|
|
|
|
if content_length is not None:
|
|
|
|
headers['Content-Length'] = str(content_length)
|
|
|
|
else:
|
2014-03-24 18:21:34 +01:00
|
|
|
for n, v in headers.items():
|
2012-05-08 11:17:04 +01:00
|
|
|
if n.lower() == 'content-length':
|
|
|
|
content_length = int(v)
|
|
|
|
if content_type is not None:
|
|
|
|
headers['Content-Type'] = content_type
|
2015-10-06 10:08:02 +02:00
|
|
|
elif 'Content-Type' not in headers:
|
2017-06-08 23:47:14 -07:00
|
|
|
if StrictVersion(requests.__version__) < StrictVersion('2.4.0'):
|
|
|
|
# python-requests sets application/x-www-form-urlencoded otherwise
|
|
|
|
# if using python3.
|
|
|
|
headers['Content-Type'] = ''
|
2012-05-08 11:17:04 +01:00
|
|
|
if not contents:
|
|
|
|
headers['Content-Length'] = '0'
|
2014-10-29 10:11:29 +00:00
|
|
|
|
|
|
|
if isinstance(contents, (ReadableToIterable, LengthWrapper)):
|
|
|
|
conn.putrequest(path, headers=headers, data=contents)
|
|
|
|
elif hasattr(contents, 'read'):
|
2013-03-05 16:56:02 +00:00
|
|
|
if chunk_size is None:
|
|
|
|
chunk_size = 65536
|
2014-10-29 10:11:29 +00:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if content_length is None:
|
2014-10-29 10:11:29 +00:00
|
|
|
data = ReadableToIterable(contents, chunk_size, md5=False)
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2014-10-29 10:11:29 +00:00
|
|
|
data = LengthWrapper(contents, content_length, md5=False)
|
|
|
|
|
|
|
|
conn.putrequest(path, headers=headers, data=data)
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
2013-03-05 16:56:02 +00:00
|
|
|
if chunk_size is not None:
|
2014-10-29 10:11:29 +00:00
|
|
|
warn_msg = ('%s object has no "read" method, ignoring chunk_size'
|
|
|
|
% type(contents).__name__)
|
2013-03-05 16:56:02 +00:00
|
|
|
warnings.warn(warn_msg, stacklevel=2)
|
2015-12-30 11:15:02 -08:00
|
|
|
# Match requests's is_stream test
|
|
|
|
if hasattr(contents, '__iter__') and not isinstance(contents, (
|
|
|
|
six.text_type, six.binary_type, list, tuple, dict)):
|
|
|
|
contents = iter_wrapper(contents)
|
2012-05-08 11:17:04 +01:00
|
|
|
conn.request('PUT', path, contents, headers)
|
2014-10-29 10:11:29 +00:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'PUT',),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Object PUT failed', body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
2014-10-29 10:11:29 +00:00
|
|
|
etag = resp.getheader('etag', '').strip('"')
|
|
|
|
return etag
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def post_object(url, token, container, name, headers, http_conn=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
response_dict=None, service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Update object metadata
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token
|
|
|
|
:param container: container name that the object is in
|
|
|
|
:param name: name of the object to update
|
|
|
|
:param headers: additional headers to include in the request
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP POST request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers = {'X-Auth-Token': token}
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
2017-08-25 12:13:12 -07:00
|
|
|
req_headers['X-Service-Token'] = service_token
|
|
|
|
if headers:
|
|
|
|
req_headers.update(headers)
|
|
|
|
conn.request('POST', path, '', req_headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'POST',),
|
2017-08-25 12:13:12 -07:00
|
|
|
{'headers': req_headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Object POST failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2016-02-15 12:14:17 +01:00
|
|
|
def copy_object(url, token, container, name, destination=None,
|
|
|
|
headers=None, fresh_metadata=None, http_conn=None,
|
|
|
|
response_dict=None, service_token=None):
|
|
|
|
"""
|
|
|
|
Copy object
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token; if None, no token will be sent
|
|
|
|
:param container: container name that the source object is in
|
|
|
|
:param name: source object name
|
|
|
|
:param destination: The container and object name of the destination object
|
|
|
|
in the form of /container/object; if None, the copy
|
|
|
|
will use the source as the destination.
|
|
|
|
:param headers: additional headers to include in the request
|
|
|
|
:param fresh_metadata: Enables object creation that omits existing user
|
|
|
|
metadata, default None
|
|
|
|
:param http_conn: HTTP connection object (If None, it will create the
|
|
|
|
conn object)
|
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
|
|
|
:param service_token: service auth token
|
|
|
|
:raises ClientException: HTTP COPY request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url)
|
|
|
|
|
|
|
|
path = parsed.path
|
|
|
|
container = quote(container)
|
|
|
|
name = quote(name)
|
|
|
|
path = '%s/%s/%s' % (path.rstrip('/'), container, name)
|
|
|
|
|
|
|
|
headers = dict(headers) if headers else {}
|
|
|
|
|
|
|
|
if destination is not None:
|
|
|
|
headers['Destination'] = quote(destination)
|
|
|
|
elif container and name:
|
|
|
|
headers['Destination'] = '/%s/%s' % (container, name)
|
|
|
|
|
|
|
|
if token is not None:
|
|
|
|
headers['X-Auth-Token'] = token
|
|
|
|
if service_token is not None:
|
|
|
|
headers['X-Service-Token'] = service_token
|
|
|
|
|
|
|
|
if fresh_metadata is not None:
|
|
|
|
# remove potential fresh metadata headers
|
|
|
|
for fresh_hdr in [hdr for hdr in headers.keys()
|
|
|
|
if hdr.lower() == 'x-fresh-metadata']:
|
|
|
|
headers.pop(fresh_hdr)
|
|
|
|
headers['X-Fresh-Metadata'] = 'true' if fresh_metadata else 'false'
|
|
|
|
|
|
|
|
conn.request('COPY', path, '', headers)
|
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'COPY',),
|
|
|
|
{'headers': headers}, resp, body)
|
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
|
|
|
if resp.status < 200 or resp.status >= 300:
|
|
|
|
raise ClientException.from_response(resp, 'Object COPY failed', body)
|
|
|
|
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def delete_object(url, token=None, container=None, name=None, http_conn=None,
|
2013-06-12 12:02:02 +00:00
|
|
|
headers=None, proxy=None, query_string=None,
|
2015-05-13 09:48:41 +00:00
|
|
|
response_dict=None, service_token=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
Delete object
|
|
|
|
|
|
|
|
:param url: storage URL
|
|
|
|
:param token: auth token; if None, no token will be sent
|
|
|
|
:param container: container name that the object is in; if None, the
|
|
|
|
container name is expected to be part of the url
|
|
|
|
:param name: object name to delete; if None, the object name is expected to
|
|
|
|
be part of the url
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object),
|
|
|
|
(If None, it will create the conn object)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param headers: additional headers to include in the request
|
|
|
|
:param proxy: proxy to connect through, if any; None by default; str of the
|
|
|
|
format 'http://127.0.0.1:8888' to set one
|
2013-03-05 15:12:04 -08:00
|
|
|
:param query_string: if set will be appended with '?' to generated path
|
2013-06-12 12:02:02 +00:00
|
|
|
:param response_dict: an optional dictionary into which to place
|
|
|
|
the response - status, reason and headers
|
2015-05-13 09:48:41 +00:00
|
|
|
:param service_token: service auth token
|
2012-05-08 11:17:04 +01:00
|
|
|
:raises ClientException: HTTP DELETE request failed
|
|
|
|
"""
|
|
|
|
if http_conn:
|
|
|
|
parsed, conn = http_conn
|
|
|
|
else:
|
|
|
|
parsed, conn = http_connection(url, proxy=proxy)
|
|
|
|
path = parsed.path
|
|
|
|
if container:
|
|
|
|
path = '%s/%s' % (path.rstrip('/'), quote(container))
|
|
|
|
if name:
|
|
|
|
path = '%s/%s' % (path.rstrip('/'), quote(name))
|
2013-03-05 15:12:04 -08:00
|
|
|
if query_string:
|
|
|
|
path += '?' + query_string
|
2012-05-08 11:17:04 +01:00
|
|
|
if headers:
|
|
|
|
headers = dict(headers)
|
|
|
|
else:
|
|
|
|
headers = {}
|
|
|
|
if token:
|
|
|
|
headers['X-Auth-Token'] = token
|
2015-05-13 09:48:41 +00:00
|
|
|
if service_token:
|
|
|
|
headers['X-Service-Token'] = service_token
|
2012-05-08 11:17:04 +01:00
|
|
|
conn.request('DELETE', path, '', headers)
|
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2012-11-16 15:23:25 +10:00
|
|
|
http_log(('%s%s' % (url.replace(parsed.path, ''), path), 'DELETE',),
|
2012-05-09 15:50:17 +01:00
|
|
|
{'headers': headers}, resp, body)
|
2013-06-12 12:02:02 +00:00
|
|
|
|
|
|
|
store_response(resp, response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(resp, 'Object DELETE failed', body)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
|
2014-01-13 22:39:28 +01:00
|
|
|
def get_capabilities(http_conn):
|
|
|
|
"""
|
|
|
|
Get cluster capability infos.
|
|
|
|
|
2016-05-12 21:30:10 -07:00
|
|
|
:param http_conn: a tuple of (parsed url, HTTPConnection object)
|
2014-01-13 22:39:28 +01:00
|
|
|
:returns: a dict containing the cluster capabilities
|
|
|
|
:raises ClientException: HTTP Capabilities GET failed
|
|
|
|
"""
|
|
|
|
parsed, conn = http_conn
|
2015-05-21 22:44:36 -07:00
|
|
|
headers = {'Accept-Encoding': 'gzip'}
|
|
|
|
conn.request('GET', parsed.path, '', headers)
|
2014-01-13 22:39:28 +01:00
|
|
|
resp = conn.getresponse()
|
|
|
|
body = resp.read()
|
2015-05-21 22:44:36 -07:00
|
|
|
http_log((parsed.geturl(), 'GET',), {'headers': headers}, resp, body)
|
2014-01-13 22:39:28 +01:00
|
|
|
if resp.status < 200 or resp.status >= 300:
|
2016-01-18 17:05:28 -08:00
|
|
|
raise ClientException.from_response(
|
|
|
|
resp, 'Capabilities GET failed', body)
|
2015-05-18 08:05:02 -07:00
|
|
|
resp_headers = resp_header_dict(resp)
|
2015-05-14 17:09:43 -07:00
|
|
|
return parse_api_response(resp_headers, body)
|
2014-01-13 22:39:28 +01:00
|
|
|
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
class Connection(object):
|
2015-05-13 09:48:41 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
Convenience class to make requests that will also retry the request
|
|
|
|
|
|
|
|
Requests will have an X-Auth-Token header whose value is either
|
|
|
|
the preauthtoken or a token obtained from the auth service using
|
|
|
|
the user credentials provided as args to the constructor. If
|
|
|
|
os_options includes a service_username then requests will also have
|
|
|
|
an X-Service-Token header whose value is a token obtained from the
|
|
|
|
auth service using the service credentials. In this case the request
|
|
|
|
url will be set to the storage_url obtained from the auth service
|
|
|
|
for the service user, unless this is overridden by a preauthurl.
|
|
|
|
"""
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2012-10-23 09:18:29 +01:00
|
|
|
def __init__(self, authurl=None, user=None, key=None, retries=5,
|
|
|
|
preauthurl=None, preauthtoken=None, snet=False,
|
2013-07-10 10:52:02 -07:00
|
|
|
starting_backoff=1, max_backoff=64, tenant_name=None,
|
|
|
|
os_options=None, auth_version="1", cacert=None,
|
2016-04-10 23:18:17 +02:00
|
|
|
insecure=False, cert=None, cert_key=None,
|
|
|
|
ssl_compression=True, retry_on_ratelimit=False,
|
Add force auth retry mode in swiftclient
This patch attemps to add an option to force get_auth call while retrying
an operation even if it gets errors other than 401 Unauthorized.
Why we need this:
The main reason why we need this is current python-swiftclient requests could
never get succeeded under certion situation using third party proxies/load balancers
between the client and swift-proxy server. I think, it would be general situation
of the use case.
Specifically describing nginx case, the nginx can close the socket from the client
when the response code from swift is not 2xx series. In default, nginx can wait the
buffers from the client for a while (default 30s)[1] but after the time past, nginx
will close the socket immediately. Unfortunately, if python-swiftclient has still been
sending the data into the socket, python-swiftclient will get socket error (EPIPE,
BrokenPipe). From the swiftclient perspective, this is absolutely not an auth error,
so current python-swiftclient will continue to retry without re-auth.
However, if the root cause is sort of 401 (i.e. nginx got 401 unauthorized from the
swift-proxy because of token expiration), swiftclient will loop 401 -> EPIPE -> 401...
until it consume the max retry times.
In particlar, less time to live of the token and multipart object upload with large
segments could not get succeeded as below:
Connection Model:
python-swiftclient -> nginx -> swift-proxy -> swift-backend
Case: Try to create slo with large segments and the auth token expired with 1 hour
1. client create a connection to nginx with successful response from swift-proxy and its auth
2. client continue to put large segment objects
(e.g. 1~5GB for each and the total would 20~30GB, i.e. 20~30 segments)
3. after some of segments uploaded, 1 hour past but client is still trying to
send remaining segment objects.
4. nginx got 401 from swift-proxy for a request and wait that the connection is closed
from the client but timeout past because the python-swiftclient is still sending much data
into the socket before reading the 401 response.
5. client got socket error because nginx closed the connection during sending the buffer.
6. client retries a new connection to nginx without re-auth...
<loop 4-6>
7. finally python-swiftclient failed with socket error (Broken Pipe)
In operational perspective, setting longer timeout for lingering close would be an option but
it's not complete solution because any other proxy/LB may not support the options.
If we actually do THE RIGHT THING in python-swiftclient, we should send expects: 100-continue
header and handle the first response to re-auth correctly.
HOWEVER, the current python's httplib and requests module used by python-swiftclient doesn't
support expects: 100-continue header [2] and the thread proposed a fix [3] is not super active.
And we know the reason we depends on the library is to fix a security issue that existed
in older python-swiftclient [4] so that we should touch around it super carefully.
In the reality, as the hot fix, this patch try to mitigate the unfortunate situation
described above WITHOUT 100-continue fix, just users can force to re-auth when any errors
occurred during the retries that can be accepted in the upstream.
1: http://nginx.org/en/docs/http/ngx_http_core_module.html#lingering_close
2: https://github.com/requests/requests/issues/713
3: https://bugs.python.org/issue1346874
4: https://review.openstack.org/#/c/69187/
Change-Id: I3470b56e3f9cf9cdb8c2fc2a94b2c551927a3440
2018-03-12 17:54:17 +09:00
|
|
|
timeout=None, session=None, force_auth_retry=False):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
|
|
|
:param authurl: authentication URL
|
|
|
|
:param user: user name to authenticate as
|
|
|
|
:param key: key/password to authenticate with
|
|
|
|
:param retries: Number of times to retry the request before failing
|
|
|
|
:param preauthurl: storage URL (if you have already authenticated)
|
|
|
|
:param preauthtoken: authentication token (if you have already
|
2012-10-23 09:18:29 +01:00
|
|
|
authenticated) note authurl/user/key/tenant_name
|
|
|
|
are not required when specifying preauthtoken
|
2012-05-08 11:17:04 +01:00
|
|
|
:param snet: use SERVICENET internal network default is False
|
2013-07-12 17:26:45 -07:00
|
|
|
:param starting_backoff: initial delay between retries (seconds)
|
|
|
|
:param max_backoff: maximum delay between retries (seconds)
|
2012-05-08 11:17:04 +01:00
|
|
|
:param auth_version: OpenStack auth version, default is 1.0
|
|
|
|
:param tenant_name: The tenant/account name, required when connecting
|
2014-05-02 18:39:14 +02:00
|
|
|
to an auth 2.0 system.
|
2012-07-04 21:46:02 +02:00
|
|
|
:param os_options: The OpenStack options which can have tenant_id,
|
2012-08-28 10:25:42 -04:00
|
|
|
auth_token, service_type, endpoint_type,
|
2015-05-13 09:48:41 +00:00
|
|
|
tenant_name, object_storage_url, region_name,
|
|
|
|
service_username, service_project_name, service_key
|
2014-02-13 23:33:01 -08:00
|
|
|
:param insecure: Allow to access servers without checking SSL certs.
|
|
|
|
The server's certificate will not be verified.
|
2016-04-10 23:18:17 +02:00
|
|
|
:param cert: Client certificate file to connect on SSL server
|
|
|
|
requiring SSL client certificate.
|
|
|
|
:param cert_key: Client certificate private key file.
|
2013-01-18 14:17:21 +00:00
|
|
|
:param ssl_compression: Whether to enable compression at the SSL layer.
|
|
|
|
If set to 'False' and the pyOpenSSL library is
|
|
|
|
present an attempt to disable SSL compression
|
|
|
|
will be made. This may provide a performance
|
|
|
|
increase for https upload/download operations.
|
2013-12-23 13:49:46 -08:00
|
|
|
:param retry_on_ratelimit: by default, a ratelimited connection will
|
|
|
|
raise an exception to the caller. Setting
|
|
|
|
this parameter to True will cause a retry
|
|
|
|
after a backoff.
|
2015-04-26 01:37:15 -06:00
|
|
|
:param timeout: The connect timeout for the HTTP connection.
|
2016-03-29 19:10:42 +00:00
|
|
|
:param session: A keystoneauth session object.
|
Add force auth retry mode in swiftclient
This patch attemps to add an option to force get_auth call while retrying
an operation even if it gets errors other than 401 Unauthorized.
Why we need this:
The main reason why we need this is current python-swiftclient requests could
never get succeeded under certion situation using third party proxies/load balancers
between the client and swift-proxy server. I think, it would be general situation
of the use case.
Specifically describing nginx case, the nginx can close the socket from the client
when the response code from swift is not 2xx series. In default, nginx can wait the
buffers from the client for a while (default 30s)[1] but after the time past, nginx
will close the socket immediately. Unfortunately, if python-swiftclient has still been
sending the data into the socket, python-swiftclient will get socket error (EPIPE,
BrokenPipe). From the swiftclient perspective, this is absolutely not an auth error,
so current python-swiftclient will continue to retry without re-auth.
However, if the root cause is sort of 401 (i.e. nginx got 401 unauthorized from the
swift-proxy because of token expiration), swiftclient will loop 401 -> EPIPE -> 401...
until it consume the max retry times.
In particlar, less time to live of the token and multipart object upload with large
segments could not get succeeded as below:
Connection Model:
python-swiftclient -> nginx -> swift-proxy -> swift-backend
Case: Try to create slo with large segments and the auth token expired with 1 hour
1. client create a connection to nginx with successful response from swift-proxy and its auth
2. client continue to put large segment objects
(e.g. 1~5GB for each and the total would 20~30GB, i.e. 20~30 segments)
3. after some of segments uploaded, 1 hour past but client is still trying to
send remaining segment objects.
4. nginx got 401 from swift-proxy for a request and wait that the connection is closed
from the client but timeout past because the python-swiftclient is still sending much data
into the socket before reading the 401 response.
5. client got socket error because nginx closed the connection during sending the buffer.
6. client retries a new connection to nginx without re-auth...
<loop 4-6>
7. finally python-swiftclient failed with socket error (Broken Pipe)
In operational perspective, setting longer timeout for lingering close would be an option but
it's not complete solution because any other proxy/LB may not support the options.
If we actually do THE RIGHT THING in python-swiftclient, we should send expects: 100-continue
header and handle the first response to re-auth correctly.
HOWEVER, the current python's httplib and requests module used by python-swiftclient doesn't
support expects: 100-continue header [2] and the thread proposed a fix [3] is not super active.
And we know the reason we depends on the library is to fix a security issue that existed
in older python-swiftclient [4] so that we should touch around it super carefully.
In the reality, as the hot fix, this patch try to mitigate the unfortunate situation
described above WITHOUT 100-continue fix, just users can force to re-auth when any errors
occurred during the retries that can be accepted in the upstream.
1: http://nginx.org/en/docs/http/ngx_http_core_module.html#lingering_close
2: https://github.com/requests/requests/issues/713
3: https://bugs.python.org/issue1346874
4: https://review.openstack.org/#/c/69187/
Change-Id: I3470b56e3f9cf9cdb8c2fc2a94b2c551927a3440
2018-03-12 17:54:17 +09:00
|
|
|
:param force_auth_retry: reset auth info even if client got unexpected
|
|
|
|
error except 401 Unauthorized.
|
2012-05-08 11:17:04 +01:00
|
|
|
"""
|
2016-03-29 19:10:42 +00:00
|
|
|
self.session = session
|
2012-05-08 11:17:04 +01:00
|
|
|
self.authurl = authurl
|
|
|
|
self.user = user
|
|
|
|
self.key = key
|
|
|
|
self.retries = retries
|
|
|
|
self.http_conn = None
|
|
|
|
self.attempts = 0
|
|
|
|
self.snet = snet
|
|
|
|
self.starting_backoff = starting_backoff
|
2013-07-10 10:52:02 -07:00
|
|
|
self.max_backoff = max_backoff
|
2012-05-08 11:17:04 +01:00
|
|
|
self.auth_version = auth_version
|
2015-08-24 12:34:45 +01:00
|
|
|
self.os_options = dict(os_options or {})
|
2012-07-04 21:46:02 +02:00
|
|
|
if tenant_name:
|
2012-09-13 05:35:26 +00:00
|
|
|
self.os_options['tenant_name'] = tenant_name
|
2014-10-24 01:02:53 -07:00
|
|
|
if preauthurl:
|
|
|
|
self.os_options['object_storage_url'] = preauthurl
|
|
|
|
self.url = preauthurl or self.os_options.get('object_storage_url')
|
|
|
|
self.token = preauthtoken or self.os_options.get('auth_token')
|
2015-05-13 09:48:41 +00:00
|
|
|
if self.os_options.get('service_username', None):
|
|
|
|
self.service_auth = True
|
|
|
|
else:
|
|
|
|
self.service_auth = False
|
|
|
|
self.service_token = None
|
2012-12-19 09:52:54 -06:00
|
|
|
self.cacert = cacert
|
2012-12-05 13:18:27 +09:00
|
|
|
self.insecure = insecure
|
2016-04-10 23:18:17 +02:00
|
|
|
self.cert = cert
|
|
|
|
self.cert_key = cert_key
|
2013-01-18 14:17:21 +00:00
|
|
|
self.ssl_compression = ssl_compression
|
2013-09-04 19:32:09 +01:00
|
|
|
self.auth_end_time = 0
|
2013-12-23 13:49:46 -08:00
|
|
|
self.retry_on_ratelimit = retry_on_ratelimit
|
2015-03-29 01:38:16 -04:00
|
|
|
self.timeout = timeout
|
Add force auth retry mode in swiftclient
This patch attemps to add an option to force get_auth call while retrying
an operation even if it gets errors other than 401 Unauthorized.
Why we need this:
The main reason why we need this is current python-swiftclient requests could
never get succeeded under certion situation using third party proxies/load balancers
between the client and swift-proxy server. I think, it would be general situation
of the use case.
Specifically describing nginx case, the nginx can close the socket from the client
when the response code from swift is not 2xx series. In default, nginx can wait the
buffers from the client for a while (default 30s)[1] but after the time past, nginx
will close the socket immediately. Unfortunately, if python-swiftclient has still been
sending the data into the socket, python-swiftclient will get socket error (EPIPE,
BrokenPipe). From the swiftclient perspective, this is absolutely not an auth error,
so current python-swiftclient will continue to retry without re-auth.
However, if the root cause is sort of 401 (i.e. nginx got 401 unauthorized from the
swift-proxy because of token expiration), swiftclient will loop 401 -> EPIPE -> 401...
until it consume the max retry times.
In particlar, less time to live of the token and multipart object upload with large
segments could not get succeeded as below:
Connection Model:
python-swiftclient -> nginx -> swift-proxy -> swift-backend
Case: Try to create slo with large segments and the auth token expired with 1 hour
1. client create a connection to nginx with successful response from swift-proxy and its auth
2. client continue to put large segment objects
(e.g. 1~5GB for each and the total would 20~30GB, i.e. 20~30 segments)
3. after some of segments uploaded, 1 hour past but client is still trying to
send remaining segment objects.
4. nginx got 401 from swift-proxy for a request and wait that the connection is closed
from the client but timeout past because the python-swiftclient is still sending much data
into the socket before reading the 401 response.
5. client got socket error because nginx closed the connection during sending the buffer.
6. client retries a new connection to nginx without re-auth...
<loop 4-6>
7. finally python-swiftclient failed with socket error (Broken Pipe)
In operational perspective, setting longer timeout for lingering close would be an option but
it's not complete solution because any other proxy/LB may not support the options.
If we actually do THE RIGHT THING in python-swiftclient, we should send expects: 100-continue
header and handle the first response to re-auth correctly.
HOWEVER, the current python's httplib and requests module used by python-swiftclient doesn't
support expects: 100-continue header [2] and the thread proposed a fix [3] is not super active.
And we know the reason we depends on the library is to fix a security issue that existed
in older python-swiftclient [4] so that we should touch around it super carefully.
In the reality, as the hot fix, this patch try to mitigate the unfortunate situation
described above WITHOUT 100-continue fix, just users can force to re-auth when any errors
occurred during the retries that can be accepted in the upstream.
1: http://nginx.org/en/docs/http/ngx_http_core_module.html#lingering_close
2: https://github.com/requests/requests/issues/713
3: https://bugs.python.org/issue1346874
4: https://review.openstack.org/#/c/69187/
Change-Id: I3470b56e3f9cf9cdb8c2fc2a94b2c551927a3440
2018-03-12 17:54:17 +09:00
|
|
|
self.force_auth_retry = force_auth_retry
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-10-17 11:12:06 +08:00
|
|
|
def close(self):
|
2014-11-06 14:10:13 +00:00
|
|
|
if (self.http_conn and isinstance(self.http_conn, tuple)
|
|
|
|
and len(self.http_conn) > 1):
|
2013-10-17 11:12:06 +08:00
|
|
|
conn = self.http_conn[1]
|
2018-06-22 16:49:03 -07:00
|
|
|
conn.close()
|
|
|
|
self.http_conn = None
|
2013-10-17 11:12:06 +08:00
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def get_auth(self):
|
2015-07-24 10:57:29 -07:00
|
|
|
self.url, self.token = get_auth(self.authurl, self.user, self.key,
|
2016-03-29 19:10:42 +00:00
|
|
|
session=self.session, snet=self.snet,
|
2015-07-24 10:57:29 -07:00
|
|
|
auth_version=self.auth_version,
|
|
|
|
os_options=self.os_options,
|
|
|
|
cacert=self.cacert,
|
|
|
|
insecure=self.insecure,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=self.cert,
|
|
|
|
cert_key=self.cert_key,
|
2015-07-24 10:57:29 -07:00
|
|
|
timeout=self.timeout)
|
|
|
|
return self.url, self.token
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2015-05-13 09:48:41 +00:00
|
|
|
def get_service_auth(self):
|
|
|
|
opts = self.os_options
|
|
|
|
service_options = {}
|
|
|
|
service_options['tenant_name'] = opts.get('service_project_name', None)
|
|
|
|
service_options['region_name'] = opts.get('region_name', None)
|
|
|
|
service_options['object_storage_url'] = opts.get('object_storage_url',
|
|
|
|
None)
|
|
|
|
service_user = opts.get('service_username', None)
|
|
|
|
service_key = opts.get('service_key', None)
|
2016-03-29 19:10:42 +00:00
|
|
|
return get_auth(self.authurl, service_user, service_key,
|
|
|
|
session=self.session,
|
2015-05-13 09:48:41 +00:00
|
|
|
snet=self.snet,
|
|
|
|
auth_version=self.auth_version,
|
|
|
|
os_options=service_options,
|
|
|
|
cacert=self.cacert,
|
|
|
|
insecure=self.insecure,
|
|
|
|
timeout=self.timeout)
|
|
|
|
|
2014-10-17 13:38:01 -06:00
|
|
|
def http_connection(self, url=None):
|
|
|
|
return http_connection(url if url else self.url,
|
2014-01-24 17:40:16 +01:00
|
|
|
cacert=self.cacert,
|
|
|
|
insecure=self.insecure,
|
2016-04-10 23:18:17 +02:00
|
|
|
cert=self.cert,
|
|
|
|
cert_key=self.cert_key,
|
2015-03-29 01:38:16 -04:00
|
|
|
ssl_compression=self.ssl_compression,
|
|
|
|
timeout=self.timeout)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def _add_response_dict(self, target_dict, kwargs):
|
2014-10-15 18:56:58 +01:00
|
|
|
if target_dict is not None and 'response_dict' in kwargs:
|
2013-06-12 12:02:02 +00:00
|
|
|
response_dict = kwargs['response_dict']
|
|
|
|
if 'response_dicts' in target_dict:
|
|
|
|
target_dict['response_dicts'].append(response_dict)
|
|
|
|
else:
|
|
|
|
target_dict['response_dicts'] = [response_dict]
|
|
|
|
target_dict.update(response_dict)
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
def _retry(self, reset_func, func, *args, **kwargs):
|
2013-06-12 17:43:01 -05:00
|
|
|
retried_auth = False
|
2012-05-08 11:17:04 +01:00
|
|
|
backoff = self.starting_backoff
|
2013-06-12 12:02:02 +00:00
|
|
|
caller_response_dict = kwargs.pop('response_dict', None)
|
2015-03-04 14:31:00 +00:00
|
|
|
self.attempts = kwargs.pop('attempts', 0)
|
2016-06-07 15:49:32 +02:00
|
|
|
while self.attempts <= self.retries or retried_auth:
|
2012-05-08 11:17:04 +01:00
|
|
|
self.attempts += 1
|
|
|
|
try:
|
|
|
|
if not self.url or not self.token:
|
|
|
|
self.url, self.token = self.get_auth()
|
2018-06-22 16:49:03 -07:00
|
|
|
self.close()
|
2015-05-13 09:48:41 +00:00
|
|
|
if self.service_auth and not self.service_token:
|
|
|
|
self.url, self.service_token = self.get_service_auth()
|
2018-06-22 16:49:03 -07:00
|
|
|
self.close()
|
2013-09-04 19:32:09 +01:00
|
|
|
self.auth_end_time = time()
|
2012-05-08 11:17:04 +01:00
|
|
|
if not self.http_conn:
|
|
|
|
self.http_conn = self.http_connection()
|
|
|
|
kwargs['http_conn'] = self.http_conn
|
2013-06-12 12:02:02 +00:00
|
|
|
if caller_response_dict is not None:
|
|
|
|
kwargs['response_dict'] = {}
|
2015-05-13 09:48:41 +00:00
|
|
|
rv = func(self.url, self.token, *args,
|
|
|
|
service_token=self.service_token, **kwargs)
|
2013-06-12 12:02:02 +00:00
|
|
|
self._add_response_dict(caller_response_dict, kwargs)
|
2012-05-08 11:17:04 +01:00
|
|
|
return rv
|
2014-01-24 17:40:16 +01:00
|
|
|
except SSLError:
|
|
|
|
raise
|
2016-10-28 12:19:35 +02:00
|
|
|
except (socket.error, RequestException):
|
2013-06-12 12:02:02 +00:00
|
|
|
self._add_response_dict(caller_response_dict, kwargs)
|
2012-05-08 11:17:04 +01:00
|
|
|
if self.attempts > self.retries:
|
|
|
|
raise
|
|
|
|
self.http_conn = None
|
2013-04-25 23:19:52 +02:00
|
|
|
except ClientException as err:
|
2013-06-12 12:02:02 +00:00
|
|
|
self._add_response_dict(caller_response_dict, kwargs)
|
2012-05-08 11:17:04 +01:00
|
|
|
if err.http_status == 401:
|
2016-03-29 19:10:42 +00:00
|
|
|
if self.session:
|
|
|
|
should_retry = self.session.invalidate()
|
|
|
|
else:
|
|
|
|
# Without a proper session, just check for auth creds
|
|
|
|
should_retry = all((self.authurl, self.user, self.key))
|
|
|
|
|
2015-05-13 09:48:41 +00:00
|
|
|
self.url = self.token = self.service_token = None
|
2016-03-29 19:10:42 +00:00
|
|
|
|
|
|
|
if retried_auth or not should_retry:
|
2012-05-08 11:17:04 +01:00
|
|
|
raise
|
2013-06-12 17:43:01 -05:00
|
|
|
retried_auth = True
|
2016-06-07 15:49:32 +02:00
|
|
|
elif self.attempts > self.retries or err.http_status is None:
|
|
|
|
raise
|
2012-05-08 11:17:04 +01:00
|
|
|
elif err.http_status == 408:
|
|
|
|
self.http_conn = None
|
|
|
|
elif 500 <= err.http_status <= 599:
|
|
|
|
pass
|
2013-12-23 13:49:46 -08:00
|
|
|
elif self.retry_on_ratelimit and err.http_status == 498:
|
|
|
|
pass
|
2012-05-08 11:17:04 +01:00
|
|
|
else:
|
|
|
|
raise
|
Add force auth retry mode in swiftclient
This patch attemps to add an option to force get_auth call while retrying
an operation even if it gets errors other than 401 Unauthorized.
Why we need this:
The main reason why we need this is current python-swiftclient requests could
never get succeeded under certion situation using third party proxies/load balancers
between the client and swift-proxy server. I think, it would be general situation
of the use case.
Specifically describing nginx case, the nginx can close the socket from the client
when the response code from swift is not 2xx series. In default, nginx can wait the
buffers from the client for a while (default 30s)[1] but after the time past, nginx
will close the socket immediately. Unfortunately, if python-swiftclient has still been
sending the data into the socket, python-swiftclient will get socket error (EPIPE,
BrokenPipe). From the swiftclient perspective, this is absolutely not an auth error,
so current python-swiftclient will continue to retry without re-auth.
However, if the root cause is sort of 401 (i.e. nginx got 401 unauthorized from the
swift-proxy because of token expiration), swiftclient will loop 401 -> EPIPE -> 401...
until it consume the max retry times.
In particlar, less time to live of the token and multipart object upload with large
segments could not get succeeded as below:
Connection Model:
python-swiftclient -> nginx -> swift-proxy -> swift-backend
Case: Try to create slo with large segments and the auth token expired with 1 hour
1. client create a connection to nginx with successful response from swift-proxy and its auth
2. client continue to put large segment objects
(e.g. 1~5GB for each and the total would 20~30GB, i.e. 20~30 segments)
3. after some of segments uploaded, 1 hour past but client is still trying to
send remaining segment objects.
4. nginx got 401 from swift-proxy for a request and wait that the connection is closed
from the client but timeout past because the python-swiftclient is still sending much data
into the socket before reading the 401 response.
5. client got socket error because nginx closed the connection during sending the buffer.
6. client retries a new connection to nginx without re-auth...
<loop 4-6>
7. finally python-swiftclient failed with socket error (Broken Pipe)
In operational perspective, setting longer timeout for lingering close would be an option but
it's not complete solution because any other proxy/LB may not support the options.
If we actually do THE RIGHT THING in python-swiftclient, we should send expects: 100-continue
header and handle the first response to re-auth correctly.
HOWEVER, the current python's httplib and requests module used by python-swiftclient doesn't
support expects: 100-continue header [2] and the thread proposed a fix [3] is not super active.
And we know the reason we depends on the library is to fix a security issue that existed
in older python-swiftclient [4] so that we should touch around it super carefully.
In the reality, as the hot fix, this patch try to mitigate the unfortunate situation
described above WITHOUT 100-continue fix, just users can force to re-auth when any errors
occurred during the retries that can be accepted in the upstream.
1: http://nginx.org/en/docs/http/ngx_http_core_module.html#lingering_close
2: https://github.com/requests/requests/issues/713
3: https://bugs.python.org/issue1346874
4: https://review.openstack.org/#/c/69187/
Change-Id: I3470b56e3f9cf9cdb8c2fc2a94b2c551927a3440
2018-03-12 17:54:17 +09:00
|
|
|
|
|
|
|
if self.force_auth_retry:
|
|
|
|
self.url = self.token = self.service_token = None
|
|
|
|
|
2012-05-08 11:17:04 +01:00
|
|
|
sleep(backoff)
|
2013-07-10 10:52:02 -07:00
|
|
|
backoff = min(backoff * 2, self.max_backoff)
|
2012-05-08 11:17:04 +01:00
|
|
|
if reset_func:
|
|
|
|
reset_func(func, *args, **kwargs)
|
|
|
|
|
2016-09-19 23:18:18 +08:00
|
|
|
def head_account(self, headers=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`head_account`"""
|
2016-09-19 23:18:18 +08:00
|
|
|
return self._retry(None, head_account, headers=headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
def get_account(self, marker=None, limit=None, prefix=None,
|
2018-11-23 22:47:15 -08:00
|
|
|
end_marker=None, full_listing=False, headers=None,
|
|
|
|
delimiter=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`get_account`"""
|
|
|
|
# TODO(unknown): With full_listing=True this will restart the entire
|
|
|
|
# listing with each retry. Need to make a better version that just
|
|
|
|
# retries where it left off.
|
|
|
|
return self._retry(None, get_account, marker=marker, limit=limit,
|
2013-05-02 00:15:11 +09:00
|
|
|
prefix=prefix, end_marker=end_marker,
|
2018-11-23 22:47:15 -08:00
|
|
|
full_listing=full_listing, headers=headers,
|
|
|
|
delimiter=delimiter)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2015-06-11 14:33:39 -07:00
|
|
|
def post_account(self, headers, response_dict=None,
|
|
|
|
query_string=None, data=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`post_account`"""
|
2013-06-12 12:02:02 +00:00
|
|
|
return self._retry(None, post_account, headers,
|
2015-06-11 14:33:39 -07:00
|
|
|
query_string=query_string, data=data,
|
2013-06-12 12:02:02 +00:00
|
|
|
response_dict=response_dict)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2015-09-22 11:09:44 +01:00
|
|
|
def head_container(self, container, headers=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`head_container`"""
|
2015-09-22 11:09:44 +01:00
|
|
|
return self._retry(None, head_container, container, headers=headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
def get_container(self, container, marker=None, limit=None, prefix=None,
|
2013-05-02 00:15:11 +09:00
|
|
|
delimiter=None, end_marker=None, path=None,
|
2016-02-23 15:02:03 -06:00
|
|
|
full_listing=False, headers=None, query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`get_container`"""
|
|
|
|
# TODO(unknown): With full_listing=True this will restart the entire
|
|
|
|
# listing with each retry. Need to make a better version that just
|
|
|
|
# retries where it left off.
|
|
|
|
return self._retry(None, get_container, container, marker=marker,
|
|
|
|
limit=limit, prefix=prefix, delimiter=delimiter,
|
2013-05-02 00:15:11 +09:00
|
|
|
end_marker=end_marker, path=path,
|
2016-02-23 15:02:03 -06:00
|
|
|
full_listing=full_listing, headers=headers,
|
|
|
|
query_string=query_string)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-02-23 15:02:03 -06:00
|
|
|
def put_container(self, container, headers=None, response_dict=None,
|
|
|
|
query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`put_container`"""
|
2013-06-12 12:02:02 +00:00
|
|
|
return self._retry(None, put_container, container, headers=headers,
|
2016-02-23 15:02:03 -06:00
|
|
|
response_dict=response_dict,
|
|
|
|
query_string=query_string)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def post_container(self, container, headers, response_dict=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`post_container`"""
|
2013-06-12 12:02:02 +00:00
|
|
|
return self._retry(None, post_container, container, headers,
|
|
|
|
response_dict=response_dict)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-02-23 15:02:03 -06:00
|
|
|
def delete_container(self, container, response_dict=None,
|
2016-09-19 23:18:18 +08:00
|
|
|
query_string=None, headers={}):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`delete_container`"""
|
2013-06-12 12:02:02 +00:00
|
|
|
return self._retry(None, delete_container, container,
|
2016-02-23 15:02:03 -06:00
|
|
|
response_dict=response_dict,
|
2016-09-19 23:18:18 +08:00
|
|
|
query_string=query_string,
|
|
|
|
headers=headers)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2018-03-05 17:33:22 -08:00
|
|
|
def head_object(self, container, obj, headers=None, query_string=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`head_object`"""
|
2018-03-05 17:33:22 -08:00
|
|
|
return self._retry(None, head_object, container, obj, headers=headers,
|
|
|
|
query_string=query_string)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-03-05 15:12:04 -08:00
|
|
|
def get_object(self, container, obj, resp_chunk_size=None,
|
2013-07-27 17:06:28 +10:00
|
|
|
query_string=None, response_dict=None, headers=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`get_object`"""
|
2015-03-04 14:31:00 +00:00
|
|
|
rheaders, body = self._retry(None, get_object, container, obj,
|
|
|
|
resp_chunk_size=resp_chunk_size,
|
|
|
|
query_string=query_string,
|
|
|
|
response_dict=response_dict,
|
|
|
|
headers=headers)
|
|
|
|
is_not_range_request = (
|
|
|
|
not headers or 'range' not in (k.lower() for k in headers))
|
|
|
|
retry_is_possible = (
|
|
|
|
is_not_range_request and resp_chunk_size and
|
2016-01-08 11:31:32 -08:00
|
|
|
self.attempts <= self.retries and
|
|
|
|
rheaders.get('transfer-encoding') is None)
|
2015-03-04 14:31:00 +00:00
|
|
|
if retry_is_possible:
|
2016-01-08 11:31:32 -08:00
|
|
|
body = _RetryBody(body.resp, self, container, obj,
|
2015-03-04 14:31:00 +00:00
|
|
|
resp_chunk_size=resp_chunk_size,
|
|
|
|
query_string=query_string,
|
|
|
|
response_dict=response_dict,
|
|
|
|
headers=headers)
|
|
|
|
return rheaders, body
|
2012-05-08 11:17:04 +01:00
|
|
|
|
|
|
|
def put_object(self, container, obj, contents, content_length=None,
|
2013-03-05 16:56:02 +00:00
|
|
|
etag=None, chunk_size=None, content_type=None,
|
2013-06-12 12:02:02 +00:00
|
|
|
headers=None, query_string=None, response_dict=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`put_object`"""
|
|
|
|
|
|
|
|
def _default_reset(*args, **kwargs):
|
|
|
|
raise ClientException('put_object(%r, %r, ...) failure and no '
|
2012-06-15 12:18:08 -07:00
|
|
|
'ability to reset contents for reupload.'
|
|
|
|
% (container, obj))
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-09-04 15:20:08 +08:00
|
|
|
if isinstance(contents, str) or not contents:
|
|
|
|
# if its a str or None then you can retry as much as you want
|
2013-03-05 15:12:04 -08:00
|
|
|
reset_func = None
|
|
|
|
else:
|
|
|
|
reset_func = _default_reset
|
2013-09-04 15:20:08 +08:00
|
|
|
if self.retries > 0:
|
|
|
|
tell = getattr(contents, 'tell', None)
|
|
|
|
seek = getattr(contents, 'seek', None)
|
2015-09-23 10:42:43 -07:00
|
|
|
reset = getattr(contents, 'reset', None)
|
2013-09-04 15:20:08 +08:00
|
|
|
if tell and seek:
|
|
|
|
orig_pos = tell()
|
|
|
|
reset_func = lambda *a, **k: seek(orig_pos)
|
2015-09-23 10:42:43 -07:00
|
|
|
elif reset:
|
|
|
|
reset_func = reset
|
2012-05-08 11:17:04 +01:00
|
|
|
return self._retry(reset_func, put_object, container, obj, contents,
|
2012-06-15 12:18:08 -07:00
|
|
|
content_length=content_length, etag=etag,
|
|
|
|
chunk_size=chunk_size, content_type=content_type,
|
2013-06-12 12:02:02 +00:00
|
|
|
headers=headers, query_string=query_string,
|
|
|
|
response_dict=response_dict)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def post_object(self, container, obj, headers, response_dict=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`post_object`"""
|
2013-06-12 12:02:02 +00:00
|
|
|
return self._retry(None, post_object, container, obj, headers,
|
|
|
|
response_dict=response_dict)
|
2012-05-08 11:17:04 +01:00
|
|
|
|
2016-02-15 12:14:17 +01:00
|
|
|
def copy_object(self, container, obj, destination=None, headers=None,
|
|
|
|
fresh_metadata=None, response_dict=None):
|
|
|
|
"""Wrapper for :func:`copy_object`"""
|
|
|
|
return self._retry(None, copy_object, container, obj, destination,
|
|
|
|
headers, fresh_metadata,
|
|
|
|
response_dict=response_dict)
|
|
|
|
|
2013-06-12 12:02:02 +00:00
|
|
|
def delete_object(self, container, obj, query_string=None,
|
2016-09-19 23:18:18 +08:00
|
|
|
response_dict=None, headers=None):
|
2012-05-08 11:17:04 +01:00
|
|
|
"""Wrapper for :func:`delete_object`"""
|
2013-03-05 15:12:04 -08:00
|
|
|
return self._retry(None, delete_object, container, obj,
|
2013-06-12 12:02:02 +00:00
|
|
|
query_string=query_string,
|
2016-09-19 23:18:18 +08:00
|
|
|
response_dict=response_dict,
|
|
|
|
headers=headers)
|
2014-01-13 22:39:28 +01:00
|
|
|
|
|
|
|
def get_capabilities(self, url=None):
|
2014-10-24 01:02:53 -07:00
|
|
|
url = url or self.url
|
2014-01-13 22:39:28 +01:00
|
|
|
if not url:
|
|
|
|
url, _ = self.get_auth()
|
2018-06-22 16:49:03 -07:00
|
|
|
parsed = urlparse(urljoin(url, '/info'))
|
|
|
|
if not self.http_conn:
|
|
|
|
self.http_conn = self.http_connection(url)
|
|
|
|
return get_capabilities((parsed, self.http_conn[1]))
|