Blackify openstack.object_store
Black used with the '-l 79 -S' flags. A future change will ignore this commit in git-blame history by adding a 'git-blame-ignore-revs' file. Change-Id: I9c6e6b898fc7e3a196725bd37a3b5bdc77060cd3 Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
34da09f312
commit
4589e293e8
@ -28,13 +28,13 @@ class BaseResource(resource.Resource):
|
||||
_last_headers = dict()
|
||||
|
||||
def __init__(self, metadata=None, **attrs):
|
||||
"""Process and save metadata known at creation stage
|
||||
"""
|
||||
"""Process and save metadata known at creation stage"""
|
||||
super().__init__(**attrs)
|
||||
if metadata is not None:
|
||||
for k, v in metadata.items():
|
||||
if not k.lower().startswith(
|
||||
self._custom_metadata_prefix.lower()):
|
||||
self._custom_metadata_prefix.lower()
|
||||
):
|
||||
self.metadata[self._custom_metadata_prefix + k] = v
|
||||
else:
|
||||
self.metadata[k] = v
|
||||
@ -62,8 +62,8 @@ class BaseResource(resource.Resource):
|
||||
def set_metadata(self, session, metadata, refresh=True):
|
||||
request = self._prepare_request()
|
||||
response = session.post(
|
||||
request.url,
|
||||
headers=self._calculate_headers(metadata))
|
||||
request.url, headers=self._calculate_headers(metadata)
|
||||
)
|
||||
self._translate_response(response, has_body=False)
|
||||
if refresh:
|
||||
response = session.head(request.url)
|
||||
@ -74,10 +74,11 @@ class BaseResource(resource.Resource):
|
||||
request = self._prepare_request()
|
||||
headers = {key: '' for key in keys}
|
||||
response = session.post(
|
||||
request.url,
|
||||
headers=self._calculate_headers(headers))
|
||||
request.url, headers=self._calculate_headers(headers)
|
||||
)
|
||||
exceptions.raise_from_response(
|
||||
response, error_message="Error deleting metadata keys")
|
||||
response, error_message="Error deleting metadata keys"
|
||||
)
|
||||
return self
|
||||
|
||||
def _set_metadata(self, headers):
|
||||
@ -85,10 +86,8 @@ class BaseResource(resource.Resource):
|
||||
|
||||
for header in headers:
|
||||
# RADOS and other stuff in front may actually lowcase headers
|
||||
if header.lower().startswith(
|
||||
self._custom_metadata_prefix.lower()
|
||||
):
|
||||
key = header[len(self._custom_metadata_prefix):].lower()
|
||||
if header.lower().startswith(self._custom_metadata_prefix.lower()):
|
||||
key = header[len(self._custom_metadata_prefix) :].lower()
|
||||
self.metadata[key] = headers[header]
|
||||
|
||||
def _translate_response(self, response, has_body=None, error_message=None):
|
||||
@ -98,5 +97,6 @@ class BaseResource(resource.Resource):
|
||||
# pops known headers.
|
||||
self._last_headers = response.headers.copy()
|
||||
super(BaseResource, self)._translate_response(
|
||||
response, has_body=has_body, error_message=error_message)
|
||||
response, has_body=has_body, error_message=error_message
|
||||
)
|
||||
self._set_metadata(response.headers)
|
||||
|
@ -40,7 +40,7 @@ class Proxy(proxy.Proxy):
|
||||
"account": _account.Account,
|
||||
"container": _container.Container,
|
||||
"info": _info.Info,
|
||||
"object": _obj.Object
|
||||
"object": _obj.Object,
|
||||
}
|
||||
|
||||
skip_discovery = True
|
||||
@ -60,19 +60,25 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
# Split url into parts and exclude potential project_id in some urls
|
||||
url_parts = [
|
||||
x for x in url_path.split('/') if (
|
||||
x
|
||||
for x in url_path.split('/')
|
||||
if (
|
||||
x != project_id
|
||||
and (
|
||||
not project_id
|
||||
or (project_id and x != 'AUTH_' + project_id)
|
||||
))
|
||||
)
|
||||
)
|
||||
]
|
||||
# Strip leading version piece so that
|
||||
# GET /v1/AUTH_xxx
|
||||
# returns ['AUTH_xxx']
|
||||
if (url_parts[0]
|
||||
and url_parts[0][0] == 'v'
|
||||
and url_parts[0][1] and url_parts[0][1].isdigit()):
|
||||
if (
|
||||
url_parts[0]
|
||||
and url_parts[0][0] == 'v'
|
||||
and url_parts[0][1]
|
||||
and url_parts[0][1].isdigit()
|
||||
):
|
||||
url_parts = url_parts[1:]
|
||||
|
||||
# Strip out anything that's empty or None
|
||||
@ -152,8 +158,9 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
:returns: ``None``
|
||||
"""
|
||||
self._delete(_container.Container, container,
|
||||
ignore_missing=ignore_missing)
|
||||
self._delete(
|
||||
_container.Container, container, ignore_missing=ignore_missing
|
||||
)
|
||||
|
||||
def get_container_metadata(self, container):
|
||||
"""Get metadata for a container
|
||||
@ -219,8 +226,12 @@ class Proxy(proxy.Proxy):
|
||||
container = self._get_container_name(container=container)
|
||||
|
||||
for obj in self._list(
|
||||
_obj.Object, container=container,
|
||||
paginated=True, format='json', **query):
|
||||
_obj.Object,
|
||||
container=container,
|
||||
paginated=True,
|
||||
format='json',
|
||||
**query,
|
||||
):
|
||||
obj.container = container
|
||||
yield obj
|
||||
|
||||
@ -236,8 +247,12 @@ class Proxy(proxy.Proxy):
|
||||
raise ValueError("container must be specified")
|
||||
|
||||
def get_object(
|
||||
self, obj, container=None, resp_chunk_size=1024,
|
||||
outfile=None, remember_content=False
|
||||
self,
|
||||
obj,
|
||||
container=None,
|
||||
resp_chunk_size=1024,
|
||||
outfile=None,
|
||||
remember_content=False,
|
||||
):
|
||||
"""Get the data associated with an object
|
||||
|
||||
@ -262,20 +277,17 @@ class Proxy(proxy.Proxy):
|
||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||
when no resource can be found.
|
||||
"""
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
container_name = self._get_container_name(obj=obj, container=container)
|
||||
|
||||
_object = self._get_resource(
|
||||
_obj.Object, obj,
|
||||
container=container_name)
|
||||
_obj.Object, obj, container=container_name
|
||||
)
|
||||
request = _object._prepare_request()
|
||||
|
||||
get_stream = (outfile is not None)
|
||||
get_stream = outfile is not None
|
||||
|
||||
response = self.get(
|
||||
request.url,
|
||||
headers=request.headers,
|
||||
stream=get_stream
|
||||
request.url, headers=request.headers, stream=get_stream
|
||||
)
|
||||
exceptions.raise_from_response(response)
|
||||
_object._translate_response(response, has_body=False)
|
||||
@ -286,7 +298,8 @@ class Proxy(proxy.Proxy):
|
||||
else:
|
||||
outfile_handle = outfile
|
||||
for chunk in response.iter_content(
|
||||
resp_chunk_size, decode_unicode=False):
|
||||
resp_chunk_size, decode_unicode=False
|
||||
):
|
||||
outfile_handle.write(chunk)
|
||||
if isinstance(outfile, str):
|
||||
outfile_handle.close()
|
||||
@ -308,10 +321,10 @@ class Proxy(proxy.Proxy):
|
||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||
when no resource can be found.
|
||||
"""
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
container_name = self._get_container_name(obj=obj, container=container)
|
||||
obj = self._get_resource(
|
||||
_obj.Object, obj, container=container_name, **attrs)
|
||||
_obj.Object, obj, container=container_name, **attrs
|
||||
)
|
||||
return obj.download(self)
|
||||
|
||||
def stream_object(self, obj, container=None, chunk_size=1024, **attrs):
|
||||
@ -326,18 +339,26 @@ class Proxy(proxy.Proxy):
|
||||
when no resource can be found.
|
||||
:returns: An iterator that iterates over chunk_size bytes
|
||||
"""
|
||||
container_name = self._get_container_name(
|
||||
obj=obj, container=container)
|
||||
container_name = self._get_container_name(obj=obj, container=container)
|
||||
obj = self._get_resource(
|
||||
_obj.Object, obj, container=container_name, **attrs)
|
||||
_obj.Object, obj, container=container_name, **attrs
|
||||
)
|
||||
return obj.stream(self, chunk_size=chunk_size)
|
||||
|
||||
def create_object(
|
||||
self, container, name, filename=None,
|
||||
md5=None, sha256=None, segment_size=None,
|
||||
use_slo=True, metadata=None,
|
||||
generate_checksums=None, data=None,
|
||||
**headers):
|
||||
self,
|
||||
container,
|
||||
name,
|
||||
filename=None,
|
||||
md5=None,
|
||||
sha256=None,
|
||||
segment_size=None,
|
||||
use_slo=True,
|
||||
metadata=None,
|
||||
generate_checksums=None,
|
||||
data=None,
|
||||
**headers,
|
||||
):
|
||||
"""Create a file object.
|
||||
|
||||
Automatically uses large-object segments if needed.
|
||||
@ -373,13 +394,14 @@ class Proxy(proxy.Proxy):
|
||||
"""
|
||||
if data is not None and filename:
|
||||
raise ValueError(
|
||||
"Both filename and data given. Please choose one.")
|
||||
"Both filename and data given. Please choose one."
|
||||
)
|
||||
if data is not None and not name:
|
||||
raise ValueError(
|
||||
"name is a required parameter when data is given")
|
||||
raise ValueError("name is a required parameter when data is given")
|
||||
if data is not None and generate_checksums:
|
||||
raise ValueError(
|
||||
"checksums cannot be generated with data parameter")
|
||||
"checksums cannot be generated with data parameter"
|
||||
)
|
||||
if generate_checksums is None:
|
||||
if data is not None:
|
||||
generate_checksums = False
|
||||
@ -400,17 +422,22 @@ class Proxy(proxy.Proxy):
|
||||
metadata[self._connection._OBJECT_SHA256_KEY] = sha256
|
||||
|
||||
container_name = self._get_container_name(container=container)
|
||||
endpoint = '{container}/{name}'.format(container=container_name,
|
||||
name=name)
|
||||
endpoint = '{container}/{name}'.format(
|
||||
container=container_name, name=name
|
||||
)
|
||||
|
||||
if data is not None:
|
||||
self.log.debug(
|
||||
"swift uploading data to %(endpoint)s",
|
||||
{'endpoint': endpoint})
|
||||
"swift uploading data to %(endpoint)s", {'endpoint': endpoint}
|
||||
)
|
||||
return self._create(
|
||||
_obj.Object, container=container_name,
|
||||
name=name, data=data, metadata=metadata,
|
||||
**headers)
|
||||
_obj.Object,
|
||||
container=container_name,
|
||||
name=name,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
**headers,
|
||||
)
|
||||
|
||||
# segment_size gets used as a step value in a range call, so needs
|
||||
# to be an int
|
||||
@ -423,7 +450,8 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
self._connection.log.debug(
|
||||
"swift uploading %(filename)s to %(endpoint)s",
|
||||
{'filename': filename, 'endpoint': endpoint})
|
||||
{'filename': filename, 'endpoint': endpoint},
|
||||
)
|
||||
|
||||
if metadata is not None:
|
||||
# Rely on the class headers calculation for requested metadata
|
||||
@ -435,8 +463,13 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
else:
|
||||
self._upload_large_object(
|
||||
endpoint, filename, headers,
|
||||
file_size, segment_size, use_slo)
|
||||
endpoint,
|
||||
filename,
|
||||
headers,
|
||||
file_size,
|
||||
segment_size,
|
||||
use_slo,
|
||||
)
|
||||
|
||||
# Backwards compat
|
||||
upload_object = create_object
|
||||
@ -461,8 +494,12 @@ class Proxy(proxy.Proxy):
|
||||
"""
|
||||
container_name = self._get_container_name(obj, container)
|
||||
|
||||
self._delete(_obj.Object, obj, ignore_missing=ignore_missing,
|
||||
container=container_name)
|
||||
self._delete(
|
||||
_obj.Object,
|
||||
obj,
|
||||
ignore_missing=ignore_missing,
|
||||
container=container_name,
|
||||
)
|
||||
|
||||
def get_object_metadata(self, obj, container=None):
|
||||
"""Get metadata for an object.
|
||||
@ -522,7 +559,8 @@ class Proxy(proxy.Proxy):
|
||||
return res
|
||||
|
||||
def is_object_stale(
|
||||
self, container, name, filename, file_md5=None, file_sha256=None):
|
||||
self, container, name, filename, file_md5=None, file_sha256=None
|
||||
):
|
||||
"""Check to see if an object matches the hashes of a file.
|
||||
|
||||
:param container: Name of the container.
|
||||
@ -538,37 +576,45 @@ class Proxy(proxy.Proxy):
|
||||
except exceptions.NotFoundException:
|
||||
self._connection.log.debug(
|
||||
"swift stale check, no object: {container}/{name}".format(
|
||||
container=container, name=name))
|
||||
container=container, name=name
|
||||
)
|
||||
)
|
||||
return True
|
||||
|
||||
if not (file_md5 or file_sha256):
|
||||
(file_md5, file_sha256) = \
|
||||
utils._get_file_hashes(filename)
|
||||
(file_md5, file_sha256) = utils._get_file_hashes(filename)
|
||||
md5_key = metadata.get(
|
||||
self._connection._OBJECT_MD5_KEY,
|
||||
metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, ''))
|
||||
metadata.get(self._connection._SHADE_OBJECT_MD5_KEY, ''),
|
||||
)
|
||||
sha256_key = metadata.get(
|
||||
self._connection._OBJECT_SHA256_KEY, metadata.get(
|
||||
self._connection._SHADE_OBJECT_SHA256_KEY, ''))
|
||||
self._connection._OBJECT_SHA256_KEY,
|
||||
metadata.get(self._connection._SHADE_OBJECT_SHA256_KEY, ''),
|
||||
)
|
||||
up_to_date = utils._hashes_up_to_date(
|
||||
md5=file_md5, sha256=file_sha256,
|
||||
md5_key=md5_key, sha256_key=sha256_key)
|
||||
md5=file_md5,
|
||||
sha256=file_sha256,
|
||||
md5_key=md5_key,
|
||||
sha256_key=sha256_key,
|
||||
)
|
||||
|
||||
if not up_to_date:
|
||||
self._connection.log.debug(
|
||||
"swift checksum mismatch: "
|
||||
" %(filename)s!=%(container)s/%(name)s",
|
||||
{'filename': filename, 'container': container, 'name': name})
|
||||
{'filename': filename, 'container': container, 'name': name},
|
||||
)
|
||||
return True
|
||||
|
||||
self._connection.log.debug(
|
||||
"swift object up to date: %(container)s/%(name)s",
|
||||
{'container': container, 'name': name})
|
||||
{'container': container, 'name': name},
|
||||
)
|
||||
return False
|
||||
|
||||
def _upload_large_object(
|
||||
self, endpoint, filename,
|
||||
headers, file_size, segment_size, use_slo):
|
||||
self, endpoint, filename, headers, file_size, segment_size, use_slo
|
||||
):
|
||||
# If the object is big, we need to break it up into segments that
|
||||
# are no larger than segment_size, upload each of them individually
|
||||
# and then upload a manifest object. The segments can be uploaded in
|
||||
@ -584,28 +630,32 @@ class Proxy(proxy.Proxy):
|
||||
# segment, the value a FileSegment file-like object that is a
|
||||
# slice of the data for the segment.
|
||||
segments = self._get_file_segments(
|
||||
endpoint, filename, file_size, segment_size)
|
||||
endpoint, filename, file_size, segment_size
|
||||
)
|
||||
|
||||
# Schedule the segments for upload
|
||||
for name, segment in segments.items():
|
||||
# Async call to put - schedules execution and returns a future
|
||||
segment_future = self._connection._pool_executor.submit(
|
||||
self.put,
|
||||
name, headers=headers, data=segment,
|
||||
raise_exc=False)
|
||||
self.put, name, headers=headers, data=segment, raise_exc=False
|
||||
)
|
||||
segment_futures.append(segment_future)
|
||||
# TODO(mordred) Collect etags from results to add to this manifest
|
||||
# dict. Then sort the list of dicts by path.
|
||||
manifest.append(dict(
|
||||
# While Object Storage usually expects the name to be
|
||||
# urlencoded in most requests, the SLO manifest requires
|
||||
# plain object names instead.
|
||||
path='/{name}'.format(name=parse.unquote(name)),
|
||||
size_bytes=segment.length))
|
||||
manifest.append(
|
||||
dict(
|
||||
# While Object Storage usually expects the name to be
|
||||
# urlencoded in most requests, the SLO manifest requires
|
||||
# plain object names instead.
|
||||
path='/{name}'.format(name=parse.unquote(name)),
|
||||
size_bytes=segment.length,
|
||||
)
|
||||
)
|
||||
|
||||
# Try once and collect failed results to retry
|
||||
segment_results, retry_results = self._connection._wait_for_futures(
|
||||
segment_futures, raise_on_error=False)
|
||||
segment_futures, raise_on_error=False
|
||||
)
|
||||
|
||||
self._add_etag_to_manifest(segment_results, manifest)
|
||||
|
||||
@ -616,37 +666,41 @@ class Proxy(proxy.Proxy):
|
||||
segment.seek(0)
|
||||
# Async call to put - schedules execution and returns a future
|
||||
segment_future = self._connection._pool_executor.submit(
|
||||
self.put,
|
||||
name, headers=headers, data=segment)
|
||||
self.put, name, headers=headers, data=segment
|
||||
)
|
||||
# TODO(mordred) Collect etags from results to add to this manifest
|
||||
# dict. Then sort the list of dicts by path.
|
||||
retry_futures.append(segment_future)
|
||||
|
||||
# If any segments fail the second time, just throw the error
|
||||
segment_results, retry_results = self._connection._wait_for_futures(
|
||||
retry_futures, raise_on_error=True)
|
||||
retry_futures, raise_on_error=True
|
||||
)
|
||||
|
||||
self._add_etag_to_manifest(segment_results, manifest)
|
||||
|
||||
try:
|
||||
if use_slo:
|
||||
return self._finish_large_object_slo(
|
||||
endpoint, headers, manifest)
|
||||
endpoint, headers, manifest
|
||||
)
|
||||
else:
|
||||
return self._finish_large_object_dlo(
|
||||
endpoint, headers)
|
||||
return self._finish_large_object_dlo(endpoint, headers)
|
||||
except Exception:
|
||||
try:
|
||||
segment_prefix = endpoint.split('/')[-1]
|
||||
self.log.debug(
|
||||
"Failed to upload large object manifest for %s. "
|
||||
"Removing segment uploads.", segment_prefix)
|
||||
"Removing segment uploads.",
|
||||
segment_prefix,
|
||||
)
|
||||
self._delete_autocreated_image_objects(
|
||||
segment_prefix=segment_prefix)
|
||||
segment_prefix=segment_prefix
|
||||
)
|
||||
except Exception:
|
||||
self.log.exception(
|
||||
"Failed to cleanup image objects for %s:",
|
||||
segment_prefix)
|
||||
"Failed to cleanup image objects for %s:", segment_prefix
|
||||
)
|
||||
raise
|
||||
|
||||
def _finish_large_object_slo(self, endpoint, headers, manifest):
|
||||
@ -656,10 +710,13 @@ class Proxy(proxy.Proxy):
|
||||
retries = 3
|
||||
while True:
|
||||
try:
|
||||
return exceptions.raise_from_response(self.put(
|
||||
endpoint,
|
||||
params={'multipart-manifest': 'put'},
|
||||
headers=headers, data=json.dumps(manifest))
|
||||
return exceptions.raise_from_response(
|
||||
self.put(
|
||||
endpoint,
|
||||
params={'multipart-manifest': 'put'},
|
||||
headers=headers,
|
||||
data=json.dumps(manifest),
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
retries -= 1
|
||||
@ -673,7 +730,8 @@ class Proxy(proxy.Proxy):
|
||||
while True:
|
||||
try:
|
||||
return exceptions.raise_from_response(
|
||||
self.put(endpoint, headers=headers))
|
||||
self.put(endpoint, headers=headers)
|
||||
)
|
||||
except Exception:
|
||||
retries -= 1
|
||||
if retries == 0:
|
||||
@ -681,8 +739,7 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
def _upload_object(self, endpoint, filename, headers):
|
||||
with open(filename, 'rb') as dt:
|
||||
return self.put(
|
||||
endpoint, headers=headers, data=dt)
|
||||
return self.put(endpoint, headers=headers, data=dt)
|
||||
|
||||
def _get_file_segments(self, endpoint, filename, file_size, segment_size):
|
||||
# Use an ordered dict here so that testing can replicate things
|
||||
@ -690,10 +747,13 @@ class Proxy(proxy.Proxy):
|
||||
for (index, offset) in enumerate(range(0, file_size, segment_size)):
|
||||
remaining = file_size - (index * segment_size)
|
||||
segment = _utils.FileSegment(
|
||||
filename, offset,
|
||||
segment_size if segment_size < remaining else remaining)
|
||||
filename,
|
||||
offset,
|
||||
segment_size if segment_size < remaining else remaining,
|
||||
)
|
||||
name = '{endpoint}/{index:0>6}'.format(
|
||||
endpoint=endpoint, index=index)
|
||||
endpoint=endpoint, index=index
|
||||
)
|
||||
segments[name] = segment
|
||||
return segments
|
||||
|
||||
@ -710,7 +770,8 @@ class Proxy(proxy.Proxy):
|
||||
server_max_file_size = DEFAULT_MAX_FILE_SIZE
|
||||
self._connection.log.info(
|
||||
"Swift capabilities not supported. "
|
||||
"Using default max file size.")
|
||||
"Using default max file size."
|
||||
)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
@ -740,9 +801,7 @@ class Proxy(proxy.Proxy):
|
||||
continue
|
||||
name = self._object_name_from_url(result.url)
|
||||
for entry in manifest:
|
||||
if entry['path'] == '/{name}'.format(
|
||||
name=parse.unquote(name)
|
||||
):
|
||||
if entry['path'] == '/{name}'.format(name=parse.unquote(name)):
|
||||
entry['etag'] = result.headers['Etag']
|
||||
|
||||
def get_info(self):
|
||||
@ -788,12 +847,16 @@ class Proxy(proxy.Proxy):
|
||||
temp_url_key = None
|
||||
if container:
|
||||
container_meta = self.get_container_metadata(container)
|
||||
temp_url_key = (container_meta.meta_temp_url_key_2
|
||||
or container_meta.meta_temp_url_key)
|
||||
temp_url_key = (
|
||||
container_meta.meta_temp_url_key_2
|
||||
or container_meta.meta_temp_url_key
|
||||
)
|
||||
if not temp_url_key:
|
||||
account_meta = self.get_account_metadata()
|
||||
temp_url_key = (account_meta.meta_temp_url_key_2
|
||||
or account_meta.meta_temp_url_key)
|
||||
temp_url_key = (
|
||||
account_meta.meta_temp_url_key_2
|
||||
or account_meta.meta_temp_url_key
|
||||
)
|
||||
if temp_url_key and not isinstance(temp_url_key, bytes):
|
||||
temp_url_key = temp_url_key.encode('utf8')
|
||||
return temp_url_key
|
||||
@ -807,12 +870,20 @@ class Proxy(proxy.Proxy):
|
||||
if not temp_url_key:
|
||||
raise exceptions.SDKException(
|
||||
'temp_url_key was not given, nor was a temporary url key'
|
||||
' found for the account or the container.')
|
||||
' found for the account or the container.'
|
||||
)
|
||||
return temp_url_key
|
||||
|
||||
def generate_form_signature(
|
||||
self, container, object_prefix, redirect_url, max_file_size,
|
||||
max_upload_count, timeout, temp_url_key=None):
|
||||
self,
|
||||
container,
|
||||
object_prefix,
|
||||
redirect_url,
|
||||
max_file_size,
|
||||
max_upload_count,
|
||||
timeout,
|
||||
temp_url_key=None,
|
||||
):
|
||||
"""Generate a signature for a FormPost upload.
|
||||
|
||||
:param container: The value can be the name of a container or a
|
||||
@ -832,33 +903,50 @@ class Proxy(proxy.Proxy):
|
||||
max_file_size = int(max_file_size)
|
||||
if max_file_size < 1:
|
||||
raise exceptions.SDKException(
|
||||
'Please use a positive max_file_size value.')
|
||||
'Please use a positive max_file_size value.'
|
||||
)
|
||||
max_upload_count = int(max_upload_count)
|
||||
if max_upload_count < 1:
|
||||
raise exceptions.SDKException(
|
||||
'Please use a positive max_upload_count value.')
|
||||
'Please use a positive max_upload_count value.'
|
||||
)
|
||||
if timeout < 1:
|
||||
raise exceptions.SDKException(
|
||||
'Please use a positive <timeout> value.')
|
||||
'Please use a positive <timeout> value.'
|
||||
)
|
||||
expires = int(time.time() + int(timeout))
|
||||
|
||||
temp_url_key = self._check_temp_url_key(container=container,
|
||||
temp_url_key=temp_url_key)
|
||||
temp_url_key = self._check_temp_url_key(
|
||||
container=container, temp_url_key=temp_url_key
|
||||
)
|
||||
|
||||
res = self._get_resource(_container.Container, container)
|
||||
endpoint = parse.urlparse(self.get_endpoint())
|
||||
path = '/'.join([endpoint.path, res.name, object_prefix])
|
||||
|
||||
data = '%s\n%s\n%s\n%s\n%s' % (path, redirect_url, max_file_size,
|
||||
max_upload_count, expires)
|
||||
data = '%s\n%s\n%s\n%s\n%s' % (
|
||||
path,
|
||||
redirect_url,
|
||||
max_file_size,
|
||||
max_upload_count,
|
||||
expires,
|
||||
)
|
||||
data = data.encode('utf8')
|
||||
sig = hmac.new(temp_url_key, data, sha1).hexdigest()
|
||||
|
||||
return (expires, sig)
|
||||
|
||||
def generate_temp_url(
|
||||
self, path, seconds, method, absolute=False, prefix=False,
|
||||
iso8601=False, ip_range=None, temp_url_key=None):
|
||||
self,
|
||||
path,
|
||||
seconds,
|
||||
method,
|
||||
absolute=False,
|
||||
prefix=False,
|
||||
iso8601=False,
|
||||
ip_range=None,
|
||||
temp_url_key=None,
|
||||
):
|
||||
"""Generates a temporary URL that gives unauthenticated access to the
|
||||
Swift object.
|
||||
|
||||
@ -894,7 +982,8 @@ class Proxy(proxy.Proxy):
|
||||
formats = (
|
||||
EXPIRES_ISO8601_FORMAT,
|
||||
EXPIRES_ISO8601_FORMAT[:-1],
|
||||
SHORT_EXPIRES_ISO8601_FORMAT)
|
||||
SHORT_EXPIRES_ISO8601_FORMAT,
|
||||
)
|
||||
for f in formats:
|
||||
try:
|
||||
t = time.strptime(seconds, f)
|
||||
@ -919,8 +1008,10 @@ class Proxy(proxy.Proxy):
|
||||
if timestamp < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValueError('time must either be a whole number '
|
||||
'or in specific ISO 8601 format.')
|
||||
raise ValueError(
|
||||
'time must either be a whole number '
|
||||
'or in specific ISO 8601 format.'
|
||||
)
|
||||
|
||||
if isinstance(path, bytes):
|
||||
try:
|
||||
@ -931,50 +1022,61 @@ class Proxy(proxy.Proxy):
|
||||
path_for_body = path
|
||||
|
||||
parts = path_for_body.split('/', 4)
|
||||
if len(parts) != 5 or parts[0] or not all(
|
||||
parts[1:(4 if prefix else 5)]):
|
||||
if (
|
||||
len(parts) != 5
|
||||
or parts[0]
|
||||
or not all(parts[1 : (4 if prefix else 5)])
|
||||
):
|
||||
if prefix:
|
||||
raise ValueError('path must at least contain /v1/a/c/')
|
||||
else:
|
||||
raise ValueError('path must be full path to an object'
|
||||
' e.g. /v1/a/c/o')
|
||||
raise ValueError(
|
||||
'path must be full path to an object' ' e.g. /v1/a/c/o'
|
||||
)
|
||||
|
||||
standard_methods = ['GET', 'PUT', 'HEAD', 'POST', 'DELETE']
|
||||
if method.upper() not in standard_methods:
|
||||
self.log.warning('Non default HTTP method %s for tempurl '
|
||||
'specified, possibly an error', method.upper())
|
||||
self.log.warning(
|
||||
'Non default HTTP method %s for tempurl '
|
||||
'specified, possibly an error',
|
||||
method.upper(),
|
||||
)
|
||||
|
||||
if not absolute:
|
||||
expiration = int(time.time() + timestamp)
|
||||
else:
|
||||
expiration = timestamp
|
||||
|
||||
hmac_parts = [method.upper(), str(expiration),
|
||||
('prefix:' if prefix else '') + path_for_body]
|
||||
hmac_parts = [
|
||||
method.upper(),
|
||||
str(expiration),
|
||||
('prefix:' if prefix else '') + path_for_body,
|
||||
]
|
||||
|
||||
if ip_range:
|
||||
if isinstance(ip_range, bytes):
|
||||
try:
|
||||
ip_range = ip_range.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError(
|
||||
'ip_range must be representable as UTF-8'
|
||||
)
|
||||
raise ValueError('ip_range must be representable as UTF-8')
|
||||
hmac_parts.insert(0, "ip=%s" % ip_range)
|
||||
|
||||
hmac_body = u'\n'.join(hmac_parts)
|
||||
|
||||
temp_url_key = self._check_temp_url_key(temp_url_key=temp_url_key)
|
||||
|
||||
sig = hmac.new(temp_url_key, hmac_body.encode('utf-8'),
|
||||
sha1).hexdigest()
|
||||
sig = hmac.new(
|
||||
temp_url_key, hmac_body.encode('utf-8'), sha1
|
||||
).hexdigest()
|
||||
|
||||
if iso8601:
|
||||
expiration = time.strftime(
|
||||
EXPIRES_ISO8601_FORMAT, time.gmtime(expiration))
|
||||
EXPIRES_ISO8601_FORMAT, time.gmtime(expiration)
|
||||
)
|
||||
|
||||
temp_url = u'{path}?temp_url_sig={sig}&temp_url_expires={exp}'.format(
|
||||
path=path_for_body, sig=sig, exp=expiration)
|
||||
path=path_for_body, sig=sig, exp=expiration
|
||||
)
|
||||
|
||||
if ip_range:
|
||||
temp_url += u'&temp_url_ip_range={}'.format(ip_range)
|
||||
@ -1020,11 +1122,7 @@ class Proxy(proxy.Proxy):
|
||||
|
||||
# ========== Project Cleanup ==========
|
||||
def _get_cleanup_dependencies(self):
|
||||
return {
|
||||
'object_store': {
|
||||
'before': []
|
||||
}
|
||||
}
|
||||
return {'object_store': {'before': []}}
|
||||
|
||||
def _service_cleanup(
|
||||
self,
|
||||
@ -1032,7 +1130,7 @@ class Proxy(proxy.Proxy):
|
||||
client_status_queue=None,
|
||||
identified_resources=None,
|
||||
filters=None,
|
||||
resource_evaluation_fn=None
|
||||
resource_evaluation_fn=None,
|
||||
):
|
||||
is_bulk_delete_supported = False
|
||||
bulk_delete_max_per_request = None
|
||||
@ -1044,7 +1142,8 @@ class Proxy(proxy.Proxy):
|
||||
bulk_delete = caps.swift.get("bulk_delete", {})
|
||||
is_bulk_delete_supported = bulk_delete is not None
|
||||
bulk_delete_max_per_request = bulk_delete.get(
|
||||
"max_deletes_per_request", 100)
|
||||
"max_deletes_per_request", 100
|
||||
)
|
||||
|
||||
elements = []
|
||||
for cont in self.containers():
|
||||
@ -1058,7 +1157,8 @@ class Proxy(proxy.Proxy):
|
||||
client_status_queue=client_status_queue,
|
||||
identified_resources=identified_resources,
|
||||
filters=filters,
|
||||
resource_evaluation_fn=resource_evaluation_fn)
|
||||
resource_evaluation_fn=resource_evaluation_fn,
|
||||
)
|
||||
if need_delete:
|
||||
if not is_bulk_delete_supported and not dry_run:
|
||||
self.delete_object(obj, cont)
|
||||
@ -1083,7 +1183,8 @@ class Proxy(proxy.Proxy):
|
||||
client_status_queue=client_status_queue,
|
||||
identified_resources=identified_resources,
|
||||
filters=filters,
|
||||
resource_evaluation_fn=resource_evaluation_fn)
|
||||
resource_evaluation_fn=resource_evaluation_fn,
|
||||
)
|
||||
|
||||
def _bulk_delete(self, elements, dry_run=False):
|
||||
data = "\n".join([parse.quote(x) for x in elements])
|
||||
@ -1093,6 +1194,6 @@ class Proxy(proxy.Proxy):
|
||||
data=data,
|
||||
headers={
|
||||
'Content-Type': 'text/plain',
|
||||
'Accept': 'application/json'
|
||||
}
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
)
|
||||
|
@ -28,8 +28,9 @@ class Account(_base.BaseResource):
|
||||
#: the account.
|
||||
account_bytes_used = resource.Header("x-account-bytes-used", type=int)
|
||||
#: The number of containers.
|
||||
account_container_count = resource.Header("x-account-container-count",
|
||||
type=int)
|
||||
account_container_count = resource.Header(
|
||||
"x-account-container-count", type=int
|
||||
)
|
||||
#: The number of objects in the account.
|
||||
account_object_count = resource.Header("x-account-object-count", type=int)
|
||||
#: The secret key value for temporary URLs. If not set,
|
||||
|
@ -25,7 +25,7 @@ class Container(_base.BaseResource):
|
||||
"read_ACL": "x-container-read",
|
||||
"write_ACL": "x-container-write",
|
||||
"sync_to": "x-container-sync-to",
|
||||
"sync_key": "x-container-sync-key"
|
||||
"sync_key": "x-container-sync-key",
|
||||
}
|
||||
|
||||
base_path = "/"
|
||||
@ -38,9 +38,7 @@ class Container(_base.BaseResource):
|
||||
allow_list = True
|
||||
allow_head = True
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
'prefix', 'format'
|
||||
)
|
||||
_query_mapping = resource.QueryParameters('prefix', 'format')
|
||||
|
||||
# Container body data (when id=None)
|
||||
#: The name of the container.
|
||||
@ -54,10 +52,12 @@ class Container(_base.BaseResource):
|
||||
# Container metadata (when id=name)
|
||||
#: The number of objects.
|
||||
object_count = resource.Header(
|
||||
"x-container-object-count", type=int, alias='count')
|
||||
"x-container-object-count", type=int, alias='count'
|
||||
)
|
||||
#: The count of bytes used in total.
|
||||
bytes_used = resource.Header(
|
||||
"x-container-bytes-used", type=int, alias='bytes')
|
||||
"x-container-bytes-used", type=int, alias='bytes'
|
||||
)
|
||||
#: The timestamp of the transaction.
|
||||
timestamp = resource.Header("x-timestamp")
|
||||
|
||||
@ -94,8 +94,9 @@ class Container(_base.BaseResource):
|
||||
#: If set to true, Object Storage guesses the content type based
|
||||
#: on the file extension and ignores the value sent in the
|
||||
#: Content-Type header, if present. *Type: bool*
|
||||
is_content_type_detected = resource.Header("x-detect-content-type",
|
||||
type=bool)
|
||||
is_content_type_detected = resource.Header(
|
||||
"x-detect-content-type", type=bool
|
||||
)
|
||||
|
||||
#: Storage policy used by the container.
|
||||
#: It is not possible to change policy of an existing container
|
||||
@ -136,9 +137,9 @@ class Container(_base.BaseResource):
|
||||
:data:`Resource.allow_create` is not set to ``True``.
|
||||
"""
|
||||
request = self._prepare_request(
|
||||
requires_id=True, prepend_key=prepend_key, base_path=base_path)
|
||||
response = session.put(
|
||||
request.url, headers=request.headers)
|
||||
requires_id=True, prepend_key=prepend_key, base_path=base_path
|
||||
)
|
||||
response = session.put(request.url, headers=request.headers)
|
||||
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
|
@ -34,8 +34,12 @@ class Info(resource.Resource):
|
||||
tempurl = resource.Body("tempurl", type=dict)
|
||||
|
||||
def fetch(
|
||||
self, session, requires_id=False,
|
||||
base_path=None, skip_cache=False, error_message=None
|
||||
self,
|
||||
session,
|
||||
requires_id=False,
|
||||
base_path=None,
|
||||
skip_cache=False,
|
||||
error_message=None,
|
||||
):
|
||||
"""Get a remote resource based on this instance.
|
||||
|
||||
@ -64,7 +68,8 @@ class Info(resource.Resource):
|
||||
session = self._get_session(session)
|
||||
endpoint = urllib.parse.urlparse(session.get_endpoint())
|
||||
url = "{scheme}://{netloc}/info".format(
|
||||
scheme=endpoint.scheme, netloc=endpoint.netloc)
|
||||
scheme=endpoint.scheme, netloc=endpoint.netloc
|
||||
)
|
||||
|
||||
microversion = self._get_microversion(session, action='fetch')
|
||||
response = session.get(url, microversion=microversion)
|
||||
|
@ -30,7 +30,7 @@ class Object(_base.BaseResource):
|
||||
"is_content_type_detected": "x-detect-content-type",
|
||||
"manifest": "x-object-manifest",
|
||||
# Rax hack - the need CORS as different header
|
||||
"access_control_allow_origin": "access-control-allow-origin"
|
||||
"access_control_allow_origin": "access-control-allow-origin",
|
||||
}
|
||||
|
||||
base_path = "/%(container)s"
|
||||
@ -44,10 +44,14 @@ class Object(_base.BaseResource):
|
||||
allow_head = True
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
'prefix', 'format',
|
||||
'temp_url_sig', 'temp_url_expires',
|
||||
'filename', 'multipart_manifest', 'symlink',
|
||||
multipart_manifest='multipart-manifest'
|
||||
'prefix',
|
||||
'format',
|
||||
'temp_url_sig',
|
||||
'temp_url_expires',
|
||||
'filename',
|
||||
'multipart_manifest',
|
||||
'symlink',
|
||||
multipart_manifest='multipart-manifest',
|
||||
)
|
||||
|
||||
# Data to be passed during a POST call to create an object on the server.
|
||||
@ -117,7 +121,8 @@ class Object(_base.BaseResource):
|
||||
#: size of the response body. Instead it contains the size of
|
||||
#: the object, in bytes.
|
||||
content_length = resource.Header(
|
||||
"content-length", type=int, alias='_bytes')
|
||||
"content-length", type=int, alias='_bytes'
|
||||
)
|
||||
#: The MIME type of the object.
|
||||
content_type = resource.Header("content-type", alias="_content_type")
|
||||
#: The type of ranges that the object accepts.
|
||||
@ -136,8 +141,9 @@ class Object(_base.BaseResource):
|
||||
etag = resource.Header("etag", alias='_hash')
|
||||
#: Set to True if this object is a static large object manifest object.
|
||||
#: *Type: bool*
|
||||
is_static_large_object = resource.Header("x-static-large-object",
|
||||
type=bool)
|
||||
is_static_large_object = resource.Header(
|
||||
"x-static-large-object", type=bool
|
||||
)
|
||||
#: If set, the value of the Content-Encoding metadata.
|
||||
#: If not set, this header is not returned by this operation.
|
||||
content_encoding = resource.Header("content-encoding")
|
||||
@ -164,9 +170,8 @@ class Object(_base.BaseResource):
|
||||
#: The date and time that the object was created or the last
|
||||
#: time that the metadata was changed.
|
||||
last_modified_at = resource.Header(
|
||||
"last-modified",
|
||||
alias='_last_modified',
|
||||
aka='updated_at')
|
||||
"last-modified", alias='_last_modified', aka='updated_at'
|
||||
)
|
||||
|
||||
# Headers for PUT and POST requests
|
||||
#: Set to chunked to enable chunked transfer encoding. If used,
|
||||
@ -175,8 +180,9 @@ class Object(_base.BaseResource):
|
||||
#: If set to true, Object Storage guesses the content type based
|
||||
#: on the file extension and ignores the value sent in the
|
||||
#: Content-Type header, if present. *Type: bool*
|
||||
is_content_type_detected = resource.Header("x-detect-content-type",
|
||||
type=bool)
|
||||
is_content_type_detected = resource.Header(
|
||||
"x-detect-content-type", type=bool
|
||||
)
|
||||
#: If set, this is the name of an object used to create the new
|
||||
#: object by copying the X-Copy-From object. The value is in form
|
||||
#: {container}/{object}. You must UTF-8-encode and then URL-encode
|
||||
@ -195,7 +201,8 @@ class Object(_base.BaseResource):
|
||||
|
||||
#: CORS for RAX (deviating from standard)
|
||||
access_control_allow_origin = resource.Header(
|
||||
"access-control-allow-origin")
|
||||
"access-control-allow-origin"
|
||||
)
|
||||
|
||||
has_body = False
|
||||
|
||||
@ -209,8 +216,9 @@ class Object(_base.BaseResource):
|
||||
def set_metadata(self, session, metadata):
|
||||
# Filter out items with empty values so the create metadata behaviour
|
||||
# is the same as account and container
|
||||
filtered_metadata = \
|
||||
{key: value for key, value in metadata.items() if value}
|
||||
filtered_metadata = {
|
||||
key: value for key, value in metadata.items() if value
|
||||
}
|
||||
|
||||
# Update from remote if we only have locally created information
|
||||
if not self.last_modified_at:
|
||||
@ -281,9 +289,11 @@ class Object(_base.BaseResource):
|
||||
|
||||
request = self._prepare_request()
|
||||
response = session.post(
|
||||
request.url, headers=self._calculate_headers(metadata))
|
||||
request.url, headers=self._calculate_headers(metadata)
|
||||
)
|
||||
exceptions.raise_from_response(
|
||||
response, error_message="Error deleting metadata keys")
|
||||
response, error_message="Error deleting metadata keys"
|
||||
)
|
||||
|
||||
# Only delete from local object if the remote delete was successful
|
||||
for key in attr_keys_to_delete:
|
||||
@ -296,7 +306,8 @@ class Object(_base.BaseResource):
|
||||
request = self._prepare_request()
|
||||
|
||||
response = session.get(
|
||||
request.url, headers=request.headers, stream=stream)
|
||||
request.url, headers=request.headers, stream=stream
|
||||
)
|
||||
exceptions.raise_from_response(response, error_message=error_message)
|
||||
return response
|
||||
|
||||
@ -306,16 +317,15 @@ class Object(_base.BaseResource):
|
||||
|
||||
def stream(self, session, error_message=None, chunk_size=1024):
|
||||
response = self._download(
|
||||
session, error_message=error_message, stream=True)
|
||||
session, error_message=error_message, stream=True
|
||||
)
|
||||
return response.iter_content(chunk_size, decode_unicode=False)
|
||||
|
||||
def create(self, session, base_path=None, **params):
|
||||
request = self._prepare_request(base_path=base_path)
|
||||
|
||||
response = session.put(
|
||||
request.url,
|
||||
data=self.data,
|
||||
headers=request.headers
|
||||
request.url, data=self.data, headers=request.headers
|
||||
)
|
||||
self._translate_response(response, has_body=False)
|
||||
return self
|
||||
@ -339,6 +349,5 @@ class Object(_base.BaseResource):
|
||||
headers['multipart-manifest'] = 'delete'
|
||||
|
||||
return session.delete(
|
||||
request.url,
|
||||
headers=headers,
|
||||
microversion=microversion)
|
||||
request.url, headers=headers, microversion=microversion
|
||||
)
|
||||
|
@ -14,7 +14,6 @@ from openstack.tests.functional import base
|
||||
|
||||
|
||||
class TestAccount(base.BaseFunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestAccount, self).setUp()
|
||||
self.require_service('object-store')
|
||||
|
@ -15,7 +15,6 @@ from openstack.tests.functional import base
|
||||
|
||||
|
||||
class TestContainer(base.BaseFunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestContainer, self).setUp()
|
||||
self.require_service('object-store')
|
||||
@ -24,7 +23,9 @@ class TestContainer(base.BaseFunctionalTest):
|
||||
container = self.conn.object_store.create_container(name=self.NAME)
|
||||
self.addEmptyCleanup(
|
||||
self.conn.object_store.delete_container,
|
||||
self.NAME, ignore_missing=False)
|
||||
self.NAME,
|
||||
ignore_missing=False,
|
||||
)
|
||||
assert isinstance(container, _container.Container)
|
||||
self.assertEqual(self.NAME, container.name)
|
||||
|
||||
@ -43,21 +44,24 @@ class TestContainer(base.BaseFunctionalTest):
|
||||
self.assertIsNone(container.read_ACL)
|
||||
self.assertIsNone(container.write_ACL)
|
||||
self.conn.object_store.set_container_metadata(
|
||||
container, read_ACL='.r:*', write_ACL='demo:demo')
|
||||
container, read_ACL='.r:*', write_ACL='demo:demo'
|
||||
)
|
||||
container = self.conn.object_store.get_container_metadata(self.NAME)
|
||||
self.assertEqual('.r:*', container.read_ACL)
|
||||
self.assertEqual('demo:demo', container.write_ACL)
|
||||
|
||||
# update system metadata
|
||||
self.conn.object_store.set_container_metadata(
|
||||
container, read_ACL='.r:demo')
|
||||
container, read_ACL='.r:demo'
|
||||
)
|
||||
container = self.conn.object_store.get_container_metadata(self.NAME)
|
||||
self.assertEqual('.r:demo', container.read_ACL)
|
||||
self.assertEqual('demo:demo', container.write_ACL)
|
||||
|
||||
# set system metadata and custom metadata
|
||||
self.conn.object_store.set_container_metadata(
|
||||
container, k0='v0', sync_key='1234')
|
||||
container, k0='v0', sync_key='1234'
|
||||
)
|
||||
container = self.conn.object_store.get_container_metadata(self.NAME)
|
||||
self.assertTrue(container.metadata)
|
||||
self.assertIn('k0', container.metadata)
|
||||
@ -67,8 +71,9 @@ class TestContainer(base.BaseFunctionalTest):
|
||||
self.assertEqual('1234', container.sync_key)
|
||||
|
||||
# unset system metadata
|
||||
self.conn.object_store.delete_container_metadata(container,
|
||||
['sync_key'])
|
||||
self.conn.object_store.delete_container_metadata(
|
||||
container, ['sync_key']
|
||||
)
|
||||
container = self.conn.object_store.get_container_metadata(self.NAME)
|
||||
self.assertTrue(container.metadata)
|
||||
self.assertIn('k0', container.metadata)
|
||||
|
@ -26,19 +26,25 @@ class TestObject(base.BaseFunctionalTest):
|
||||
self.conn.object_store.create_container(name=self.FOLDER)
|
||||
self.addCleanup(self.conn.object_store.delete_container, self.FOLDER)
|
||||
self.sot = self.conn.object_store.upload_object(
|
||||
container=self.FOLDER, name=self.FILE, data=self.DATA)
|
||||
container=self.FOLDER, name=self.FILE, data=self.DATA
|
||||
)
|
||||
self.addEmptyCleanup(
|
||||
self.conn.object_store.delete_object, self.sot,
|
||||
ignore_missing=False)
|
||||
self.conn.object_store.delete_object,
|
||||
self.sot,
|
||||
ignore_missing=False,
|
||||
)
|
||||
|
||||
def test_list(self):
|
||||
names = [o.name for o
|
||||
in self.conn.object_store.objects(container=self.FOLDER)]
|
||||
names = [
|
||||
o.name
|
||||
for o in self.conn.object_store.objects(container=self.FOLDER)
|
||||
]
|
||||
self.assertIn(self.FILE, names)
|
||||
|
||||
def test_download_object(self):
|
||||
result = self.conn.object_store.download_object(
|
||||
self.FILE, container=self.FOLDER)
|
||||
self.FILE, container=self.FOLDER
|
||||
)
|
||||
self.assertEqual(self.DATA, result)
|
||||
result = self.conn.object_store.download_object(self.sot)
|
||||
self.assertEqual(self.DATA, result)
|
||||
@ -46,25 +52,29 @@ class TestObject(base.BaseFunctionalTest):
|
||||
def test_system_metadata(self):
|
||||
# get system metadata
|
||||
obj = self.conn.object_store.get_object_metadata(
|
||||
self.FILE, container=self.FOLDER)
|
||||
self.FILE, container=self.FOLDER
|
||||
)
|
||||
# TODO(shade) obj.bytes is coming up None on python3 but not python2
|
||||
# self.assertGreaterEqual(0, obj.bytes)
|
||||
self.assertIsNotNone(obj.etag)
|
||||
|
||||
# set system metadata
|
||||
obj = self.conn.object_store.get_object_metadata(
|
||||
self.FILE, container=self.FOLDER)
|
||||
self.FILE, container=self.FOLDER
|
||||
)
|
||||
self.assertIsNone(obj.content_disposition)
|
||||
self.assertIsNone(obj.content_encoding)
|
||||
self.conn.object_store.set_object_metadata(
|
||||
obj, content_disposition='attachment', content_encoding='gzip')
|
||||
obj, content_disposition='attachment', content_encoding='gzip'
|
||||
)
|
||||
obj = self.conn.object_store.get_object_metadata(obj)
|
||||
self.assertEqual('attachment', obj.content_disposition)
|
||||
self.assertEqual('gzip', obj.content_encoding)
|
||||
|
||||
# update system metadata
|
||||
self.conn.object_store.set_object_metadata(
|
||||
obj, content_encoding='deflate')
|
||||
obj, content_encoding='deflate'
|
||||
)
|
||||
obj = self.conn.object_store.get_object_metadata(obj)
|
||||
self.assertEqual('attachment', obj.content_disposition)
|
||||
self.assertEqual('deflate', obj.content_encoding)
|
||||
@ -79,7 +89,8 @@ class TestObject(base.BaseFunctionalTest):
|
||||
|
||||
# unset more system metadata
|
||||
self.conn.object_store.delete_object_metadata(
|
||||
obj, keys=['content_disposition'])
|
||||
obj, keys=['content_disposition']
|
||||
)
|
||||
obj = self.conn.object_store.get_object_metadata(obj)
|
||||
self.assertIn('k0', obj.metadata)
|
||||
self.assertEqual('v0', obj.metadata['k0'])
|
||||
@ -90,7 +101,8 @@ class TestObject(base.BaseFunctionalTest):
|
||||
def test_custom_metadata(self):
|
||||
# get custom metadata
|
||||
obj = self.conn.object_store.get_object_metadata(
|
||||
self.FILE, container=self.FOLDER)
|
||||
self.FILE, container=self.FOLDER
|
||||
)
|
||||
self.assertFalse(obj.metadata)
|
||||
|
||||
# set no custom metadata
|
||||
@ -112,8 +124,9 @@ class TestObject(base.BaseFunctionalTest):
|
||||
self.assertEqual('v1', obj.metadata['k1'])
|
||||
|
||||
# set more custom metadata by named object and container
|
||||
self.conn.object_store.set_object_metadata(self.FILE, self.FOLDER,
|
||||
k2='v2')
|
||||
self.conn.object_store.set_object_metadata(
|
||||
self.FILE, self.FOLDER, k2='v2'
|
||||
)
|
||||
obj = self.conn.object_store.get_object_metadata(obj)
|
||||
self.assertTrue(obj.metadata)
|
||||
self.assertEqual(2, len(obj.metadata))
|
||||
|
@ -24,12 +24,11 @@ ACCOUNT_EXAMPLE = {
|
||||
'x-account-container-count': '678',
|
||||
'content-type': 'text/plain; charset=utf-8',
|
||||
'x-account-object-count': '98765',
|
||||
'x-timestamp': '1453413555.88937'
|
||||
'x-timestamp': '1453413555.88937',
|
||||
}
|
||||
|
||||
|
||||
class TestAccount(base.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestAccount, self).setUp()
|
||||
self.endpoint = self.cloud.object_store.get_endpoint() + '/'
|
||||
@ -49,28 +48,41 @@ class TestAccount(base.TestCase):
|
||||
def test_make_it(self):
|
||||
sot = account.Account(**ACCOUNT_EXAMPLE)
|
||||
self.assertIsNone(sot.id)
|
||||
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-bytes-used']),
|
||||
sot.account_bytes_used)
|
||||
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-container-count']),
|
||||
sot.account_container_count)
|
||||
self.assertEqual(int(ACCOUNT_EXAMPLE['x-account-object-count']),
|
||||
sot.account_object_count)
|
||||
self.assertEqual(
|
||||
int(ACCOUNT_EXAMPLE['x-account-bytes-used']),
|
||||
sot.account_bytes_used,
|
||||
)
|
||||
self.assertEqual(
|
||||
int(ACCOUNT_EXAMPLE['x-account-container-count']),
|
||||
sot.account_container_count,
|
||||
)
|
||||