py3: get proxy-server willing and able to respond to some API requests

I saw GET account/container/replicated object all work,
which is not too shabby.

Change-Id: I63408274fb76a4e9920c00a2ce2829ca6d9982ca
This commit is contained in:
Tim Burke 2018-06-28 11:27:23 -07:00
parent 956172623c
commit 78344bfe25
6 changed files with 30 additions and 22 deletions

View File

@ -92,7 +92,7 @@ def account_listing_response(account, req, response_content_type, broker=None,
account_list = listing_formats.account_to_xml(data, account)
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
elif response_content_type.endswith('/json'):
account_list = json.dumps(data)
account_list = json.dumps(data).encode('ascii')
ret = HTTPOk(body=account_list, request=req, headers=resp_headers)
elif data:
account_list = listing_formats.listing_to_text(data)

View File

@ -62,7 +62,9 @@ def get_listing_content_type(req):
def account_to_xml(listing, account_name):
doc = Element('account', name=account_name.decode('utf-8'))
if isinstance(account_name, bytes):
account_name = account_name.decode('utf-8')
doc = Element('account', name=account_name)
doc.text = '\n'
for record in listing:
if 'subdir' in record:
@ -75,12 +77,14 @@ def account_to_xml(listing, account_name):
record.pop(field))
sub.tail = '\n'
return tostring(doc, encoding='UTF-8').replace(
"<?xml version='1.0' encoding='UTF-8'?>",
'<?xml version="1.0" encoding="UTF-8"?>', 1)
b"<?xml version='1.0' encoding='UTF-8'?>",
b'<?xml version="1.0" encoding="UTF-8"?>', 1)
def container_to_xml(listing, base_name):
doc = Element('container', name=base_name.decode('utf-8'))
if isinstance(base_name, bytes):
base_name = base_name.decode('utf-8')
doc = Element('container', name=base_name)
for record in listing:
if 'subdir' in record:
name = record.pop('subdir')
@ -94,8 +98,8 @@ def container_to_xml(listing, base_name):
record.pop(field))
return tostring(doc, encoding='UTF-8').replace(
"<?xml version='1.0' encoding='UTF-8'?>",
'<?xml version="1.0" encoding="UTF-8"?>', 1)
b"<?xml version='1.0' encoding='UTF-8'?>",
b'<?xml version="1.0" encoding="UTF-8"?>', 1)
def listing_to_text(listing):
@ -175,7 +179,7 @@ class ListingFilter(object):
body = b''.join(resp_iter)
try:
listing = json.loads(body)
listing = json.loads(body.decode('ascii'))
# Do a couple sanity checks
if not isinstance(listing, list):
raise ValueError

View File

@ -294,12 +294,12 @@ class SymlinkContainerContext(WSGIContext):
:return: modified json body
"""
with closing_if_possible(resp_iter):
resp_body = ''.join(resp_iter)
body_json = json.loads(resp_body)
resp_body = b''.join(resp_iter)
body_json = json.loads(resp_body.decode('ascii'))
swift_version, account, _junk = split_path(req.path, 2, 3, True)
new_body = json.dumps(
[self._extract_symlink_path_json(obj_dict, swift_version, account)
for obj_dict in body_json])
for obj_dict in body_json]).encode('ascii')
self.update_content_length(len(new_body))
return [new_body]

View File

@ -963,7 +963,7 @@ class ResumingGetter(object):
def iter_bytes_from_response_part(part_file):
nchunks = 0
buf = ''
buf = b''
while True:
try:
with ChunkReadTimeout(node_timeout):
@ -980,7 +980,7 @@ class ResumingGetter(object):
six.reraise(exc_type, exc_value, exc_traceback)
except RangeAlreadyComplete:
break
buf = ''
buf = b''
new_source, new_node = self._get_source_and_node()
if new_source:
self.app.exception_occurred(
@ -1017,7 +1017,7 @@ class ResumingGetter(object):
else:
self.skip_bytes -= len(buf)
self.bytes_used_from_backend += len(buf)
buf = ''
buf = b''
if not chunk:
if buf:
@ -1025,7 +1025,7 @@ class ResumingGetter(object):
self.app.client_timeout):
self.bytes_used_from_backend += len(buf)
yield buf
buf = ''
buf = b''
break
if client_chunk_size is not None:
@ -1041,7 +1041,7 @@ class ResumingGetter(object):
with ChunkWriteTimeout(self.app.client_timeout):
self.bytes_used_from_backend += len(buf)
yield buf
buf = ''
buf = b''
# This is for fairness; if the network is outpacing
# the CPU, we'll always be able to read and write

View File

@ -219,7 +219,7 @@ class ContainerController(Controller):
end_marker <= objects[-1]['name'].encode('utf-8')):
break
resp.body = json.dumps(objects)
resp.body = json.dumps(objects).encode('ascii')
constrained = any(req.params.get(constraint) for constraint in (
'marker', 'end_marker', 'path', 'prefix', 'delimiter'))
if not constrained and len(objects) < req_limit:

View File

@ -333,23 +333,25 @@ class Application(object):
raise ValueError(
"No policy found for override config, index: %s" % index)
override = self._make_policy_override(policy, conf, override_conf)
overrides[policy] = override
overrides[index] = override
return overrides
def get_policy_options(self, policy):
"""
Return policy specific options.
:param policy: an instance of :class:`BaseStoragePolicy`
:param policy: an instance of :class:`BaseStoragePolicy` or ``None``
:return: an instance of :class:`ProxyOverrideOptions`
"""
return self._override_options[policy]
return self._override_options[policy and policy.idx]
def check_config(self):
"""
Check the configuration for possible errors
"""
for policy, options in self._override_options.items():
for policy_idx, options in self._override_options.items():
policy = (None if policy_idx is None
else POLICIES.get_by_index(policy_idx))
if options.read_affinity and options.sorting_method != 'affinity':
self.logger.warning(
_("sorting_method is set to '%(method)s', not 'affinity'; "
@ -625,8 +627,10 @@ class Application(object):
:param msg: error message
"""
self._incr_node_errors(node)
if isinstance(msg, bytes):
msg = msg.decode('utf-8')
self.logger.error(_('%(msg)s %(ip)s:%(port)s/%(device)s'),
{'msg': msg.decode('utf-8'), 'ip': node['ip'],
{'msg': msg, 'ip': node['ip'],
'port': node['port'], 'device': node['device']})
def iter_nodes(self, ring, partition, node_iter=None, policy=None):