Merge "Extract some closures to methods"
This commit is contained in:
commit
0a970d0736
@ -2620,17 +2620,9 @@ class ECFragGetter(object):
|
|||||||
it = self._get_response_parts_iter(req)
|
it = self._get_response_parts_iter(req)
|
||||||
return it
|
return it
|
||||||
|
|
||||||
def _get_response_parts_iter(self, req):
|
def get_next_doc_part(self):
|
||||||
try:
|
|
||||||
client_chunk_size = self.client_chunk_size
|
|
||||||
node_timeout = self.app.recoverable_node_timeout
|
node_timeout = self.app.recoverable_node_timeout
|
||||||
|
|
||||||
# This is safe; it sets up a generator but does not call next()
|
|
||||||
# on it, so no IO is performed.
|
|
||||||
self.source_parts_iter = http_response_to_document_iters(
|
|
||||||
self.source, read_chunk_size=self.app.object_chunk_size)
|
|
||||||
|
|
||||||
def get_next_doc_part():
|
|
||||||
while True:
|
while True:
|
||||||
# the loop here is to resume if trying to parse
|
# the loop here is to resume if trying to parse
|
||||||
# multipart/byteranges response raises a ChunkReadTimeout
|
# multipart/byteranges response raises a ChunkReadTimeout
|
||||||
@ -2667,7 +2659,9 @@ class ECFragGetter(object):
|
|||||||
new_source,
|
new_source,
|
||||||
read_chunk_size=self.app.object_chunk_size)
|
read_chunk_size=self.app.object_chunk_size)
|
||||||
|
|
||||||
def iter_bytes_from_response_part(part_file, nbytes):
|
def iter_bytes_from_response_part(self, part_file, nbytes):
|
||||||
|
client_chunk_size = self.client_chunk_size
|
||||||
|
node_timeout = self.app.recoverable_node_timeout
|
||||||
nchunks = 0
|
nchunks = 0
|
||||||
buf = b''
|
buf = b''
|
||||||
part_file = ByteCountEnforcer(part_file, nbytes)
|
part_file = ByteCountEnforcer(part_file, nbytes)
|
||||||
@ -2712,7 +2706,7 @@ class ECFragGetter(object):
|
|||||||
read_chunk_size=self.app.object_chunk_size)
|
read_chunk_size=self.app.object_chunk_size)
|
||||||
try:
|
try:
|
||||||
_junk, _junk, _junk, _junk, part_file = \
|
_junk, _junk, _junk, _junk, part_file = \
|
||||||
get_next_doc_part()
|
self.get_next_doc_part()
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
# it's not clear to me how to make
|
# it's not clear to me how to make
|
||||||
# get_next_doc_part raise StopIteration for the
|
# get_next_doc_part raise StopIteration for the
|
||||||
@ -2779,12 +2773,19 @@ class ECFragGetter(object):
|
|||||||
if nchunks % 5 == 0:
|
if nchunks % 5 == 0:
|
||||||
sleep()
|
sleep()
|
||||||
|
|
||||||
|
def _get_response_parts_iter(self, req):
|
||||||
|
try:
|
||||||
|
# This is safe; it sets up a generator but does not call next()
|
||||||
|
# on it, so no IO is performed.
|
||||||
|
self.source_parts_iter = http_response_to_document_iters(
|
||||||
|
self.source, read_chunk_size=self.app.object_chunk_size)
|
||||||
|
|
||||||
part_iter = None
|
part_iter = None
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
start_byte, end_byte, length, headers, part = \
|
start_byte, end_byte, length, headers, part = \
|
||||||
get_next_doc_part()
|
self.get_next_doc_part()
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
# it seems this is the only way out of the loop; not
|
# it seems this is the only way out of the loop; not
|
||||||
# sure why the req.environ update is always needed
|
# sure why the req.environ update is always needed
|
||||||
@ -2801,7 +2802,8 @@ class ECFragGetter(object):
|
|||||||
if (end_byte is not None
|
if (end_byte is not None
|
||||||
and start_byte is not None)
|
and start_byte is not None)
|
||||||
else None)
|
else None)
|
||||||
part_iter = iter_bytes_from_response_part(part, byte_count)
|
part_iter = self.iter_bytes_from_response_part(
|
||||||
|
part, byte_count)
|
||||||
yield {'start_byte': start_byte, 'end_byte': end_byte,
|
yield {'start_byte': start_byte, 'end_byte': end_byte,
|
||||||
'entity_length': length, 'headers': headers,
|
'entity_length': length, 'headers': headers,
|
||||||
'part_iter': part_iter}
|
'part_iter': part_iter}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user