Merge "Use instance attr instead of non-local"

This commit is contained in:
Zuul
2023-04-24 17:27:17 +00:00
committed by Gerrit Code Review

View File

@@ -2627,15 +2627,14 @@ class ECFragGetter(object):
# This is safe; it sets up a generator but does not call next() # This is safe; it sets up a generator but does not call next()
# on it, so no IO is performed. # on it, so no IO is performed.
parts_iter = [ self.source_parts_iter = http_response_to_document_iters(
http_response_to_document_iters( self.source, read_chunk_size=self.app.object_chunk_size)
self.source, read_chunk_size=self.app.object_chunk_size)]
def get_next_doc_part(): def get_next_doc_part():
while True: while True:
# the loop here is to resume if trying to parse # the loop here is to resume if trying to parse
# multipart/byteranges response raises a ChunkReadTimeout # multipart/byteranges response raises a ChunkReadTimeout
# and resets the parts_iter # and resets the source_parts_iter
try: try:
with WatchdogTimeout(self.app.watchdog, node_timeout, with WatchdogTimeout(self.app.watchdog, node_timeout,
ChunkReadTimeout): ChunkReadTimeout):
@@ -2647,7 +2646,7 @@ class ECFragGetter(object):
# we have a multipart/byteranges response; as it # we have a multipart/byteranges response; as it
# will read the MIME boundary and part headers. # will read the MIME boundary and part headers.
start_byte, end_byte, length, headers, part = next( start_byte, end_byte, length, headers, part = next(
parts_iter[0]) self.source_parts_iter)
return (start_byte, end_byte, length, headers, part) return (start_byte, end_byte, length, headers, part)
except ChunkReadTimeout: except ChunkReadTimeout:
new_source, new_node = self._dig_for_source_and_node() new_source, new_node = self._dig_for_source_and_node()
@@ -2663,9 +2662,10 @@ class ECFragGetter(object):
self.node = new_node self.node = new_node
# This is safe; it sets up a generator but does # This is safe; it sets up a generator but does
# not call next() on it, so no IO is performed. # not call next() on it, so no IO is performed.
parts_iter[0] = http_response_to_document_iters( self.source_parts_iter = \
new_source, http_response_to_document_iters(
read_chunk_size=self.app.object_chunk_size) new_source,
read_chunk_size=self.app.object_chunk_size)
def iter_bytes_from_response_part(part_file, nbytes): def iter_bytes_from_response_part(part_file, nbytes):
nchunks = 0 nchunks = 0
@@ -2706,9 +2706,10 @@ class ECFragGetter(object):
# This is safe; it just sets up a generator but # This is safe; it just sets up a generator but
# does not call next() on it, so no IO is # does not call next() on it, so no IO is
# performed. # performed.
parts_iter[0] = http_response_to_document_iters( self.source_parts_iter = \
new_source, http_response_to_document_iters(
read_chunk_size=self.app.object_chunk_size) new_source,
read_chunk_size=self.app.object_chunk_size)
try: try:
_junk, _junk, _junk, _junk, part_file = \ _junk, _junk, _junk, _junk, part_file = \
get_next_doc_part() get_next_doc_part()