Merge "Avoid usage of six.moves in local functions"

This commit is contained in:
Jenkins 2014-10-06 19:17:44 +00:00 committed by Gerrit Code Review
commit 1caaecc5d6
5 changed files with 18 additions and 15 deletions

View File

@ -30,7 +30,7 @@ top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir)) os.pardir))
sys.path.insert(0, top_dir) sys.path.insert(0, top_dir)
import six from six.moves import range as compat_range
from zake import fake_client from zake import fake_client
from taskflow import exceptions as excp from taskflow import exceptions as excp
@ -137,7 +137,7 @@ def producer(ident, client):
name = "P-%s" % (ident) name = "P-%s" % (ident)
safe_print(name, "started") safe_print(name, "started")
with backends.backend(name, SHARED_CONF.copy(), client=client) as board: with backends.backend(name, SHARED_CONF.copy(), client=client) as board:
for i in six.moves.xrange(0, PRODUCER_UNITS): for i in compat_range(0, PRODUCER_UNITS):
job_name = "%s-%s" % (name, i) job_name = "%s-%s" % (name, i)
details = { details = {
'color': random.choice(['red', 'blue']), 'color': random.choice(['red', 'blue']),
@ -151,13 +151,13 @@ def producer(ident, client):
def main(): def main():
with contextlib.closing(fake_client.FakeClient()) as c: with contextlib.closing(fake_client.FakeClient()) as c:
created = [] created = []
for i in range(0, PRODUCERS): for i in compat_range(0, PRODUCERS):
p = threading.Thread(target=producer, args=(i + 1, c)) p = threading.Thread(target=producer, args=(i + 1, c))
p.daemon = True p.daemon = True
created.append(p) created.append(p)
p.start() p.start()
consumed = collections.deque() consumed = collections.deque()
for i in range(0, WORKERS): for i in compat_range(0, WORKERS):
w = threading.Thread(target=worker, args=(i + 1, c, consumed)) w = threading.Thread(target=worker, args=(i + 1, c, consumed))
w.daemon = True w.daemon = True
created.append(w) created.append(w)

View File

@ -20,13 +20,13 @@ import os
import sys import sys
import threading import threading
import six
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir,
os.pardir)) os.pardir))
sys.path.insert(0, top_dir) sys.path.insert(0, top_dir)
from six.moves import range as compat_range
from taskflow import engines from taskflow import engines
from taskflow.engines.worker_based import worker from taskflow.engines.worker_based import worker
from taskflow.patterns import unordered_flow as uf from taskflow.patterns import unordered_flow as uf
@ -84,7 +84,7 @@ class MandelCalculator(task.Task):
def mandelbrot(x, y, max_iters): def mandelbrot(x, y, max_iters):
c = complex(x, y) c = complex(x, y)
z = 0.0j z = 0.0j
for i in six.moves.xrange(max_iters): for i in compat_range(max_iters):
z = z * z + c z = z * z + c
if (z.real * z.real + z.imag * z.imag) >= 4: if (z.real * z.real + z.imag * z.imag) >= 4:
return i return i
@ -95,10 +95,10 @@ class MandelCalculator(task.Task):
pixel_size_x = (max_x - min_x) / width pixel_size_x = (max_x - min_x) / width
pixel_size_y = (max_y - min_y) / height pixel_size_y = (max_y - min_y) / height
block = [] block = []
for y in six.moves.xrange(chunk[0], chunk[1]): for y in compat_range(chunk[0], chunk[1]):
row = [] row = []
imag = min_y + y * pixel_size_y imag = min_y + y * pixel_size_y
for x in six.moves.xrange(0, width): for x in compat_range(0, width):
real = min_x + x * pixel_size_x real = min_x + x * pixel_size_x
row.append(mandelbrot(real, imag, max_iters)) row.append(mandelbrot(real, imag, max_iters))
block.append(row) block.append(row)
@ -133,7 +133,7 @@ def calculate(engine_conf):
# Compose our workflow. # Compose our workflow.
height, width = IMAGE_SIZE height, width = IMAGE_SIZE
chunk_size = int(math.ceil(height / float(CHUNK_COUNT))) chunk_size = int(math.ceil(height / float(CHUNK_COUNT)))
for i in six.moves.xrange(0, CHUNK_COUNT): for i in compat_range(0, CHUNK_COUNT):
chunk_name = 'chunk_%s' % i chunk_name = 'chunk_%s' % i
task_name = "calculation_%s" % i task_name = "calculation_%s" % i
# Break the calculation up into chunk size pieces. # Break the calculation up into chunk size pieces.
@ -225,7 +225,7 @@ def create_fractal():
try: try:
# Create a set of workers to simulate actual remote workers. # Create a set of workers to simulate actual remote workers.
print('Running %s workers.' % (WORKERS)) print('Running %s workers.' % (WORKERS))
for i in range(0, WORKERS): for i in compat_range(0, WORKERS):
worker_conf['topic'] = 'calculator_%s' % (i + 1) worker_conf['topic'] = 'calculator_%s' % (i + 1)
worker_topics.append(worker_conf['topic']) worker_topics.append(worker_conf['topic'])
w = worker.Worker(**worker_conf) w = worker.Worker(**worker_conf)

View File

@ -17,6 +17,7 @@
from kazoo import client from kazoo import client
from kazoo import exceptions as k_exc from kazoo import exceptions as k_exc
import six import six
from six.moves import zip as compat_zip
from taskflow import exceptions as exc from taskflow import exceptions as exc
from taskflow.utils import reflection from taskflow.utils import reflection
@ -100,7 +101,7 @@ def checked_commit(txn):
return [] return []
results = txn.commit() results = txn.commit()
failures = [] failures = []
for op, result in six.moves.zip(txn.operations, results): for op, result in compat_zip(txn.operations, results):
if isinstance(result, k_exc.KazooException): if isinstance(result, k_exc.KazooException):
failures.append((op, result)) failures.append((op, result))
if len(results) < len(txn.operations): if len(results) < len(txn.operations):

View File

@ -34,6 +34,8 @@ import traceback
from oslo.serialization import jsonutils from oslo.serialization import jsonutils
from oslo.utils import netutils from oslo.utils import netutils
import six import six
from six.moves import map as compat_map
from six.moves import range as compat_range
from six.moves.urllib import parse as urlparse from six.moves.urllib import parse as urlparse
from taskflow import exceptions as exc from taskflow import exceptions as exc
@ -286,7 +288,7 @@ def item_from(container, index, name=None):
def get_duplicate_keys(iterable, key=None): def get_duplicate_keys(iterable, key=None):
if key is not None: if key is not None:
iterable = six.moves.map(key, iterable) iterable = compat_map(key, iterable)
keys = set() keys = set()
duplicates = set() duplicates = set()
for item in iterable: for item in iterable:
@ -373,7 +375,7 @@ class ExponentialBackoff(object):
def __iter__(self): def __iter__(self):
if self.count <= 0: if self.count <= 0:
raise StopIteration() raise StopIteration()
for i in six.moves.range(0, self.count): for i in compat_range(0, self.count):
yield min(self.exponent ** i, self.max_backoff) yield min(self.exponent ** i, self.max_backoff)
def __str__(self): def __str__(self):

View File

@ -54,7 +54,7 @@ builtins = _
exclude = .venv,.tox,dist,doc,./taskflow/openstack/common,*egg,.git,build,tools exclude = .venv,.tox,dist,doc,./taskflow/openstack/common,*egg,.git,build,tools
[hacking] [hacking]
import_exceptions = six.moves.mock import_exceptions = six.moves
taskflow.test.mock taskflow.test.mock
unittest.mock unittest.mock