Bump pyupgrade target to 3.10+

... according to the versions currently supported.

Change-Id: Ibc60bf2bb13a57625d6afd045be88d988f4b24a9
Signed-off-by: Takashi Kajinami <kajinamit@oss.nttdata.com>
This commit is contained in:
Takashi Kajinami
2025-10-05 19:46:33 +09:00
parent f6376b9e22
commit e3cca6cd18
33 changed files with 47 additions and 47 deletions

View File

@@ -22,4 +22,4 @@ repos:
rev: v3.20.0
hooks:
- id: pyupgrade
args: [--py3-only]
args: [--py310-plus]

View File

@@ -377,7 +377,7 @@ class Atom(metaclass=abc.ABCMeta):
"""
def __str__(self):
return '"{}=={}"'.format(self.name, misc.get_version_string(self))
return f'"{self.name}=={misc.get_version_string(self)}"'
def __repr__(self):
return '<{} {}>'.format(reflection.get_class_name(self), self)
return f'<{reflection.get_class_name(self)} {self}>'

View File

@@ -49,7 +49,7 @@ class Terminator:
def __init__(self, flow):
self._flow = flow
self._name = "{}[$]".format(self._flow.name)
self._name = f"{self._flow.name}[$]"
@property
def flow(self):

View File

@@ -143,7 +143,7 @@ class Proxy:
def _make_queue(self, routing_key, exchange, channel=None):
"""Make a named queue for the given exchange."""
queue_name = "{}_{}".format(self._exchange_name, routing_key)
queue_name = f"{self._exchange_name}_{routing_key}"
return kombu.Queue(name=queue_name,
routing_key=routing_key, durable=False,
exchange=exchange, auto_delete=True,

View File

@@ -43,7 +43,7 @@ def show_time(name):
start = time.time()
yield
end = time.time()
print(" -- {} took {:0.3f} seconds".format(name, end - start))
print(f" -- {name} took {end - start:0.3f} seconds")
# This affects how many volumes to create and how much time to *simulate*
@@ -83,7 +83,7 @@ class VolumeCreator(task.Task):
# volume create can be resumed/revert, and is much easier to use for
# audit and tracking purposes.
base_name = reflection.get_callable_name(self)
super().__init__(name="{}-{}".format(base_name, volume_id))
super().__init__(name=f"{base_name}-{volume_id}")
self._volume_id = volume_id
def execute(self):

View File

@@ -65,6 +65,6 @@ print("---------")
for path in backend.memory.ls_r(backend.memory.root_path, absolute=True):
value = backend.memory[path]
if value:
print("{} -> {}".format(path, value))
print(f"{path} -> {value}")
else:
print("%s" % (path))

View File

@@ -137,7 +137,7 @@ class ActivateDriver(task.Task):
def update_progress(self, progress, **kwargs):
# Override the parent method to also print out the status.
super().update_progress(progress, **kwargs)
print("{} is {:0.2f}% done".format(self.name, progress * 100))
print(f"{self.name} is {progress * 100:0.2f}% done")
class DeclareSuccess(task.Task):

View File

@@ -97,7 +97,7 @@ print("Single threaded engine result %s" % result)
for (name, value) in expected:
actual = result.get(name)
if actual != value:
sys.stderr.write("{} != {}\n".format(actual, value))
sys.stderr.write(f"{actual} != {value}\n")
unexpected += 1
result = taskflow.engines.run(
@@ -107,7 +107,7 @@ print("Multi threaded engine result %s" % result)
for (name, value) in expected:
actual = result.get(name)
if actual != value:
sys.stderr.write("{} != {}\n".format(actual, value))
sys.stderr.write(f"{actual} != {value}\n")
unexpected += 1
if unexpected:

View File

@@ -41,7 +41,7 @@ class PrinterTask(task.Task):
def execute(self, output):
if self._show_name:
print("{}: {}".format(self.name, output))
print(f"{self.name}: {output}")
else:
print(output)

View File

@@ -82,9 +82,9 @@ def dispatch_work(job):
def safe_print(name, message, prefix=""):
with STDOUT_LOCK:
if prefix:
print("{} {}: {}".format(prefix, name, message))
print(f"{prefix} {name}: {message}")
else:
print("{}: {}".format(name, message))
print(f"{name}: {message}")
def worker(ident, client, consumed):
@@ -136,7 +136,7 @@ def producer(ident, client):
safe_print(name, "started")
with backends.backend(name, SHARED_CONF.copy(), client=client) as board:
for i in range(0, PRODUCER_UNITS):
job_name = "{}-{}".format(name, i)
job_name = f"{name}-{i}"
details = {
'color': random.choice(['red', 'blue']),
}

View File

@@ -65,7 +65,7 @@ class CallTask(task.Task):
"""Task that calls person by number."""
def execute(self, person, number):
print('Calling {} {}.'.format(person, number))
print(f'Calling {person} {number}.')
# This is how it works for one person:
@@ -82,7 +82,7 @@ taskflow.engines.run(simple_flow, store={'person': 'Josh'})
# we use `rebind` argument of task constructor.
def subflow_factory(prefix):
def pr(what):
return '{}-{}'.format(prefix, what)
return f'{prefix}-{what}'
return lf.Flow(pr('flow')).add(
FetchNumberTask(pr('fetch'),

View File

@@ -60,7 +60,7 @@ import example_utils as eu # noqa
def print_task_states(flowdetail, msg):
eu.print_wrapped(msg)
print("Flow '{}' state: {}".format(flowdetail.name, flowdetail.state))
print(f"Flow '{flowdetail.name}' state: {flowdetail.state}")
# Sort by these so that our test validation doesn't get confused by the
# order in which the items in the flow detail can be in.
items = sorted((td.name, td.version, td.state, td.results)

View File

@@ -37,7 +37,7 @@ FINISHED_STATES = (states.SUCCESS, states.FAILURE, states.REVERTED)
def resume(flowdetail, backend):
print('Resuming flow {} {}'.format(flowdetail.name, flowdetail.uuid))
print(f'Resuming flow {flowdetail.name} {flowdetail.uuid}')
engine = taskflow.engines.load_from_detail(flow_detail=flowdetail,
backend=backend)
engine.run()

View File

@@ -110,7 +110,7 @@ class DownloadImages(task.Task):
def execute(self, image_locations):
for src, loc in image_locations.items():
with slow_down(1):
print("Downloading from {} => {}".format(src, loc))
print(f"Downloading from {src} => {loc}")
return sorted(image_locations.values())
@@ -149,7 +149,7 @@ class WriteNetworkSettings(task.Task):
def execute(self, download_paths, network_settings):
for j, path in enumerate(download_paths):
with slow_down(1):
print("Mounting {} to /tmp/{}".format(path, j))
print(f"Mounting {path} to /tmp/{j}")
for i, setting in enumerate(network_settings):
filename = ("/tmp/etc/sysconfig/network-scripts/"
"ifcfg-eth%s" % (i))

View File

@@ -49,4 +49,4 @@ e.compile()
e.prepare()
for i, st in enumerate(e.run_iter(), 1):
print("Transition {}: {}".format(i, st))
print(f"Transition {i}: {st}")

View File

@@ -45,7 +45,7 @@ class DelayedTask(task.Task):
self._wait_for = random.random()
def execute(self):
print("Running '{}' in thread '{}'".format(self.name, tu.get_ident()))
print(f"Running '{self.name}' in thread '{tu.get_ident()}'")
time.sleep(self._wait_for)

View File

@@ -62,7 +62,7 @@ while entries:
path = entries.pop()
value = backend.memory[path]
if value:
print("{} -> {}".format(path, value))
print(f"{path} -> {value}")
else:
print("%s" % (path))
entries.extend(os.path.join(path, child)

View File

@@ -58,7 +58,7 @@ from taskflow.utils import threading_utils
RUN_TIME = 5
REVIEW_CREATION_DELAY = 0.5
SCAN_DELAY = 0.1
NAME = "{}_{}".format(socket.getfqdn(), os.getpid())
NAME = f"{socket.getfqdn()}_{os.getpid()}"
# This won't really use zookeeper but will use a local version of it using
# the zake library that mimics an actual zookeeper cluster using threads and

View File

@@ -120,7 +120,7 @@ class FailureFormatter:
atom_attrs['provides'] = self._mask_keys(
provides, self._mask_outputs_keys)
if atom_attrs:
return "Atom '{}' {}".format(atom_name, atom_attrs)
return f"Atom '{atom_name}' {atom_attrs}"
else:
return "Atom '%s'" % (atom_name)
else:
@@ -170,7 +170,7 @@ class FailureFormatter:
rooted_tree = builder(graph, atom)
child_count = rooted_tree.child_count(only_direct=False)
buff.write_nl(
'{} {} (most recent first):'.format(child_count, kind))
f'{child_count} {kind} (most recent first):')
formatter = functools.partial(self._format_node, storage, cache)
direct_child_count = rooted_tree.child_count(only_direct=True)
for i, child in enumerate(rooted_tree, 1):

View File

@@ -377,14 +377,14 @@ class EtcdJobBoard(base.JobBoard):
"""Returns how many jobs are on this jobboard."""
return len(self._job_cache)
def get_owner_data(self, job: EtcdJob) -> typing.Optional[dict]:
def get_owner_data(self, job: EtcdJob) -> dict | None:
owner_key = job.key + self.LOCK_POSTFIX
owner_data = self.get_one(owner_key)
if not owner_data:
return None
return jsonutils.loads(owner_data)
def find_owner(self, job: EtcdJob) -> typing.Optional[dict]:
def find_owner(self, job: EtcdJob) -> dict | None:
"""Gets the owner of the job if one exists."""
data = self.get_owner_data(job)
if data:
@@ -396,7 +396,7 @@ class EtcdJobBoard(base.JobBoard):
return self.get_one(key)
def get_owner_and_data(self, job: EtcdJob) -> tuple[
typing.Optional[str], typing.Optional[bytes]]:
str | None, bytes | None]:
if self._client is None:
raise exc.JobFailure("Cannot retrieve information, "
"not connected")

View File

@@ -118,7 +118,7 @@ class PrintingDurationListener(DurationListener):
def _receiver(self, item_type, item_name, state):
super()._receiver(item_type, item_name, state)
if state in STARTING_STATES:
self._printer("'{}' {} started.".format(item_name, item_type))
self._printer(f"'{item_name}' {item_type} started.")
class EventTimeListener(base.Listener):

View File

@@ -62,7 +62,7 @@ def fetch(conf, namespace=BACKEND_NAMESPACE, **kwargs):
invoke_kwds=kwargs)
return mgr.driver
except RuntimeError as e:
raise exc.NotFound("Could not find backend {}: {}".format(backend, e))
raise exc.NotFound(f"Could not find backend {backend}: {e}")
@contextlib.contextmanager

View File

@@ -36,7 +36,7 @@ class GreaterThanEqual:
def match(self, other):
if other >= self.source:
return None
return matchers.Mismatch("{} was not >= {}".format(other, self.source))
return matchers.Mismatch(f"{other} was not >= {self.source}")
class FailureRegexpMatcher:

View File

@@ -71,7 +71,7 @@ class PersistenceTestMixin:
lb_ids = {}
for i in range(0, 10):
lb_id = uuidutils.generate_uuid()
lb_name = 'lb-{}-{}'.format(i, lb_id)
lb_name = f'lb-{i}-{lb_id}'
lb = models.LogBook(name=lb_name, uuid=lb_id)
lb_ids[lb_id] = True

View File

@@ -59,7 +59,7 @@ def _get_connect_string(backend, user, passwd, database=None, variant=None):
raise Exception("Unrecognized backend: '%s'" % backend)
if not database:
database = ''
return "{}://{}:{}@localhost/{}".format(backend, user, passwd, database)
return f"{backend}://{user}:{passwd}@localhost/{database}"
def _mysql_exists():

View File

@@ -70,7 +70,7 @@ class TestProxy(test.MockTestCase):
self.resetMasterMock()
def _queue_name(self, topic):
return "{}_{}".format(self.exchange, topic)
return f"{self.exchange}_{topic}"
def proxy_start_calls(self, calls, exc_type=mock.ANY):
return [

View File

@@ -29,15 +29,15 @@ def _common_format(g, edge_notation):
lines.append("Nodes: %s" % g.number_of_nodes())
for n, n_data in g.nodes(data=True):
if n_data:
lines.append(" - {} ({})".format(n, n_data))
lines.append(f" - {n} ({n_data})")
else:
lines.append(" - %s" % n)
lines.append("Edges: %s" % g.number_of_edges())
for (u, v, e_data) in g.edges(data=True):
if e_data:
lines.append(" {} {} {} ({})".format(u, edge_notation, v, e_data))
lines.append(f" {u} {edge_notation} {v} ({e_data})")
else:
lines.append(" {} {} {}".format(u, edge_notation, v))
lines.append(f" {u} {edge_notation} {v}")
return lines

View File

@@ -64,7 +64,7 @@ class OrderedSet(abc.Set, abc.Hashable):
return tuple(self)
def __repr__(self):
return "{}({})".format(type(self).__name__, list(self))
return f"{type(self).__name__}({list(self)})"
def copy(self):
"""Return a shallow copy of a set."""

View File

@@ -86,9 +86,9 @@ def make_banner(what, chapters):
sections = chapter_contents
for j, section in enumerate(sections):
if j + 1 < len(sections):
buf.write_nl(" {}. {}".format(j + 1, section))
buf.write_nl(f" {j + 1}. {section}")
else:
buf.write(" {}. {}".format(j + 1, section))
buf.write(f" {j + 1}. {section}")
else:
raise TypeError("Unsupported chapter contents"
" type: one of dict, list, tuple, set expected"

View File

@@ -39,7 +39,7 @@ def _parse_hosts(hosts):
if isinstance(hosts, (dict)):
host_ports = []
for (k, v) in hosts.items():
host_ports.append("{}:{}".format(k, v))
host_ports.append(f"{k}:{v}")
hosts = host_ports
if isinstance(hosts, (list, set, tuple)):
return ",".join([str(h) for h in hosts])
@@ -63,7 +63,7 @@ def prettify_failures(failures, limit=-1):
pass
pretty_op += "(%s)" % (", ".join(selected_attrs))
pretty_cause = reflection.get_class_name(r, fully_qualified=False)
prettier.append("{}@{}".format(pretty_cause, pretty_op))
prettier.append(f"{pretty_cause}@{pretty_op}")
if limit <= 0 or len(prettier) <= limit:
return ", ".join(prettier)
else:

View File

@@ -187,7 +187,7 @@ def find_subclasses(locations, base_cls, exclude_hidden=True):
except ValueError:
module = importutils.import_module(item)
else:
obj = importutils.import_class('{}.{}'.format(pkg, cls))
obj = importutils.import_class(f'{pkg}.{cls}')
if not reflection.is_subclass(obj, base_cls):
raise TypeError("Object '%s' (%s) is not a '%s' subclass"
% (item, type(item), base_cls))

View File

@@ -26,4 +26,4 @@ def version_string_with_package():
if TASK_PACKAGE is None:
return version_string()
else:
return "{}-{}".format(version_string(), TASK_PACKAGE)
return f"{version_string()}-{TASK_PACKAGE}"

View File

@@ -185,7 +185,7 @@ def main():
print(g.to_string().strip())
g.write(options.filename, format=options.format)
print("Created {} at '{}'".format(options.format, options.filename))
print(f"Created {options.format} at '{options.filename}'")
# To make the svg more pretty use the following:
# $ xsltproc ../diagram-tools/notugly.xsl ./states.svg > pretty-states.svg