Delay string interpolations at logging calls

This was already fixed in 4889dc1814 but
we did not enforce this rule and reintroduced "bad" string
interpolations. This patch adds a hacking rule to prevent us from doing
this again in the future.

Change-Id: I96d9a157d3887286542859d67138ffdae5a589f1
This commit is contained in:
Cyril Roelandt 2021-07-28 02:52:51 +02:00
parent 331ce59a50
commit 1962f47ed2
18 changed files with 35 additions and 34 deletions

View File

@ -65,7 +65,7 @@ class CacheController(object):
req.context, image=image, enforcer=self.policy, req.context, image=image, enforcer=self.policy,
policy_str=new_policy).manage_image_cache() policy_str=new_policy).manage_image_cache()
except exception.Forbidden: except exception.Forbidden:
LOG.debug("User not permitted by '%s' policy" % new_policy) LOG.debug("User not permitted by '%s' policy", new_policy)
raise webob.exc.HTTPForbidden() raise webob.exc.HTTPForbidden()
if not CONF.image_cache_dir: if not CONF.image_cache_dir:

View File

@ -281,7 +281,7 @@ class ImagesController(object):
raise webob.exc.HTTPBadGateway('Stage host is unavailable') raise webob.exc.HTTPBadGateway('Stage host is unavailable')
req_id_hdr = 'x-openstack-request-id' req_id_hdr = 'x-openstack-request-id'
if req_id_hdr in r.headers: if req_id_hdr in r.headers:
LOG.debug('Replying with remote request id %s' % ( LOG.debug('Replying with remote request id %s', (
r.headers[req_id_hdr])) r.headers[req_id_hdr]))
req.context.request_id = r.headers[req_id_hdr] req.context.request_id = r.headers[req_id_hdr]
if r.status_code // 100 != 2: if r.status_code // 100 != 2:
@ -391,7 +391,7 @@ class ImagesController(object):
if existing_store in stores: if existing_store in stores:
LOG.debug("Removing store '%s' from all stores as " LOG.debug("Removing store '%s' from all stores as "
"image is already available in that " "image is already available in that "
"store." % existing_store) "store.", existing_store)
stores.remove(existing_store) stores.remove(existing_store)
if len(stores) == 0: if len(stores) == 0:

View File

@ -103,9 +103,9 @@ class ThreadPoolModel(object):
def spawn(self, fn, *args, **kwargs): def spawn(self, fn, *args, **kwargs):
"""Spawn a function with args using the thread pool.""" """Spawn a function with args using the thread pool."""
LOG.debug('Spawning with %s: %s(%s, %s)' % ( LOG.debug('Spawning with %s: %s(%s, %s)',
self.get_threadpool_executor_class().__name__, self.get_threadpool_executor_class().__name__,
fn, args, kwargs)) fn, args, kwargs)
return self.pool.submit(fn, *args, **kwargs) return self.pool.submit(fn, *args, **kwargs)
def map(self, fn, iterable): def map(self, fn, iterable):

View File

@ -274,7 +274,7 @@ def get_flow(**kwargs):
task_type = kwargs.get('task_type') task_type = kwargs.get('task_type')
image_repo = kwargs.get('image_repo') image_repo = kwargs.get('image_repo')
LOG.debug("Flow: %(task_type)s with ID %(id)s on %(repo)s" % LOG.debug("Flow: %(task_type)s with ID %(id)s on %(repo)s",
{'task_type': task_type, 'id': task_id, 'repo': image_repo}) {'task_type': task_type, 'id': task_id, 'repo': image_repo})
return lf.Flow(task_type).add( return lf.Flow(task_type).add(

View File

@ -150,7 +150,7 @@ class TaskExecutor(glance.async_.TaskExecutor):
try: try:
super(TaskExecutor, self).begin_processing(task_id) super(TaskExecutor, self).begin_processing(task_id)
except exception.ImportTaskError as exc: except exception.ImportTaskError as exc:
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s') % LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s'),
{'task_id': task_id, 'exc': exc.msg}) {'task_id': task_id, 'exc': exc.msg})
task = self.task_repo.get(task_id) task = self.task_repo.get(task_id)
task.fail(exc.msg) task.fail(exc.msg)
@ -183,7 +183,7 @@ class TaskExecutor(glance.async_.TaskExecutor):
self.task_repo.save(task) self.task_repo.save(task)
except Exception as exc: except Exception as exc:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s') % LOG.error(_LE('Failed to execute task %(task_id)s: %(exc)s'),
{'task_id': task_id, {'task_id': task_id,
'exc': encodeutils.exception_to_unicode(exc)}) 'exc': encodeutils.exception_to_unicode(exc)})
# TODO(sabari): Check for specific exceptions and update the # TODO(sabari): Check for specific exceptions and update the

View File

@ -634,12 +634,12 @@ def replication_compare(options, args):
if image[key] != headers.get(key): if image[key] != headers.get(key):
LOG.warning(_LW('%(image_id)s: field %(key)s differs ' LOG.warning(_LW('%(image_id)s: field %(key)s differs '
'(source is %(source_value)s, destination ' '(source is %(source_value)s, destination '
'is %(target_value)s)') 'is %(target_value)s)'),
% {'image_id': image['id'], {'image_id': image['id'],
'key': key, 'key': key,
'source_value': image[key], 'source_value': image[key],
'target_value': headers.get(key, 'target_value': headers.get(key,
'undefined')}) 'undefined')})
differences[image['id']] = 'diff' differences[image['id']] = 'diff'
else: else:
LOG.debug('%(image_id)s is identical', LOG.debug('%(image_id)s is identical',
@ -647,9 +647,9 @@ def replication_compare(options, args):
elif image['status'] == 'active': elif image['status'] == 'active':
LOG.warning(_LW('Image %(image_id)s ("%(image_name)s") ' LOG.warning(_LW('Image %(image_id)s ("%(image_name)s") '
'entirely missing from the destination') 'entirely missing from the destination'),
% {'image_id': image['id'], {'image_id': image['id'],
'image_name': image.get('name', '--unnamed')}) 'image_name': image.get('name', '--unnamed')})
differences[image['id']] = 'missing' differences[image['id']] = 'missing'
return differences return differences

View File

@ -170,7 +170,7 @@ class PropertyRules(object):
LOG.warning( LOG.warning(
_LW('Property protection on operation %(operation)s' _LW('Property protection on operation %(operation)s'
' for rule %(rule)s is not found. No role will be' ' for rule %(rule)s is not found. No role will be'
' allowed to perform this operation.') % ' allowed to perform this operation.'),
{'operation': operation, {'operation': operation,
'rule': property_exp}) 'rule': property_exp})

View File

@ -124,7 +124,7 @@ def set_image_data(image, uri, task_id, backend=None):
image.set_data(data_iter, backend=backend) image.set_data(data_iter, backend=backend)
except Exception as e: except Exception as e:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.warning("Task %(task_id)s failed with exception %(error)s" % LOG.warning("Task %(task_id)s failed with exception %(error)s",
{"error": encodeutils.exception_to_unicode(e), {"error": encodeutils.exception_to_unicode(e),
"task_id": task_id}) "task_id": task_id})
LOG.info("Task %(task_id)s: Could not import image file" LOG.info("Task %(task_id)s: Could not import image file"

View File

@ -152,7 +152,7 @@ def set_image_data(image, uri, task_id, backend=None, set_active=True,
except Exception as e: except Exception as e:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.warning(_LW("Task %(task_id)s failed with exception " LOG.warning(_LW("Task %(task_id)s failed with exception "
"%(error)s") % "%(error)s"),
{"error": encodeutils.exception_to_unicode(e), {"error": encodeutils.exception_to_unicode(e),
"task_id": task_id}) "task_id": task_id})
LOG.info(_LI("Task %(task_id)s: Could not import image file" LOG.info(_LI("Task %(task_id)s: Could not import image file"

View File

@ -673,14 +673,14 @@ class PosixServer(BaseServer):
self.stale_children.remove(pid) self.stale_children.remove(pid)
LOG.info(_LI('Removed stale child %s'), pid) LOG.info(_LI('Removed stale child %s'), pid)
else: else:
LOG.warning(_LW('Unrecognised child %s') % pid) LOG.warning(_LW('Unrecognised child %s'), pid)
def _verify_and_respawn_children(self, pid, status): def _verify_and_respawn_children(self, pid, status):
if len(self.stale_children) == 0: if len(self.stale_children) == 0:
LOG.debug('No stale children') LOG.debug('No stale children')
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0: if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
LOG.error(_LE('Not respawning child %d, cannot ' LOG.error(_LE('Not respawning child %d, cannot '
'recover from termination') % pid) 'recover from termination'), pid)
if not self.children and not self.stale_children: if not self.children and not self.stale_children:
LOG.info( LOG.info(
_LI('All workers have terminated. Exiting')) _LI('All workers have terminated. Exiting'))

View File

@ -537,7 +537,7 @@ class TaskExecutorFactory(object):
except ImportError: except ImportError:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.exception(_LE("Failed to load the %s executor provided " LOG.exception(_LE("Failed to load the %s executor provided "
"in the config.") % CONF.task.task_executor) "in the config."), CONF.task.task_executor)
class MetadefNamespace(object): class MetadefNamespace(object):

View File

@ -371,7 +371,7 @@ class ImageCache(object):
except Exception as e: except Exception as e:
LOG.exception(_LE("Exception encountered while tee'ing " LOG.exception(_LE("Exception encountered while tee'ing "
"image '%(image_id)s' into cache: %(error)s. " "image '%(image_id)s' into cache: %(error)s. "
"Continuing with response.") % "Continuing with response."),
{'image_id': image_id, {'image_id': image_id,
'error': encodeutils.exception_to_unicode(e)}) 'error': encodeutils.exception_to_unicode(e)})

View File

@ -348,7 +348,7 @@ class Driver(base.Driver):
msg = (_LW("Fetch of cache file failed (%(e)s), rolling " msg = (_LW("Fetch of cache file failed (%(e)s), rolling "
"back by moving '%(incomplete_path)s' to " "back by moving '%(incomplete_path)s' to "
"'%(invalid_path)s'") % "'%(invalid_path)s'"),
{'e': e, {'e': e,
'incomplete_path': incomplete_path, 'incomplete_path': incomplete_path,
'invalid_path': invalid_path}) 'invalid_path': invalid_path})

View File

@ -51,7 +51,7 @@ class Prefetcher(base.CacheApp):
return False return False
if image.status != 'active': if image.status != 'active':
LOG.warning(_LW("Image '%s' is not active. Not caching.") % LOG.warning(_LW("Image '%s' is not active. Not caching."),
image_id) image_id)
return False return False

View File

@ -410,7 +410,7 @@ class Scrubber(object):
LOG.info(_LI("Image %s is scrubbed from a location."), image_id) LOG.info(_LI("Image %s is scrubbed from a location."), image_id)
except Exception as e: except Exception as e:
LOG.error(_LE("Unable to scrub image %(id)s from a location. " LOG.error(_LE("Unable to scrub image %(id)s from a location. "
"Reason: %(exc)s ") % "Reason: %(exc)s "),
{'id': image_id, {'id': image_id,
'exc': encodeutils.exception_to_unicode(e)}) 'exc': encodeutils.exception_to_unicode(e)})
raise raise

View File

@ -51,10 +51,10 @@ class TestImageImportLocking(functional.SynchronousAPIBase):
set_active=True): set_active=True):
me = str(uuid.uuid4()) me = str(uuid.uuid4())
while state['want_run'] == True: while state['want_run'] == True:
LOG.info('fake_set_data running %s' % me) LOG.info('fake_set_data running %s', me)
state['running'] = True state['running'] = True
time.sleep(0.1) time.sleep(0.1)
LOG.info('fake_set_data ended %s' % me) LOG.info('fake_set_data ended %s', me)
# Constrain oslo timeutils time so we can manipulate it # Constrain oslo timeutils time so we can manipulate it
tf = time_fixture.TimeFixture() tf = time_fixture.TimeFixture()

View File

@ -314,16 +314,16 @@ def wait_for_fork(pid,
pass pass
elif not nice_timer.expired(): elif not nice_timer.expired():
# Politely ask the process to GTFO # Politely ask the process to GTFO
LOG.warning('Killing child %i with SIGTERM' % pid) LOG.warning('Killing child %i with SIGTERM', pid)
os.kill(pid, signal.SIGTERM) os.kill(pid, signal.SIGTERM)
else: else:
# No more Mr. Nice Guy # No more Mr. Nice Guy
LOG.warning('Killing child %i with SIGKILL' % pid) LOG.warning('Killing child %i with SIGKILL', pid)
os.kill(pid, signal.SIGKILL) os.kill(pid, signal.SIGKILL)
expected_exitcode = signal.SIGKILL expected_exitcode = signal.SIGKILL
time.sleep(1) time.sleep(1)
continue continue
LOG.info('waitpid(%i) returned %i,%i' % (pid, cpid, rc)) LOG.info('waitpid(%i) returned %i,%i', pid, cpid, rc)
if rc != expected_exitcode: if rc != expected_exitcode:
raise RuntimeError('The exit code %d is not %d' raise RuntimeError('The exit code %d is not %d'
% (rc, expected_exitcode)) % (rc, expected_exitcode))
@ -332,7 +332,7 @@ def wait_for_fork(pid,
# Nothing to wait for # Nothing to wait for
return 0 return 0
except Exception as e: except Exception as e:
LOG.error('Got wait error: %s' % e) LOG.error('Got wait error: %s', e)
if raise_error: if raise_error:
raise raise

View File

@ -131,6 +131,7 @@ ignore-path = .venv,.git,.tox,.eggs,*glance/locale*,*lib/python*,glance.egg*,api
# W503 line break before binary operator - conflicting guidance # W503 line break before binary operator - conflicting guidance
# W504 line break after binary operator - conflicting guidance # W504 line break after binary operator - conflicting guidance
ignore = E402,E711,E712,H404,H405,W503,W504 ignore = E402,E711,E712,H404,H405,W503,W504
enable-extensions = H904
exclude = .venv,.git,.tox,dist,doc,etc,*glance/locale*,*lib/python*,*egg,build exclude = .venv,.git,.tox,dist,doc,etc,*glance/locale*,*lib/python*,*egg,build
[hacking] [hacking]