Merge "Use LOG.warning instead of deprecated LOG.warn"
This commit is contained in:
commit
464524db0d
@ -20,3 +20,4 @@ glance Specific Commandments
|
|||||||
- [G328] Must use a dict comprehension instead of a dict constructor with
|
- [G328] Must use a dict comprehension instead of a dict constructor with
|
||||||
a sequence of key-value pairs
|
a sequence of key-value pairs
|
||||||
- [G329] Python 3: Do not use xrange.
|
- [G329] Python 3: Do not use xrange.
|
||||||
|
- [G330] Log.warn is deprecated. Enforce use of LOG.warning.
|
||||||
|
@ -170,19 +170,19 @@ def check_quota(context, image_size, db_api, image_id=None):
|
|||||||
# exception is when there is no room left at all, thus we know
|
# exception is when there is no room left at all, thus we know
|
||||||
# it will not fit
|
# it will not fit
|
||||||
if remaining <= 0:
|
if remaining <= 0:
|
||||||
LOG.warn(_LW("User %(user)s attempted to upload an image of"
|
LOG.warning(_LW("User %(user)s attempted to upload an image of"
|
||||||
" unknown size that will exceed the quota."
|
" unknown size that will exceed the quota."
|
||||||
" %(remaining)d bytes remaining."),
|
" %(remaining)d bytes remaining."),
|
||||||
{'user': user, 'remaining': remaining})
|
{'user': user, 'remaining': remaining})
|
||||||
raise exception.StorageQuotaFull(image_size=image_size,
|
raise exception.StorageQuotaFull(image_size=image_size,
|
||||||
remaining=remaining)
|
remaining=remaining)
|
||||||
return
|
return
|
||||||
|
|
||||||
if image_size > remaining:
|
if image_size > remaining:
|
||||||
LOG.warn(_LW("User %(user)s attempted to upload an image of size"
|
LOG.warning(_LW("User %(user)s attempted to upload an image of size"
|
||||||
" %(size)d that will exceed the quota. %(remaining)d"
|
" %(size)d that will exceed the quota. %(remaining)d"
|
||||||
" bytes remaining."),
|
" bytes remaining."),
|
||||||
{'user': user, 'size': image_size, 'remaining': remaining})
|
{'user': user, 'size': image_size, 'remaining': remaining})
|
||||||
raise exception.StorageQuotaFull(image_size=image_size,
|
raise exception.StorageQuotaFull(image_size=image_size,
|
||||||
remaining=remaining)
|
remaining=remaining)
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ class BaseContextMiddleware(wsgi.Middleware):
|
|||||||
try:
|
try:
|
||||||
request_id = resp.request.context.request_id
|
request_id = resp.request.context.request_id
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
LOG.warn(_LW('Unable to retrieve request id from context'))
|
LOG.warning(_LW('Unable to retrieve request id from context'))
|
||||||
else:
|
else:
|
||||||
# For python 3 compatibility need to use bytes type
|
# For python 3 compatibility need to use bytes type
|
||||||
prefix = b'req-' if isinstance(request_id, bytes) else 'req-'
|
prefix = b'req-' if isinstance(request_id, bytes) else 'req-'
|
||||||
|
@ -189,7 +189,7 @@ class ImageDataController(object):
|
|||||||
"The image may have been deleted during the "
|
"The image may have been deleted during the "
|
||||||
"upload, cleaning up the chunks uploaded.") %
|
"upload, cleaning up the chunks uploaded.") %
|
||||||
image_id)
|
image_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
# NOTE(sridevi): Cleaning up the uploaded chunks.
|
# NOTE(sridevi): Cleaning up the uploaded chunks.
|
||||||
try:
|
try:
|
||||||
image.delete()
|
image.delete()
|
||||||
|
@ -116,7 +116,7 @@ class ImagesController(object):
|
|||||||
LOG.debug("User not permitted to create image")
|
LOG.debug("User not permitted to create image")
|
||||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||||
except exception.LimitExceeded as e:
|
except exception.LimitExceeded as e:
|
||||||
LOG.warn(encodeutils.exception_to_unicode(e))
|
LOG.warning(encodeutils.exception_to_unicode(e))
|
||||||
raise webob.exc.HTTPRequestEntityTooLarge(
|
raise webob.exc.HTTPRequestEntityTooLarge(
|
||||||
explanation=e.msg, request=req, content_type='text/plain')
|
explanation=e.msg, request=req, content_type='text/plain')
|
||||||
except exception.Duplicate as e:
|
except exception.Duplicate as e:
|
||||||
@ -379,7 +379,7 @@ class ImagesController(object):
|
|||||||
try:
|
try:
|
||||||
stores = utils.get_stores_from_request(req, body)
|
stores = utils.get_stores_from_request(req, body)
|
||||||
except glance_store.UnknownScheme as exc:
|
except glance_store.UnknownScheme as exc:
|
||||||
LOG.warn(exc.msg)
|
LOG.warning(exc.msg)
|
||||||
raise exception.Conflict(exc.msg)
|
raise exception.Conflict(exc.msg)
|
||||||
|
|
||||||
# NOTE(abhishekk): If all_stores is specified and import_method is
|
# NOTE(abhishekk): If all_stores is specified and import_method is
|
||||||
@ -625,7 +625,7 @@ class ImagesController(object):
|
|||||||
except exception.StorageQuotaFull as e:
|
except exception.StorageQuotaFull as e:
|
||||||
msg = (_("Denying attempt to upload image because it exceeds the"
|
msg = (_("Denying attempt to upload image because it exceeds the"
|
||||||
" quota: %s") % encodeutils.exception_to_unicode(e))
|
" quota: %s") % encodeutils.exception_to_unicode(e))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPRequestEntityTooLarge(
|
raise webob.exc.HTTPRequestEntityTooLarge(
|
||||||
explanation=msg, request=req, content_type='text/plain')
|
explanation=msg, request=req, content_type='text/plain')
|
||||||
except exception.LimitExceeded as e:
|
except exception.LimitExceeded as e:
|
||||||
@ -717,14 +717,14 @@ class ImagesController(object):
|
|||||||
except castellan_exception.Forbidden:
|
except castellan_exception.Forbidden:
|
||||||
msg = ('Not allowed to delete encryption key %s' %
|
msg = ('Not allowed to delete encryption key %s' %
|
||||||
cinder_encryption_key_id)
|
cinder_encryption_key_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
except (castellan_exception.ManagedObjectNotFoundError, KeyError):
|
except (castellan_exception.ManagedObjectNotFoundError, KeyError):
|
||||||
msg = 'Could not find encryption key %s' % cinder_encryption_key_id
|
msg = 'Could not find encryption key %s' % cinder_encryption_key_id
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
except castellan_exception.KeyManagerError:
|
except castellan_exception.KeyManagerError:
|
||||||
msg = ('Failed to delete cinder encryption key %s' %
|
msg = ('Failed to delete cinder encryption key %s' %
|
||||||
cinder_encryption_key_id)
|
cinder_encryption_key_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
|
|
||||||
@utils.mutating
|
@utils.mutating
|
||||||
def delete_from_store(self, req, store_id, image_id):
|
def delete_from_store(self, req, store_id, image_id):
|
||||||
@ -910,14 +910,14 @@ class ImagesController(object):
|
|||||||
except (glance_store.NotFound, exception.NotFound):
|
except (glance_store.NotFound, exception.NotFound):
|
||||||
msg = (_("Failed to find image %(image_id)s to delete") %
|
msg = (_("Failed to find image %(image_id)s to delete") %
|
||||||
{'image_id': image_id})
|
{'image_id': image_id})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||||
except glance_store.exceptions.InUseByStore as e:
|
except glance_store.exceptions.InUseByStore as e:
|
||||||
msg = (_("Image %(id)s could not be deleted "
|
msg = (_("Image %(id)s could not be deleted "
|
||||||
"because it is in use: %(exc)s") %
|
"because it is in use: %(exc)s") %
|
||||||
{"id": image_id,
|
{"id": image_id,
|
||||||
"exc": e.msg})
|
"exc": e.msg})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPConflict(explanation=msg)
|
raise webob.exc.HTTPConflict(explanation=msg)
|
||||||
except glance_store.exceptions.HasSnapshot as e:
|
except glance_store.exceptions.HasSnapshot as e:
|
||||||
raise webob.exc.HTTPConflict(explanation=e.msg)
|
raise webob.exc.HTTPConflict(explanation=e.msg)
|
||||||
@ -1926,7 +1926,7 @@ def load_custom_properties():
|
|||||||
else:
|
else:
|
||||||
msg = (_LW('Could not find schema properties file %s. Continuing '
|
msg = (_LW('Could not find schema properties file %s. Continuing '
|
||||||
'without custom properties') % filename)
|
'without custom properties') % filename)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ class TasksController(object):
|
|||||||
except exception.Forbidden as e:
|
except exception.Forbidden as e:
|
||||||
msg = (_LW("Forbidden to create task. Reason: %(reason)s")
|
msg = (_LW("Forbidden to create task. Reason: %(reason)s")
|
||||||
% {'reason': encodeutils.exception_to_unicode(e)})
|
% {'reason': encodeutils.exception_to_unicode(e)})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||||
return new_task
|
return new_task
|
||||||
|
|
||||||
@ -119,10 +119,10 @@ class TasksController(object):
|
|||||||
result['next_marker'] = tasks[-1].task_id
|
result['next_marker'] = tasks[-1].task_id
|
||||||
except (exception.NotFound, exception.InvalidSortKey,
|
except (exception.NotFound, exception.InvalidSortKey,
|
||||||
exception.InvalidFilterRangeValue) as e:
|
exception.InvalidFilterRangeValue) as e:
|
||||||
LOG.warn(encodeutils.exception_to_unicode(e))
|
LOG.warning(encodeutils.exception_to_unicode(e))
|
||||||
raise webob.exc.HTTPBadRequest(explanation=e.msg)
|
raise webob.exc.HTTPBadRequest(explanation=e.msg)
|
||||||
except exception.Forbidden as e:
|
except exception.Forbidden as e:
|
||||||
LOG.warn(encodeutils.exception_to_unicode(e))
|
LOG.warning(encodeutils.exception_to_unicode(e))
|
||||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||||
result['tasks'] = tasks
|
result['tasks'] = tasks
|
||||||
return result
|
return result
|
||||||
@ -138,14 +138,14 @@ class TasksController(object):
|
|||||||
msg = (_LW("Failed to find task %(task_id)s. Reason: %(reason)s")
|
msg = (_LW("Failed to find task %(task_id)s. Reason: %(reason)s")
|
||||||
% {'task_id': task_id,
|
% {'task_id': task_id,
|
||||||
'reason': encodeutils.exception_to_unicode(e)})
|
'reason': encodeutils.exception_to_unicode(e)})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPNotFound(explanation=e.msg)
|
raise webob.exc.HTTPNotFound(explanation=e.msg)
|
||||||
except exception.Forbidden as e:
|
except exception.Forbidden as e:
|
||||||
msg = (_LW("Forbidden to get task %(task_id)s. Reason:"
|
msg = (_LW("Forbidden to get task %(task_id)s. Reason:"
|
||||||
" %(reason)s")
|
" %(reason)s")
|
||||||
% {'task_id': task_id,
|
% {'task_id': task_id,
|
||||||
'reason': encodeutils.exception_to_unicode(e)})
|
'reason': encodeutils.exception_to_unicode(e)})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
raise webob.exc.HTTPForbidden(explanation=e.msg)
|
||||||
return task
|
return task
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ class _Convert(task.Task):
|
|||||||
msg = _LW('The conversion format is None, please add a value '
|
msg = _LW('The conversion format is None, please add a value '
|
||||||
'for it in the config file for this task to '
|
'for it in the config file for this task to '
|
||||||
'work: %s')
|
'work: %s')
|
||||||
LOG.warn(msg, self.task_id)
|
LOG.warning(msg, self.task_id)
|
||||||
_Convert.conversion_missing_warned = True
|
_Convert.conversion_missing_warned = True
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -252,14 +252,15 @@ class OVAImageExtractor(object):
|
|||||||
self.interested_properties = properties.get(
|
self.interested_properties = properties.get(
|
||||||
'cim_pasd', [])
|
'cim_pasd', [])
|
||||||
if not self.interested_properties:
|
if not self.interested_properties:
|
||||||
LOG.warn(_LW('OVF metadata of interest was not specified '
|
msg = _LW('OVF metadata of interest was not specified '
|
||||||
'in ovf-metadata.json config file. Please '
|
'in ovf-metadata.json config file. Please '
|
||||||
'set "cim_pasd" to a list of interested '
|
'set "cim_pasd" to a list of interested '
|
||||||
'CIM_ProcessorAllocationSettingData '
|
'CIM_ProcessorAllocationSettingData '
|
||||||
'properties.'))
|
'properties.')
|
||||||
|
LOG.warning(msg)
|
||||||
else:
|
else:
|
||||||
LOG.warn(_LW('OVF properties config file "ovf-metadata.json" was '
|
LOG.warning(_LW('OVF properties config file "ovf-metadata.json" '
|
||||||
'not found.'))
|
'was not found.'))
|
||||||
|
|
||||||
|
|
||||||
def get_flow(**kwargs):
|
def get_flow(**kwargs):
|
||||||
|
@ -75,5 +75,5 @@ class OptionalTask(task.Task):
|
|||||||
msg = (_LW("An optional task has failed, "
|
msg = (_LW("An optional task has failed, "
|
||||||
"the failure was: %s") %
|
"the failure was: %s") %
|
||||||
encodeutils.exception_to_unicode(exc))
|
encodeutils.exception_to_unicode(exc))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
@ -636,24 +636,24 @@ def replication_compare(options, args):
|
|||||||
|
|
||||||
for key in image:
|
for key in image:
|
||||||
if image[key] != headers.get(key):
|
if image[key] != headers.get(key):
|
||||||
LOG.warn(_LW('%(image_id)s: field %(key)s differs '
|
LOG.warning(_LW('%(image_id)s: field %(key)s differs '
|
||||||
'(source is %(source_value)s, destination '
|
'(source is %(source_value)s, destination '
|
||||||
'is %(target_value)s)')
|
'is %(target_value)s)')
|
||||||
% {'image_id': image['id'],
|
% {'image_id': image['id'],
|
||||||
'key': key,
|
'key': key,
|
||||||
'source_value': image[key],
|
'source_value': image[key],
|
||||||
'target_value': headers.get(key,
|
'target_value': headers.get(key,
|
||||||
'undefined')})
|
'undefined')})
|
||||||
differences[image['id']] = 'diff'
|
differences[image['id']] = 'diff'
|
||||||
else:
|
else:
|
||||||
LOG.debug('%(image_id)s is identical',
|
LOG.debug('%(image_id)s is identical',
|
||||||
{'image_id': image['id']})
|
{'image_id': image['id']})
|
||||||
|
|
||||||
elif image['status'] == 'active':
|
elif image['status'] == 'active':
|
||||||
LOG.warn(_LW('Image %(image_id)s ("%(image_name)s") '
|
LOG.warning(_LW('Image %(image_id)s ("%(image_name)s") '
|
||||||
'entirely missing from the destination')
|
'entirely missing from the destination')
|
||||||
% {'image_id': image['id'],
|
% {'image_id': image['id'],
|
||||||
'image_name': image.get('name', '--unnamed')})
|
'image_name': image.get('name', '--unnamed')})
|
||||||
differences[image['id']] = 'missing'
|
differences[image['id']] = 'missing'
|
||||||
|
|
||||||
return differences
|
return differences
|
||||||
|
@ -75,7 +75,7 @@ def _load_strategies():
|
|||||||
msg = (_('%(strategy)s is registered as a module twice. '
|
msg = (_('%(strategy)s is registered as a module twice. '
|
||||||
'%(module)s is not being used.') %
|
'%(module)s is not being used.') %
|
||||||
{'strategy': strategy_name, 'module': module_name})
|
{'strategy': strategy_name, 'module': module_name})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
else:
|
else:
|
||||||
# Initialize strategy module
|
# Initialize strategy module
|
||||||
mgr.driver.init()
|
mgr.driver.init()
|
||||||
|
@ -164,7 +164,7 @@ class PropertyRules(object):
|
|||||||
property_dict[operation] = permissions
|
property_dict[operation] = permissions
|
||||||
else:
|
else:
|
||||||
property_dict[operation] = []
|
property_dict[operation] = []
|
||||||
LOG.warn(
|
LOG.warning(
|
||||||
_LW('Property protection on operation %(operation)s'
|
_LW('Property protection on operation %(operation)s'
|
||||||
' for rule %(rule)s is not found. No role will be'
|
' for rule %(rule)s is not found. No role will be'
|
||||||
' allowed to perform this operation.') %
|
' allowed to perform this operation.') %
|
||||||
|
@ -125,9 +125,9 @@ def set_image_data(image, uri, task_id, backend=None):
|
|||||||
image.set_data(data_iter, backend=backend)
|
image.set_data(data_iter, backend=backend)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.warn("Task %(task_id)s failed with exception %(error)s" %
|
LOG.warning("Task %(task_id)s failed with exception %(error)s" %
|
||||||
{"error": encodeutils.exception_to_unicode(e),
|
{"error": encodeutils.exception_to_unicode(e),
|
||||||
"task_id": task_id})
|
"task_id": task_id})
|
||||||
LOG.info("Task %(task_id)s: Could not import image file"
|
LOG.info("Task %(task_id)s: Could not import image file"
|
||||||
" %(image_data)s", {"image_data": uri,
|
" %(image_data)s", {"image_data": uri,
|
||||||
"task_id": task_id})
|
"task_id": task_id})
|
||||||
|
@ -152,9 +152,10 @@ def set_image_data(image, uri, task_id, backend=None, set_active=True,
|
|||||||
image.set_data(data_iter, backend=backend, set_active=set_active)
|
image.set_data(data_iter, backend=backend, set_active=set_active)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
with excutils.save_and_reraise_exception():
|
with excutils.save_and_reraise_exception():
|
||||||
LOG.warn(_LW("Task %(task_id)s failed with exception %(error)s") %
|
LOG.warning(_LW("Task %(task_id)s failed with exception "
|
||||||
{"error": encodeutils.exception_to_unicode(e),
|
"%(error)s") %
|
||||||
"task_id": task_id})
|
{"error": encodeutils.exception_to_unicode(e),
|
||||||
|
"task_id": task_id})
|
||||||
LOG.info(_LI("Task %(task_id)s: Could not import image file"
|
LOG.info(_LI("Task %(task_id)s: Could not import image file"
|
||||||
" %(image_data)s"), {"image_data": uri,
|
" %(image_data)s"), {"image_data": uri,
|
||||||
"task_id": task_id})
|
"task_id": task_id})
|
||||||
|
@ -70,9 +70,9 @@ def safe_delete_from_backend(context, image_id, location):
|
|||||||
msg = ("The image data for %(iid)s was not found in the store. "
|
msg = ("The image data for %(iid)s was not found in the store. "
|
||||||
"The image record has been updated to reflect "
|
"The image record has been updated to reflect "
|
||||||
"this." % {'iid': image_id})
|
"this." % {'iid': image_id})
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
except store_api.StoreDeleteNotSupported as e:
|
except store_api.StoreDeleteNotSupported as e:
|
||||||
LOG.warn(encodeutils.exception_to_unicode(e))
|
LOG.warning(encodeutils.exception_to_unicode(e))
|
||||||
except store_api.UnsupportedBackend:
|
except store_api.UnsupportedBackend:
|
||||||
exc_type = sys.exc_info()[0].__name__
|
exc_type = sys.exc_info()[0].__name__
|
||||||
msg = (_LE('Failed to delete image %(image_id)s from store: %(exc)s') %
|
msg = (_LE('Failed to delete image %(image_id)s from store: %(exc)s') %
|
||||||
|
@ -674,7 +674,7 @@ class PosixServer(BaseServer):
|
|||||||
self.stale_children.remove(pid)
|
self.stale_children.remove(pid)
|
||||||
LOG.info(_LI('Removed stale child %s'), pid)
|
LOG.info(_LI('Removed stale child %s'), pid)
|
||||||
else:
|
else:
|
||||||
LOG.warn(_LW('Unrecognised child %s') % pid)
|
LOG.warning(_LW('Unrecognised child %s') % pid)
|
||||||
|
|
||||||
def _verify_and_respawn_children(self, pid, status):
|
def _verify_and_respawn_children(self, pid, status):
|
||||||
if len(self.stale_children) == 0:
|
if len(self.stale_children) == 0:
|
||||||
|
@ -432,7 +432,7 @@ def image_set_property_atomic(image_id, name, value):
|
|||||||
try:
|
try:
|
||||||
image = DATA['images'][image_id]
|
image = DATA['images'][image_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.warn(_LW('Could not find image %s'), image_id)
|
LOG.warning(_LW('Could not find image %s'), image_id)
|
||||||
raise exception.ImageNotFound()
|
raise exception.ImageNotFound()
|
||||||
|
|
||||||
prop = _image_property_format(image_id,
|
prop = _image_property_format(image_id,
|
||||||
@ -445,7 +445,7 @@ def image_delete_property_atomic(image_id, name, value):
|
|||||||
try:
|
try:
|
||||||
image = DATA['images'][image_id]
|
image = DATA['images'][image_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.warn(_LW('Could not find image %s'), image_id)
|
LOG.warning(_LW('Could not find image %s'), image_id)
|
||||||
raise exception.ImageNotFound()
|
raise exception.ImageNotFound()
|
||||||
|
|
||||||
for i, prop in enumerate(image['properties']):
|
for i, prop in enumerate(image['properties']):
|
||||||
@ -460,16 +460,16 @@ def _image_get(context, image_id, force_show_deleted=False, status=None):
|
|||||||
try:
|
try:
|
||||||
image = DATA['images'][image_id]
|
image = DATA['images'][image_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
LOG.warn(_LW('Could not find image %s'), image_id)
|
LOG.warning(_LW('Could not find image %s'), image_id)
|
||||||
raise exception.ImageNotFound()
|
raise exception.ImageNotFound()
|
||||||
|
|
||||||
if image['deleted'] and not (force_show_deleted
|
if image['deleted'] and not (force_show_deleted
|
||||||
or context.can_see_deleted):
|
or context.can_see_deleted):
|
||||||
LOG.warn(_LW('Unable to get deleted image'))
|
LOG.warning(_LW('Unable to get deleted image'))
|
||||||
raise exception.ImageNotFound()
|
raise exception.ImageNotFound()
|
||||||
|
|
||||||
if not is_image_visible(context, image):
|
if not is_image_visible(context, image):
|
||||||
LOG.warn(_LW('Unable to get unowned image'))
|
LOG.warning(_LW('Unable to get unowned image'))
|
||||||
raise exception.Forbidden("Image not visible to you")
|
raise exception.Forbidden("Image not visible to you")
|
||||||
|
|
||||||
return image
|
return image
|
||||||
@ -676,7 +676,7 @@ def image_location_update(context, image_id, location):
|
|||||||
if not updated:
|
if not updated:
|
||||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||||
dict(loc=loc_id, img=image_id))
|
dict(loc=loc_id, img=image_id))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.NotFound(msg)
|
raise exception.NotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
@ -702,7 +702,7 @@ def image_location_delete(context, image_id, location_id, status,
|
|||||||
if not deleted:
|
if not deleted:
|
||||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||||
dict(loc=location_id, img=image_id))
|
dict(loc=location_id, img=image_id))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.NotFound(msg)
|
raise exception.NotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
@ -990,12 +990,12 @@ def _task_get(context, task_id, force_show_deleted=False):
|
|||||||
task = DATA['tasks'][task_id]
|
task = DATA['tasks'][task_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
msg = _LW('Could not find task %s') % task_id
|
msg = _LW('Could not find task %s') % task_id
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.TaskNotFound(task_id=task_id)
|
raise exception.TaskNotFound(task_id=task_id)
|
||||||
|
|
||||||
if task['deleted'] and not (force_show_deleted or context.can_see_deleted):
|
if task['deleted'] and not (force_show_deleted or context.can_see_deleted):
|
||||||
msg = _LW('Unable to get deleted task %s') % task_id
|
msg = _LW('Unable to get deleted task %s') % task_id
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.TaskNotFound(task_id=task_id)
|
raise exception.TaskNotFound(task_id=task_id)
|
||||||
|
|
||||||
if not _is_task_visible(context, task):
|
if not _is_task_visible(context, task):
|
||||||
@ -1171,7 +1171,7 @@ def _task_info_get(task_id):
|
|||||||
task_info = DATA['task_info'][task_id]
|
task_info = DATA['task_info'][task_id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
msg = _LW('Could not find task info %s') % task_id
|
msg = _LW('Could not find task info %s') % task_id
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.TaskNotFound(task_id=task_id)
|
raise exception.TaskNotFound(task_id=task_id)
|
||||||
|
|
||||||
return task_info
|
return task_info
|
||||||
@ -1257,7 +1257,7 @@ def metadef_namespace_get_by_id(context, namespace_id):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
msg = (_("Metadata definition namespace not found for id=%s")
|
msg = (_("Metadata definition namespace not found for id=%s")
|
||||||
% namespace_id)
|
% namespace_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.MetadefNamespaceNotFound(msg)
|
raise exception.MetadefNamespaceNotFound(msg)
|
||||||
|
|
||||||
if not _is_namespace_visible(context, namespace):
|
if not _is_namespace_visible(context, namespace):
|
||||||
@ -1388,7 +1388,7 @@ def metadef_object_get_by_id(context, namespace_name, object_id):
|
|||||||
else:
|
else:
|
||||||
msg = (_("Metadata definition object not found for id=%s")
|
msg = (_("Metadata definition object not found for id=%s")
|
||||||
% object_id)
|
% object_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.MetadefObjectNotFound(msg)
|
raise exception.MetadefObjectNotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
@ -1644,7 +1644,7 @@ def metadef_property_get_by_id(context, namespace_name, property_id):
|
|||||||
else:
|
else:
|
||||||
msg = (_("Metadata definition property not found for id=%s")
|
msg = (_("Metadata definition property not found for id=%s")
|
||||||
% property_id)
|
% property_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.MetadefPropertyNotFound(msg)
|
raise exception.MetadefPropertyNotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
@ -1849,7 +1849,7 @@ def metadef_tag_get_by_id(context, namespace_name, id):
|
|||||||
return tag
|
return tag
|
||||||
else:
|
else:
|
||||||
msg = (_("Metadata definition tag not found for id=%s") % id)
|
msg = (_("Metadata definition tag not found for id=%s") % id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.MetadefTagNotFound(msg)
|
raise exception.MetadefTagNotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ def _retry_on_deadlock(exc):
|
|||||||
"""Decorator to retry a DB API call if Deadlock was received."""
|
"""Decorator to retry a DB API call if Deadlock was received."""
|
||||||
|
|
||||||
if isinstance(exc, db_exception.DBDeadlock):
|
if isinstance(exc, db_exception.DBDeadlock):
|
||||||
LOG.warn(_LW("Deadlock detected. Retrying..."))
|
LOG.warning(_LW("Deadlock detected. Retrying..."))
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -132,7 +132,7 @@ def clear_db_env():
|
|||||||
|
|
||||||
def _check_mutate_authorization(context, image_ref):
|
def _check_mutate_authorization(context, image_ref):
|
||||||
if not is_image_mutable(context, image_ref):
|
if not is_image_mutable(context, image_ref):
|
||||||
LOG.warn(_LW("Attempted to modify image user did not own."))
|
LOG.warning(_LW("Attempted to modify image user did not own."))
|
||||||
msg = _("You do not own this image")
|
msg = _("You do not own this image")
|
||||||
if image_ref.visibility in ['private', 'shared']:
|
if image_ref.visibility in ['private', 'shared']:
|
||||||
exc_class = exception.Forbidden
|
exc_class = exception.Forbidden
|
||||||
@ -365,7 +365,7 @@ def _paginate_query(query, model, limit, sort_keys, marker=None,
|
|||||||
if 'id' not in sort_keys:
|
if 'id' not in sort_keys:
|
||||||
# TODO(justinsb): If this ever gives a false-positive, check
|
# TODO(justinsb): If this ever gives a false-positive, check
|
||||||
# the actual primary key, rather than assuming its id
|
# the actual primary key, rather than assuming its id
|
||||||
LOG.warn(_LW('Id not in sort_keys; is sort_keys unique?'))
|
LOG.warning(_LW('Id not in sort_keys; is sort_keys unique?'))
|
||||||
|
|
||||||
assert(not (sort_dir and sort_dirs)) # nosec
|
assert(not (sort_dir and sort_dirs)) # nosec
|
||||||
# nosec: This function runs safely if the assertion fails.
|
# nosec: This function runs safely if the assertion fails.
|
||||||
@ -1087,7 +1087,7 @@ def image_location_update(context, image_id, location, session=None):
|
|||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||||
dict(loc=loc_id, img=image_id))
|
dict(loc=loc_id, img=image_id))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.NotFound(msg)
|
raise exception.NotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
@ -1113,7 +1113,7 @@ def image_location_delete(context, image_id, location_id, status,
|
|||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||||
dict(loc=location_id, img=image_id))
|
dict(loc=location_id, img=image_id))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exception.NotFound(msg)
|
raise exception.NotFound(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -87,7 +87,7 @@ def _get(context, namespace_id, session):
|
|||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_("Metadata definition namespace not found for id=%s")
|
msg = (_("Metadata definition namespace not found for id=%s")
|
||||||
% namespace_id)
|
% namespace_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exc.MetadefNamespaceNotFound(msg)
|
raise exc.MetadefNamespaceNotFound(msg)
|
||||||
|
|
||||||
# Make sure they are allowed to view it.
|
# Make sure they are allowed to view it.
|
||||||
|
@ -34,7 +34,7 @@ def _get(context, object_id, session):
|
|||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_("Metadata definition object not found for id=%s")
|
msg = (_("Metadata definition object not found for id=%s")
|
||||||
% object_id)
|
% object_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exc.MetadefObjectNotFound(msg)
|
raise exc.MetadefObjectNotFound(msg)
|
||||||
|
|
||||||
return metadef_object
|
return metadef_object
|
||||||
|
@ -36,7 +36,7 @@ def _get(context, property_id, session):
|
|||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_("Metadata definition property not found for id=%s")
|
msg = (_("Metadata definition property not found for id=%s")
|
||||||
% property_id)
|
% property_id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exc.MetadefPropertyNotFound(msg)
|
raise exc.MetadefPropertyNotFound(msg)
|
||||||
|
|
||||||
return property_rec
|
return property_rec
|
||||||
|
@ -34,7 +34,7 @@ def _get(context, id, session):
|
|||||||
metadef_tag = query.one()
|
metadef_tag = query.one()
|
||||||
except sa_orm.exc.NoResultFound:
|
except sa_orm.exc.NoResultFound:
|
||||||
msg = (_LW("Metadata tag not found for id %s") % id)
|
msg = (_LW("Metadata tag not found for id %s") % id)
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
raise exc.MetadefTagNotFound(message=msg)
|
raise exc.MetadefTagNotFound(message=msg)
|
||||||
return metadef_tag
|
return metadef_tag
|
||||||
|
|
||||||
|
@ -522,7 +522,7 @@ class TaskExecutorFactory(object):
|
|||||||
if not TaskExecutorFactory.eventlet_deprecation_warned:
|
if not TaskExecutorFactory.eventlet_deprecation_warned:
|
||||||
msg = _LW("The `eventlet` executor has been deprecated. "
|
msg = _LW("The `eventlet` executor has been deprecated. "
|
||||||
"Use `taskflow` instead.")
|
"Use `taskflow` instead.")
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
TaskExecutorFactory.eventlet_deprecation_warned = True
|
TaskExecutorFactory.eventlet_deprecation_warned = True
|
||||||
task_executor = 'taskflow'
|
task_executor = 'taskflow'
|
||||||
|
|
||||||
|
@ -131,3 +131,16 @@ def check_python3_xrange(logical_line):
|
|||||||
if re.search(r"\bxrange\s*\(", logical_line):
|
if re.search(r"\bxrange\s*\(", logical_line):
|
||||||
yield(0, "G329: Do not use xrange. Use range, or six.moves.range for "
|
yield(0, "G329: Do not use xrange. Use range, or six.moves.range for "
|
||||||
"large loops.")
|
"large loops.")
|
||||||
|
|
||||||
|
|
||||||
|
@core.flake8ext
|
||||||
|
def no_log_warn(logical_line):
|
||||||
|
"""Disallow 'LOG.warn('
|
||||||
|
|
||||||
|
Use LOG.warning() instead of Deprecated LOG.warn().
|
||||||
|
https://docs.python.org/3/library/logging.html#logging.warning
|
||||||
|
"""
|
||||||
|
|
||||||
|
msg = ("G330: LOG.warn is deprecated, please use LOG.warning!")
|
||||||
|
if "LOG.warn(" in logical_line:
|
||||||
|
yield (0, msg)
|
||||||
|
@ -172,11 +172,11 @@ class ImageCache(object):
|
|||||||
self.driver_class = importutils.import_class(driver_module)
|
self.driver_class = importutils.import_class(driver_module)
|
||||||
LOG.info(_LI("Image cache loaded driver '%s'."), driver_name)
|
LOG.info(_LI("Image cache loaded driver '%s'."), driver_name)
|
||||||
except ImportError as import_err:
|
except ImportError as import_err:
|
||||||
LOG.warn(_LW("Image cache driver "
|
LOG.warning(_LW("Image cache driver "
|
||||||
"'%(driver_name)s' failed to load. "
|
"'%(driver_name)s' failed to load. "
|
||||||
"Got error: '%(import_err)s."),
|
"Got error: '%(import_err)s."),
|
||||||
{'driver_name': driver_name,
|
{'driver_name': driver_name,
|
||||||
'import_err': import_err})
|
'import_err': import_err})
|
||||||
|
|
||||||
driver_module = __name__ + '.drivers.sqlite.Driver'
|
driver_module = __name__ + '.drivers.sqlite.Driver'
|
||||||
LOG.info(_LI("Defaulting to SQLite driver."))
|
LOG.info(_LI("Defaulting to SQLite driver."))
|
||||||
@ -193,11 +193,11 @@ class ImageCache(object):
|
|||||||
self.driver.configure()
|
self.driver.configure()
|
||||||
except exception.BadDriverConfiguration as config_err:
|
except exception.BadDriverConfiguration as config_err:
|
||||||
driver_module = self.driver_class.__module__
|
driver_module = self.driver_class.__module__
|
||||||
LOG.warn(_LW("Image cache driver "
|
LOG.warning(_LW("Image cache driver "
|
||||||
"'%(driver_module)s' failed to configure. "
|
"'%(driver_module)s' failed to configure. "
|
||||||
"Got error: '%(config_err)s"),
|
"Got error: '%(config_err)s"),
|
||||||
{'driver_module': driver_module,
|
{'driver_module': driver_module,
|
||||||
'config_err': config_err})
|
'config_err': config_err})
|
||||||
LOG.info(_LI("Defaulting to SQLite driver."))
|
LOG.info(_LI("Defaulting to SQLite driver."))
|
||||||
default_module = __name__ + '.drivers.sqlite.Driver'
|
default_module = __name__ + '.drivers.sqlite.Driver'
|
||||||
self.driver_class = importutils.import_class(default_module)
|
self.driver_class = importutils.import_class(default_module)
|
||||||
|
@ -346,12 +346,13 @@ class Driver(base.Driver):
|
|||||||
if os.path.exists(incomplete_path):
|
if os.path.exists(incomplete_path):
|
||||||
invalid_path = self.get_image_filepath(image_id, 'invalid')
|
invalid_path = self.get_image_filepath(image_id, 'invalid')
|
||||||
|
|
||||||
LOG.warn(_LW("Fetch of cache file failed (%(e)s), rolling "
|
msg = (_LW("Fetch of cache file failed (%(e)s), rolling "
|
||||||
"back by moving '%(incomplete_path)s' to "
|
"back by moving '%(incomplete_path)s' to "
|
||||||
"'%(invalid_path)s'") %
|
"'%(invalid_path)s'") %
|
||||||
{'e': e,
|
{'e': e,
|
||||||
'incomplete_path': incomplete_path,
|
'incomplete_path': incomplete_path,
|
||||||
'invalid_path': invalid_path})
|
'invalid_path': invalid_path})
|
||||||
|
LOG.warning(msg)
|
||||||
os.rename(incomplete_path, invalid_path)
|
os.rename(incomplete_path, invalid_path)
|
||||||
|
|
||||||
db.execute("""DELETE FROM cached_images
|
db.execute("""DELETE FROM cached_images
|
||||||
@ -472,7 +473,7 @@ class Driver(base.Driver):
|
|||||||
msg = (_LW("Failed to delete file %(path)s. "
|
msg = (_LW("Failed to delete file %(path)s. "
|
||||||
"Got error: %(e)s"),
|
"Got error: %(e)s"),
|
||||||
dict(path=path, e=e))
|
dict(path=path, e=e))
|
||||||
LOG.warn(msg)
|
LOG.warning(msg)
|
||||||
|
|
||||||
def get_queued_images(self):
|
def get_queued_images(self):
|
||||||
"""
|
"""
|
||||||
|
@ -51,7 +51,8 @@ class Prefetcher(base.CacheApp):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if image.status != 'active':
|
if image.status != 'active':
|
||||||
LOG.warn(_LW("Image '%s' is not active. Not caching.") % image_id)
|
LOG.warning(_LW("Image '%s' is not active. Not caching.") %
|
||||||
|
image_id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for loc in image.locations:
|
for loc in image.locations:
|
||||||
@ -85,8 +86,8 @@ class Prefetcher(base.CacheApp):
|
|||||||
results = pool.map(self.fetch_image_into_cache, images)
|
results = pool.map(self.fetch_image_into_cache, images)
|
||||||
successes = sum([1 for r in results if r is True])
|
successes = sum([1 for r in results if r is True])
|
||||||
if successes != num_images:
|
if successes != num_images:
|
||||||
LOG.warn(_LW("Failed to successfully cache all "
|
LOG.warning(_LW("Failed to successfully cache all "
|
||||||
"images in queue."))
|
"images in queue."))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
LOG.info(_LI("Successfully cached all %d images"), num_images)
|
LOG.info(_LI("Successfully cached all %d images"), num_images)
|
||||||
|
@ -622,10 +622,10 @@ class ImageProxy(glance.domain.proxy.Image):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.warn(_LW('Get image %(id)s data failed: '
|
LOG.warning(_LW('Get image %(id)s data failed: '
|
||||||
'%(err)s.'),
|
'%(err)s.'),
|
||||||
{'id': self.image.image_id,
|
{'id': self.image.image_id,
|
||||||
'err': encodeutils.exception_to_unicode(e)})
|
'err': encodeutils.exception_to_unicode(e)})
|
||||||
err = e
|
err = e
|
||||||
# tried all locations
|
# tried all locations
|
||||||
LOG.error(_LE('Glance tried all active locations to get data for '
|
LOG.error(_LE('Glance tried all active locations to get data for '
|
||||||
|
@ -384,9 +384,9 @@ class Scrubber(object):
|
|||||||
{'status': 'deleted'})
|
{'status': 'deleted'})
|
||||||
LOG.info(_LI("Image %s has been scrubbed successfully"), image_id)
|
LOG.info(_LI("Image %s has been scrubbed successfully"), image_id)
|
||||||
else:
|
else:
|
||||||
LOG.warn(_LW("One or more image locations couldn't be scrubbed "
|
LOG.warning(_LW("One or more image locations couldn't be scrubbed "
|
||||||
"from backend. Leaving image '%s' in 'pending_delete'"
|
"from backend. Leaving image '%s' in "
|
||||||
" status"), image_id)
|
"'pending_delete' status"), image_id)
|
||||||
|
|
||||||
def _delete_image_location_from_backend(self, image_id, loc_id, uri,
|
def _delete_image_location_from_backend(self, image_id, loc_id, uri,
|
||||||
backend=None):
|
backend=None):
|
||||||
|
@ -108,3 +108,15 @@ class HackingTestCase(utils.BaseTestCase):
|
|||||||
self.assertEqual(0, len(list(func('for i in range(10)'))))
|
self.assertEqual(0, len(list(func('for i in range(10)'))))
|
||||||
self.assertEqual(0, len(list(func('for i in six.moves.range(10)'))))
|
self.assertEqual(0, len(list(func('for i in six.moves.range(10)'))))
|
||||||
self.assertEqual(0, len(list(func('testxrange(10)'))))
|
self.assertEqual(0, len(list(func('testxrange(10)'))))
|
||||||
|
|
||||||
|
def test_no_log_warn(self):
|
||||||
|
code = """
|
||||||
|
LOG.warn("LOG.warn is deprecated")
|
||||||
|
"""
|
||||||
|
errors = [(1, 0, 'G330')]
|
||||||
|
self._assert_has_errors(code, checks.no_log_warn,
|
||||||
|
expected_errors=errors)
|
||||||
|
code = """
|
||||||
|
LOG.warning("LOG.warn is deprecated")
|
||||||
|
"""
|
||||||
|
self._assert_has_no_errors(code, checks.no_log_warn)
|
||||||
|
1
tox.ini
1
tox.ini
@ -142,6 +142,7 @@ extension =
|
|||||||
G327 = checks:check_no_contextlib_nested
|
G327 = checks:check_no_contextlib_nested
|
||||||
G328 = checks:dict_constructor_with_list_copy
|
G328 = checks:dict_constructor_with_list_copy
|
||||||
G329 = checks:check_python3_xrange
|
G329 = checks:check_python3_xrange
|
||||||
|
G330 = checks:no_log_warn
|
||||||
paths = ./glance/hacking
|
paths = ./glance/hacking
|
||||||
|
|
||||||
[testenv:docs]
|
[testenv:docs]
|
||||||
|
Loading…
Reference in New Issue
Block a user