debug level logs should not be translated
According to the OpenStack translation policy available at https://wiki.openstack.org/wiki/LoggingStandards debug messages should not be translated. Like mentioned in several changes in Nova by garyk this is to help prioritize log translation. This patch adds a new hacking check - N319 - that ensures all debug log messages don't have translations. Change-Id: I9dd958b904671a7eb95883026e14684469dc52d5 Closes-Bug: #1317847
This commit is contained in:
parent
edc4856e06
commit
86dd9ff66c
@ -8,4 +8,4 @@ glance Style Commandments
|
||||
glance Specific Commandments
|
||||
--------------------------
|
||||
|
||||
None so far
|
||||
- [G319] Validate that debug level logs are not translated
|
||||
|
@ -122,7 +122,7 @@ class CacheFilter(wsgi.Middleware):
|
||||
except webob.exc.HTTPForbidden:
|
||||
return None
|
||||
|
||||
LOG.debug(_("Cache hit for image '%s'"), image_id)
|
||||
LOG.debug("Cache hit for image '%s'", image_id)
|
||||
image_iterator = self.get_from_cache(image_id)
|
||||
method = getattr(self, '_process_%s_request' % version)
|
||||
|
||||
@ -235,7 +235,7 @@ class CacheFilter(wsgi.Middleware):
|
||||
|
||||
def _process_DELETE_response(self, resp, image_id):
|
||||
if self.cache.is_cached(image_id):
|
||||
LOG.debug(_("Removing image %s from cache"), image_id)
|
||||
LOG.debug("Removing image %s from cache", image_id)
|
||||
self.cache.delete_cached_image(image_id)
|
||||
return resp
|
||||
|
||||
|
@ -50,24 +50,24 @@ class VersionNegotiationFilter(wsgi.Middleware):
|
||||
|
||||
accept = str(req.accept)
|
||||
if accept.startswith('application/vnd.openstack.images-'):
|
||||
LOG.debug(_("Using media-type versioning"))
|
||||
LOG.debug("Using media-type versioning")
|
||||
token_loc = len('application/vnd.openstack.images-')
|
||||
req_version = accept[token_loc:]
|
||||
else:
|
||||
LOG.debug(_("Using url versioning"))
|
||||
LOG.debug("Using url versioning")
|
||||
# Remove version in url so it doesn't conflict later
|
||||
req_version = self._pop_path_info(req)
|
||||
|
||||
try:
|
||||
version = self._match_version_string(req_version)
|
||||
except ValueError:
|
||||
LOG.debug(_("Unknown version. Returning version choices."))
|
||||
LOG.debug("Unknown version. Returning version choices.")
|
||||
return self.versions_app
|
||||
|
||||
req.environ['api.version'] = version
|
||||
req.path_info = ''.join(('/v', str(version), req.path_info))
|
||||
LOG.debug(_("Matched version: v%d"), version)
|
||||
LOG.debug(_('new path %s'), req.path_info)
|
||||
LOG.debug("Matched version: v%d", version)
|
||||
LOG.debug('new path %s', req.path_info)
|
||||
return None
|
||||
|
||||
def _match_version_string(self, subject):
|
||||
|
@ -80,7 +80,7 @@ class Enforcer(object):
|
||||
rule_type = "default "
|
||||
|
||||
text_rules = dict((k, str(v)) for k, v in rules.items())
|
||||
msg = (_('Loaded %(rule_type)spolicy rules: %(text_rules)s') %
|
||||
msg = ('Loaded %(rule_type)spolicy rules: %(text_rules)s' %
|
||||
{'rule_type': rule_type, 'text_rules': text_rules})
|
||||
LOG.debug(msg)
|
||||
|
||||
@ -103,7 +103,7 @@ class Enforcer(object):
|
||||
"""
|
||||
mtime = os.path.getmtime(self.policy_path)
|
||||
if not self.policy_file_contents or mtime != self.policy_file_mtime:
|
||||
LOG.debug(_("Loading policy from %s") % self.policy_path)
|
||||
LOG.debug("Loading policy from %s" % self.policy_path)
|
||||
with open(self.policy_path) as fap:
|
||||
raw_contents = fap.read()
|
||||
rules_dict = jsonutils.loads(raw_contents)
|
||||
|
@ -38,12 +38,12 @@ class BaseController(object):
|
||||
try:
|
||||
return registry.get_image_metadata(context, image_id)
|
||||
except exception.NotFound:
|
||||
msg = _("Image with identifier %s not found") % image_id
|
||||
msg = "Image with identifier %s not found" % image_id
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPNotFound(
|
||||
msg, request=request, content_type='text/plain')
|
||||
except exception.Forbidden:
|
||||
msg = _("Forbidden image access")
|
||||
msg = "Forbidden image access"
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPForbidden(msg,
|
||||
request=request,
|
||||
@ -56,7 +56,7 @@ class BaseController(object):
|
||||
"""
|
||||
image = self.get_image_meta_or_404(request, image_id)
|
||||
if image['status'] != 'active':
|
||||
msg = _("Image %s is not active") % image_id
|
||||
msg = "Image %s is not active" % image_id
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPNotFound(
|
||||
msg, request=request, content_type='text/plain')
|
||||
|
@ -193,7 +193,7 @@ class Controller(controller.BaseController):
|
||||
for key in create_props:
|
||||
if (self.prop_enforcer.check_property_rules(
|
||||
key, 'create', req.context) is False):
|
||||
msg = _("Property '%s' is protected") % key
|
||||
msg = "Property '%s' is protected" % key
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg,
|
||||
request=req,
|
||||
@ -237,7 +237,7 @@ class Controller(controller.BaseController):
|
||||
key, 'update', req.context) is False and
|
||||
image_meta['properties'][key] !=
|
||||
orig_meta['properties'][key]) or not has_read):
|
||||
msg = _("Property '%s' is protected") % key
|
||||
msg = "Property '%s' is protected" % key
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg,
|
||||
request=req,
|
||||
@ -271,7 +271,7 @@ class Controller(controller.BaseController):
|
||||
orig_meta['properties'][key]
|
||||
elif (self.prop_enforcer.check_property_rules(
|
||||
key, 'delete', req.context) is False):
|
||||
msg = _("Property '%s' is protected") % key
|
||||
msg = "Property '%s' is protected" % key
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg,
|
||||
request=req,
|
||||
@ -425,7 +425,7 @@ class Controller(controller.BaseController):
|
||||
for scheme in schemes:
|
||||
if pieces.scheme == scheme:
|
||||
return source
|
||||
msg = _("External sourcing not supported for store %s") % source
|
||||
msg = "External sourcing not supported for store %s" % source
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=req,
|
||||
@ -498,7 +498,7 @@ class Controller(controller.BaseController):
|
||||
location = self._external_source(image_meta, req)
|
||||
store = image_meta.get('store')
|
||||
if store and store not in get_known_stores():
|
||||
msg = _("Required store %s is invalid") % store
|
||||
msg = "Required store %s is invalid" % store
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
content_type='text/plain')
|
||||
@ -510,7 +510,7 @@ class Controller(controller.BaseController):
|
||||
try:
|
||||
store = get_store_from_location(location)
|
||||
except exception.BadStoreUri:
|
||||
msg = _("Invalid location %s") % location
|
||||
msg = "Invalid location %s" % location
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=req,
|
||||
@ -533,7 +533,7 @@ class Controller(controller.BaseController):
|
||||
self.notifier.info("image.create", redact_loc(image_meta))
|
||||
return image_meta
|
||||
except exception.Duplicate:
|
||||
msg = (_("An image with identifier %s already exists") %
|
||||
msg = ("An image with identifier %s already exists" %
|
||||
image_meta['id'])
|
||||
LOG.debug(msg)
|
||||
raise HTTPConflict(explanation=msg,
|
||||
@ -547,7 +547,7 @@ class Controller(controller.BaseController):
|
||||
request=req,
|
||||
content_type="text/plain")
|
||||
except exception.Forbidden:
|
||||
msg = _("Forbidden to reserve image.")
|
||||
msg = "Forbidden to reserve image."
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg,
|
||||
request=req,
|
||||
@ -574,7 +574,7 @@ class Controller(controller.BaseController):
|
||||
copy_from)
|
||||
except Exception as e:
|
||||
upload_utils.safe_kill(req, image_meta['id'])
|
||||
msg = _("Copy from external source failed: %s") % e
|
||||
msg = "Copy from external source failed: %s" % e
|
||||
LOG.debug(msg)
|
||||
return
|
||||
image_meta['size'] = image_size or image_meta['size']
|
||||
@ -583,7 +583,7 @@ class Controller(controller.BaseController):
|
||||
req.get_content_type(('application/octet-stream',))
|
||||
except exception.InvalidContentType:
|
||||
upload_utils.safe_kill(req, image_meta['id'])
|
||||
msg = _("Content-Type must be application/octet-stream")
|
||||
msg = "Content-Type must be application/octet-stream"
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg)
|
||||
|
||||
@ -594,13 +594,13 @@ class Controller(controller.BaseController):
|
||||
store = self.get_store_or_400(req, scheme)
|
||||
|
||||
image_id = image_meta['id']
|
||||
LOG.debug(_("Setting image %s to status 'saving'"), image_id)
|
||||
LOG.debug("Setting image %s to status 'saving'", image_id)
|
||||
registry.update_image_metadata(req.context, image_id,
|
||||
{'status': 'saving'})
|
||||
|
||||
LOG.debug(_("Uploading image data for image %(image_id)s "
|
||||
"to %(scheme)s store"), {'image_id': image_id,
|
||||
'scheme': scheme})
|
||||
LOG.debug("Uploading image data for image %(image_id)s "
|
||||
"to %(scheme)s store", {'image_id': image_id,
|
||||
'scheme': scheme})
|
||||
|
||||
self.notifier.info("image.prepare", redact_loc(image_meta))
|
||||
|
||||
@ -642,13 +642,13 @@ class Controller(controller.BaseController):
|
||||
with excutils.save_and_reraise_exception():
|
||||
# Delete image data since it has been supersceded by another
|
||||
# upload and re-raise.
|
||||
LOG.debug(_("duplicate operation - deleting image data for "
|
||||
" %(id)s (location:%(location)s)") %
|
||||
LOG.debug("duplicate operation - deleting image data for "
|
||||
" %(id)s (location:%(location)s)" %
|
||||
{'id': image_id, 'location': image_meta['location']})
|
||||
upload_utils.initiate_deletion(req, image_meta['location'],
|
||||
image_id, CONF.delayed_delete)
|
||||
except exception.Invalid as e:
|
||||
msg = _("Failed to activate image. Got error: %(e)s") % {'e': e}
|
||||
msg = "Failed to activate image. Got error: %(e)s" % {'e': e}
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=req,
|
||||
@ -691,7 +691,7 @@ class Controller(controller.BaseController):
|
||||
location = image_meta.get('location')
|
||||
sources = filter(lambda x: x, (copy_from, location, image_data))
|
||||
if len(sources) >= 2:
|
||||
msg = _("It's invalid to provide multiple image sources.")
|
||||
msg = "It's invalid to provide multiple image sources."
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=req,
|
||||
@ -717,11 +717,11 @@ class Controller(controller.BaseController):
|
||||
# size provided by the client will be used as-is.
|
||||
if (image_size_store and
|
||||
image_size_store != image_size_meta):
|
||||
msg = _("Provided image size must match the stored "
|
||||
"image size. (provided size: %(ps)d, "
|
||||
"stored size: %(ss)d)") % {
|
||||
"ps": image_size_meta,
|
||||
"ss": image_size_store}
|
||||
msg = ("Provided image size must match the stored "
|
||||
"image size. (provided size: %(ps)d, "
|
||||
"stored size: %(ss)d)" % {
|
||||
"ps": image_size_meta,
|
||||
"ss": image_size_store})
|
||||
LOG.debug(msg)
|
||||
raise HTTPConflict(explanation=msg,
|
||||
request=req,
|
||||
@ -885,7 +885,7 @@ class Controller(controller.BaseController):
|
||||
self.update_store_acls(req, id, orig_or_updated_loc,
|
||||
public=is_public)
|
||||
except exception.BadStoreUri:
|
||||
msg = _("Invalid location %s") % location
|
||||
msg = "Invalid location %s" % location
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=req,
|
||||
@ -932,7 +932,7 @@ class Controller(controller.BaseController):
|
||||
image_data)
|
||||
|
||||
except exception.Invalid as e:
|
||||
msg = (_("Failed to update image metadata. Got error: %(e)s") %
|
||||
msg = ("Failed to update image metadata. Got error: %(e)s" %
|
||||
{'e': e})
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
@ -985,19 +985,19 @@ class Controller(controller.BaseController):
|
||||
|
||||
image = self.get_image_meta_or_404(req, id)
|
||||
if image['protected']:
|
||||
msg = _("Image is protected")
|
||||
msg = "Image is protected"
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg,
|
||||
request=req,
|
||||
content_type="text/plain")
|
||||
|
||||
if image['status'] == 'pending_delete':
|
||||
msg = (_("Forbidden to delete a %s image.") % image['status'])
|
||||
msg = "Forbidden to delete a %s image." % image['status']
|
||||
LOG.debug(msg)
|
||||
raise HTTPForbidden(explanation=msg, request=req,
|
||||
content_type="text/plain")
|
||||
elif image['status'] == 'deleted':
|
||||
msg = _("Image %s not found.") % id
|
||||
msg = "Image %s not found." % id
|
||||
LOG.debug(msg)
|
||||
raise HTTPNotFound(explanation=msg, request=req,
|
||||
content_type="text/plain")
|
||||
@ -1060,7 +1060,7 @@ class Controller(controller.BaseController):
|
||||
try:
|
||||
return get_store_from_scheme(request.context, scheme)
|
||||
except exception.UnknownScheme:
|
||||
msg = _("Store for scheme %s not found") % scheme
|
||||
msg = "Store for scheme %s not found" % scheme
|
||||
LOG.debug(msg)
|
||||
raise HTTPBadRequest(explanation=msg,
|
||||
request=request,
|
||||
|
@ -72,11 +72,11 @@ class Controller(controller.BaseController):
|
||||
try:
|
||||
members = registry.get_image_members(req.context, image_id)
|
||||
except exception.NotFound:
|
||||
msg = _("Image with identifier %s not found") % image_id
|
||||
msg = "Image with identifier %s not found" % image_id
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPNotFound(msg)
|
||||
except exception.Forbidden:
|
||||
msg = _("Unauthorized image access")
|
||||
msg = "Unauthorized image access"
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPForbidden(msg)
|
||||
return dict(members=members)
|
||||
|
@ -133,11 +133,11 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
|
||||
# Update the database with the checksum returned
|
||||
# from the backend store
|
||||
LOG.debug(_("Updating image %(image_id)s data. "
|
||||
"Checksum set to %(checksum)s, size set "
|
||||
"to %(size)d"), {'image_id': image_id,
|
||||
'checksum': checksum,
|
||||
'size': size})
|
||||
LOG.debug("Updating image %(image_id)s data. "
|
||||
"Checksum set to %(checksum)s, size set "
|
||||
"to %(size)d", {'image_id': image_id,
|
||||
'checksum': checksum,
|
||||
'size': size})
|
||||
update_data = {'checksum': checksum,
|
||||
'size': size}
|
||||
try:
|
||||
@ -162,7 +162,7 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
content_type='text/plain')
|
||||
|
||||
except exception.Duplicate as e:
|
||||
msg = _("Attempt to upload duplicate image: %s") % e
|
||||
msg = "Attempt to upload duplicate image: %s" % e
|
||||
LOG.debug(msg)
|
||||
# NOTE(dosaboy): do not delete the image since it is likely that this
|
||||
# conflict is a result of another concurrent upload that will be
|
||||
@ -173,7 +173,7 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
content_type="text/plain")
|
||||
|
||||
except exception.Forbidden as e:
|
||||
msg = _("Forbidden upload attempt: %s") % e
|
||||
msg = "Forbidden upload attempt: %s" % e
|
||||
LOG.debug(msg)
|
||||
safe_kill(req, image_id)
|
||||
notifier.error('image.upload', msg)
|
||||
@ -231,7 +231,7 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
safe_kill(req, image_id)
|
||||
|
||||
except (ValueError, IOError) as e:
|
||||
msg = _("Client disconnected before sending all data to backend")
|
||||
msg = "Client disconnected before sending all data to backend"
|
||||
LOG.debug(msg)
|
||||
safe_kill(req, image_id)
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg,
|
||||
|
@ -86,7 +86,7 @@ class ImageDataController(object):
|
||||
content_type='text/plain')
|
||||
|
||||
except ValueError as e:
|
||||
LOG.debug(_("Cannot save data for image %(id)s: %(e)s"),
|
||||
LOG.debug("Cannot save data for image %(id)s: %(e)s",
|
||||
{'id': image_id, 'e': utils.exception_to_str(e)})
|
||||
self._restore(image_repo, image)
|
||||
raise webob.exc.HTTPBadRequest(explanation=
|
||||
@ -98,7 +98,7 @@ class ImageDataController(object):
|
||||
raise webob.exc.HTTPConflict(explanation=e.msg, request=req)
|
||||
|
||||
except exception.Forbidden as e:
|
||||
msg = (_("Not allowed to upload image data for image %s") %
|
||||
msg = ("Not allowed to upload image data for image %s" %
|
||||
image_id)
|
||||
LOG.debug(msg)
|
||||
raise webob.exc.HTTPForbidden(explanation=msg, request=req)
|
||||
|
@ -101,8 +101,8 @@ class ImageService(object):
|
||||
if self.auth_token:
|
||||
headers.setdefault('x-auth-token', self.auth_token)
|
||||
|
||||
logging.debug(_('Request: %(method)s http://%(server)s:%(port)s'
|
||||
'%(url)s with headers %(headers)s')
|
||||
logging.debug('Request: %(method)s http://%(server)s:%(port)s'
|
||||
'%(url)s with headers %(headers)s'
|
||||
% {'method': method,
|
||||
'server': self.conn.host,
|
||||
'port': self.conn.port,
|
||||
@ -114,7 +114,7 @@ class ImageService(object):
|
||||
headers = self._header_list_to_dict(response.getheaders())
|
||||
code = response.status
|
||||
code_description = httplib.responses[code]
|
||||
logging.debug(_('Response: %(code)s %(status)s %(headers)s')
|
||||
logging.debug('Response: %(code)s %(status)s %(headers)s'
|
||||
% {'code': code,
|
||||
'status': code_description,
|
||||
'headers': repr(headers)})
|
||||
@ -236,7 +236,7 @@ class ImageService(object):
|
||||
response = self._http_request('POST', url, headers, image_data)
|
||||
headers = self._header_list_to_dict(response.getheaders())
|
||||
|
||||
logging.debug(_('Image post done'))
|
||||
logging.debug('Image post done')
|
||||
body = response.read()
|
||||
return headers, body
|
||||
|
||||
@ -255,7 +255,7 @@ class ImageService(object):
|
||||
response = self._http_request('PUT', url, headers, '')
|
||||
headers = self._header_list_to_dict(response.getheaders())
|
||||
|
||||
logging.debug(_('Image post done'))
|
||||
logging.debug('Image post done')
|
||||
body = response.read()
|
||||
return headers, body
|
||||
|
||||
@ -289,7 +289,7 @@ def replication_size(options, args):
|
||||
client = imageservice(httplib.HTTPConnection(server, port),
|
||||
options.slavetoken)
|
||||
for image in client.get_images():
|
||||
logging.debug(_('Considering image: %(image)s') % {'image': image})
|
||||
logging.debug('Considering image: %(image)s' % {'image': image})
|
||||
if image['status'] == 'active':
|
||||
total_size += int(image['size'])
|
||||
count += 1
|
||||
@ -354,15 +354,15 @@ def _dict_diff(a, b):
|
||||
"""
|
||||
# Only things the master has which the slave lacks matter
|
||||
if set(a.keys()) - set(b.keys()):
|
||||
logging.debug(_('metadata diff -- master has extra keys: %(keys)s')
|
||||
logging.debug('metadata diff -- master has extra keys: %(keys)s'
|
||||
% {'keys': ' '.join(set(a.keys()) - set(b.keys()))})
|
||||
return True
|
||||
|
||||
for key in a:
|
||||
if str(a[key]) != str(b[key]):
|
||||
logging.debug(_('metadata diff -- value differs for key '
|
||||
'%(key)s: master "%(master_value)s" vs '
|
||||
'slave "%(slave_value)s"') %
|
||||
logging.debug('metadata diff -- value differs for key '
|
||||
'%(key)s: master "%(master_value)s" vs '
|
||||
'slave "%(slave_value)s"' %
|
||||
{'key': key, 'master_value': a[key],
|
||||
'slave_value': b[key]})
|
||||
return True
|
||||
@ -413,20 +413,20 @@ def replication_load(options, args):
|
||||
# Remove keys which don't make sense for replication
|
||||
for key in options.dontreplicate.split(' '):
|
||||
if key in meta:
|
||||
logging.debug(_('Stripping %(header)s from saved '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from saved '
|
||||
'metadata', {'header': key})
|
||||
del meta[key]
|
||||
|
||||
if _image_present(client, image_uuid):
|
||||
# NOTE(mikal): Perhaps we just need to update the metadata?
|
||||
# Note that we don't attempt to change an image file once it
|
||||
# has been uploaded.
|
||||
logging.debug(_('Image %s already present'), image_uuid)
|
||||
logging.debug('Image %s already present', image_uuid)
|
||||
headers = client.get_image_meta(image_uuid)
|
||||
for key in options.dontreplicate.split(' '):
|
||||
if key in headers:
|
||||
logging.debug(_('Stripping %(header)s from slave '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from slave '
|
||||
'metadata', {'header': key})
|
||||
del headers[key]
|
||||
|
||||
if _dict_diff(meta, headers):
|
||||
@ -482,7 +482,7 @@ def replication_livecopy(options, args):
|
||||
logging.info(_('Considering %(id)s') % {'id': image['id']})
|
||||
for key in options.dontreplicate.split(' '):
|
||||
if key in image:
|
||||
logging.debug(_('Stripping %(header)s from master metadata'),
|
||||
logging.debug('Stripping %(header)s from master metadata',
|
||||
{'header': key})
|
||||
del image[key]
|
||||
|
||||
@ -494,12 +494,12 @@ def replication_livecopy(options, args):
|
||||
if headers['status'] == 'active':
|
||||
for key in options.dontreplicate.split(' '):
|
||||
if key in image:
|
||||
logging.debug(_('Stripping %(header)s from master '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from master '
|
||||
'metadata', {'header': key})
|
||||
del image[key]
|
||||
if key in headers:
|
||||
logging.debug(_('Stripping %(header)s from slave '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from slave '
|
||||
'metadata', {'header': key})
|
||||
del headers[key]
|
||||
|
||||
if _dict_diff(image, headers):
|
||||
@ -553,12 +553,12 @@ def replication_compare(options, args):
|
||||
headers = slave_client.get_image_meta(image['id'])
|
||||
for key in options.dontreplicate.split(' '):
|
||||
if key in image:
|
||||
logging.debug(_('Stripping %(header)s from master '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from master '
|
||||
'metadata', {'header': key})
|
||||
del image[key]
|
||||
if key in headers:
|
||||
logging.debug(_('Stripping %(header)s from slave '
|
||||
'metadata'), {'header': key})
|
||||
logging.debug('Stripping %(header)s from slave '
|
||||
'metadata', {'header': key})
|
||||
del headers[key]
|
||||
|
||||
for key in image:
|
||||
@ -573,7 +573,7 @@ def replication_compare(options, args):
|
||||
'undefined')})
|
||||
differences[image['id']] = 'diff'
|
||||
else:
|
||||
logging.debug(_('%(image_id)s is identical')
|
||||
logging.debug('%(image_id)s is identical'
|
||||
% {'image_id': image['id']})
|
||||
|
||||
elif image['status'] == 'active':
|
||||
|
@ -306,7 +306,7 @@ class BaseClient(object):
|
||||
|
||||
<http|https>://<host>:port/doc_root
|
||||
"""
|
||||
LOG.debug(_("Configuring from URL: %s"), url)
|
||||
LOG.debug("Configuring from URL: %s", url)
|
||||
parsed = urlparse.urlparse(url)
|
||||
self.use_ssl = parsed.scheme == 'https'
|
||||
self.host = parsed.hostname
|
||||
@ -318,7 +318,7 @@ class BaseClient(object):
|
||||
if self.DEFAULT_DOC_ROOT:
|
||||
doc_root = self.DEFAULT_DOC_ROOT.lstrip('/')
|
||||
self.doc_root += '/' + doc_root
|
||||
msg = (_("Appending doc_root %(doc_root)s to URL %(url)s") %
|
||||
msg = ("Appending doc_root %(doc_root)s to URL %(url)s" %
|
||||
{'doc_root': doc_root, 'url': url})
|
||||
LOG.debug(msg)
|
||||
|
||||
|
@ -210,7 +210,7 @@ def load_paste_app(app_name, flavor=None, conf_file=None):
|
||||
|
||||
try:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(_("Loading %(app_name)s from %(conf_file)s"),
|
||||
logger.debug("Loading %(app_name)s from %(conf_file)s",
|
||||
{'conf_file': conf_file, 'app_name': app_name})
|
||||
|
||||
app = deploy.loadapp("config:%s" % conf_file, name=app_name)
|
||||
|
@ -436,7 +436,7 @@ def mutating(func):
|
||||
@functools.wraps(func)
|
||||
def wrapped(self, req, *args, **kwargs):
|
||||
if req.context.read_only:
|
||||
msg = _("Read-only access")
|
||||
msg = "Read-only access"
|
||||
LOG.debug(msg)
|
||||
raise exc.HTTPForbidden(msg, request=req,
|
||||
content_type="text/plain")
|
||||
|
@ -275,7 +275,7 @@ class Server(object):
|
||||
break
|
||||
eventlet.greenio.shutdown_safe(self.sock)
|
||||
self.sock.close()
|
||||
self.logger.debug(_('Exited'))
|
||||
self.logger.debug('Exited')
|
||||
|
||||
def wait(self):
|
||||
"""Wait until all servers have completed running."""
|
||||
|
@ -737,7 +737,7 @@ def task_update(context, task_id, values):
|
||||
try:
|
||||
task = DATA['tasks'][task_id]
|
||||
except KeyError:
|
||||
msg = (_("No task found with ID %s") % task_id)
|
||||
msg = "No task found with ID %s" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.TaskNotFound(task_id=task_id)
|
||||
|
||||
@ -769,7 +769,7 @@ def _task_get(context, task_id, force_show_deleted=False):
|
||||
raise exception.TaskNotFound(task_id=task_id)
|
||||
|
||||
if not _is_task_visible(context, task):
|
||||
msg = (_("Forbidding request, task %s is not visible") % task_id)
|
||||
msg = "Forbidding request, task %s is not visible" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(msg)
|
||||
|
||||
@ -787,7 +787,7 @@ def task_delete(context, task_id):
|
||||
DATA['tasks'][task_id]['updated_at'] = timeutils.utcnow()
|
||||
return copy.deepcopy(DATA['tasks'][task_id])
|
||||
except KeyError:
|
||||
msg = (_("No task found with ID %s") % task_id)
|
||||
msg = "No task found with ID %s" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.TaskNotFound(task_id=task_id)
|
||||
|
||||
@ -909,7 +909,7 @@ def _task_info_update(task_id, values):
|
||||
try:
|
||||
task_info = DATA['task_info'][task_id]
|
||||
except KeyError:
|
||||
msg = (_("No task info found with task id %s") % task_id)
|
||||
msg = "No task info found with task id %s" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.TaskNotFound(task_id=task_id)
|
||||
|
||||
|
@ -178,13 +178,13 @@ def _image_get(context, image_id, session=None, force_show_deleted=False):
|
||||
image = query.one()
|
||||
|
||||
except sa_orm.exc.NoResultFound:
|
||||
msg = (_("No image found with ID %s") % image_id)
|
||||
msg = "No image found with ID %s" % image_id
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
# Make sure they can look at it
|
||||
if not is_image_visible(context, image):
|
||||
msg = (_("Forbidding request, image %s not visible") % image_id)
|
||||
msg = "Forbidding request, image %s not visible" % image_id
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(msg)
|
||||
|
||||
@ -1081,7 +1081,7 @@ def _task_info_get(context, task_id, session=None):
|
||||
try:
|
||||
task_info_ref = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
msg = (_("TaskInfo was not found for task with id %(task_id)s") %
|
||||
msg = ("TaskInfo was not found for task with id %(task_id)s" %
|
||||
{'task_id': task_id})
|
||||
LOG.debug(msg)
|
||||
task_info_ref = None
|
||||
@ -1247,13 +1247,13 @@ def _task_get(context, task_id, session=None, force_show_deleted=False):
|
||||
try:
|
||||
task_ref = query.one()
|
||||
except sa_orm.exc.NoResultFound:
|
||||
msg = (_("No task found with ID %s") % task_id)
|
||||
msg = "No task found with ID %s" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.TaskNotFound(task_id=task_id)
|
||||
|
||||
# Make sure the task is visible
|
||||
if not _is_task_visible(context, task_ref):
|
||||
msg = (_("Forbidding request, task %s is not visible") % task_id)
|
||||
msg = "Forbidding request, task %s is not visible" % task_id
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(msg)
|
||||
|
||||
|
0
glance/hacking/__init__.py
Normal file
0
glance/hacking/__init__.py
Normal file
36
glance/hacking/checks.py
Normal file
36
glance/hacking/checks.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2014 OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
def no_translate_debug_logs(logical_line, filename):
|
||||
dirs = [
|
||||
"glance/api",
|
||||
"glance/cmd",
|
||||
"glance/common",
|
||||
"glance/db",
|
||||
"glance/domain",
|
||||
"glance/image_cache",
|
||||
"glance/quota",
|
||||
"glance/registry",
|
||||
"glance/store",
|
||||
"glance/tests",
|
||||
]
|
||||
|
||||
if max([name in filename for name in dirs]):
|
||||
if logical_line.startswith("LOG.debug(_("):
|
||||
yield(0, "N319: Don't translate debug level logs")
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(no_translate_debug_logs)
|
@ -172,12 +172,12 @@ class ImageCache(object):
|
||||
max_size = CONF.image_cache_max_size
|
||||
current_size = self.driver.get_cache_size()
|
||||
if max_size > current_size:
|
||||
LOG.debug(_("Image cache has free space, skipping prune..."))
|
||||
LOG.debug("Image cache has free space, skipping prune...")
|
||||
return (0, 0)
|
||||
|
||||
overage = current_size - max_size
|
||||
LOG.debug(_("Image cache currently %(overage)d bytes over max "
|
||||
"size. Starting prune to max size of %(max_size)d "),
|
||||
LOG.debug("Image cache currently %(overage)d bytes over max "
|
||||
"size. Starting prune to max size of %(max_size)d ",
|
||||
{'overage': overage, 'max_size': max_size})
|
||||
|
||||
total_bytes_pruned = 0
|
||||
@ -185,7 +185,7 @@ class ImageCache(object):
|
||||
entry = self.driver.get_least_recently_accessed()
|
||||
while entry and current_size > max_size:
|
||||
image_id, size = entry
|
||||
LOG.debug(_("Pruning '%(image_id)s' to free %(size)d bytes"),
|
||||
LOG.debug("Pruning '%(image_id)s' to free %(size)d bytes",
|
||||
{'image_id': image_id, 'size': size})
|
||||
self.driver.delete_cached_image(image_id)
|
||||
total_bytes_pruned = total_bytes_pruned + size
|
||||
@ -193,9 +193,9 @@ class ImageCache(object):
|
||||
current_size = current_size - size
|
||||
entry = self.driver.get_least_recently_accessed()
|
||||
|
||||
LOG.debug(_("Pruning finished pruning. "
|
||||
"Pruned %(total_files_pruned)d and "
|
||||
"%(total_bytes_pruned)d."),
|
||||
LOG.debug("Pruning finished pruning. "
|
||||
"Pruned %(total_files_pruned)d and "
|
||||
"%(total_bytes_pruned)d.",
|
||||
{'total_files_pruned': total_files_pruned,
|
||||
'total_bytes_pruned': total_bytes_pruned})
|
||||
return total_files_pruned, total_bytes_pruned
|
||||
@ -232,7 +232,7 @@ class ImageCache(object):
|
||||
if not self.driver.is_cacheable(image_id):
|
||||
return image_iter
|
||||
|
||||
LOG.debug(_("Tee'ing image '%s' into cache"), image_id)
|
||||
LOG.debug("Tee'ing image '%s' into cache", image_id)
|
||||
|
||||
return self.cache_tee_iter(image_id, image_iter, image_checksum)
|
||||
|
||||
|
@ -158,7 +158,7 @@ class Driver(base.Driver):
|
||||
"""
|
||||
Returns a list of records about cached images.
|
||||
"""
|
||||
LOG.debug(_("Gathering cached image entries."))
|
||||
LOG.debug("Gathering cached image entries.")
|
||||
with self.get_db() as db:
|
||||
cur = db.execute("""SELECT
|
||||
image_id, hits, last_accessed, last_modified, size
|
||||
@ -301,8 +301,8 @@ class Driver(base.Driver):
|
||||
def commit():
|
||||
with self.get_db() as db:
|
||||
final_path = self.get_image_filepath(image_id)
|
||||
LOG.debug(_("Fetch finished, moving "
|
||||
"'%(incomplete_path)s' to '%(final_path)s'"),
|
||||
LOG.debug("Fetch finished, moving "
|
||||
"'%(incomplete_path)s' to '%(final_path)s'",
|
||||
dict(incomplete_path=incomplete_path,
|
||||
final_path=final_path))
|
||||
os.rename(incomplete_path, final_path)
|
||||
@ -325,9 +325,9 @@ class Driver(base.Driver):
|
||||
if os.path.exists(incomplete_path):
|
||||
invalid_path = self.get_image_filepath(image_id, 'invalid')
|
||||
|
||||
LOG.debug(_("Fetch of cache file failed (%(e)s), rolling "
|
||||
"back by moving '%(incomplete_path)s' to "
|
||||
"'%(invalid_path)s'"),
|
||||
LOG.debug("Fetch of cache file failed (%(e)s), rolling "
|
||||
"back by moving '%(incomplete_path)s' to "
|
||||
"'%(invalid_path)s'",
|
||||
{'e': e,
|
||||
'incomplete_path': incomplete_path,
|
||||
'invalid_path': invalid_path})
|
||||
@ -485,7 +485,7 @@ class Driver(base.Driver):
|
||||
|
||||
def delete_cached_file(path):
|
||||
if os.path.exists(path):
|
||||
LOG.debug(_("Deleting image cache file '%s'"), path)
|
||||
LOG.debug("Deleting image cache file '%s'", path)
|
||||
os.unlink(path)
|
||||
else:
|
||||
LOG.warn(_("Cached image file '%s' doesn't exist, unable to"
|
||||
|
@ -142,7 +142,7 @@ class Driver(base.Driver):
|
||||
"""
|
||||
Returns a list of records about cached images.
|
||||
"""
|
||||
LOG.debug(_("Gathering cached image entries."))
|
||||
LOG.debug("Gathering cached image entries.")
|
||||
entries = []
|
||||
for path in get_all_regular_files(self.base_dir):
|
||||
image_id = os.path.basename(path)
|
||||
@ -270,25 +270,25 @@ class Driver(base.Driver):
|
||||
set_attr('hits', 0)
|
||||
|
||||
final_path = self.get_image_filepath(image_id)
|
||||
LOG.debug(_("Fetch finished, moving "
|
||||
"'%(incomplete_path)s' to '%(final_path)s'"),
|
||||
LOG.debug("Fetch finished, moving "
|
||||
"'%(incomplete_path)s' to '%(final_path)s'",
|
||||
dict(incomplete_path=incomplete_path,
|
||||
final_path=final_path))
|
||||
os.rename(incomplete_path, final_path)
|
||||
|
||||
# Make sure that we "pop" the image from the queue...
|
||||
if self.is_queued(image_id):
|
||||
LOG.debug(_("Removing image '%s' from queue after "
|
||||
"caching it."), image_id)
|
||||
LOG.debug("Removing image '%s' from queue after "
|
||||
"caching it.", image_id)
|
||||
os.unlink(self.get_image_filepath(image_id, 'queue'))
|
||||
|
||||
def rollback(e):
|
||||
set_attr('error', utils.exception_to_str(e))
|
||||
|
||||
invalid_path = self.get_image_filepath(image_id, 'invalid')
|
||||
LOG.debug(_("Fetch of cache file failed (%(e)s), rolling back by "
|
||||
"moving '%(incomplete_path)s' to "
|
||||
"'%(invalid_path)s'"),
|
||||
LOG.debug("Fetch of cache file failed (%(e)s), rolling back by "
|
||||
"moving '%(incomplete_path)s' to "
|
||||
"'%(invalid_path)s'",
|
||||
{'e': utils.exception_to_str(e),
|
||||
'incomplete_path': incomplete_path,
|
||||
'invalid_path': invalid_path})
|
||||
@ -351,7 +351,7 @@ class Driver(base.Driver):
|
||||
return False
|
||||
|
||||
path = self.get_image_filepath(image_id, 'queue')
|
||||
LOG.debug(_("Queueing image '%s'."), image_id)
|
||||
LOG.debug("Queueing image '%s'.", image_id)
|
||||
|
||||
# Touch the file to add it to the queue
|
||||
with open(path, "w"):
|
||||
@ -381,13 +381,13 @@ class Driver(base.Driver):
|
||||
mtime = os.path.getmtime(path)
|
||||
age = now - mtime
|
||||
if not grace:
|
||||
LOG.debug(_("No grace period, reaping '%(path)s'"
|
||||
" immediately"), {'path': path})
|
||||
LOG.debug("No grace period, reaping '%(path)s'"
|
||||
" immediately", {'path': path})
|
||||
delete_cached_file(path)
|
||||
reaped += 1
|
||||
elif age > grace:
|
||||
LOG.debug(_("Cache entry '%(path)s' exceeds grace period, "
|
||||
"(%(age)i s > %(grace)i s)"),
|
||||
LOG.debug("Cache entry '%(path)s' exceeds grace period, "
|
||||
"(%(age)i s > %(grace)i s)",
|
||||
{'path': path, 'age': age, 'grace': grace})
|
||||
delete_cached_file(path)
|
||||
reaped += 1
|
||||
@ -436,7 +436,7 @@ def get_all_regular_files(basepath):
|
||||
|
||||
def delete_cached_file(path):
|
||||
if os.path.exists(path):
|
||||
LOG.debug(_("Deleting image cache file '%s'"), path)
|
||||
LOG.debug("Deleting image cache file '%s'", path)
|
||||
os.unlink(path)
|
||||
else:
|
||||
LOG.warn(_("Cached image file '%s' doesn't exist, unable to"
|
||||
|
@ -53,7 +53,7 @@ class Prefetcher(base.CacheApp):
|
||||
|
||||
location = image_meta['location']
|
||||
image_data, image_size = glance.store.get_from_backend(ctx, location)
|
||||
LOG.debug(_("Caching image '%s'"), image_id)
|
||||
LOG.debug("Caching image '%s'", image_id)
|
||||
cache_tee_iter = self.cache.cache_tee_iter(image_id, image_data,
|
||||
image_meta['checksum'])
|
||||
# Image is tee'd into cache and checksum verified
|
||||
@ -65,11 +65,11 @@ class Prefetcher(base.CacheApp):
|
||||
|
||||
images = self.cache.get_queued_images()
|
||||
if not images:
|
||||
LOG.debug(_("Nothing to prefetch."))
|
||||
LOG.debug("Nothing to prefetch.")
|
||||
return True
|
||||
|
||||
num_images = len(images)
|
||||
LOG.debug(_("Found %d images to prefetch"), num_images)
|
||||
LOG.debug("Found %d images to prefetch", num_images)
|
||||
|
||||
pool = eventlet.GreenPool(num_images)
|
||||
results = pool.imap(self.fetch_image_into_cache, images)
|
||||
|
@ -441,9 +441,9 @@ class Controller(object):
|
||||
|
||||
purge_props = req.headers.get("X-Glance-Registry-Purge-Props", "false")
|
||||
try:
|
||||
LOG.debug(_("Updating image %(id)s with metadata: "
|
||||
"%(image_data)r"), {'id': id,
|
||||
'image_data': image_data})
|
||||
LOG.debug("Updating image %(id)s with metadata: "
|
||||
"%(image_data)r", {'id': id,
|
||||
'image_data': image_data})
|
||||
image_data = _normalize_image_location_for_db(image_data)
|
||||
if purge_props == "true":
|
||||
purge_props = True
|
||||
|
@ -294,7 +294,7 @@ class Controller(object):
|
||||
if members:
|
||||
self.db_api.image_member_delete(req.context, members[0]['id'])
|
||||
else:
|
||||
msg = (_("%(id)s is not a member of image %(image_id)s") %
|
||||
msg = ("%(id)s is not a member of image %(image_id)s" %
|
||||
{'id': id, 'image_id': image_id})
|
||||
LOG.debug(msg)
|
||||
msg = _("Membership could not be found.")
|
||||
|
@ -156,21 +156,21 @@ def get_image_metadata(context, image_id):
|
||||
|
||||
|
||||
def add_image_metadata(context, image_meta):
|
||||
LOG.debug(_("Adding image metadata..."))
|
||||
LOG.debug("Adding image metadata...")
|
||||
c = get_registry_client(context)
|
||||
return c.add_image(image_meta)
|
||||
|
||||
|
||||
def update_image_metadata(context, image_id, image_meta,
|
||||
purge_props=False, from_state=None):
|
||||
LOG.debug(_("Updating image metadata for image %s..."), image_id)
|
||||
LOG.debug("Updating image metadata for image %s...", image_id)
|
||||
c = get_registry_client(context)
|
||||
return c.update_image(image_id, image_meta, purge_props=purge_props,
|
||||
from_state=from_state)
|
||||
|
||||
|
||||
def delete_image_metadata(context, image_id):
|
||||
LOG.debug(_("Deleting image metadata for image %s..."), image_id)
|
||||
LOG.debug("Deleting image metadata for image %s...", image_id)
|
||||
c = get_registry_client(context)
|
||||
return c.delete_image(image_id)
|
||||
|
||||
|
@ -108,8 +108,8 @@ class RegistryClient(BaseClient):
|
||||
**kwargs)
|
||||
status = res.status
|
||||
request_id = res.getheader('x-openstack-request-id')
|
||||
msg = (_("Registry request %(method)s %(action)s HTTP %(status)s"
|
||||
" request id %(request_id)s") %
|
||||
msg = ("Registry request %(method)s %(action)s HTTP %(status)s"
|
||||
" request id %(request_id)s" %
|
||||
{'method': method, 'action': action,
|
||||
'status': status, 'request_id': request_id})
|
||||
LOG.debug(msg)
|
||||
|
@ -378,10 +378,10 @@ class Daemon(object):
|
||||
LOG.info(msg)
|
||||
|
||||
def _run(self, application):
|
||||
LOG.debug(_("Running application"))
|
||||
LOG.debug("Running application")
|
||||
self.pool.spawn_n(application.run, self.pool, self.event)
|
||||
eventlet.spawn_after(self.wakeup_time, self._run, application)
|
||||
LOG.debug(_("Next run scheduled in %s seconds") % self.wakeup_time)
|
||||
LOG.debug("Next run scheduled in %s seconds" % self.wakeup_time)
|
||||
|
||||
|
||||
class Scrubber(object):
|
||||
@ -449,7 +449,7 @@ class Scrubber(object):
|
||||
uri = crypt.urlsafe_decrypt(CONF.metadata_encryption_key, uri)
|
||||
|
||||
try:
|
||||
LOG.debug(_("Deleting URI from image %(image_id)s.") %
|
||||
LOG.debug("Deleting URI from image %(image_id)s." %
|
||||
{'image_id': image_id})
|
||||
|
||||
# Here we create a request context with credentials to support
|
||||
|
@ -166,7 +166,7 @@ def _register_stores(store_classes):
|
||||
def _get_store_class(store_entry):
|
||||
store_cls = None
|
||||
try:
|
||||
LOG.debug(_("Attempting to import store %s"), store_entry)
|
||||
LOG.debug("Attempting to import store %s", store_entry)
|
||||
store_cls = importutils.import_class(store_entry)
|
||||
except exception.NotFound:
|
||||
raise BackendException('Unable to load store. '
|
||||
@ -208,9 +208,9 @@ def create_stores():
|
||||
% store_cls)
|
||||
else:
|
||||
if store_cls not in store_classes:
|
||||
LOG.debug(_("Registering store %(cls)s with schemes "
|
||||
"%(schemes)s"), {'cls': store_cls,
|
||||
'schemes': schemes})
|
||||
LOG.debug("Registering store %(cls)s with schemes "
|
||||
"%(schemes)s", {'cls': store_cls,
|
||||
'schemes': schemes})
|
||||
store_classes.add(store_cls)
|
||||
scheme_map = {}
|
||||
for scheme in schemes:
|
||||
@ -222,7 +222,7 @@ def create_stores():
|
||||
location.register_scheme_map(scheme_map)
|
||||
store_count += 1
|
||||
else:
|
||||
LOG.debug(_("Store %s already registered"), store_cls)
|
||||
LOG.debug("Store %s already registered", store_cls)
|
||||
_register_stores(store_classes)
|
||||
return store_count
|
||||
|
||||
@ -426,7 +426,7 @@ def set_acls(context, location_uri, public=False, read_tenants=None,
|
||||
store.set_acls(loc, public=public, read_tenants=read_tenants,
|
||||
write_tenants=write_tenants)
|
||||
except NotImplementedError:
|
||||
LOG.debug(_("Skipping store.set_acls... not implemented."))
|
||||
LOG.debug("Skipping store.set_acls... not implemented.")
|
||||
|
||||
|
||||
class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
@ -65,7 +65,7 @@ class Store(object):
|
||||
"""
|
||||
if not self.store_location_class:
|
||||
class_name = "%s.StoreLocation" % (self.__module__)
|
||||
LOG.debug(_("Late loading location class %s"), class_name)
|
||||
LOG.debug("Late loading location class %s", class_name)
|
||||
self.store_location_class = importutils.import_class(class_name)
|
||||
return self.store_location_class
|
||||
|
||||
|
@ -85,7 +85,7 @@ def get_cinderclient(context):
|
||||
service_name=service_name,
|
||||
endpoint_type=endpoint_type)
|
||||
|
||||
LOG.debug(_('Cinderclient connection created using URL: %s') % url)
|
||||
LOG.debug('Cinderclient connection created using URL: %s' % url)
|
||||
|
||||
c = cinderclient.Client(context.user,
|
||||
context.auth_tok,
|
||||
|
@ -75,7 +75,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
self.scheme = pieces.scheme
|
||||
path = (pieces.netloc + pieces.path).strip()
|
||||
if path == '':
|
||||
reason = _("No path specified in URI: %s") % uri
|
||||
reason = "No path specified in URI: %s" % uri
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri('No path specified')
|
||||
self.path = path
|
||||
@ -307,7 +307,7 @@ class Store(glance.store.base.Store):
|
||||
:raises `glance.exception.NotFound` if image does not exist
|
||||
"""
|
||||
filepath, filesize = self._resolve_location(location)
|
||||
msg = _("Found image at %s. Returning in ChunkedFile.") % filepath
|
||||
msg = "Found image at %s. Returning in ChunkedFile." % filepath
|
||||
LOG.debug(msg)
|
||||
return (ChunkedFile(filepath), filesize)
|
||||
|
||||
@ -322,7 +322,7 @@ class Store(glance.store.base.Store):
|
||||
:rtype int
|
||||
"""
|
||||
filepath, filesize = self._resolve_location(location)
|
||||
msg = _("Found image at %s.") % filepath
|
||||
msg = "Found image at %s." % filepath
|
||||
LOG.debug(msg)
|
||||
return filesize
|
||||
|
||||
@ -341,7 +341,7 @@ class Store(glance.store.base.Store):
|
||||
fn = loc.path
|
||||
if os.path.exists(fn):
|
||||
try:
|
||||
LOG.debug(_("Deleting image at %(fn)s"), {'fn': fn})
|
||||
LOG.debug("Deleting image at %(fn)s", {'fn': fn})
|
||||
os.unlink(fn)
|
||||
except OSError:
|
||||
raise exception.Forbidden(_("You cannot delete file %s") % fn)
|
||||
@ -448,8 +448,8 @@ class Store(glance.store.base.Store):
|
||||
checksum_hex = checksum.hexdigest()
|
||||
metadata = self._get_metadata()
|
||||
|
||||
LOG.debug(_("Wrote %(bytes_written)d bytes to %(filepath)s with "
|
||||
"checksum %(checksum_hex)s"),
|
||||
LOG.debug("Wrote %(bytes_written)d bytes to %(filepath)s with "
|
||||
"checksum %(checksum_hex)s",
|
||||
{'bytes_written': bytes_written,
|
||||
'filepath': filepath,
|
||||
'checksum_hex': checksum_hex})
|
||||
|
@ -154,8 +154,8 @@ class Store(glance.store.base.Store):
|
||||
parsed = urlparse.urlparse(store_location.get_uri())
|
||||
return self.fs.get(parsed.netloc)
|
||||
except gridfs.errors.NoFile:
|
||||
msg = _("Could not find %s image in GridFS") % \
|
||||
store_location.get_uri()
|
||||
msg = ("Could not find %s image in GridFS"
|
||||
% store_location.get_uri())
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
@ -180,9 +180,9 @@ class Store(glance.store.base.Store):
|
||||
raise exception.Duplicate(_("GridFS already has an image at "
|
||||
"location %s") % loc.get_uri())
|
||||
|
||||
LOG.debug(_("Adding a new image to GridFS with id %(id)s and "
|
||||
"size %(size)s") % {'id': image_id,
|
||||
'size': image_size})
|
||||
LOG.debug("Adding a new image to GridFS with id %(id)s and "
|
||||
"size %(size)s" % {'id': image_id,
|
||||
'size': image_size})
|
||||
|
||||
try:
|
||||
self.fs.put(image_file, _id=image_id)
|
||||
@ -193,10 +193,10 @@ class Store(glance.store.base.Store):
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.fs.delete(image_id)
|
||||
|
||||
LOG.debug(_("Uploaded image %(id)s, md5 %(md5)s, length %(length)s "
|
||||
"to GridFS") % {'id': image._id,
|
||||
'md5': image.md5,
|
||||
'length': image.length})
|
||||
LOG.debug("Uploaded image %(id)s, md5 %(md5)s, length %(length)s "
|
||||
"to GridFS" % {'id': image._id,
|
||||
'md5': image.md5,
|
||||
'length': image.length})
|
||||
|
||||
return (loc.get_uri(), image.length, image.md5, {})
|
||||
|
||||
@ -212,4 +212,4 @@ class Store(glance.store.base.Store):
|
||||
"""
|
||||
image = self._get_file(location)
|
||||
self.fs.delete(image._id)
|
||||
LOG.debug(_("Deleted image %s from GridFS"), image._id)
|
||||
LOG.debug("Deleted image %s from GridFS", image._id)
|
||||
|
@ -82,8 +82,8 @@ def register_scheme_map(scheme_map):
|
||||
"""
|
||||
for (k, v) in scheme_map.items():
|
||||
if k not in SCHEME_TO_CLS_MAP:
|
||||
LOG.debug(_("Registering scheme %(k)s with %(v)s") % {'k': k,
|
||||
'v': v})
|
||||
LOG.debug("Registering scheme %(k)s with %(v)s" % {'k': k,
|
||||
'v': v})
|
||||
SCHEME_TO_CLS_MAP[k] = v
|
||||
|
||||
|
||||
|
@ -106,17 +106,17 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
prefix = 'rbd://'
|
||||
if not uri.startswith(prefix):
|
||||
reason = _('URI must start with rbd://')
|
||||
msg = (_("Invalid URI: %(uri)s: %(reason)s") % {'uri': uri,
|
||||
'reason': reason})
|
||||
msg = "Invalid URI: %(uri)s: %(reason)s" % {'uri': uri,
|
||||
'reason': reason}
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
# convert to ascii since librbd doesn't handle unicode
|
||||
try:
|
||||
ascii_uri = str(uri)
|
||||
except UnicodeError:
|
||||
reason = _('URI contains non-ascii characters')
|
||||
msg = (_("Invalid URI: %(uri)s: %(reason)s") % {'uri': uri,
|
||||
'reason': reason})
|
||||
reason = 'URI contains non-ascii characters'
|
||||
msg = "Invalid URI: %(uri)s: %(reason)s" % {'uri': uri,
|
||||
'reason': reason}
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
pieces = ascii_uri[len(prefix):].split('/')
|
||||
@ -127,15 +127,15 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
self.fsid, self.pool, self.image, self.snapshot = \
|
||||
map(urlparse.unquote, pieces)
|
||||
else:
|
||||
reason = _('URI must have exactly 1 or 4 components')
|
||||
msg = (_("Invalid URI: %(uri)s: %(reason)s") % {'uri': uri,
|
||||
'reason': reason})
|
||||
reason = 'URI must have exactly 1 or 4 components'
|
||||
msg = "Invalid URI: %(uri)s: %(reason)s" % {'uri': uri,
|
||||
'reason': reason}
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
if any(map(lambda p: p == '', pieces)):
|
||||
reason = _('URI cannot contain empty components')
|
||||
msg = (_("Invalid URI: %(uri)s: %(reason)s") % {'uri': uri,
|
||||
'reason': reason})
|
||||
reason = 'URI cannot contain empty components'
|
||||
msg = "Invalid URI: %(uri)s: %(reason)s" % {'uri': uri,
|
||||
'reason': reason}
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
|
||||
@ -233,7 +233,7 @@ class Store(glance.store.base.Store):
|
||||
img_info = image.stat()
|
||||
return img_info['size']
|
||||
except rbd.ImageNotFound:
|
||||
msg = _('RBD image %s does not exist') % loc.get_uri()
|
||||
msg = 'RBD image %s does not exist' % loc.get_uri()
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
@ -280,9 +280,9 @@ class Store(glance.store.base.Store):
|
||||
try:
|
||||
image.unprotect_snap(snapshot_name)
|
||||
except rbd.ImageBusy:
|
||||
log_msg = _("snapshot %(image)s@%(snap)s "
|
||||
"could not be unprotected because "
|
||||
"it is in use")
|
||||
log_msg = ("snapshot %(image)s@%(snap)s "
|
||||
"could not be unprotected because "
|
||||
"it is in use")
|
||||
LOG.debug(log_msg %
|
||||
{'image': image_name,
|
||||
'snap': snapshot_name})
|
||||
@ -295,8 +295,8 @@ class Store(glance.store.base.Store):
|
||||
raise exception.NotFound(
|
||||
_("RBD image %s does not exist") % image_name)
|
||||
except rbd.ImageBusy:
|
||||
log_msg = _("image %s could not be removed "
|
||||
"because it is in use")
|
||||
log_msg = ("image %s could not be removed "
|
||||
"because it is in use")
|
||||
LOG.debug(log_msg % image_name)
|
||||
raise exception.InUseByStore()
|
||||
|
||||
@ -323,8 +323,8 @@ class Store(glance.store.base.Store):
|
||||
fsid = conn.get_fsid()
|
||||
with conn.open_ioctx(self.pool) as ioctx:
|
||||
order = int(math.log(self.chunk_size, 2))
|
||||
LOG.debug(_('creating image %(name)s with order %(order)d and '
|
||||
'size %(size)d'),
|
||||
LOG.debug('creating image %(name)s with order %(order)d and '
|
||||
'size %(size)d',
|
||||
{'name': text_type(image_name),
|
||||
'order': order,
|
||||
'size': image_size})
|
||||
@ -354,10 +354,10 @@ class Store(glance.store.base.Store):
|
||||
chunk_length = len(chunk)
|
||||
length = offset + chunk_length
|
||||
bytes_written += chunk_length
|
||||
LOG.debug(_("resizing image to %s KiB") %
|
||||
LOG.debug("resizing image to %s KiB" %
|
||||
(length / units.Ki))
|
||||
image.resize(length)
|
||||
LOG.debug(_("writing chunk at offset %s") %
|
||||
LOG.debug("writing chunk at offset %s" %
|
||||
(offset))
|
||||
offset += image.write(chunk, offset)
|
||||
checksum.update(chunk)
|
||||
|
@ -112,15 +112,15 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
# s3://accesskey:secretkey@https://s3.amazonaws.com/bucket/key-id
|
||||
# are immediately rejected.
|
||||
if uri.count('://') != 1:
|
||||
reason = _("URI cannot contain more than one occurrence "
|
||||
"of a scheme. If you have specified a URI like "
|
||||
"s3://accesskey:secretkey@"
|
||||
"https://s3.amazonaws.com/bucket/key-id"
|
||||
", you need to change it to use the "
|
||||
"s3+https:// scheme, like so: "
|
||||
"s3+https://accesskey:secretkey@"
|
||||
"s3.amazonaws.com/bucket/key-id")
|
||||
LOG.debug(_("Invalid store uri: %s") % reason)
|
||||
reason = ("URI cannot contain more than one occurrence "
|
||||
"of a scheme. If you have specified a URI like "
|
||||
"s3://accesskey:secretkey@"
|
||||
"https://s3.amazonaws.com/bucket/key-id"
|
||||
", you need to change it to use the "
|
||||
"s3+https:// scheme, like so: "
|
||||
"s3+https://accesskey:secretkey@"
|
||||
"s3.amazonaws.com/bucket/key-id")
|
||||
LOG.debug("Invalid store uri: %s" % reason)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
|
||||
pieces = urlparse.urlparse(uri)
|
||||
@ -146,7 +146,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
self.accesskey = access_key
|
||||
self.secretkey = secret_key
|
||||
except IndexError:
|
||||
reason = _("Badly formed S3 credentials %s") % creds
|
||||
reason = "Badly formed S3 credentials %s" % creds
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri()
|
||||
else:
|
||||
@ -162,7 +162,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
reason = _("Badly formed S3 URI. Missing s3 service URL.")
|
||||
raise exception.BadStoreUri()
|
||||
except IndexError:
|
||||
reason = _("Badly formed S3 URI: %s") % uri
|
||||
reason = "Badly formed S3 URI: %s" % uri
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri()
|
||||
|
||||
@ -249,8 +249,8 @@ class Store(glance.store.base.Store):
|
||||
def _option_get(self, param):
|
||||
result = getattr(CONF, param)
|
||||
if not result:
|
||||
reason = (_("Could not find %(param)s in configuration "
|
||||
"options.") % {'param': param})
|
||||
reason = ("Could not find %(param)s in configuration "
|
||||
"options." % {'param': param})
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreConfiguration(store_name="s3",
|
||||
reason=reason)
|
||||
@ -304,12 +304,12 @@ class Store(glance.store.base.Store):
|
||||
|
||||
key = get_key(bucket_obj, loc.key)
|
||||
|
||||
msg = _("Retrieved image object from S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(accesskey)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)") % ({'s3_host': loc.s3serviceurl,
|
||||
'accesskey': loc.accesskey,
|
||||
'bucket': loc.bucket,
|
||||
'obj_name': loc.key})
|
||||
msg = ("Retrieved image object from S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(accesskey)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)" % ({'s3_host': loc.s3serviceurl,
|
||||
'accesskey': loc.accesskey,
|
||||
'bucket': loc.bucket,
|
||||
'obj_name': loc.key}))
|
||||
LOG.debug(msg)
|
||||
|
||||
return key
|
||||
@ -368,12 +368,12 @@ class Store(glance.store.base.Store):
|
||||
"location %s") %
|
||||
_sanitize(loc.get_uri()))
|
||||
|
||||
msg = _("Adding image object to S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(access_key)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)") % ({'s3_host': self.s3_host,
|
||||
'access_key': self.access_key,
|
||||
'bucket': self.bucket,
|
||||
'obj_name': obj_name})
|
||||
msg = ("Adding image object to S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(access_key)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)" % ({'s3_host': self.s3_host,
|
||||
'access_key': self.access_key,
|
||||
'bucket': self.bucket,
|
||||
'obj_name': obj_name}))
|
||||
LOG.debug(msg)
|
||||
|
||||
key = bucket_obj.new_key(obj_name)
|
||||
@ -390,8 +390,8 @@ class Store(glance.store.base.Store):
|
||||
# take this opportunity to calculate the image checksum while
|
||||
# writing the tempfile, so we don't need to call key.compute_md5()
|
||||
|
||||
msg = _("Writing request body file to temporary file "
|
||||
"for %s") % _sanitize(loc.get_uri())
|
||||
msg = ("Writing request body file to temporary file "
|
||||
"for %s" % _sanitize(loc.get_uri()))
|
||||
LOG.debug(msg)
|
||||
|
||||
tmpdir = self.s3_store_object_buffer_dir
|
||||
@ -402,7 +402,7 @@ class Store(glance.store.base.Store):
|
||||
temp_file.write(chunk)
|
||||
temp_file.flush()
|
||||
|
||||
msg = (_("Uploading temporary file to S3 for %s") %
|
||||
msg = ("Uploading temporary file to S3 for %s" %
|
||||
_sanitize(loc.get_uri()))
|
||||
LOG.debug(msg)
|
||||
|
||||
@ -411,8 +411,8 @@ class Store(glance.store.base.Store):
|
||||
size = key.size
|
||||
checksum_hex = checksum.hexdigest()
|
||||
|
||||
LOG.debug(_("Wrote %(size)d bytes to S3 key named %(obj_name)s "
|
||||
"with checksum %(checksum_hex)s"),
|
||||
LOG.debug("Wrote %(size)d bytes to S3 key named %(obj_name)s "
|
||||
"with checksum %(checksum_hex)s",
|
||||
{'size': size, 'obj_name': obj_name,
|
||||
'checksum_hex': checksum_hex})
|
||||
|
||||
@ -439,12 +439,12 @@ class Store(glance.store.base.Store):
|
||||
# Close the key when we're through.
|
||||
key = get_key(bucket_obj, loc.key)
|
||||
|
||||
msg = _("Deleting image object from S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(accesskey)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)") % ({'s3_host': loc.s3serviceurl,
|
||||
'accesskey': loc.accesskey,
|
||||
'bucket': loc.bucket,
|
||||
'obj_name': loc.key})
|
||||
msg = ("Deleting image object from S3 using (s3_host=%(s3_host)s, "
|
||||
"access_key=%(accesskey)s, bucket=%(bucket)s, "
|
||||
"key=%(obj_name)s)" % ({'s3_host': loc.s3serviceurl,
|
||||
'accesskey': loc.accesskey,
|
||||
'bucket': loc.bucket,
|
||||
'obj_name': loc.key}))
|
||||
LOG.debug(msg)
|
||||
|
||||
return key.delete()
|
||||
@ -461,7 +461,7 @@ def get_bucket(conn, bucket_id):
|
||||
|
||||
bucket = conn.get_bucket(bucket_id)
|
||||
if not bucket:
|
||||
msg = _("Could not find bucket with ID %s") % bucket_id
|
||||
msg = "Could not find bucket with ID %s" % bucket_id
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
@ -525,7 +525,7 @@ def get_key(bucket, obj):
|
||||
|
||||
key = bucket.get_key(obj)
|
||||
if not key or not key.exists():
|
||||
msg = (_("Could not find key %(obj)s in bucket %(bucket)s") %
|
||||
msg = ("Could not find key %(obj)s in bucket %(bucket)s" %
|
||||
{'obj': obj, 'bucket': bucket})
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
@ -211,13 +211,13 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
# swift://user:pass@http://authurl.com/v1/container/obj
|
||||
# are immediately rejected.
|
||||
if uri.count('://') != 1:
|
||||
reason = _("URI cannot contain more than one occurrence "
|
||||
"of a scheme. If you have specified a URI like "
|
||||
"swift://user:pass@http://authurl.com/v1/container/obj"
|
||||
", you need to change it to use the "
|
||||
"swift+http:// scheme, like so: "
|
||||
"swift+http://user:pass@authurl.com/v1/container/obj")
|
||||
LOG.debug(_("Invalid store URI: %(reason)s"), {'reason': reason})
|
||||
reason = ("URI cannot contain more than one occurrence "
|
||||
"of a scheme. If you have specified a URI like "
|
||||
"swift://user:pass@http://authurl.com/v1/container/obj"
|
||||
", you need to change it to use the "
|
||||
"swift+http:// scheme, like so: "
|
||||
"swift+http://user:pass@authurl.com/v1/container/obj")
|
||||
LOG.debug("Invalid store URI: %(reason)s", {'reason': reason})
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
|
||||
pieces = urlparse.urlparse(uri)
|
||||
@ -243,7 +243,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
if creds:
|
||||
cred_parts = creds.split(':')
|
||||
if len(cred_parts) != 2:
|
||||
reason = (_("Badly formed credentials in Swift URI."))
|
||||
reason = "Badly formed credentials in Swift URI."
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri()
|
||||
user, key = cred_parts
|
||||
@ -261,7 +261,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
path_parts.insert(0, netloc)
|
||||
self.auth_or_store_url = '/'.join(path_parts)
|
||||
except IndexError:
|
||||
reason = _("Badly formed Swift URI.")
|
||||
reason = "Badly formed Swift URI."
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri()
|
||||
|
||||
@ -372,7 +372,7 @@ class BaseStore(glance.store.base.Store):
|
||||
|
||||
def _delete_stale_chunks(self, connection, container, chunk_list):
|
||||
for chunk in chunk_list:
|
||||
LOG.debug(_("Deleting chunk %s") % chunk)
|
||||
LOG.debug("Deleting chunk %s" % chunk)
|
||||
try:
|
||||
connection.delete_object(container, chunk)
|
||||
except Exception:
|
||||
@ -388,8 +388,8 @@ class BaseStore(glance.store.base.Store):
|
||||
|
||||
self._create_container_if_missing(location.container, connection)
|
||||
|
||||
LOG.debug(_("Adding image object '%(obj_name)s' "
|
||||
"to Swift") % dict(obj_name=location.obj))
|
||||
LOG.debug("Adding image object '%(obj_name)s' "
|
||||
"to Swift" % dict(obj_name=location.obj))
|
||||
try:
|
||||
if image_size > 0 and image_size < self.large_object_size:
|
||||
# Image size is known, and is less than large_object_size.
|
||||
@ -408,8 +408,8 @@ class BaseStore(glance.store.base.Store):
|
||||
# image_size == 0 is when we don't know the size
|
||||
# of the image. This can occur with older clients
|
||||
# that don't inspect the payload size.
|
||||
LOG.debug(_("Cannot determine image size. Adding as a "
|
||||
"segmented object to Swift."))
|
||||
LOG.debug("Cannot determine image size. Adding as a "
|
||||
"segmented object to Swift.")
|
||||
total_chunks = '?'
|
||||
|
||||
checksum = hashlib.md5()
|
||||
@ -444,10 +444,10 @@ class BaseStore(glance.store.base.Store):
|
||||
written_chunks)
|
||||
|
||||
bytes_read = reader.bytes_read
|
||||
msg = (_("Wrote chunk %(chunk_name)s (%(chunk_id)d/"
|
||||
"%(total_chunks)s) of length %(bytes_read)d "
|
||||
"to Swift returning MD5 of content: "
|
||||
"%(chunk_etag)s") %
|
||||
msg = ("Wrote chunk %(chunk_name)s (%(chunk_id)d/"
|
||||
"%(total_chunks)s) of length %(bytes_read)d "
|
||||
"to Swift returning MD5 of content: "
|
||||
"%(chunk_etag)s" %
|
||||
{'chunk_name': chunk_name,
|
||||
'chunk_id': chunk_id,
|
||||
'total_chunks': total_chunks,
|
||||
@ -458,7 +458,7 @@ class BaseStore(glance.store.base.Store):
|
||||
if bytes_read == 0:
|
||||
# Delete the last chunk, because it's of zero size.
|
||||
# This will happen if size == 0.
|
||||
LOG.debug(_("Deleting final zero-length chunk"))
|
||||
LOG.debug("Deleting final zero-length chunk")
|
||||
connection.delete_object(location.container,
|
||||
chunk_name)
|
||||
break
|
||||
@ -607,7 +607,7 @@ class SingleTenantStore(BaseStore):
|
||||
|
||||
def get_connection(self, location):
|
||||
if not location.user:
|
||||
reason = (_("Location is missing user:password information."))
|
||||
reason = "Location is missing user:password information."
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri(message=reason)
|
||||
|
||||
@ -619,8 +619,8 @@ class SingleTenantStore(BaseStore):
|
||||
try:
|
||||
tenant_name, user = location.user.split(':')
|
||||
except ValueError:
|
||||
reason = (_("Badly formed tenant:user '%(user)s' in "
|
||||
"Swift URI") % {'user': location.user})
|
||||
reason = ("Badly formed tenant:user '%(user)s' in "
|
||||
"Swift URI" % {'user': location.user})
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri()
|
||||
else:
|
||||
|
@ -178,8 +178,7 @@ class StoreLocation(glance.store.location.StoreLocation):
|
||||
self.path = path
|
||||
self.query = query
|
||||
return
|
||||
reason = (_('Badly formed VMware datastore URI %(uri)s.')
|
||||
% {'uri': uri})
|
||||
reason = 'Badly formed VMware datastore URI %(uri)s.' % {'uri': uri}
|
||||
LOG.debug(reason)
|
||||
raise exception.BadStoreUri(reason)
|
||||
|
||||
@ -361,8 +360,8 @@ class Store(glance.store.base.Store):
|
||||
|
||||
def _query(self, location, method, headers, depth=0):
|
||||
if depth > MAX_REDIRECTS:
|
||||
msg = (_("The HTTP URL exceeded %(max_redirects)s maximum "
|
||||
"redirects.") % {'max_redirects': MAX_REDIRECTS})
|
||||
msg = ("The HTTP URL exceeded %(max_redirects)s maximum "
|
||||
"redirects." % {'max_redirects': MAX_REDIRECTS})
|
||||
LOG.debug(msg)
|
||||
raise exception.MaxRedirectsExceeded(redirects=MAX_REDIRECTS)
|
||||
loc = location.store_location
|
||||
@ -375,18 +374,18 @@ class Store(glance.store.base.Store):
|
||||
{'image': location.image_id})
|
||||
if resp.status >= 400:
|
||||
if resp.status == httplib.NOT_FOUND:
|
||||
msg = _('VMware datastore could not find image at URI.')
|
||||
msg = 'VMware datastore could not find image at URI.'
|
||||
LOG.debug(msg)
|
||||
raise exception.NotFound(msg)
|
||||
msg = (_('HTTP request returned a %(status)s status code.')
|
||||
msg = ('HTTP request returned a %(status)s status code.'
|
||||
% {'status': resp.status})
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(msg)
|
||||
location_header = resp.getheader('location')
|
||||
if location_header:
|
||||
if resp.status not in (301, 302):
|
||||
msg = (_("The HTTP URL %(path)s attempted to redirect "
|
||||
"with an invalid %(status)s status code.")
|
||||
msg = ("The HTTP URL %(path)s attempted to redirect "
|
||||
"with an invalid %(status)s status code."
|
||||
% {'path': loc.path, 'status': resp.status})
|
||||
LOG.debug(msg)
|
||||
raise exception.BadStoreUri(msg)
|
||||
|
@ -140,7 +140,7 @@ class TestMigrations(test_utils.BaseTestCase):
|
||||
|
||||
# Load test databases from the config file. Only do this
|
||||
# once. No need to re-run this on each test...
|
||||
LOG.debug(_('config_path is %s'),
|
||||
LOG.debug('config_path is %s',
|
||||
text_type(TestMigrations.CONFIG_FILE_PATH))
|
||||
if os.path.exists(TestMigrations.CONFIG_FILE_PATH):
|
||||
cp = ConfigParser.RawConfigParser()
|
||||
@ -334,7 +334,7 @@ class TestMigrations(test_utils.BaseTestCase):
|
||||
init_version + 1)
|
||||
self.assertEqual(init_version + 1, db_version())
|
||||
|
||||
LOG.debug(_('latest version is %s'), TestMigrations.REPOSITORY.latest)
|
||||
LOG.debug('latest version is %s', TestMigrations.REPOSITORY.latest)
|
||||
|
||||
for version in xrange(init_version + 2,
|
||||
TestMigrations.REPOSITORY.latest + 1):
|
||||
|
Loading…
Reference in New Issue
Block a user