Remove log translations
Log messages are no longer being translated. This removes all use of the _LE, _LI, and _LW translation markers to simplify logging and to avoid confusion with new contributions. See: http://lists.openstack.org/pipermail/openstack-i18n/2016-November/002574.html http://lists.openstack.org/pipermail/openstack-dev/2017-March/113365.html Change-Id: Ieec8028305099422e1b0f8fc84bc90c9ca6c694fchanges/85/447785/12
parent
34df3cd915
commit
8f10215ffd
|
@ -20,13 +20,12 @@ An OpenStack REST API to Heat.
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG.warning(_LW('DEPRECATED: `heat-api` script is deprecated. Please use the '
|
||||
'system level heat binaries installed to start '
|
||||
'any of the heat services.'))
|
||||
LOG.warning('DEPRECATED: `heat-api` script is deprecated. Please use the '
|
||||
'system level heat binaries installed to start '
|
||||
'any of the heat services.')
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
@ -22,13 +22,12 @@ AMQP RPC to implement them.
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG.warning(_LW('DEPRECATED: `heat-api-cfn` script is deprecated. Please use '
|
||||
'the system level heat binaries installed to start '
|
||||
'any of the heat services.'))
|
||||
LOG.warning('DEPRECATED: `heat-api-cfn` script is deprecated. Please use '
|
||||
'the system level heat binaries installed to start '
|
||||
'any of the heat services.')
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
@ -22,13 +22,12 @@ implement them.
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG.warning(_LW('DEPRECATED: `heat-api-cloudwatch` script is deprecated. '
|
||||
'Please use the system level heat binaries installed to '
|
||||
'start any of the heat services.'))
|
||||
LOG.warning('DEPRECATED: `heat-api-cloudwatch` script is deprecated. '
|
||||
'Please use the system level heat binaries installed to '
|
||||
'start any of the heat services.')
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
@ -22,13 +22,12 @@ engine.
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG.warning(_LW('DEPRECATED: `heat-engine` script is deprecated. '
|
||||
'Please use the system level heat binaries installed to '
|
||||
'start any of the heat services.'))
|
||||
LOG.warning('DEPRECATED: `heat-engine` script is deprecated. '
|
||||
'Please use the system level heat binaries installed to '
|
||||
'start any of the heat services.')
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
@ -15,12 +15,11 @@
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG.warning(_LW('DEPRECATED: `heat-manage` script is deprecated. Please use '
|
||||
'the system level heat-manage binary.'))
|
||||
LOG.warning('DEPRECATED: `heat-manage` script is deprecated. Please use '
|
||||
'the system level heat-manage binary.')
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
|
|
@ -21,7 +21,6 @@ import six
|
|||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LW
|
||||
from heat.engine import attributes
|
||||
from heat.engine import constraints
|
||||
from heat.engine import properties
|
||||
|
@ -554,8 +553,8 @@ def available_resource_mapping():
|
|||
if DOCKER_INSTALLED:
|
||||
return resource_mapping()
|
||||
else:
|
||||
LOG.warning(_LW("Docker plug-in loaded, but docker lib "
|
||||
"not installed."))
|
||||
LOG.warning("Docker plug-in loaded, but docker lib "
|
||||
"not installed.")
|
||||
return {}
|
||||
|
||||
|
||||
|
|
|
@ -21,12 +21,9 @@ from oslo_log import log as logging
|
|||
from oslo_utils import importutils
|
||||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger('heat.common.keystoneclient')
|
||||
LOG.info(_LI("Keystone V2 loaded"))
|
||||
LOG.info("Keystone V2 loaded")
|
||||
|
||||
|
||||
class KeystoneClientV2(object):
|
||||
|
@ -100,8 +97,8 @@ class KeystoneClientV2(object):
|
|||
kwargs['tenant_name'] = self.context.project_name
|
||||
kwargs['tenant_id'] = self.context.tenant_id
|
||||
else:
|
||||
LOG.error(_LE("Keystone v2 API connection failed, no password "
|
||||
"or auth_token!"))
|
||||
LOG.error("Keystone v2 API connection failed, no password "
|
||||
"or auth_token!")
|
||||
raise exception.AuthorizationFailure()
|
||||
kwargs['cacert'] = self._get_client_option('ca_file')
|
||||
kwargs['insecure'] = self._get_client_option('insecure')
|
||||
|
@ -115,7 +112,7 @@ class KeystoneClientV2(object):
|
|||
if auth_kwargs:
|
||||
# Sanity check
|
||||
if not client.auth_ref.trust_scoped:
|
||||
LOG.error(_LE("v2 trust token re-scoping failed!"))
|
||||
LOG.error("v2 trust token re-scoping failed!")
|
||||
raise exception.AuthorizationFailure()
|
||||
# All OK so update the context with the token
|
||||
self.context.auth_token = client.auth_ref.auth_token
|
||||
|
@ -123,8 +120,8 @@ class KeystoneClientV2(object):
|
|||
# Ensure the v2 API we're using is not impacted by keystone
|
||||
# bug #1239303, otherwise we can't trust the user_id
|
||||
if self.context.trustor_user_id != client.auth_ref.user_id:
|
||||
LOG.error(_LE("Trust impersonation failed, bug #1239303 "
|
||||
"suspected, you may need a newer keystone"))
|
||||
LOG.error("Trust impersonation failed, bug #1239303 "
|
||||
"suspected, you may need a newer keystone")
|
||||
raise exception.AuthorizationFailure()
|
||||
|
||||
return client
|
||||
|
@ -164,8 +161,8 @@ class KeystoneClientV2(object):
|
|||
Returns the keystone ID of the resulting user
|
||||
"""
|
||||
if len(username) > 64:
|
||||
LOG.warning(_LW("Truncating the username %s to the last 64 "
|
||||
"characters."), username)
|
||||
LOG.warning("Truncating the username %s to the last 64 "
|
||||
"characters.", username)
|
||||
# get the last 64 characters of the username
|
||||
username = username[-64:]
|
||||
user = self.client.users.create(username,
|
||||
|
@ -188,8 +185,8 @@ class KeystoneClientV2(object):
|
|||
self.client.roles.add_user_role(user.id, role_id,
|
||||
self.context.tenant_id)
|
||||
else:
|
||||
LOG.error(_LE("Failed to add user %(user)s to role %(role)s, "
|
||||
"check role exists!"),
|
||||
LOG.error("Failed to add user %(user)s to role %(role)s, "
|
||||
"check role exists!",
|
||||
{'user': username,
|
||||
'role': cfg.CONF.heat_stack_user_role})
|
||||
|
||||
|
|
|
@ -25,8 +25,6 @@ from swiftclient import utils as swiftclient_utils
|
|||
from troveclient import client as tc
|
||||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common.i18n import _LW
|
||||
from heat.engine.clients import client_plugin
|
||||
from heat.engine.clients.os import cinder
|
||||
from heat.engine.clients.os import glance
|
||||
|
@ -57,7 +55,7 @@ class RackspaceClientPlugin(client_plugin.ClientPlugin):
|
|||
"""Create an authenticated client context."""
|
||||
self.pyrax = pyrax.create_context("rackspace")
|
||||
self.pyrax.auth_endpoint = self.context.auth_url
|
||||
LOG.info(_LI("Authenticating username: %s"),
|
||||
LOG.info("Authenticating username: %s",
|
||||
self.context.username)
|
||||
tenant = self.context.tenant_id
|
||||
tenant_name = self.context.tenant
|
||||
|
@ -65,9 +63,9 @@ class RackspaceClientPlugin(client_plugin.ClientPlugin):
|
|||
tenant_id=tenant,
|
||||
tenant_name=tenant_name)
|
||||
if not self.pyrax.authenticated:
|
||||
LOG.warning(_LW("Pyrax Authentication Failed."))
|
||||
LOG.warning("Pyrax Authentication Failed.")
|
||||
raise exception.AuthorizationFailure()
|
||||
LOG.info(_LI("User %s authenticated successfully."),
|
||||
LOG.info("User %s authenticated successfully.",
|
||||
self.context.username)
|
||||
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ import six
|
|||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LI
|
||||
from heat.engine import attributes
|
||||
from heat.engine import constraints
|
||||
from heat.engine import function
|
||||
|
@ -1184,7 +1183,7 @@ class CloudLoadBalancer(resource.Resource):
|
|||
raise exception.InvalidTemplateAttribute(resource=self.name,
|
||||
key=key)
|
||||
function = attribute_function[key]
|
||||
LOG.info(_LI('%(name)s.GetAtt(%(key)s) == %(function)s'),
|
||||
LOG.info('%(name)s.GetAtt(%(key)s) == %(function)s',
|
||||
{'name': self.name, 'key': key, 'function': function})
|
||||
return function
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ from oslo_log import log as logging
|
|||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LW
|
||||
from heat.engine import constraints
|
||||
from heat.engine import properties
|
||||
from heat.engine.resources.openstack.nova import server
|
||||
|
@ -187,7 +186,7 @@ class CloudServer(server.Server):
|
|||
reason = server.metadata.get('rackconnect_unprocessable_reason',
|
||||
None)
|
||||
if reason is not None:
|
||||
LOG.warning(_LW("RackConnect unprocessable reason: %s"),
|
||||
LOG.warning("RackConnect unprocessable reason: %s",
|
||||
reason)
|
||||
|
||||
msg = _("RackConnect automation has completed")
|
||||
|
|
|
@ -15,7 +15,6 @@ from oslo_log import log as logging
|
|||
import six
|
||||
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LW
|
||||
from heat.engine import attributes
|
||||
from heat.engine import constraints
|
||||
from heat.engine import properties
|
||||
|
@ -108,8 +107,8 @@ class CloudNetwork(resource.Resource):
|
|||
try:
|
||||
self._network = self.cloud_networks().get(self.resource_id)
|
||||
except NotFound:
|
||||
LOG.warning(_LW("Could not find network %s but resource id is"
|
||||
" set."), self.resource_id)
|
||||
LOG.warning("Could not find network %s but resource id is"
|
||||
" set.", self.resource_id)
|
||||
return self._network
|
||||
|
||||
def cloud_networks(self):
|
||||
|
@ -139,7 +138,7 @@ class CloudNetwork(resource.Resource):
|
|||
try:
|
||||
network.delete()
|
||||
except NetworkInUse:
|
||||
LOG.warning(_LW("Network '%s' still in use."), network.id)
|
||||
LOG.warning("Network '%s' still in use.", network.id)
|
||||
else:
|
||||
self._delete_issued = True
|
||||
return False
|
||||
|
|
|
@ -22,8 +22,6 @@ import webob
|
|||
from heat.api.aws import exception
|
||||
from heat.common import endpoint_utils
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common import wsgi
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -161,14 +159,14 @@ class EC2Token(wsgi.Middleware):
|
|||
# here so that we can use both authentication methods.
|
||||
# Returning here just means the user didn't supply AWS
|
||||
# authentication and we'll let the app try native keystone next.
|
||||
LOG.info(_LI("Checking AWS credentials.."))
|
||||
LOG.info("Checking AWS credentials..")
|
||||
|
||||
signature = self._get_signature(req)
|
||||
if not signature:
|
||||
if 'X-Auth-User' in req.headers:
|
||||
return self.application
|
||||
else:
|
||||
LOG.info(_LI("No AWS Signature found."))
|
||||
LOG.info("No AWS Signature found.")
|
||||
raise exception.HeatIncompleteSignatureError()
|
||||
|
||||
access = self._get_access(req)
|
||||
|
@ -176,14 +174,14 @@ class EC2Token(wsgi.Middleware):
|
|||
if 'X-Auth-User' in req.headers:
|
||||
return self.application
|
||||
else:
|
||||
LOG.info(_LI("No AWSAccessKeyId/Authorization Credential"))
|
||||
LOG.info("No AWSAccessKeyId/Authorization Credential")
|
||||
raise exception.HeatMissingAuthenticationTokenError()
|
||||
|
||||
LOG.info(_LI("AWS credentials found, checking against keystone."))
|
||||
LOG.info("AWS credentials found, checking against keystone.")
|
||||
|
||||
if not auth_uri:
|
||||
LOG.error(_LE("Ec2Token authorization failed, no auth_uri "
|
||||
"specified in config file"))
|
||||
LOG.error("Ec2Token authorization failed, no auth_uri "
|
||||
"specified in config file")
|
||||
raise exception.HeatInternalFailureError(_('Service '
|
||||
'misconfigured'))
|
||||
# Make a copy of args for authentication and signature verification.
|
||||
|
@ -207,7 +205,7 @@ class EC2Token(wsgi.Middleware):
|
|||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
keystone_ec2_uri = self._conf_get_keystone_ec2_uri(auth_uri)
|
||||
LOG.info(_LI('Authenticating with %s'), keystone_ec2_uri)
|
||||
LOG.info('Authenticating with %s', keystone_ec2_uri)
|
||||
response = requests.post(keystone_ec2_uri, data=creds_json,
|
||||
headers=headers,
|
||||
verify=self.ssl_options['verify'],
|
||||
|
@ -220,7 +218,7 @@ class EC2Token(wsgi.Middleware):
|
|||
roles = [role['name']
|
||||
for role in result['token'].get('roles', [])]
|
||||
except (AttributeError, KeyError):
|
||||
LOG.info(_LI("AWS authentication failure."))
|
||||
LOG.info("AWS authentication failure.")
|
||||
# Try to extract the reason for failure so we can return the
|
||||
# appropriate AWS error via raising an exception
|
||||
try:
|
||||
|
@ -235,7 +233,7 @@ class EC2Token(wsgi.Middleware):
|
|||
else:
|
||||
raise exception.HeatAccessDeniedError()
|
||||
else:
|
||||
LOG.info(_LI("AWS authentication successful."))
|
||||
LOG.info("AWS authentication successful.")
|
||||
|
||||
# Authenticated!
|
||||
ec2_creds = {'ec2Credentials': {'access': access,
|
||||
|
|
|
@ -19,7 +19,6 @@ import re
|
|||
from oslo_log import log as logging
|
||||
|
||||
from heat.api.aws import exception
|
||||
from heat.common.i18n import _LE
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -98,7 +97,7 @@ def get_param_value(params, key):
|
|||
try:
|
||||
return params[key]
|
||||
except KeyError:
|
||||
LOG.error(_LE("Request does not contain %s parameter!"), key)
|
||||
LOG.error("Request does not contain %s parameter!", key)
|
||||
raise exception.HeatMissingParameterError(key)
|
||||
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ from heat.api.aws import exception
|
|||
from heat.api.aws import utils as api_utils
|
||||
from heat.common import exception as heat_exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common import identifier
|
||||
from heat.common import policy
|
||||
from heat.common import template_format
|
||||
|
@ -426,7 +425,7 @@ class StackController(object):
|
|||
msg = _("The Template must be a JSON or YAML document.")
|
||||
return exception.HeatInvalidParameterValueError(detail=msg)
|
||||
|
||||
LOG.info(_LI('validate_template'))
|
||||
LOG.info('validate_template')
|
||||
|
||||
def format_validate_parameter(key, value):
|
||||
"""Reformat engine output into AWS "ValidateTemplate" format."""
|
||||
|
|
|
@ -21,8 +21,6 @@ from heat.api.aws import exception
|
|||
from heat.api.aws import utils as api_utils
|
||||
from heat.common import exception as heat_exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LW
|
||||
from heat.common import policy
|
||||
from heat.common import wsgi
|
||||
from heat.rpc import api as rpc_api
|
||||
|
@ -199,7 +197,7 @@ class WatchController(object):
|
|||
# Filter criteria not met, return None
|
||||
return
|
||||
except KeyError:
|
||||
LOG.warning(_LW("Invalid filter key %s, ignoring"), f)
|
||||
LOG.warning("Invalid filter key %s, ignoring", f)
|
||||
|
||||
return result
|
||||
|
||||
|
@ -250,8 +248,8 @@ class WatchController(object):
|
|||
# need to process (each dict) for dimensions
|
||||
metric_data = api_utils.extract_param_list(parms, prefix='MetricData')
|
||||
if not len(metric_data):
|
||||
LOG.error(_LE("Request does not contain required MetricData"))
|
||||
return exception.HeatMissingParameterError("MetricData list")
|
||||
LOG.error("Request does not contain required MetricData")
|
||||
return exception.HeatMissingParameterError(_("MetricData list"))
|
||||
|
||||
watch_name = None
|
||||
dimensions = []
|
||||
|
|
|
@ -24,7 +24,6 @@ from heat.api.openstack.v1.views import stacks_view
|
|||
from heat.common import context
|
||||
from heat.common import environment_format
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LW
|
||||
from heat.common import identifier
|
||||
from heat.common import param_utils
|
||||
from heat.common import serializers
|
||||
|
@ -324,7 +323,7 @@ class StackController(object):
|
|||
not_tags=not_tags,
|
||||
not_tags_any=not_tags_any)
|
||||
except AttributeError as ex:
|
||||
LOG.warning(_LW("Old Engine Version: %s"), ex)
|
||||
LOG.warning("Old Engine Version: %s", ex)
|
||||
|
||||
return stacks_view.collection(req, stacks=stacks,
|
||||
count=count,
|
||||
|
|
|
@ -30,7 +30,6 @@ from oslo_service import systemd
|
|||
import six
|
||||
|
||||
from heat.common import config
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common import messaging
|
||||
from heat.common import profiler
|
||||
from heat.common import wsgi
|
||||
|
@ -55,7 +54,7 @@ def launch_api(setup_logging=True):
|
|||
|
||||
port = cfg.CONF.heat_api.bind_port
|
||||
host = cfg.CONF.heat_api.bind_host
|
||||
LOG.info(_LI('Starting Heat REST API on %(host)s:%(port)s'),
|
||||
LOG.info('Starting Heat REST API on %(host)s:%(port)s',
|
||||
{'host': host, 'port': port})
|
||||
profiler.setup('heat-api', host)
|
||||
gmr.TextGuruMeditation.setup_autorun(version)
|
||||
|
|
|
@ -32,7 +32,6 @@ from oslo_service import systemd
|
|||
import six
|
||||
|
||||
from heat.common import config
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common import messaging
|
||||
from heat.common import profiler
|
||||
from heat.common import wsgi
|
||||
|
@ -59,7 +58,7 @@ def launch_cfn_api(setup_logging=True):
|
|||
|
||||
port = cfg.CONF.heat_api_cfn.bind_port
|
||||
host = cfg.CONF.heat_api_cfn.bind_host
|
||||
LOG.info(_LI('Starting Heat API on %(host)s:%(port)s'),
|
||||
LOG.info('Starting Heat API on %(host)s:%(port)s',
|
||||
{'host': host, 'port': port})
|
||||
profiler.setup('heat-api-cfn', host)
|
||||
gmr.TextGuruMeditation.setup_autorun(version)
|
||||
|
|
|
@ -32,7 +32,6 @@ from oslo_service import systemd
|
|||
import six
|
||||
|
||||
from heat.common import config
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common import messaging
|
||||
from heat.common import profiler
|
||||
from heat.common import wsgi
|
||||
|
@ -59,7 +58,7 @@ def launch_cloudwatch_api(setup_logging=True):
|
|||
|
||||
port = cfg.CONF.heat_api_cloudwatch.bind_port
|
||||
host = cfg.CONF.heat_api_cloudwatch.bind_host
|
||||
LOG.info(_LI('Starting Heat CloudWatch API on %(host)s:%(port)s'),
|
||||
LOG.info('Starting Heat CloudWatch API on %(host)s:%(port)s',
|
||||
{'host': host, 'port': port})
|
||||
profiler.setup('heat-api-cloudwatch', host)
|
||||
gmr.TextGuruMeditation.setup_autorun(version)
|
||||
|
|
|
@ -32,7 +32,6 @@ from oslo_reports import guru_meditation_report as gmr
|
|||
from oslo_service import service
|
||||
|
||||
from heat.common import config
|
||||
from heat.common.i18n import _LC
|
||||
from heat.common import messaging
|
||||
from heat.common import profiler
|
||||
from heat.engine import template
|
||||
|
@ -60,7 +59,7 @@ def launch_engine(setup_logging=True):
|
|||
try:
|
||||
mgr = template._get_template_extension_manager()
|
||||
except template.TemplatePluginNotRegistered as ex:
|
||||
LOG.critical(_LC("%s"), ex)
|
||||
LOG.critical("%s", ex)
|
||||
if not mgr or not mgr.names():
|
||||
sys.exit("ERROR: No template format plugins registered")
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ from osprofiler import opts as profiler
|
|||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LW
|
||||
from heat.common import wsgi
|
||||
|
||||
|
||||
|
@ -375,8 +374,8 @@ def startup_sanity_check():
|
|||
not cfg.CONF.stack_user_domain_name):
|
||||
# FIXME(shardy): Legacy fallback for folks using old heat.conf
|
||||
# files which lack domain configuration
|
||||
LOG.warning(_LW('stack_user_domain_id or stack_user_domain_name not '
|
||||
'set in heat.conf falling back to using default'))
|
||||
LOG.warning('stack_user_domain_id or stack_user_domain_name not '
|
||||
'set in heat.conf falling back to using default')
|
||||
else:
|
||||
domain_admin_user = cfg.CONF.stack_domain_admin
|
||||
domain_admin_password = cfg.CONF.stack_domain_admin_password
|
||||
|
@ -389,7 +388,7 @@ def startup_sanity_check():
|
|||
auth_key_len = len(cfg.CONF.auth_encryption_key)
|
||||
if auth_key_len in (16, 24):
|
||||
LOG.warning(
|
||||
_LW('Please update auth_encryption_key to be 32 characters.'))
|
||||
'Please update auth_encryption_key to be 32 characters.')
|
||||
elif auth_key_len != 32:
|
||||
raise exception.Error(_('heat.conf misconfigured, auth_encryption_key '
|
||||
'must be 32 characters'))
|
||||
|
|
|
@ -28,7 +28,6 @@ import six
|
|||
from heat.common import config
|
||||
from heat.common import endpoint_utils
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common import policy
|
||||
from heat.common import wsgi
|
||||
from heat.db.sqlalchemy import api as db_api
|
||||
|
@ -235,9 +234,9 @@ class RequestContext(context.RequestContext):
|
|||
if auth_uri:
|
||||
return auth_uri
|
||||
else:
|
||||
LOG.error(_LE('Keystone API endpoint not provided. Set '
|
||||
'auth_uri in section [clients_keystone] '
|
||||
'of the configuration file.'))
|
||||
LOG.error('Keystone API endpoint not provided. Set '
|
||||
'auth_uri in section [clients_keystone] '
|
||||
'of the configuration file.')
|
||||
raise exception.AuthorizationFailure()
|
||||
|
||||
@property
|
||||
|
@ -247,8 +246,8 @@ class RequestContext(context.RequestContext):
|
|||
cfg.CONF, TRUSTEE_CONF_GROUP, trust_id=self.trust_id)
|
||||
|
||||
if not self._trusts_auth_plugin:
|
||||
LOG.error(_LE('Please add the trustee credentials you need '
|
||||
'to the %s section of your heat.conf file.'),
|
||||
LOG.error('Please add the trustee credentials you need '
|
||||
'to the %s section of your heat.conf file.',
|
||||
TRUSTEE_CONF_GROUP)
|
||||
raise exception.AuthorizationFailure()
|
||||
|
||||
|
@ -276,8 +275,8 @@ class RequestContext(context.RequestContext):
|
|||
user_domain_id=self.user_domain,
|
||||
auth_url=self.keystone_v3_endpoint)
|
||||
|
||||
LOG.error(_LE("Keystone API connection failed, no password "
|
||||
"trust or auth_token!"))
|
||||
LOG.error("Keystone API connection failed, no password "
|
||||
"trust or auth_token!")
|
||||
raise exception.AuthorizationFailure()
|
||||
|
||||
def reload_auth_plugin(self):
|
||||
|
|
|
@ -24,7 +24,6 @@ from oslo_utils import excutils
|
|||
import six
|
||||
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
|
||||
_FATAL_EXCEPTION_FORMAT_ERRORS = False
|
||||
|
||||
|
@ -69,9 +68,9 @@ class HeatException(Exception):
|
|||
reraise=_FATAL_EXCEPTION_FORMAT_ERRORS):
|
||||
# kwargs doesn't match a variable in the message
|
||||
# log the issue and the kwargs
|
||||
LOG.exception(_LE('Exception in string format operation'))
|
||||
LOG.exception('Exception in string format operation')
|
||||
for name, value in six.iteritems(kwargs):
|
||||
LOG.error(_LE("%(name)s: %(value)s"),
|
||||
LOG.error("%(name)s: %(value)s",
|
||||
{'name': name, 'value': value}) # noqa
|
||||
|
||||
if self.error_code:
|
||||
|
|
|
@ -27,16 +27,6 @@ _translators = i18n.TranslatorFactory(domain='heat')
|
|||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# Translators for log levels.
|
||||
#
|
||||
# The abbreviated names are meant to reflect the usual use of a short
|
||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||
# the level.
|
||||
_LI = _translators.log_info
|
||||
_LW = _translators.log_warning
|
||||
_LE = _translators.log_error
|
||||
_LC = _translators.log_critical
|
||||
|
||||
|
||||
def repr_wrapper(klass):
|
||||
"""A decorator that defines __repr__ method under Python 2.
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LI
|
||||
from heat.engine import resources
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -42,19 +40,19 @@ def get_plug_point_class_instances():
|
|||
slps = resources.global_env().get_stack_lifecycle_plugins()
|
||||
pp_classes = [cls for name, cls in slps]
|
||||
except Exception:
|
||||
LOG.exception(_LE("failed to get lifecycle plug point classes"))
|
||||
LOG.exception("failed to get lifecycle plug point classes")
|
||||
|
||||
for ppc in pp_classes:
|
||||
try:
|
||||
pp_class_instances.append(ppc())
|
||||
except Exception:
|
||||
LOG.exception(
|
||||
_LE("failed to instantiate stack lifecycle class %s"), ppc)
|
||||
"failed to instantiate stack lifecycle class %s", ppc)
|
||||
try:
|
||||
pp_class_instances = sorted(pp_class_instances,
|
||||
key=lambda ppci: ppci.get_ordinal())
|
||||
except Exception:
|
||||
LOG.exception(_LE("failed to sort lifecycle plug point classes"))
|
||||
LOG.exception("failed to sort lifecycle plug point classes")
|
||||
return pp_class_instances
|
||||
|
||||
|
||||
|
@ -108,13 +106,13 @@ def _do_ops(cinstances, opname, cnxt, stack, current_stack=None, action=None,
|
|||
op(cnxt, stack, current_stack, action)
|
||||
success_count += 1
|
||||
except Exception as ex:
|
||||
LOG.exception(_LE(
|
||||
"%(opname)s %(ci)s failed for %(a)s on %(sid)s"),
|
||||
LOG.exception(
|
||||
"%(opname)s %(ci)s failed for %(a)s on %(sid)s",
|
||||
{'opname': opname, 'ci': type(ci),
|
||||
'a': action, 'sid': stack.id})
|
||||
failure = True
|
||||
failure_exception_message = ex.args[0] if ex.args else str(ex)
|
||||
break
|
||||
LOG.info(_LI("done with class=%(c)s, stackid=%(sid)s, action=%(a)s"),
|
||||
LOG.info("done with class=%(c)s, stackid=%(sid)s, action=%(a)s",
|
||||
{'c': type(ci), 'sid': stack.id, 'a': action})
|
||||
return (failure, failure_exception_message, success_count)
|
||||
|
|
|
@ -26,7 +26,6 @@ import types
|
|||
from oslo_log import log as logging
|
||||
import six
|
||||
|
||||
from heat.common.i18n import _LE
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -97,7 +96,7 @@ def load_modules(package, ignore_error=False):
|
|||
try:
|
||||
module = _import_module(importer, module_name, package)
|
||||
except ImportError:
|
||||
LOG.error(_LE('Failed to import module %s'), module_name)
|
||||
LOG.error('Failed to import module %s', module_name)
|
||||
if not ignore_error:
|
||||
raise
|
||||
else:
|
||||
|
|
|
@ -13,14 +13,13 @@
|
|||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def log_fail_msg(manager, entrypoint, exception):
|
||||
LOG.warning(_LW('Encountered exception while loading %(module_name)s: '
|
||||
'"%(message)s". Not using %(name)s.'),
|
||||
LOG.warning('Encountered exception while loading %(module_name)s: '
|
||||
'"%(message)s". Not using %(name)s.',
|
||||
{'module_name': entrypoint.module_name,
|
||||
'message': exception.message,
|
||||
'name': entrypoint.name})
|
||||
|
|
|
@ -16,7 +16,6 @@ from oslo_log import log as logging
|
|||
import osprofiler.initializer
|
||||
|
||||
from heat.common import context
|
||||
from heat.common.i18n import _LW
|
||||
|
||||
cfg.CONF.import_opt('enabled', 'heat.common.config', group='profiler')
|
||||
|
||||
|
@ -31,14 +30,14 @@ def setup(binary, host):
|
|||
project="heat",
|
||||
service=binary,
|
||||
host=host)
|
||||
LOG.warning(_LW("OSProfiler is enabled.\nIt means that person who "
|
||||
"knows any of hmac_keys that are specified in "
|
||||
"/etc/heat/heat.conf can trace his requests. \n"
|
||||
"In real life only operator can read this file so "
|
||||
"there is no security issue. Note that even if person "
|
||||
"can trigger profiler, only admin user can retrieve "
|
||||
"trace information.\n"
|
||||
"To disable OSprofiler set in heat.conf:\n"
|
||||
"[profiler]\nenabled=false"))
|
||||
LOG.warning("OSProfiler is enabled.\nIt means that person who "
|
||||
"knows any of hmac_keys that are specified in "
|
||||
"/etc/heat/heat.conf can trace his requests. \n"
|
||||
"In real life only operator can read this file so "
|
||||
"there is no security issue. Note that even if person "
|
||||
"can trigger profiler, only admin user can retrieve "
|
||||
"trace information.\n"
|
||||
"To disable OSprofiler set in heat.conf:\n"
|
||||
"[profiler]\nenabled=false")
|
||||
else:
|
||||
osprofiler.web.disable()
|
||||
|
|
|
@ -21,7 +21,6 @@ from six.moves import urllib
|
|||
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LI
|
||||
|
||||
cfg.CONF.import_opt('max_template_size', 'heat.common.config')
|
||||
|
||||
|
@ -40,7 +39,7 @@ def get(url, allowed_schemes=('http', 'https')):
|
|||
the allowed_schemes argument.
|
||||
Raise an IOError if getting the data fails.
|
||||
"""
|
||||
LOG.info(_LI('Fetching data from %s'), url)
|
||||
LOG.info('Fetching data from %s', url)
|
||||
|
||||
components = urllib.parse.urlparse(url)
|
||||
|
||||
|
@ -70,10 +69,11 @@ def get(url, allowed_schemes=('http', 'https')):
|
|||
for chunk in reader:
|
||||
result += chunk
|
||||
if len(result) > cfg.CONF.max_template_size:
|
||||
raise URLFetchError("Template exceeds maximum allowed size (%s"
|
||||
" bytes)" % cfg.CONF.max_template_size)
|
||||
raise URLFetchError(_("Template exceeds maximum allowed size "
|
||||
"(%s bytes)") %
|
||||
cfg.CONF.max_template_size)
|
||||
return result
|
||||
|
||||
except exceptions.RequestException as ex:
|
||||
LOG.info(_LI('Failed to retrieve template: %s') % ex)
|
||||
LOG.info('Failed to retrieve template: %s', ex)
|
||||
raise URLFetchError(_('Failed to retrieve template from %s') % url)
|
||||
|
|
|
@ -48,9 +48,6 @@ import webob.exc
|
|||
from heat.api.aws import exception as aws_exception
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common.i18n import _LW
|
||||
from heat.common import serializers
|
||||
|
||||
|
||||
|
@ -275,7 +272,7 @@ class Server(object):
|
|||
|
||||
def kill_children(self, *args):
|
||||
"""Kills the entire process group."""
|
||||
LOG.error(_LE('SIGTERM received'))
|
||||
LOG.error('SIGTERM received')
|
||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||
self.running = False
|
||||
|
@ -283,7 +280,7 @@ class Server(object):
|
|||
|
||||
def hup(self, *args):
|
||||
"""Reloads configuration files with zero down time."""
|
||||
LOG.error(_LE('SIGHUP received'))
|
||||
LOG.error('SIGHUP received')
|
||||
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
raise exception.SIGHUPInterrupt
|
||||
|
||||
|
@ -315,7 +312,7 @@ class Server(object):
|
|||
else:
|
||||
childs_num = workers
|
||||
|
||||
LOG.info(_LI("Starting %d workers"), workers)
|
||||
LOG.info("Starting %d workers", workers)
|
||||
signal.signal(signal.SIGTERM, self.kill_children)
|
||||
signal.signal(signal.SIGINT, self.kill_children)
|
||||
signal.signal(signal.SIGHUP, self.hup)
|
||||
|
@ -333,7 +330,7 @@ class Server(object):
|
|||
if err.errno not in (errno.EINTR, errno.ECHILD):
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
LOG.info(_LI('Caught keyboard interrupt. Exiting.'))
|
||||
LOG.info('Caught keyboard interrupt. Exiting.')
|
||||
os.killpg(0, signal.SIGTERM)
|
||||
break
|
||||
except exception.SIGHUPInterrupt:
|
||||
|
@ -417,22 +414,22 @@ class Server(object):
|
|||
def _remove_children(self, pid):
|
||||
if pid in self.children:
|
||||
self.children.remove(pid)
|
||||
LOG.info(_LI('Removed dead child %s'), pid)
|
||||
LOG.info('Removed dead child %s', pid)
|
||||
elif pid in self.stale_children:
|
||||
self.stale_children.remove(pid)
|
||||
LOG.info(_LI('Removed stale child %s'), pid)
|
||||
LOG.info('Removed stale child %s', pid)
|
||||
else:
|
||||
LOG.warning(_LW('Unrecognised child %s'), pid)
|
||||
LOG.warning('Unrecognised child %s', pid)
|
||||
|
||||
def _verify_and_respawn_children(self, pid, status):
|
||||
if len(self.stale_children) == 0:
|
||||
LOG.debug('No stale children')
|
||||
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
|
||||
LOG.error(_LE('Not respawning child %d, cannot '
|
||||
'recover from termination'), pid)
|
||||
LOG.error('Not respawning child %d, cannot '
|
||||
'recover from termination', pid)
|
||||
if not self.children and not self.stale_children:
|
||||
LOG.info(
|
||||
_LI('All workers have terminated. Exiting'))
|
||||
'All workers have terminated. Exiting')
|
||||
self.running = False
|
||||
else:
|
||||
if len(self.children) < self.conf.workers:
|
||||
|
@ -509,12 +506,12 @@ class Server(object):
|
|||
# exit on sighup
|
||||
self._sock = None
|
||||
self.run_server()
|
||||
LOG.info(_LI('Child %d exiting normally'), os.getpid())
|
||||
LOG.info('Child %d exiting normally', os.getpid())
|
||||
# self.pool.waitall() is now called in wsgi's server so
|
||||
# it's safe to exit here
|
||||
sys.exit(0)
|
||||
else:
|
||||
LOG.info(_LI('Started child %s'), pid)
|
||||
LOG.info('Started child %s', pid)
|
||||
self.children.add(pid)
|
||||
|
||||
def run_server(self):
|
||||
|
@ -541,7 +538,7 @@ class Server(object):
|
|||
|
||||
def _single_run(self, application, sock):
|
||||
"""Start a WSGI server in a new green thread."""
|
||||
LOG.info(_LI("Starting single process server"))
|
||||
LOG.info("Starting single process server")
|
||||
eventlet.wsgi.server(sock, application,
|
||||
custom_pool=self.pool,
|
||||
url_length_limit=URL_LENGTH_LIMIT,
|
||||
|
@ -838,7 +835,7 @@ class Resource(object):
|
|||
action_result = self.dispatch(self.controller, action,
|
||||
request, **action_args)
|
||||
except TypeError as err:
|
||||
LOG.error(_LE('Exception handling resource: %s'), err)
|
||||
LOG.error('Exception handling resource: %s', err)
|
||||
msg = _('The server could not comply with the request since '
|
||||
'it is either malformed or otherwise incorrect.')
|
||||
err = webob.exc.HTTPBadRequest(msg)
|
||||
|
@ -860,7 +857,7 @@ class Resource(object):
|
|||
raise
|
||||
if isinstance(err, webob.exc.HTTPServerError):
|
||||
LOG.error(
|
||||
_LE("Returning %(code)s to user: %(explanation)s"),
|
||||
"Returning %(code)s to user: %(explanation)s",
|
||||
{'code': err.code, 'explanation': err.explanation})
|
||||
http_exc = translate_exception(err, request.best_match_language())
|
||||
raise exception.HTTPExceptionDisguise(http_exc)
|
||||
|
@ -899,8 +896,7 @@ class Resource(object):
|
|||
err_body = action_result.get_unserialized_body()
|
||||
serializer.default(action_result, err_body)
|
||||
except Exception:
|
||||
LOG.warning(_LW("Unable to serialize exception "
|
||||
"response"))
|
||||
LOG.warning("Unable to serialize exception response")
|
||||
|
||||
return action_result
|
||||
|
||||
|
@ -934,7 +930,7 @@ class Resource(object):
|
|||
|
||||
def log_exception(err, exc_info):
|
||||
args = {'exc_info': exc_info} if cfg.CONF.debug else {}
|
||||
LOG.error(_LE("Unexpected error occurred serving API: %s"), err,
|
||||
LOG.error("Unexpected error occurred serving API: %s", err,
|
||||
**args)
|
||||
|
||||
|
||||
|
|
|
@ -35,9 +35,6 @@ from sqlalchemy.orm import aliased as orm_aliased
|
|||
from heat.common import crypt
|
||||
from heat.common import exception
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common.i18n import _LI
|
||||
from heat.common.i18n import _LW
|
||||
from heat.db.sqlalchemy import filters as db_filters
|
||||
from heat.db.sqlalchemy import migration
|
||||
from heat.db.sqlalchemy import models
|
||||
|
@ -1294,7 +1291,7 @@ def _purge_stacks(stack_infos, engine, meta):
|
|||
syncpoint = sqlalchemy.Table('sync_point', meta, autoload=True)
|
||||
|
||||
stack_info_str = ','.join([str(i) for i in stack_infos])
|
||||
LOG.info("Purging stacks %s" % stack_info_str)
|
||||
LOG.info("Purging stacks %s", stack_info_str)
|
||||
|
||||
# TODO(cwolfe): find a way to make this re-entrant with
|
||||
# reasonably sized transactions (good luck), or add
|
||||
|
@ -1475,8 +1472,8 @@ def _db_encrypt_or_decrypt_template_params(
|
|||
for raw_template in next_batch:
|
||||
try:
|
||||
if verbose:
|
||||
LOG.info(_LI("Processing raw_template %(id)d..."),
|
||||
{'id': raw_template.id})
|
||||
LOG.info("Processing raw_template %s...",
|
||||
raw_template.id)
|
||||
env = raw_template.environment
|
||||
needs_update = False
|
||||
|
||||
|
@ -1524,16 +1521,16 @@ def _db_encrypt_or_decrypt_template_params(
|
|||
raw_template_update(ctxt, raw_template.id,
|
||||
{'environment': newenv})
|
||||
except Exception as exc:
|
||||
LOG.exception(_LE('Failed to %(crypt_action)s parameters '
|
||||
'of raw template %(id)d'),
|
||||
LOG.exception('Failed to %(crypt_action)s parameters '
|
||||
'of raw template %(id)d',
|
||||
{'id': raw_template.id,
|
||||
'crypt_action': _crypt_action(encrypt)})
|
||||
excs.append(exc)
|
||||
continue
|
||||
finally:
|
||||
if verbose:
|
||||
LOG.info(_LI("Finished %(crypt_action)s processing of "
|
||||
"raw_template %(id)d."),
|
||||
LOG.info("Finished %(crypt_action)s processing of "
|
||||
"raw_template %(id)d.",
|
||||
{'id': raw_template.id,
|
||||
'crypt_action': _crypt_action(encrypt)})
|
||||
next_batch = list(itertools.islice(template_batches, batch_size))
|
||||
|
@ -1560,8 +1557,8 @@ def _db_encrypt_or_decrypt_resource_prop_data_legacy(
|
|||
continue
|
||||
try:
|
||||
if verbose:
|
||||
LOG.info(_LI("Processing resource %(id)d..."),
|
||||
{'id': resource.id})
|
||||
LOG.info("Processing resource %s...",
|
||||
resource.id)
|
||||
if encrypt:
|
||||
result = crypt.encrypted_dict(resource.properties_data,
|
||||
encryption_key)
|
||||
|
@ -1573,16 +1570,16 @@ def _db_encrypt_or_decrypt_resource_prop_data_legacy(
|
|||
'properties_data_encrypted': encrypt},
|
||||
resource.atomic_key)
|
||||
except Exception as exc:
|
||||
LOG.exception(_LE('Failed to %(crypt_action)s '
|
||||
'properties_data of resource %(id)d') %
|
||||
LOG.exception('Failed to %(crypt_action)s '
|
||||
'properties_data of resource %(id)d' %
|
||||
{'id': resource.id,
|
||||
'crypt_action': _crypt_action(encrypt)})
|
||||
excs.append(exc)
|
||||
continue
|
||||
finally:
|
||||
if verbose:
|
||||
LOG.info(_LI("Finished processing resource "
|
||||
"%(id)d."), {'id': resource.id})
|
||||
LOG.info("Finished processing resource %s.",
|
||||
resource.id)
|
||||
next_batch = list(itertools.islice(resource_batches, batch_size))
|
||||
return excs
|
||||
|
||||
|
@ -1607,8 +1604,8 @@ def _db_encrypt_or_decrypt_resource_prop_data(
|
|||
continue
|
||||
try:
|
||||
if verbose:
|
||||
LOG.info(_LI("Processing resource_properties_data "
|
||||
"%(id)d..."), {'id': rpd.id})
|
||||
LOG.info("Processing resource_properties_data "
|
||||
"%s...", rpd.id)
|
||||
if encrypt:
|
||||
result = crypt.encrypted_dict(rpd.data,
|
||||
encryption_key)
|
||||
|
@ -1619,8 +1616,8 @@ def _db_encrypt_or_decrypt_resource_prop_data(
|
|||
'encrypted': encrypt})
|
||||
except Exception as exc:
|
||||
LOG.exception(
|
||||
_LE("Failed to %(crypt_action)s "
|
||||
"data of resource_properties_data %(id)d") %
|
||||
"Failed to %(crypt_action)s "
|
||||
"data of resource_properties_data %(id)d" %
|
||||
{'id': rpd.id,
|
||||
'crypt_action': _crypt_action(encrypt)})
|
||||
excs.append(exc)
|
||||
|
@ -1628,8 +1625,8 @@ def _db_encrypt_or_decrypt_resource_prop_data(
|
|||
finally:
|
||||
if verbose:
|
||||
LOG.info(
|
||||
_LI("Finished processing resource_properties_data"
|
||||
" %(id)d."), {'id': rpd.id})
|
||||
"Finished processing resource_properties_data"
|
||||
" %s.", rpd.id)
|
||||
next_batch = list(itertools.islice(rpd_batches, batch_size))
|
||||
return excs
|
||||
|
||||
|
@ -1706,10 +1703,10 @@ def db_properties_data_migrate(ctxt, batch_size=50):
|
|||
encrypted = resource.properties_data_encrypted
|
||||
if encrypted is None:
|
||||
LOG.warning(
|
||||
_LW('Unexpected: resource.encrypted is None for '
|
||||
'resource id %(id)d for legacy '
|
||||
'resource.properties_data, assuming False.'),
|
||||
{'id': resource.id})
|
||||
'Unexpected: resource.encrypted is None for '
|
||||
'resource id %s for legacy '
|
||||
'resource.properties_data, assuming False.',
|
||||
resource.id)
|
||||
encrypted = False
|
||||
rsrc_prop_data = resource_prop_data_create(
|
||||
ctxt, {'encrypted': encrypted,
|
||||
|
@ -1720,8 +1717,8 @@ def db_properties_data_migrate(ctxt, batch_size=50):
|
|||
'rsrc_prop_data_id': rsrc_prop_data.id},
|
||||
resource.atomic_key)
|
||||
except Exception:
|
||||
LOG.exception(_LE('Failed to migrate properties_data for '
|
||||
'resource %(id)d'), {'id': resource.id})
|
||||
LOG.exception('Failed to migrate properties_data for '
|
||||
'resource %d', resource.id)
|
||||
continue
|
||||
next_batch = list(itertools.islice(resource_batches, batch_size))
|
||||
|
||||
|
@ -1743,8 +1740,8 @@ def db_properties_data_migrate(ctxt, batch_size=50):
|
|||
event.update({'resource_properties': None,
|
||||
'rsrc_prop_data_id': rsrc_prop_data.id})
|
||||
except Exception:
|
||||
LOG.exception(_LE('Failed to migrate resource_properties '
|
||||
'for event %(id)d'), {'id': event.id})
|
||||
LOG.exception('Failed to migrate resource_properties '
|
||||
'for event %d', event.id)
|
||||
continue
|
||||
next_batch = list(itertools.islice(event_batches, batch_size))
|
||||
|
||||
|
|
|
@ -18,7 +18,6 @@ from oslo_utils import timeutils
|
|||
import six
|
||||
|
||||
from heat.common.i18n import _
|
||||
from heat.common.i18n import _LE
|
||||
from heat.common import param_utils
|
||||
from heat.common import template_format
|
||||
from heat.common import timeutils as heat_timeutils
|
||||
|
@ -41,7 +40,7 @@ def extract_args(params):
|
|||
try:
|
||||
timeout = int(timeout_mins)
|
||||
except (ValueError, TypeError):
|
||||
LOG.exception(_LE('Timeout conversion failed'))
|
||||
LOG.exception('Timeout conversion failed')
|
||||
else:
|
||||
if timeout > 0:
|
||||
kwargs[rpc_api.PARAM_TIMEOUT] = timeout
|
||||
|
@ -481,7 +480,7 @@ def format_watch_data(wd, rule_names):
|
|||
if len(metric) == 1:
|
||||
metric_name, metric_data = metric[0]
|
||||
else:
|
||||
LOG.error(_LE("Unexpected number of keys in watch_data.data!"))
|
||||
LOG.error("Unexpected number of keys in watch_data.data!")
|
||||
return
|
||||
|
||||
result = {
|
||||