Merge "Remove translation of log messages"

This commit is contained in:
Jenkins 2017-06-08 02:53:53 +00:00 committed by Gerrit Code Review
commit c3ffbf14d2
29 changed files with 162 additions and 206 deletions

View File

@ -51,7 +51,7 @@ def load_body(req, resp=None, validator=None):
body = req.body_file.read(CONF.max_allowed_request_size_in_bytes) body = req.body_file.read(CONF.max_allowed_request_size_in_bytes)
req.body_file.seek(0) req.body_file.seek(0)
except IOError: except IOError:
LOG.exception(u._LE("Problem reading request JSON stream.")) LOG.exception("Problem reading request JSON stream.")
pecan.abort(500, u._('Read Error')) pecan.abort(500, u._('Read Error'))
try: try:
@ -61,7 +61,7 @@ def load_body(req, resp=None, validator=None):
parsed_body = json.loads(body) parsed_body = json.loads(body)
strip_whitespace(parsed_body) strip_whitespace(parsed_body)
except ValueError: except ValueError:
LOG.exception(u._LE("Problem loading request JSON.")) LOG.exception("Problem loading request JSON.")
pecan.abort(400, u._('Malformed JSON')) pecan.abort(400, u._('Malformed JSON'))
if validator: if validator:

View File

@ -31,7 +31,6 @@ from oslo_log import log
from barbican.api.controllers import versions from barbican.api.controllers import versions
from barbican.api import hooks from barbican.api import hooks
from barbican.common import config from barbican.common import config
from barbican import i18n as u
from barbican.model import repositories from barbican.model import repositories
from barbican import queue from barbican import queue
@ -87,7 +86,7 @@ def main_app(func):
if newrelic_loaded: if newrelic_loaded:
wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app)
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
LOG.info(u._LI('Barbican app created and initialized')) LOG.info('Barbican app created and initialized')
return wsgi_app return wsgi_app
return _wrapper return _wrapper

View File

@ -102,7 +102,7 @@ def handle_exceptions(operation_name=u._('System')):
try: try:
return fn(inst, *args, **kwargs) return fn(inst, *args, **kwargs)
except exc.HTTPError: except exc.HTTPError:
LOG.exception(u._LE('Webob error seen')) LOG.exception('Webob error seen')
raise # Already converted to Webob exception, just reraise raise # Already converted to Webob exception, just reraise
# In case PolicyNotAuthorized, we do not want to expose payload by # In case PolicyNotAuthorized, we do not want to expose payload by
# logging exception, so just LOG.error # logging exception, so just LOG.error

View File

@ -31,8 +31,8 @@ from barbican.tasks import certificate_resources as cert_resources
LOG = utils.getLogger(__name__) LOG = utils.getLogger(__name__)
_DEPRECATION_MSG = u._LW('%s has been deprecated in the Newton release. It ' _DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \
'will be removed in the Pike release.') 'It will be removed in the Pike release.'
def _certificate_authority_not_found(): def _certificate_authority_not_found():
@ -254,7 +254,7 @@ class CertificateAuthorityController(controllers.ACLMixin):
@controllers.enforce_rbac('certificate_authority:delete') @controllers.enforce_rbac('certificate_authority:delete')
def on_delete(self, external_project_id, **kwargs): def on_delete(self, external_project_id, **kwargs):
cert_resources.delete_subordinate_ca(external_project_id, self.ca) cert_resources.delete_subordinate_ca(external_project_id, self.ca)
LOG.info(u._LI('Deleted CA for project: %s'), external_project_id) LOG.info('Deleted CA for project: %s', external_project_id)
class CertificateAuthoritiesController(controllers.ACLMixin): class CertificateAuthoritiesController(controllers.ACLMixin):
@ -493,7 +493,7 @@ class CertificateAuthoritiesController(controllers.ACLMixin):
pecan.response.status = 201 pecan.response.status = 201
pecan.response.headers['Location'] = url pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a sub CA for project: %s'), LOG.info('Created a sub CA for project: %s',
external_project_id) external_project_id)
return {'ca_ref': url} return {'ca_ref': url}

View File

@ -68,7 +68,7 @@ class ContainerConsumerController(controllers.ACLMixin):
dict_fields = consumer.to_dict_fields() dict_fields = consumer.to_dict_fields()
LOG.info(u._LI('Retrieved a consumer for project: %s'), LOG.info('Retrieved a consumer for project: %s',
external_project_id) external_project_id)
return hrefs.convert_to_hrefs( return hrefs.convert_to_hrefs(
@ -132,7 +132,7 @@ class ContainerConsumersController(controllers.ACLMixin):
) )
resp_ctrs_overall.update({'total': total}) resp_ctrs_overall.update({'total': total})
LOG.info(u._LI('Retrieved a consumer list for project: %s'), LOG.info('Retrieved a consumer list for project: %s',
external_project_id) external_project_id)
return resp_ctrs_overall return resp_ctrs_overall
@ -158,7 +158,7 @@ class ContainerConsumersController(controllers.ACLMixin):
url = hrefs.convert_consumer_to_href(new_consumer.container_id) url = hrefs.convert_consumer_to_href(new_consumer.container_id)
pecan.response.headers['Location'] = url pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a consumer for project: %s'), LOG.info('Created a consumer for project: %s',
external_project_id) external_project_id)
return self._return_container_data(self.container_id) return self._return_container_data(self.container_id)
@ -196,11 +196,11 @@ class ContainerConsumersController(controllers.ACLMixin):
self.consumer_repo.delete_entity_by_id(consumer.id, self.consumer_repo.delete_entity_by_id(consumer.id,
external_project_id) external_project_id)
except exception.NotFound: except exception.NotFound:
LOG.exception(u._LE('Problem deleting consumer')) LOG.exception('Problem deleting consumer')
_consumer_not_found() _consumer_not_found()
ret_data = self._return_container_data(self.container_id) ret_data = self._return_container_data(self.container_id)
LOG.info(u._LI('Deleted a consumer for project: %s'), LOG.info('Deleted a consumer for project: %s',
external_project_id) external_project_id)
return ret_data return ret_data

View File

@ -74,7 +74,7 @@ class ContainerController(controllers.ACLMixin):
for secret_ref in dict_fields['secret_refs']: for secret_ref in dict_fields['secret_refs']:
hrefs.convert_to_hrefs(secret_ref) hrefs.convert_to_hrefs(secret_ref)
LOG.info(u._LI('Retrieved container for project: %s'), LOG.info('Retrieved container for project: %s',
external_project_id) external_project_id)
return hrefs.convert_to_hrefs( return hrefs.convert_to_hrefs(
hrefs.convert_to_hrefs(dict_fields) hrefs.convert_to_hrefs(dict_fields)
@ -95,10 +95,10 @@ class ContainerController(controllers.ACLMixin):
external_project_id=external_project_id external_project_id=external_project_id
) )
except exception.NotFound: except exception.NotFound:
LOG.exception(u._LE('Problem deleting container')) LOG.exception('Problem deleting container')
container_not_found() container_not_found()
LOG.info(u._LI('Deleted container for project: %s'), LOG.info('Deleted container for project: %s',
external_project_id) external_project_id)
for consumer in container_consumers[0]: for consumer in container_consumers[0]:
@ -175,7 +175,7 @@ class ContainersController(controllers.ACLMixin):
) )
resp_ctrs_overall.update({'total': total}) resp_ctrs_overall.update({'total': total})
LOG.info(u._LI('Retrieved container list for project: %s'), project_id) LOG.info('Retrieved container list for project: %s', project_id)
return resp_ctrs_overall return resp_ctrs_overall
@index.when(method='POST', template='json') @index.when(method='POST', template='json')
@ -219,7 +219,7 @@ class ContainersController(controllers.ACLMixin):
pecan.response.status = 201 pecan.response.status = 201
pecan.response.headers['Location'] = url pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a container for project: %s'), LOG.info('Created a container for project: %s',
external_project_id) external_project_id)
return {'container_ref': url} return {'container_ref': url}
@ -285,7 +285,7 @@ class ContainersSecretsController(controllers.ACLMixin):
pecan.response.status = 201 pecan.response.status = 201
pecan.response.headers['Location'] = url pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a container secret for project: %s'), LOG.info('Created a container secret for project: %s',
external_project_id) external_project_id)
return {'container_ref': url} return {'container_ref': url}
@ -325,5 +325,5 @@ class ContainersSecretsController(controllers.ACLMixin):
container_secret.id, external_project_id) container_secret.id, external_project_id)
pecan.response.status = 204 pecan.response.status = 204
LOG.info(u._LI('Deleted container secret for project: %s'), LOG.info('Deleted container secret for project: %s',
external_project_id) external_project_id)

View File

@ -27,8 +27,8 @@ from barbican.queue import client as async_client
LOG = utils.getLogger(__name__) LOG = utils.getLogger(__name__)
_DEPRECATION_MSG = u._LW('%s has been deprecated in the Newton release. It ' _DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \
'will be removed in the Pike release.') 'It will be removed in the Pike release.'
def _order_not_found(): def _order_not_found():

View File

@ -91,7 +91,7 @@ class ProjectQuotasController(controllers.ACLMixin):
validator=self.validator) validator=self.validator)
self.quota_driver.set_project_quotas(self.passed_project_id, self.quota_driver.set_project_quotas(self.passed_project_id,
kwargs['project_quotas']) kwargs['project_quotas'])
LOG.info(u._LI('Put Project Quotas')) LOG.info('Put Project Quotas')
pecan.response.status = 204 pecan.response.status = 204
@index.when(method='DELETE', template='json') @index.when(method='DELETE', template='json')
@ -103,10 +103,10 @@ class ProjectQuotasController(controllers.ACLMixin):
try: try:
self.quota_driver.delete_project_quotas(self.passed_project_id) self.quota_driver.delete_project_quotas(self.passed_project_id)
except exception.NotFound: except exception.NotFound:
LOG.info(u._LI('Delete Project Quotas - Project not found')) LOG.info('Delete Project Quotas - Project not found')
_project_quotas_not_found() _project_quotas_not_found()
else: else:
LOG.info(u._LI('Delete Project Quotas')) LOG.info('Delete Project Quotas')
pecan.response.status = 204 pecan.response.status = 204

View File

@ -118,12 +118,12 @@ class SecretController(controllers.ACLMixin):
if controllers.is_json_request_accept(pecan.request): if controllers.is_json_request_accept(pecan.request):
resp = self._on_get_secret_metadata(self.secret, **kwargs) resp = self._on_get_secret_metadata(self.secret, **kwargs)
LOG.info(u._LI('Retrieved secret metadata for project: %s'), LOG.info('Retrieved secret metadata for project: %s',
external_project_id) external_project_id)
return resp return resp
else: else:
LOG.warning(u._LW('Decrypted secret %s requested using deprecated ' LOG.warning('Decrypted secret %s requested using deprecated '
'API call.'), self.secret.id) 'API call.', self.secret.id)
return self._on_get_secret_payload(self.secret, return self._on_get_secret_payload(self.secret,
external_project_id, external_project_id,
**kwargs) **kwargs)
@ -202,7 +202,7 @@ class SecretController(controllers.ACLMixin):
external_project_id, external_project_id,
**kwargs) **kwargs)
LOG.info(u._LI('Retrieved secret payload for project: %s'), LOG.info('Retrieved secret payload for project: %s',
external_project_id) external_project_id)
return resp return resp
@ -243,7 +243,7 @@ class SecretController(controllers.ACLMixin):
secret_model=self.secret, secret_model=self.secret,
project_model=project_model, project_model=project_model,
transport_key_id=transport_key_id) transport_key_id=transport_key_id)
LOG.info(u._LI('Updated secret for project: %s'), external_project_id) LOG.info('Updated secret for project: %s', external_project_id)
@index.when(method='DELETE') @index.when(method='DELETE')
@utils.allow_all_content_types @utils.allow_all_content_types
@ -251,7 +251,7 @@ class SecretController(controllers.ACLMixin):
@controllers.enforce_rbac('secret:delete') @controllers.enforce_rbac('secret:delete')
def on_delete(self, external_project_id, **kwargs): def on_delete(self, external_project_id, **kwargs):
plugin.delete_secret(self.secret, external_project_id) plugin.delete_secret(self.secret, external_project_id)
LOG.info(u._LI('Deleted secret for project: %s'), external_project_id) LOG.info('Deleted secret for project: %s', external_project_id)
class SecretsController(controllers.ACLMixin): class SecretsController(controllers.ACLMixin):
@ -405,7 +405,7 @@ class SecretsController(controllers.ACLMixin):
) )
secrets_resp_overall.update({'total': total}) secrets_resp_overall.update({'total': total})
LOG.info(u._LI('Retrieved secret list for project: %s'), LOG.info('Retrieved secret list for project: %s',
external_project_id) external_project_id)
return secrets_resp_overall return secrets_resp_overall
@ -446,7 +446,7 @@ class SecretsController(controllers.ACLMixin):
pecan.response.status = 201 pecan.response.status = 201
pecan.response.headers['Location'] = url pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a secret for project: %s'), LOG.info('Created a secret for project: %s',
external_project_id) external_project_id)
if transport_key_model is not None: if transport_key_model is not None:
tkey_url = hrefs.convert_transport_key_to_href( tkey_url = hrefs.convert_transport_key_to_href(

View File

@ -74,7 +74,7 @@ class TransportKeyController(controllers.ACLMixin):
# TODO(alee) response should be 204 on success # TODO(alee) response should be 204 on success
# pecan.response.status = 204 # pecan.response.status = 204
except exception.NotFound: except exception.NotFound:
LOG.exception(u._LE('Problem deleting transport_key')) LOG.exception('Problem deleting transport_key')
_transport_key_not_found() _transport_key_not_found()

View File

@ -35,7 +35,7 @@ class BaseContextMiddleware(mw.Middleware):
resp.headers['x-openstack-request-id'] = resp.request.request_id resp.headers['x-openstack-request-id'] = resp.request.request_id
LOG.info(u._LI('Processed request: %(status)s - %(method)s %(url)s'), LOG.info('Processed request: %(status)s - %(method)s %(url)s',
{"status": resp.status, {"status": resp.status,
"method": resp.request.method, "method": resp.request.method,
"url": resp.request.url}) "url": resp.request.url})

View File

@ -22,7 +22,6 @@ import sys
sys.path.insert(0, os.getcwd()) sys.path.insert(0, os.getcwd())
from barbican.common import config from barbican.common import config
from barbican import i18n as u
from barbican.model import clean from barbican.model import clean
from barbican.model.migration import commands from barbican.model.migration import commands
from oslo_log import log from oslo_log import log
@ -176,8 +175,7 @@ def main():
dm.execute() dm.execute()
except Exception as ex: except Exception as ex:
if not _exception_is_successful_exit(ex): if not _exception_is_successful_exit(ex):
LOG.exception(u._LE('Problem seen trying to run' LOG.exception('Problem seen trying to run barbican db manage')
' barbican db manage'))
sys.stderr.write("ERROR: {0}\n".format(ex)) sys.stderr.write("ERROR: {0}\n".format(ex))
sys.exit(1) sys.exit(1)

View File

@ -40,7 +40,6 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
from barbican.common import config from barbican.common import config
from barbican import i18n as u
from barbican import queue from barbican import queue
from barbican.queue import keystone_listener from barbican.queue import keystone_listener
from barbican import version from barbican import version
@ -66,7 +65,7 @@ def main():
log.setup(CONF, 'barbican') log.setup(CONF, 'barbican')
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
LOG.info(u._LI("Booting up Barbican Keystone listener node...")) LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization # Queuing initialization
queue.init(CONF) queue.init(CONF)
@ -77,8 +76,7 @@ def main():
keystone_listener.MessageServer(CONF) keystone_listener.MessageServer(CONF)
).wait() ).wait()
else: else:
LOG.info(u._LI("Exiting as Barbican Keystone listener" LOG.info("Exiting as Barbican Keystone listener is not enabled...")
" is not enabled..."))
except RuntimeError as e: except RuntimeError as e:
fail(1, e) fail(1, e)

View File

@ -298,10 +298,10 @@ def setup_remote_pydev_debug():
stdoutToServer=True, stdoutToServer=True,
stderrToServer=True) stderrToServer=True)
except Exception: except Exception:
LOG.exception(u._LE('Unable to join debugger, please ' LOG.exception('Unable to join debugger, please '
'make sure that the debugger processes is ' 'make sure that the debugger processes is '
'listening on debug-host \'%(debug-host)s\' ' 'listening on debug-host \'%(debug-host)s\' '
'debug-port \'%(debug-port)s\'.'), 'debug-port \'%(debug-port)s\'.',
{'debug-host': CONF.pydev_debug_host, {'debug-host': CONF.pydev_debug_host,
'debug-port': CONF.pydev_debug_port}) 'debug-port': CONF.pydev_debug_port})
raise raise

View File

@ -278,7 +278,7 @@ class NewSecretValidator(ValidatorBase):
expiration_tz = timeutils.parse_isotime(expiration_raw.strip()) expiration_tz = timeutils.parse_isotime(expiration_raw.strip())
expiration = timeutils.normalize_time(expiration_tz) expiration = timeutils.normalize_time(expiration_tz)
except ValueError: except ValueError:
LOG.exception(u._("Problem parsing expiration date")) LOG.exception("Problem parsing expiration date")
raise exception.InvalidObject( raise exception.InvalidObject(
schema=schema_name, schema=schema_name,
reason=u._("Invalid date for 'expiration'"), reason=u._("Invalid date for 'expiration'"),
@ -334,7 +334,7 @@ class NewSecretValidator(ValidatorBase):
try: try:
base64.b64decode(payload) base64.b64decode(payload)
except Exception: except Exception:
LOG.exception(u._("Problem parsing payload")) LOG.exception("Problem parsing payload")
raise exception.InvalidObject( raise exception.InvalidObject(
schema=schema_name, schema=schema_name,
reason=u._("Invalid payload for payload_content_encoding"), reason=u._("Invalid payload for payload_content_encoding"),
@ -687,7 +687,7 @@ class TypeOrderValidator(ValidatorBase, CACommonHelpersMixin):
expiration_tz = timeutils.parse_isotime(expiration_raw) expiration_tz = timeutils.parse_isotime(expiration_raw)
expiration = timeutils.normalize_time(expiration_tz) expiration = timeutils.normalize_time(expiration_tz)
except ValueError: except ValueError:
LOG.exception(u._("Problem parsing expiration date")) LOG.exception("Problem parsing expiration date")
raise exception.InvalidObject(schema=schema_name, raise exception.InvalidObject(schema=schema_name,
reason=u._("Invalid date " reason=u._("Invalid date "
"for 'expiration'"), "for 'expiration'"),

View File

@ -14,7 +14,6 @@
# limitations under the License. # limitations under the License.
from barbican.common import config from barbican.common import config
from barbican import i18n as u
from barbican.model import models from barbican.model import models
from barbican.model import repositories as repo from barbican.model import repositories as repo
from oslo_log import log from oslo_log import log
@ -60,8 +59,8 @@ def cleanup_unassociated_projects():
query = session.query(models.Project) query = session.query(models.Project)
query = query.filter(models.Project.id.in_(sub_query)) query = query.filter(models.Project.id.in_(sub_query))
delete_count = query.delete(synchronize_session='fetch') delete_count = query.delete(synchronize_session='fetch')
LOG.info(u._LI("Cleaned up %(delete_count)s entries for " LOG.info("Cleaned up %(delete_count)s entries for "
"%(project_name)s") % "%(project_name)s",
{'delete_count': str(delete_count), {'delete_count': str(delete_count),
'project_name': models.Project.__name__}) 'project_name': models.Project.__name__})
return delete_count return delete_count
@ -98,8 +97,8 @@ def cleanup_parent_with_no_child(parent_model, child_model,
if threshold_date: if threshold_date:
query = query.filter(parent_model.deleted_at <= threshold_date) query = query.filter(parent_model.deleted_at <= threshold_date)
delete_count = query.delete(synchronize_session='fetch') delete_count = query.delete(synchronize_session='fetch')
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(parent_name)s " LOG.info("Cleaned up %(delete_count)s entries for %(parent_name)s "
"with no children in %(child_name)s") % "with no children in %(child_name)s",
{'delete_count': delete_count, {'delete_count': delete_count,
'parent_name': parent_model.__name__, 'parent_name': parent_model.__name__,
'child_name': child_model.__name__}) 'child_name': child_model.__name__})
@ -120,7 +119,7 @@ def cleanup_softdeletes(model, threshold_date=None):
if threshold_date: if threshold_date:
query = query.filter(model.deleted_at <= threshold_date) query = query.filter(model.deleted_at <= threshold_date)
delete_count = query.delete() delete_count = query.delete()
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(model_name)s") % LOG.info("Cleaned up %(delete_count)s entries for %(model_name)s",
{'delete_count': delete_count, {'delete_count': delete_count,
'model_name': model.__name__}) 'model_name': model.__name__})
return delete_count return delete_count
@ -172,7 +171,7 @@ def cleanup_all(threshold_date=None):
# TODO(edtubill) Clean up projects that were soft deleted by # TODO(edtubill) Clean up projects that were soft deleted by
# the keystone listener # the keystone listener
LOG.info(u._LI("Cleaned up %s soft deleted entries"), total) LOG.info("Cleaned up %s soft deleted entries", total)
return total return total
@ -295,9 +294,9 @@ def soft_delete_expired_secrets(threshold_date):
children_count, acl_total = _soft_delete_expired_secret_children( children_count, acl_total = _soft_delete_expired_secret_children(
threshold_date) threshold_date)
update_count += children_count update_count += children_count
LOG.info(u._LI("Soft deleted %(update_count)s entries due to secret " LOG.info("Soft deleted %(update_count)s entries due to secret "
"expiration and %(acl_total)s secret acl entries " "expiration and %(acl_total)s secret acl entries "
"were removed from the database") % "were removed from the database",
{'update_count': update_count, {'update_count': update_count,
'acl_total': acl_total}) 'acl_total': acl_total})
return update_count + acl_total return update_count + acl_total
@ -324,7 +323,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
if log_file: if log_file:
CONF.set_override('log_file', log_file) CONF.set_override('log_file', log_file)
LOG.info(u._LI("Cleaning up soft deletions in the barbican database")) LOG.info("Cleaning up soft deletions in the barbican database")
log.setup(CONF, 'barbican') log.setup(CONF, 'barbican')
cleanup_total = 0 cleanup_total = 0
@ -353,7 +352,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
repo.commit() repo.commit()
except Exception as ex: except Exception as ex:
LOG.exception(u._LE('Failed to clean up soft deletions in database.')) LOG.exception('Failed to clean up soft deletions in database.')
repo.rollback() repo.rollback()
cleanup_total = 0 # rollback happened, no entries affected cleanup_total = 0 # rollback happened, no entries affected
raise ex raise ex
@ -372,6 +371,5 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
log.setup(CONF, 'barbican') # reset the overrides log.setup(CONF, 'barbican') # reset the overrides
LOG.info(u._LI("Cleaning of database affected %s entries"), LOG.info("Cleaning of database affected %s entries", cleanup_total)
cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time)
LOG.info(u._LI('DB clean up finished in %s seconds'), elapsed_time)

View File

@ -28,7 +28,6 @@ from alembic import config as alembic_config
from barbican.common import config from barbican.common import config
from barbican.common import utils from barbican.common import utils
from barbican import i18n as u
LOG = utils.getLogger(__name__) LOG = utils.getLogger(__name__)
@ -45,8 +44,8 @@ def init_config(sql_url=None):
"the CLI or the configuration file.") "the CLI or the configuration file.")
if sqlalchemy_url and 'sqlite' in sqlalchemy_url: if sqlalchemy_url and 'sqlite' in sqlalchemy_url:
LOG.warning(u._('!!! Limited support for migration commands using' LOG.warning('!!! Limited support for migration commands using'
' sqlite databases; This operation may not succeed.')) ' sqlite databases; This operation may not succeed.')
config = alembic_config.Config( config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini') os.path.join(os.path.dirname(__file__), 'alembic.ini')

View File

@ -95,7 +95,7 @@ def hard_reset():
def setup_database_engine_and_factory(): def setup_database_engine_and_factory():
global sa_logger, _SESSION_FACTORY, _ENGINE global sa_logger, _SESSION_FACTORY, _ENGINE
LOG.info(u._LI('Setting up database engine and session factory')) LOG.info('Setting up database engine and session factory')
if CONF.debug: if CONF.debug:
sa_logger = logging.getLogger('sqlalchemy.engine') sa_logger = logging.getLogger('sqlalchemy.engine')
sa_logger.setLevel(logging.DEBUG) sa_logger.setLevel(logging.DEBUG)
@ -198,7 +198,7 @@ def _get_engine(engine):
_auto_generate_tables(engine, tables) _auto_generate_tables(engine, tables)
else: else:
LOG.info(u._LI('Not auto-creating barbican registry DB')) LOG.info('Not auto-creating barbican registry DB')
return engine return engine
@ -245,11 +245,11 @@ def _create_engine(connection, **engine_args):
def _auto_generate_tables(engine, tables): def _auto_generate_tables(engine, tables):
if tables and 'alembic_version' in tables: if tables and 'alembic_version' in tables:
# Upgrade the database to the latest version. # Upgrade the database to the latest version.
LOG.info(u._LI('Updating schema to latest version')) LOG.info('Updating schema to latest version')
commands.upgrade() commands.upgrade()
else: else:
# Create database tables from our models. # Create database tables from our models.
LOG.info(u._LI('Auto-creating barbican registry DB')) LOG.info('Auto-creating barbican registry DB')
models.BASE.metadata.create_all(engine) models.BASE.metadata.create_all(engine)
# Sync the alembic version 'head' with current models. # Sync the alembic version 'head' with current models.
@ -267,7 +267,7 @@ def wrap_db_error(f):
remaining_attempts = CONF.sql_max_retries remaining_attempts = CONF.sql_max_retries
while True: while True:
LOG.warning(u._LW('SQL connection failed. %d attempts left.'), LOG.warning('SQL connection failed. %d attempts left.',
remaining_attempts) remaining_attempts)
remaining_attempts -= 1 remaining_attempts -= 1
time.sleep(CONF.sql_retry_interval) time.sleep(CONF.sql_retry_interval)
@ -371,7 +371,7 @@ class BaseRepo(object):
entity = query.one() entity = query.one()
except sa_orm.exc.NoResultFound: except sa_orm.exc.NoResultFound:
LOG.exception(u._LE("Not found for %s"), entity_id) LOG.exception("Not found for %s", entity_id)
entity = None entity = None
if not suppress_exception: if not suppress_exception:
_raise_entity_not_found(self._do_entity_name(), entity_id) _raise_entity_not_found(self._do_entity_name(), entity_id)
@ -406,7 +406,7 @@ class BaseRepo(object):
LOG.debug("Saving entity...") LOG.debug("Saving entity...")
entity.save(session=session) entity.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry as e:
LOG.exception(u._LE('Problem saving entity for create')) LOG.exception('Problem saving entity for create')
error_msg = re.sub('[()]', '', str(e.args)) error_msg = re.sub('[()]', '', str(e.args))
raise exception.ConstraintCheck(error=error_msg) raise exception.ConstraintCheck(error=error_msg)
@ -558,8 +558,7 @@ class BaseRepo(object):
# Its a soft delete so its more like entity update # Its a soft delete so its more like entity update
entity.delete(session=session) entity.delete(session=session)
except sqlalchemy.exc.SQLAlchemyError: except sqlalchemy.exc.SQLAlchemyError:
LOG.exception(u._LE('Problem finding project related entity to ' LOG.exception('Problem finding project related entity to delete')
'delete'))
if not suppress_exception: if not suppress_exception:
raise exception.BarbicanException(u._('Error deleting project ' raise exception.BarbicanException(u._('Error deleting project '
'entities for ' 'entities for '
@ -595,7 +594,7 @@ class ProjectRepo(BaseRepo):
except sa_orm.exc.NoResultFound: except sa_orm.exc.NoResultFound:
entity = None entity = None
if not suppress_exception: if not suppress_exception:
LOG.exception(u._LE("Problem getting Project %s"), LOG.exception("Problem getting Project %s",
external_project_id) external_project_id)
raise exception.NotFound(u._( raise exception.NotFound(u._(
"No {entity_name} found with keystone-ID {id}").format( "No {entity_name} found with keystone-ID {id}").format(
@ -798,7 +797,7 @@ class SecretRepo(BaseRepo):
except sa_orm.exc.NoResultFound: except sa_orm.exc.NoResultFound:
entity = None entity = None
if not suppress_exception: if not suppress_exception:
LOG.exception(u._LE("Problem getting secret %s"), LOG.exception("Problem getting secret %s",
entity_id) entity_id)
raise exception.NotFound(u._( raise exception.NotFound(u._(
"No secret found with secret-ID {id}").format( "No secret found with secret-ID {id}").format(
@ -1338,8 +1337,7 @@ class ContainerRepo(BaseRepo):
except sa_orm.exc.NoResultFound: except sa_orm.exc.NoResultFound:
entity = None entity = None
if not suppress_exception: if not suppress_exception:
LOG.exception(u._LE("Problem getting container %s"), LOG.exception("Problem getting container %s", entity_id)
entity_id)
raise exception.NotFound(u._( raise exception.NotFound(u._(
"No container found with container-ID {id}").format( "No container found with container-ID {id}").format(
entity_name=self._do_entity_name(), entity_name=self._do_entity_name(),
@ -2275,7 +2273,7 @@ class ProjectSecretStoreRepo(BaseRepo):
try: try:
entity = query.one() entity = query.one()
except sa_orm.exc.NoResultFound: except sa_orm.exc.NoResultFound:
LOG.info(u._LE("No preferred secret store found for project = %s"), LOG.info("No preferred secret store found for project = %s",
project_id) project_id)
entity = None entity = None
if not suppress_exception: if not suppress_exception:

View File

@ -152,7 +152,7 @@ class P11CryptoPlugin(plugin.CryptoPluginBase):
try: try:
return func(*args, **kwargs) return func(*args, **kwargs)
except (exception.PKCS11Exception) as pe: except (exception.PKCS11Exception) as pe:
LOG.warning(u._LW("Reinitializing PKCS#11 library: %s"), pe) LOG.warning("Reinitializing PKCS#11 library: %s", pe)
self._reinitialize_pkcs11() self._reinitialize_pkcs11()
return func(*args, **kwargs) return func(*args, **kwargs)

View File

@ -56,10 +56,10 @@ class SimpleCryptoPlugin(c.CryptoPluginBase):
def __init__(self, conf=CONF): def __init__(self, conf=CONF):
self.master_kek = conf.simple_crypto_plugin.kek self.master_kek = conf.simple_crypto_plugin.kek
self.plugin_name = conf.simple_crypto_plugin.plugin_name self.plugin_name = conf.simple_crypto_plugin.plugin_name
LOG.warning(u._LW("This plugin is NOT meant for a production " LOG.warning("This plugin is NOT meant for a production "
"environment. This is meant just for development " "environment. This is meant just for development "
"and testing purposes. Please use another plugin " "and testing purposes. Please use another plugin "
"for production.")) "for production.")
def get_plugin_name(self): def get_plugin_name(self):
return self.plugin_name return self.plugin_name

View File

@ -69,8 +69,8 @@ def _create_nss_db_if_needed(nss_db_path, nss_password):
nss_db_path, nss_password, over_write=True) nss_db_path, nss_password, over_write=True)
return True return True
else: else:
LOG.info(u._LI("The nss_db_path provided already exists, so the " LOG.info("The nss_db_path provided already exists, so the "
"database is assumed to be already set up.")) "database is assumed to be already set up.")
return False return False
@ -87,8 +87,8 @@ def _setup_nss_db_services(conf):
nss_db_path, nss_password = (conf.dogtag_plugin.nss_db_path, nss_db_path, nss_password = (conf.dogtag_plugin.nss_db_path,
conf.dogtag_plugin.nss_password) conf.dogtag_plugin.nss_password)
if nss_db_path is None: if nss_db_path is None:
LOG.warning(u._LW("nss_db_path was not provided so the crypto " LOG.warning("nss_db_path was not provided so the crypto "
"provider functions were not initialized.")) "provider functions were not initialized.")
return None return None
if nss_password is None: if nss_password is None:
raise ValueError(u._("nss_password is required")) raise ValueError(u._("nss_password is required"))
@ -110,8 +110,8 @@ def _import_kra_transport_cert_to_nss_db(conf, crypto):
transport_cert = systemcert_client.get_transport_cert() transport_cert = systemcert_client.get_transport_cert()
crypto.import_cert(KRA_TRANSPORT_NICK, transport_cert, "u,u,u") crypto.import_cert(KRA_TRANSPORT_NICK, transport_cert, "u,u,u")
except Exception as e: except Exception as e:
LOG.error(u._LE("Error in importing transport cert." LOG.error("Error in importing transport cert."
" KRA may not be enabled: %s"), e) " KRA may not be enabled: %s", e)
def create_connection(conf, subsystem_path): def create_connection(conf, subsystem_path):
@ -640,7 +640,7 @@ def _catch_subca_deletion_exceptions(ca_related_function):
try: try:
return ca_related_function(self, *args, **kwargs) return ca_related_function(self, *args, **kwargs)
except pki.ResourceNotFoundException as e: except pki.ResourceNotFoundException as e:
LOG.warning(u._LI("Sub-CA already deleted")) LOG.warning("Sub-CA already deleted")
pass pass
except pki.PKIException as e: except pki.PKIException as e:
raise exception.SubCADeletionErrors(reason=e.message) raise exception.SubCADeletionErrors(reason=e.message)
@ -699,7 +699,7 @@ class DogtagCAPlugin(cm.CertificatePluginBase):
"%Y-%m-%d %H:%M:%S.%f" "%Y-%m-%d %H:%M:%S.%f"
) )
except (ValueError, TypeError): except (ValueError, TypeError):
LOG.warning(u._LI("Invalid data read from expiration file")) LOG.warning("Invalid data read from expiration file")
self.expiration = datetime.utcnow() self.expiration = datetime.utcnow()
return self._expiration return self._expiration
@ -738,15 +738,15 @@ class DogtagCAPlugin(cm.CertificatePluginBase):
feature_client = feature.FeatureClient(connection) feature_client = feature.FeatureClient(connection)
authority_feature = feature_client.get_feature("authority") authority_feature = feature_client.get_feature("authority")
if authority_feature.enabled: if authority_feature.enabled:
LOG.info(u._LI("Sub-CAs are enabled by Dogtag server")) LOG.info("Sub-CAs are enabled by Dogtag server")
return True return True
else: else:
LOG.info(u._LI("Sub-CAs are not enabled by Dogtag server")) LOG.info("Sub-CAs are not enabled by Dogtag server")
except (request_exceptions.HTTPError, except (request_exceptions.HTTPError,
pki.ResourceNotFoundException): pki.ResourceNotFoundException):
LOG.info(u._LI("Sub-CAs are not supported by Dogtag server")) LOG.info("Sub-CAs are not supported by Dogtag server")
else: else:
LOG.info(u._LI("Sub-CAs are not supported by Dogtag client")) LOG.info("Sub-CAs are not supported by Dogtag client")
return False return False
def _get_request_id(self, order_id, plugin_meta, operation): def _get_request_id(self, order_id, plugin_meta, operation):

View File

@ -656,7 +656,7 @@ class CertificatePluginManager(named.NamedExtensionManager):
new_ca_infos = cert_plugin.get_ca_info() new_ca_infos = cert_plugin.get_ca_info()
except Exception as e: except Exception as e:
# The plugin gave an invalid CA, log and return # The plugin gave an invalid CA, log and return
LOG.error(u._LE("ERROR getting CA from plugin: %s"), LOG.error("ERROR getting CA from plugin: %s",
encodeutils.exception_to_unicode(e)) encodeutils.exception_to_unicode(e))
return return
@ -689,7 +689,7 @@ class CertificatePluginManager(named.NamedExtensionManager):
self._add_ca(plugin_name, add_id, new_ca_infos[add_id]) self._add_ca(plugin_name, add_id, new_ca_infos[add_id])
except Exception as e: except Exception as e:
# The plugin gave an invalid CA, log and continue # The plugin gave an invalid CA, log and continue
LOG.error(u._LE("ERROR adding CA from plugin: %s"), LOG.error("ERROR adding CA from plugin: %s",
encodeutils.exception_to_unicode(e)) encodeutils.exception_to_unicode(e))
def _add_ca(self, plugin_name, plugin_ca_id, ca_info): def _add_ca(self, plugin_name, plugin_ca_id, ca_info):

View File

@ -245,8 +245,8 @@ class KMIPSecretStore(ss.SecretStoreBase):
config = conf.kmip_plugin config = conf.kmip_plugin
if not getattr(ssl, config.ssl_version, None): if not getattr(ssl, config.ssl_version, None):
LOG.error(u._LE("The configured SSL version (%s) is not available" LOG.error("The configured SSL version (%s) is not available"
" on the system."), config.ssl_version) " on the system.", config.ssl_version)
self.client = client.ProxyKmipClient( self.client = client.ProxyKmipClient(
hostname=config.host, hostname=config.host,
@ -292,7 +292,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
"uuid: %s", uuid) "uuid: %s", uuid)
return {KMIPSecretStore.KEY_UUID: uuid} return {KMIPSecretStore.KEY_UUID: uuid}
except Exception as e: except Exception as e:
LOG.exception(u._LE("Error opening or writing to client")) LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e) raise ss.SecretGeneralException(e)
def generate_asymmetric_key(self, key_spec): def generate_asymmetric_key(self, key_spec):
@ -341,7 +341,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
public_key_metadata, public_key_metadata,
passphrase_metadata) passphrase_metadata)
except Exception as e: except Exception as e:
LOG.exception(u._LE("Error opening or writing to client")) LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e) raise ss.SecretGeneralException(e)
def store_secret(self, secret_dto): def store_secret(self, secret_dto):
@ -375,7 +375,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
LOG.debug("SUCCESS: Key stored with uuid: %s", uuid) LOG.debug("SUCCESS: Key stored with uuid: %s", uuid)
return {KMIPSecretStore.KEY_UUID: uuid} return {KMIPSecretStore.KEY_UUID: uuid}
except Exception as e: except Exception as e:
LOG.exception(u._LE("Error opening or writing to client")) LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e) raise ss.SecretGeneralException(e)
def get_secret(self, secret_type, secret_metadata): def get_secret(self, secret_type, secret_metadata):
@ -396,7 +396,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
managed_object = self.client.get(uuid) managed_object = self.client.get(uuid)
return self._get_barbican_secret(managed_object, secret_type) return self._get_barbican_secret(managed_object, secret_type)
except Exception as e: except Exception as e:
LOG.exception(u._LE("Error opening or writing to client")) LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e) raise ss.SecretGeneralException(e)
def generate_supports(self, key_spec): def generate_supports(self, key_spec):
@ -435,7 +435,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
LOG.debug("Opened connection to KMIP client") LOG.debug("Opened connection to KMIP client")
self.client.destroy(uuid) self.client.destroy(uuid)
except Exception as e: except Exception as e:
LOG.exception(u._LE("Error opening or writing to client")) LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e) raise ss.SecretGeneralException(e)
def store_secret_supports(self, key_spec): def store_secret_supports(self, key_spec):
@ -567,7 +567,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
reason=result.result_reason, reason=result.result_reason,
message=result.result_message message=result.result_message
) )
LOG.error(u._LE("ERROR from KMIP server: %s"), msg) LOG.error("ERROR from KMIP server: %s", msg)
raise ss.SecretGeneralException(msg) raise ss.SecretGeneralException(msg)
def _validate_keyfile_permissions(self, path): def _validate_keyfile_permissions(self, path):

View File

@ -15,7 +15,6 @@
Default implementation of Barbican certificate processing plugins and support. Default implementation of Barbican certificate processing plugins and support.
""" """
from barbican.common import utils from barbican.common import utils
from barbican import i18n as u
from barbican.plugin.interface import certificate_manager as cert from barbican.plugin.interface import certificate_manager as cert
LOG = utils.getLogger(__name__) LOG = utils.getLogger(__name__)
@ -51,7 +50,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation populated by the plugin implementation
:rtype: :class:`ResultDTO` :rtype: :class:`ResultDTO`
""" """
LOG.info(u._LI('Invoking issue_certificate_request()')) LOG.info('Invoking issue_certificate_request()')
return cert.ResultDTO( return cert.ResultDTO(
cert.CertificateStatus.WAITING_FOR_CA, cert.CertificateStatus.WAITING_FOR_CA,
retry_msec=MSEC_UNTIL_CHECK_STATUS) retry_msec=MSEC_UNTIL_CHECK_STATUS)
@ -71,7 +70,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation populated by the plugin implementation
:rtype: :class:`ResultDTO` :rtype: :class:`ResultDTO`
""" """
LOG.info(u._LI('Invoking modify_certificate_request()')) LOG.info('Invoking modify_certificate_request()')
return cert.ResultDTO(cert.CertificateStatus.WAITING_FOR_CA) return cert.ResultDTO(cert.CertificateStatus.WAITING_FOR_CA)
def cancel_certificate_request(self, order_id, order_meta, plugin_meta, def cancel_certificate_request(self, order_id, order_meta, plugin_meta,
@ -89,7 +88,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation populated by the plugin implementation
:rtype: :class:`ResultDTO` :rtype: :class:`ResultDTO`
""" """
LOG.info(u._LI('Invoking cancel_certificate_request()')) LOG.info('Invoking cancel_certificate_request()')
return cert.ResultDTO(cert.CertificateStatus.REQUEST_CANCELED) return cert.ResultDTO(cert.CertificateStatus.REQUEST_CANCELED)
def check_certificate_status(self, order_id, order_meta, plugin_meta, def check_certificate_status(self, order_id, order_meta, plugin_meta,
@ -107,7 +106,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation populated by the plugin implementation
:rtype: :class:`ResultDTO` :rtype: :class:`ResultDTO`
""" """
LOG.info(u._LI('Invoking check_certificate_status()')) LOG.info('Invoking check_certificate_status()')
return cert.ResultDTO(cert.CertificateStatus.CERTIFICATE_GENERATED) return cert.ResultDTO(cert.CertificateStatus.CERTIFICATE_GENERATED)
def supports(self, certificate_spec): def supports(self, certificate_spec):
@ -145,7 +144,7 @@ class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase):
the certificate the certificate
:returns: None :returns: None
""" """
LOG.info(u._LI('Invoking notify_certificate_is_ready()')) LOG.info('Invoking notify_certificate_is_ready()')
def notify_ca_is_unavailable( def notify_ca_is_unavailable(
self, project_id, order_ref, error_msg, retry_in_msec): self, project_id, order_ref, error_msg, retry_in_msec):
@ -158,4 +157,4 @@ class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase):
If this is 0, then no attempt will be made. If this is 0, then no attempt will be made.
:returns: None :returns: None
""" """
LOG.info(u._LI('Invoking notify_ca_is_unavailable()')) LOG.info('Invoking notify_ca_is_unavailable()')

View File

@ -17,7 +17,6 @@
Utilities to support plugins and plugin managers. Utilities to support plugins and plugin managers.
""" """
from barbican.common import utils from barbican.common import utils
from barbican import i18n as u
LOG = utils.getLogger(__name__) LOG = utils.getLogger(__name__)
@ -42,10 +41,7 @@ def instantiate_plugins(extension_manager, invoke_args=(), invoke_kwargs={}):
plugin_instance = ext.plugin(*invoke_args, **invoke_kwargs) plugin_instance = ext.plugin(*invoke_args, **invoke_kwargs)
except Exception: except Exception:
LOG.logger.disabled = False # Ensure not suppressing logs. LOG.logger.disabled = False # Ensure not suppressing logs.
LOG.exception( LOG.exception("Problem seen creating plugin: '%s'", ext.name)
u._LE("Problem seen creating plugin: '%s'"),
ext.name
)
else: else:
ext.obj = plugin_instance ext.obj = plugin_instance

View File

@ -24,7 +24,6 @@ from oslo_service import service
from barbican.common import config from barbican.common import config
from barbican.common import utils from barbican.common import utils
from barbican import i18n as u
from barbican.model import models from barbican.model import models
from barbican.model import repositories from barbican.model import repositories
from barbican.queue import client as async_client from barbican.queue import client as async_client
@ -74,11 +73,11 @@ class PeriodicServer(service.Service):
self.order_retry_repo = repositories.get_order_retry_tasks_repository() self.order_retry_repo = repositories.get_order_retry_tasks_repository()
def start(self): def start(self):
LOG.info(u._LI("Starting the PeriodicServer")) LOG.info("Starting the PeriodicServer")
super(PeriodicServer, self).start() super(PeriodicServer, self).start()
def stop(self, graceful=True): def stop(self, graceful=True):
LOG.info(u._LI("Halting the PeriodicServer")) LOG.info("Halting the PeriodicServer")
super(PeriodicServer, self).stop(graceful=graceful) super(PeriodicServer, self).stop(graceful=graceful)
@periodic_task.periodic_task @periodic_task.periodic_task
@ -92,25 +91,22 @@ class PeriodicServer(service.Service):
try: try:
total_tasks_processed = self._process_retry_tasks() total_tasks_processed = self._process_retry_tasks()
except Exception: except Exception:
LOG.exception( LOG.exception("Problem seen processing scheduled retry tasks")
u._LE("Problem seen processing scheduled retry tasks")
)
# Return the next delay before this method is invoked again. # Return the next delay before this method is invoked again.
check_again_in_seconds = _compute_next_periodic_interval() check_again_in_seconds = _compute_next_periodic_interval()
LOG.info( LOG.info("Done processing '%(total)s' tasks, will check again in "
u._LI("Done processing '%(total)s' tasks, will check again in " "'%(next)s' seconds.",
"'%(next)s' seconds."), {
{ 'total': total_tasks_processed,
'total': total_tasks_processed, 'next': check_again_in_seconds
'next': check_again_in_seconds }
} )
)
return check_again_in_seconds return check_again_in_seconds
def _process_retry_tasks(self): def _process_retry_tasks(self):
"""Scan for and then re-queue tasks that are ready to retry.""" """Scan for and then re-queue tasks that are ready to retry."""
LOG.info(u._LI("Processing scheduled retry tasks:")) LOG.info("Processing scheduled retry tasks:")
# Retrieve tasks to retry. # Retrieve tasks to retry.
entities, total = self._retrieve_tasks() entities, total = self._retrieve_tasks()
@ -160,16 +156,14 @@ class PeriodicServer(service.Service):
"kwargs '{2}')".format( "kwargs '{2}')".format(
retry_task_name, retry_args, retry_kwargs)) retry_task_name, retry_args, retry_kwargs))
except Exception: except Exception:
LOG.exception( LOG.exception("Problem enqueuing method '%(name)s' with args "
u._LE( "'%(args)s' and kwargs '%(kwargs)s'.",
"Problem enqueuing method '%(name)s' with args '%(args)s' " {
"and kwargs '%(kwargs)s'."), 'name': retry_task_name,
{ 'args': retry_args,
'name': retry_task_name, 'kwargs': retry_kwargs
'args': retry_args, }
'kwargs': retry_kwargs )
}
)
repositories.rollback() repositories.rollback()
finally: finally:
repositories.clear() repositories.clear()

View File

@ -29,7 +29,6 @@ except ImportError:
from oslo_service import service from oslo_service import service
from barbican.common import utils from barbican.common import utils
from barbican import i18n as u
from barbican.model import models from barbican.model import models
from barbican.model import repositories from barbican.model import repositories
from barbican import queue from barbican import queue
@ -63,13 +62,10 @@ def retryable_order(fn):
retry_rpc_method = schedule_order_retry_tasks( retry_rpc_method = schedule_order_retry_tasks(
fn, result, *args, **kwargs) fn, result, *args, **kwargs)
if retry_rpc_method: if retry_rpc_method:
LOG.info( LOG.info("Scheduled RPC method for retry: '%s'", retry_rpc_method)
u._LI("Scheduled RPC method for retry: '%s'"),
retry_rpc_method)
else: else:
LOG.info( LOG.info("Task '%s' did not have to be retried",
u._LI("Task '%s' did not have to be retried"), find_function_name(fn, if_no_name='???'))
find_function_name(fn, if_no_name='???'))
return wrapper return wrapper
@ -84,15 +80,13 @@ def transactional(fn):
if not queue.is_server_side(): if not queue.is_server_side():
# Non-server mode directly invokes tasks. # Non-server mode directly invokes tasks.
fn(*args, **kwargs) fn(*args, **kwargs)
LOG.info(u._LI("Completed worker task: '%s'"), fn_name) LOG.info("Completed worker task: '%s'", fn_name)
else: else:
# Manage session/transaction. # Manage session/transaction.
try: try:
fn(*args, **kwargs) fn(*args, **kwargs)
repositories.commit() repositories.commit()
LOG.info( LOG.info("Completed worker task (post-commit): '%s'", fn_name)
u._LI("Completed worker task (post-commit): '%s'"),
fn_name)
except Exception: except Exception:
"""NOTE: Wrapped functions must process with care! """NOTE: Wrapped functions must process with care!
@ -100,10 +94,9 @@ def transactional(fn):
including any updates made to entities such as setting error including any updates made to entities such as setting error
codes and error messages. codes and error messages.
""" """
LOG.exception( LOG.exception("Problem seen processing worker task: '%s'",
u._LE("Problem seen processing worker task: '%s'"), fn_name
fn_name )
)
repositories.rollback() repositories.rollback()
finally: finally:
repositories.clear() repositories.clear()
@ -212,10 +205,8 @@ class Tasks(object):
@retryable_order @retryable_order
def process_type_order(self, context, order_id, project_id, request_id): def process_type_order(self, context, order_id, project_id, request_id):
"""Process TypeOrder.""" """Process TypeOrder."""
message = u._LI( message = "Processing type order: order ID is '%(order)s' and " \
"Processing type order: " "request ID is '%(request)s'"
"order ID is '%(order)s' and request ID is '%(request)s'"
)
LOG.info(message, {'order': order_id, 'request': request_id}) LOG.info(message, {'order': order_id, 'request': request_id})
return resources.BeginTypeOrder().process_and_suppress_exceptions( return resources.BeginTypeOrder().process_and_suppress_exceptions(
order_id, project_id) order_id, project_id)
@ -226,10 +217,9 @@ class Tasks(object):
def update_order(self, context, order_id, project_id, def update_order(self, context, order_id, project_id,
updated_meta, request_id): updated_meta, request_id):
"""Update Order.""" """Update Order."""
message = u._LI( message = "Processing update order: order ID is '%(order)s' and " \
"Processing update order: " "request ID is '%(request)s'"
"order ID is '%(order)s' and request ID is '%(request)s'"
)
LOG.info(message, {'order': order_id, 'request': request_id}) LOG.info(message, {'order': order_id, 'request': request_id})
return resources.UpdateOrder().process_and_suppress_exceptions( return resources.UpdateOrder().process_and_suppress_exceptions(
order_id, project_id, updated_meta) order_id, project_id, updated_meta)
@ -240,10 +230,8 @@ class Tasks(object):
def check_certificate_status(self, context, order_id, def check_certificate_status(self, context, order_id,
project_id, request_id): project_id, request_id):
"""Check the status of a certificate order.""" """Check the status of a certificate order."""
message = u._LI( message = "Processing check certificate status on order: " \
"Processing check certificate status on order: " "order ID is '%(order)s' and request ID is '%(request)s'"
"order ID is '%(order)s' and request ID is '%(request)s'"
)
LOG.info(message, {'order': order_id, 'request': request_id}) LOG.info(message, {'order': order_id, 'request': request_id})
check_cert_order = resources.CheckCertificateStatusOrder() check_cert_order = resources.CheckCertificateStatusOrder()
@ -277,11 +265,11 @@ class TaskServer(Tasks, service.Service):
endpoints=[self]) endpoints=[self])
def start(self): def start(self):
LOG.info(u._LI("Starting the TaskServer")) LOG.info("Starting the TaskServer")
self._server.start() self._server.start()
super(TaskServer, self).start() super(TaskServer, self).start()
def stop(self): def stop(self):
LOG.info(u._LI("Halting the TaskServer")) LOG.info("Halting the TaskServer")
super(TaskServer, self).stop() super(TaskServer, self).stop()
self._server.stop() self._server.stop()

View File

@ -72,19 +72,16 @@ class KeystoneEventConsumer(resources.BaseTask):
def handle_error(self, project, status, message, exception, def handle_error(self, project, status, message, exception,
project_id=None, resource_type=None, operation_type=None): project_id=None, resource_type=None, operation_type=None):
LOG.error( LOG.error(
u._LE( 'Error processing Keystone event, project_id=%(project_id)s, '
'Error processing Keystone event, project_id=%(project_id)s, ' 'event resource=%(resource)s, event operation=%(operation)s, '
'event resource=%(resource)s, event operation=%(operation)s, ' 'status=%(status)s, error message=%(message)s',
'status=%(status)s, error message=%(message)s'
),
{ {
'project_id': project.project_id, 'project_id': project.project_id,
'resource': resource_type, 'resource': resource_type,
'operation': operation_type, 'operation': operation_type,
'status': status, 'status': status,
'message': message 'message': message
} })
)
def handle_success(self, project, result, project_id=None, def handle_success(self, project, result, project_id=None,
resource_type=None, operation_type=None): resource_type=None, operation_type=None):
@ -92,11 +89,9 @@ class KeystoneEventConsumer(resources.BaseTask):
# only pertains to long-running tasks. See the documentation for # only pertains to long-running tasks. See the documentation for
# BaseTask for more details. # BaseTask for more details.
LOG.info( LOG.info(
u._LI( 'Successfully handled Keystone event, '
'Successfully handled Keystone event, ' 'project_id=%(project_id)s, event resource=%(resource)s, '
'project_id=%(project_id)s, event resource=%(resource)s, ' 'event operation=%(operation)s',
'event operation=%(operation)s'
),
{ {
'project_id': project_id, 'project_id': project_id,
'resource': resource_type, 'resource': resource_type,
@ -118,9 +113,8 @@ class KeystoneEventConsumer(resources.BaseTask):
etc.) performed on Keystone resource. etc.) performed on Keystone resource.
""" """
if project is None: if project is None:
LOG.info(u._LI('No action is needed as there are no Barbican ' LOG.info('No action is needed as there are no Barbican resources '
'resources present for Keystone ' 'present for Keystone project_id=%s', project_id)
'project_id=%s'), project_id)
return return
# barbican entities use projects table 'id' field as foreign key. # barbican entities use projects table 'id' field as foreign key.
@ -132,5 +126,5 @@ class KeystoneEventConsumer(resources.BaseTask):
# reached here means there is no error so log the successful # reached here means there is no error so log the successful
# cleanup log entry. # cleanup log entry.
LOG.info(u._LI('Successfully completed Barbican resources cleanup for ' LOG.info('Successfully completed Barbican resources cleanup for '
'Keystone project_id=%s'), project_id) 'Keystone project_id=%s', project_id)

View File

@ -67,10 +67,8 @@ class BaseTask(object):
try: try:
return self.process(*args, **kwargs) return self.process(*args, **kwargs)
except Exception: except Exception:
LOG.exception( LOG.exception("Suppressing exception while trying to "
u._LE( "process task '%s'.", self.get_name())
"Suppressing exception while trying to "
"process task '%s'."), self.get_name())
def process(self, *args, **kwargs): def process(self, *args, **kwargs):
"""A template method for all asynchronous tasks. """A template method for all asynchronous tasks.
@ -93,16 +91,15 @@ class BaseTask(object):
entity = self.retrieve_entity(*args, **kwargs) entity = self.retrieve_entity(*args, **kwargs)
except Exception: except Exception:
# Serious error! # Serious error!
LOG.exception(u._LE("Could not retrieve information needed to " LOG.exception("Could not retrieve information needed to "
"process task '%s'."), name) "process task '%s'.", name)
raise raise
# Process the target entity. # Process the target entity.
try: try:
result = self.handle_processing(entity, *args, **kwargs) result = self.handle_processing(entity, *args, **kwargs)
except Exception as e_orig: except Exception as e_orig:
LOG.exception(u._LE("Could not perform processing for " LOG.exception("Could not perform processing for task '%s'.", name)
"task '%s'."), name)
# Handle failure to process entity. # Handle failure to process entity.
try: try:
@ -111,19 +108,17 @@ class BaseTask(object):
self.handle_error(entity, status, message, e_orig, self.handle_error(entity, status, message, e_orig,
*args, **kwargs) *args, **kwargs)
except Exception: except Exception:
LOG.exception(u._LE("Problem handling an error for task '%s', " LOG.exception("Problem handling an error for task '%s', "
"raising original " "raising original exception.", name)
"exception."), name)
raise e_orig raise e_orig
# Handle successful conclusion of processing. # Handle successful conclusion of processing.
try: try:
self.handle_success(entity, result, *args, **kwargs) self.handle_success(entity, result, *args, **kwargs)
except Exception: except Exception:
LOG.exception(u._LE("Could not process after successfully " LOG.exception("Could not process after successfully "
"executing task '%s'."), name) "executing task '%s'.", name)
raise raise
return result return result
@abc.abstractmethod @abc.abstractmethod