From d035e754991e517e33559efdf61c8ce06542336c Mon Sep 17 00:00:00 2001 From: Jeremy Liu Date: Wed, 2 Nov 2016 23:13:53 +0800 Subject: [PATCH] Remove translations for debug level log According to the guideline of i18n, debug level info messages should not be translated, they're for developers [1]. [1] http://docs.openstack.org/developer/oslo.i18n/guidelines.html#log-translation Change-Id: Id0562a6836878385912c8d6a95ed1bbc54f5ea77 --- barbican/api/controllers/cas.py | 8 ++--- barbican/api/controllers/consumers.py | 4 +-- barbican/api/controllers/containers.py | 2 +- barbican/api/controllers/orders.py | 8 ++--- barbican/api/controllers/secretmeta.py | 18 +++++----- barbican/api/controllers/secrets.py | 8 ++--- barbican/api/controllers/secretstores.py | 20 ++++++------ barbican/api/controllers/transportkeys.py | 6 ++-- barbican/api/middleware/simple.py | 3 +- barbican/cmd/db_manage.py | 2 +- barbican/cmd/retry_scheduler.py | 3 +- barbican/cmd/worker.py | 3 +- barbican/common/resources.py | 3 +- barbican/model/clean.py | 18 +++++----- barbican/model/repositories.py | 6 ++-- barbican/plugin/dogtag.py | 4 +-- barbican/plugin/kmip_secret_store.py | 40 +++++++++++------------ barbican/plugin/snakeoil_ca.py | 6 ++-- barbican/queue/keystone_listener.py | 9 +++-- barbican/tasks/certificate_resources.py | 3 +- barbican/tasks/keystone_consumer.py | 2 +- barbican/tasks/resources.py | 18 +++++----- 22 files changed, 94 insertions(+), 100 deletions(-) diff --git a/barbican/api/controllers/cas.py b/barbican/api/controllers/cas.py index 31171d3e8..fb1ab5dc8 100644 --- a/barbican/api/controllers/cas.py +++ b/barbican/api/controllers/cas.py @@ -439,8 +439,8 @@ class CertificateAuthoritiesController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Retrieve project preferred CA')) @controllers.enforce_rbac('certificate_authorities:get_preferred_ca') def preferred(self, external_project_id, **kw): - LOG.debug(u._('Start certificate_authorities get' - ' project preferred CA')) + LOG.debug('Start certificate_authorities get' + ' project preferred CA') project = res.get_or_create_project(external_project_id) @@ -458,7 +458,7 @@ class CertificateAuthoritiesController(controllers.ACLMixin): @controllers.enforce_rbac('certificate_authorities:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): - LOG.debug(u._('Start on_post for project-ID %s:...'), + LOG.debug('Start on_post for project-ID %s:...', external_project_id) data = api.load_body(pecan.request, validator=self.validator) @@ -480,7 +480,7 @@ class CertificateAuthoritiesController(controllers.ACLMixin): ) url = hrefs.convert_certificate_authority_to_href(new_ca.id) - LOG.debug(u._('URI to sub-CA is %s'), url) + LOG.debug('URI to sub-CA is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url diff --git a/barbican/api/controllers/consumers.py b/barbican/api/controllers/consumers.py index 2e238ce82..7766998d5 100644 --- a/barbican/api/controllers/consumers.py +++ b/barbican/api/controllers/consumers.py @@ -102,8 +102,8 @@ class ContainerConsumersController(controllers.ACLMixin): @controllers.handle_exceptions(u._('ContainerConsumers(s) retrieval')) @controllers.enforce_rbac('consumers:get') def on_get(self, external_project_id, **kw): - LOG.debug(u._('Start consumers on_get ' - 'for container-ID %s:'), self.container_id) + LOG.debug('Start consumers on_get ' + 'for container-ID %s:', self.container_id) result = self.consumer_repo.get_by_container_id( self.container_id, diff --git a/barbican/api/controllers/containers.py b/barbican/api/controllers/containers.py index 20ff6c17b..cd9f45676 100644 --- a/barbican/api/controllers/containers.py +++ b/barbican/api/controllers/containers.py @@ -281,7 +281,7 @@ class ContainersSecretsController(controllers.ACLMixin): self.container_secret_repo.save(new_container_secret) url = hrefs.convert_container_to_href(self.container.id) - LOG.debug(u._('URI to container is %s'), url) + LOG.debug('URI to container is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url diff --git a/barbican/api/controllers/orders.py b/barbican/api/controllers/orders.py index 126a2db9d..0463596eb 100644 --- a/barbican/api/controllers/orders.py +++ b/barbican/api/controllers/orders.py @@ -161,8 +161,8 @@ class OrdersController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Order(s) retrieval')) @controllers.enforce_rbac('orders:get') def on_get(self, external_project_id, **kw): - LOG.debug(u._('Start orders on_get ' - 'for project-ID %s:'), external_project_id) + LOG.debug('Start orders on_get ' + 'for project-ID %s:', external_project_id) result = self.order_repo.get_by_create_date( external_project_id, offset_arg=kw.get('offset', 0), @@ -205,8 +205,8 @@ class OrdersController(controllers.ACLMixin): order_meta = body.get('meta') request_type = order_meta.get('request_type') - LOG.debug(u._('Processing order type %(order_type)s,' - ' request type %(request_type)s') % + LOG.debug('Processing order type %(order_type)s,' + ' request type %(request_type)s' % {'order_type': order_type, 'request_type': request_type}) diff --git a/barbican/api/controllers/secretmeta.py b/barbican/api/controllers/secretmeta.py index 2d4055686..f0bada25e 100644 --- a/barbican/api/controllers/secretmeta.py +++ b/barbican/api/controllers/secretmeta.py @@ -52,8 +52,8 @@ class SecretMetadataController(controllers.ACLMixin): def on_get(self, external_project_id, **kwargs): """Handles retrieval of existing secret metadata requests.""" - LOG.debug(u._('Start secret metadata on_get ' - 'for secret-ID %s:'), self.secret.id) + LOG.debug('Start secret metadata on_get ' + 'for secret-ID %s:', self.secret.id) resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id) pecan.response.status = 200 @@ -67,13 +67,13 @@ class SecretMetadataController(controllers.ACLMixin): def on_put(self, external_project_id, **kwargs): """Handles creation/update of secret metadata.""" data = api.load_body(pecan.request, validator=self.metadata_validator) - LOG.debug(u._('Start secret metadata on_put...%s'), data) + LOG.debug('Start secret metadata on_put...%s', data) self.user_meta_repo.create_replace_user_metadata(self.secret.id, data) url = hrefs.convert_user_meta_to_href(self.secret.id) - LOG.debug(u._('URI to secret metadata is %s'), url) + LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url} @@ -95,12 +95,12 @@ class SecretMetadataController(controllers.ACLMixin): pecan.abort(409, u._('Conflict. Key in request is already in the ' 'secret metadata')) - LOG.debug(u._('Start secret metadatum on_post...%s'), metadata) + LOG.debug('Start secret metadatum on_post...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) url = hrefs.convert_user_meta_to_href(self.secret.id) - LOG.debug(u._('URI to secret metadata is %s'), url) + LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url + "/%s {key: %s, value:%s}" % (key, @@ -126,8 +126,8 @@ class SecretMetadatumController(controllers.ACLMixin): def on_get(self, external_project_id, remainder, **kwargs): """Handles retrieval of existing secret metadatum.""" - LOG.debug(u._('Start secret metadatum on_get ' - 'for secret-ID %s:'), self.secret.id) + LOG.debug('Start secret metadatum on_get ' + 'for secret-ID %s:', self.secret.id) metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if remainder in metadata: @@ -157,7 +157,7 @@ class SecretMetadatumController(controllers.ACLMixin): 'request url.' pecan.abort(409, msg) else: - LOG.debug(u._('Start secret metadatum on_put...%s'), metadata) + LOG.debug('Start secret metadatum on_put...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) diff --git a/barbican/api/controllers/secrets.py b/barbican/api/controllers/secrets.py index 200163883..4e7622054 100644 --- a/barbican/api/controllers/secrets.py +++ b/barbican/api/controllers/secrets.py @@ -344,8 +344,8 @@ class SecretsController(controllers.ACLMixin): def secret_fields(field): return putil.mime_types.augment_fields_with_content_types(field) - LOG.debug(u._('Start secrets on_get ' - 'for project-ID %s:'), external_project_id) + LOG.debug('Start secrets on_get ' + 'for project-ID %s:', external_project_id) name = kw.get('name', '') if name: @@ -413,7 +413,7 @@ class SecretsController(controllers.ACLMixin): @controllers.enforce_rbac('secrets:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): - LOG.debug(u._('Start on_post for project-ID %s:...'), + LOG.debug('Start on_post for project-ID %s:...', external_project_id) data = api.load_body(pecan.request, validator=self.validator) @@ -440,7 +440,7 @@ class SecretsController(controllers.ACLMixin): transport_key_id=data.get('transport_key_id')) url = hrefs.convert_secret_to_href(new_secret.id) - LOG.debug(u._('URI to secret is %s'), url) + LOG.debug('URI to secret is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url diff --git a/barbican/api/controllers/secretstores.py b/barbican/api/controllers/secretstores.py index e121365e9..b328568ed 100644 --- a/barbican/api/controllers/secretstores.py +++ b/barbican/api/controllers/secretstores.py @@ -56,7 +56,7 @@ class PreferredSecretStoreController(controllers.ACLMixin): """Handles preferred secret store set/removal requests.""" def __init__(self, secret_store): - LOG.debug(u._('=== Creating PreferredSecretStoreController ===')) + LOG.debug('=== Creating PreferredSecretStoreController ===') self.secret_store = secret_store self.proj_store_repo = repo.get_project_secret_store_repository() @@ -68,8 +68,8 @@ class PreferredSecretStoreController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Removing preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:delete') def on_delete(self, external_project_id, **kw): - LOG.debug(u._('Start: Remove project preferred secret-store for store' - ' id %s'), self.secret_store.id) + LOG.debug('Start: Remove project preferred secret-store for store' + ' id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) @@ -87,8 +87,8 @@ class PreferredSecretStoreController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Setting preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:post') def on_post(self, external_project_id, **kwargs): - LOG.debug(u._('Start: Set project preferred secret-store for store ' - 'id %s'), self.secret_store.id) + LOG.debug('Start: Set project preferred secret-store for store ' + 'id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) @@ -102,7 +102,7 @@ class SecretStoreController(controllers.ACLMixin): """Handles secret store retrieval requests.""" def __init__(self, secret_store): - LOG.debug(u._('=== Creating SecretStoreController ===')) + LOG.debug('=== Creating SecretStoreController ===') self.secret_store = secret_store @pecan.expose() @@ -160,8 +160,8 @@ class SecretStoresController(controllers.ACLMixin): @controllers.handle_exceptions(u._('List available secret stores')) @controllers.enforce_rbac('secretstores:get') def on_get(self, external_project_id, **kw): - LOG.debug(u._('Start SecretStoresController on_get: listing secret ' - 'stores')) + LOG.debug('Start SecretStoresController on_get: listing secret ' + 'stores') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() @@ -182,7 +182,7 @@ class SecretStoresController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Retrieve global default secret store')) @controllers.enforce_rbac('secretstores:get_global_default') def get_global_default(self, external_project_id, **kw): - LOG.debug(u._('Start secret-stores get global default secret store')) + LOG.debug('Start secret-stores get global default secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() @@ -197,7 +197,7 @@ class SecretStoresController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Retrieve project preferred store')) @controllers.enforce_rbac('secretstores:get_preferred') def get_preferred(self, external_project_id, **kw): - LOG.debug(u._('Start secret-stores get preferred secret store')) + LOG.debug('Start secret-stores get preferred secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() diff --git a/barbican/api/controllers/transportkeys.py b/barbican/api/controllers/transportkeys.py index a4bb18d88..5beb880c9 100644 --- a/barbican/api/controllers/transportkeys.py +++ b/barbican/api/controllers/transportkeys.py @@ -54,7 +54,7 @@ class TransportKeyController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Transport Key retrieval')) @controllers.enforce_rbac('transport_key:get') def on_get(self, external_project_id): - LOG.debug(u._("== Getting transport key for %s"), external_project_id) + LOG.debug("== Getting transport key for %s", external_project_id) transport_key = self.repo.get(entity_id=self.transport_key_id) if not transport_key: _transport_key_not_found() @@ -66,7 +66,7 @@ class TransportKeyController(controllers.ACLMixin): @controllers.handle_exceptions(u._('Transport Key deletion')) @controllers.enforce_rbac('transport_key:delete') def on_delete(self, external_project_id, **kwargs): - LOG.debug(u._("== Deleting transport key ===")) + LOG.debug("== Deleting transport key ===") try: self.repo.delete_entity_by_id( entity_id=self.transport_key_id, @@ -153,7 +153,7 @@ class TransportKeysController(controllers.ACLMixin): self.repo.create_from(new_key) url = hrefs.convert_transport_key_to_href(new_key.id) - LOG.debug(u._('URI to transport key is %s'), url) + LOG.debug('URI to transport key is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url diff --git a/barbican/api/middleware/simple.py b/barbican/api/middleware/simple.py index 34c535e17..5995b68ca 100644 --- a/barbican/api/middleware/simple.py +++ b/barbican/api/middleware/simple.py @@ -20,7 +20,6 @@ purposes only. from barbican.api import middleware from barbican.common import utils -from barbican import i18n as u LOG = utils.getLogger(__name__) @@ -32,5 +31,5 @@ class SimpleFilter(middleware.Middleware): def process_request(self, req): """Just announce we have been called.""" - LOG.debug(u._("Calling SimpleFilter")) + LOG.debug("Calling SimpleFilter") return None diff --git a/barbican/cmd/db_manage.py b/barbican/cmd/db_manage.py index 3917a53ae..084137e42 100644 --- a/barbican/cmd/db_manage.py +++ b/barbican/cmd/db_manage.py @@ -137,7 +137,7 @@ class DatabaseManager(object): def upgrade(self, args): """Process the 'upgrade' Alembic command.""" - LOG.debug(u._("Performing database schema migration...")) + LOG.debug("Performing database schema migration...") commands.upgrade(to_version=args.version, sql_url=args.dburl) def history(self, args): diff --git a/barbican/cmd/retry_scheduler.py b/barbican/cmd/retry_scheduler.py index fc47d074d..cd9b3fb4a 100644 --- a/barbican/cmd/retry_scheduler.py +++ b/barbican/cmd/retry_scheduler.py @@ -37,7 +37,6 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): from barbican.common import config -from barbican import i18n as u from barbican import queue from barbican.queue import retry_scheduler @@ -57,7 +56,7 @@ def main(): # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) - LOG.debug(u._("Booting up Barbican worker retry/scheduler node...")) + LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) diff --git a/barbican/cmd/worker.py b/barbican/cmd/worker.py index c6c2833a2..3f59f1e86 100644 --- a/barbican/cmd/worker.py +++ b/barbican/cmd/worker.py @@ -37,7 +37,6 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): from barbican.common import config -from barbican import i18n as u from barbican import queue from barbican.queue import server @@ -57,7 +56,7 @@ def main(): # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) - LOG.debug(u._("Booting up Barbican worker node...")) + LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) diff --git a/barbican/common/resources.py b/barbican/common/resources.py index 26c8544b5..4f9067dd7 100644 --- a/barbican/common/resources.py +++ b/barbican/common/resources.py @@ -17,7 +17,6 @@ Shared business logic. """ from barbican.common import utils -from barbican import i18n as u from barbican.model import models from barbican.model import repositories @@ -43,7 +42,7 @@ def get_or_create_project(project_id): project = project_repo.find_by_external_project_id(project_id, suppress_exception=True) if not project: - LOG.debug(u._('Creating project for %s'), project_id) + LOG.debug('Creating project for %s', project_id) project = models.Project() project.external_id = project_id project.status = models.States.ACTIVE diff --git a/barbican/model/clean.py b/barbican/model/clean.py index 5f61f239b..853187d58 100644 --- a/barbican/model/clean.py +++ b/barbican/model/clean.py @@ -36,7 +36,7 @@ def cleanup_unassociated_projects(): This looks for projects that have no children entries on the dependent tables and removes them. """ - LOG.debug(u._("Cleaning up unassociated projects")) + LOG.debug("Cleaning up unassociated projects") session = repo.get_session() project_children_tables = [models.Order, models.KEKDatum, @@ -48,7 +48,7 @@ def cleanup_unassociated_projects(): models.ProjectCertificateAuthority, models.ProjectQuotas] children_names = map(lambda child: child.__name__, project_children_tables) - LOG.debug(u._("Children tables for Project table being checked: %s"), + LOG.debug("Children tables for Project table being checked: %s", str(children_names)) sub_query = session.query(models.Project.id) for model in project_children_tables: @@ -82,8 +82,8 @@ def cleanup_parent_with_no_child(parent_model, child_model, :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from database """ - LOG.debug(u._("Cleaning soft deletes for %(parent_name)s without " - "a child in %(child_name)s") % + LOG.debug("Cleaning soft deletes for %(parent_name)s without " + "a child in %(child_name)s" % {'parent_name': parent_model.__name__, 'child_name': child_model.__name__}) session = repo.get_session() @@ -113,7 +113,7 @@ def cleanup_softdeletes(model, threshold_date=None): :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ - LOG.debug(u._("Cleaning soft deletes: %s"), model.__name__) + LOG.debug("Cleaning soft deletes: %s", model.__name__) session = repo.get_session() query = session.query(model) query = query.filter_by(deleted=True) @@ -135,8 +135,8 @@ def cleanup_all(threshold_date=None): :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ - LOG.debug(u._("Cleaning up soft deletions where deletion date" - " is older than %s"), str(threshold_date)) + LOG.debug("Cleaning up soft deletions where deletion date" + " is older than %s", str(threshold_date)) total = 0 total += cleanup_softdeletes(models.TransportKey, threshold_date=threshold_date) @@ -247,7 +247,7 @@ def _soft_delete_expired_secret_children(threshold_date): models.EncryptedDatum, models.ContainerSecret] children_names = map(lambda child: child.__name__, secret_children) - LOG.debug(u._("Children tables for Secret table being checked: %s"), + LOG.debug("Children tables for Secret table being checked: %s", str(children_names)) session = repo.get_session() update_count = 0 @@ -288,7 +288,7 @@ def soft_delete_expired_secrets(threshold_date): """ # Note: sqllite does not support multiple table updates so # several db updates are used instead - LOG.debug(u._('Soft deleting expired secrets older than: %s'), + LOG.debug('Soft deleting expired secrets older than: %s', str(threshold_date)) update_count = _soft_delete_expired_secrets(threshold_date) diff --git a/barbican/model/repositories.py b/barbican/model/repositories.py index 451b5adff..d4c10a4e4 100644 --- a/barbican/model/repositories.py +++ b/barbican/model/repositories.py @@ -229,8 +229,8 @@ def is_db_connection_error(args): def _create_engine(connection, **engine_args): - LOG.debug(u._('Sql connection: please check "sql_connection" property in ' - 'barbican configuration file; Args: %s'), engine_args) + LOG.debug('Sql connection: please check "sql_connection" property in ' + 'barbican configuration file; Args: %s', engine_args) engine = sqlalchemy.create_engine(connection, **engine_args) @@ -310,7 +310,7 @@ def clean_paging_values(offset_arg=0, limit_arg=CONF.default_limit_paging): except ValueError: limit = CONF.default_limit_paging - LOG.debug(u._("Clean paging values limit=%(limit)s, offset=%(offset)s") % + LOG.debug("Clean paging values limit=%(limit)s, offset=%(offset)s" % {'limit': limit, 'offset': offset}) diff --git a/barbican/plugin/dogtag.py b/barbican/plugin/dogtag.py index 7a8e4554e..ee40d0f06 100644 --- a/barbican/plugin/dogtag.py +++ b/barbican/plugin/dogtag.py @@ -198,7 +198,7 @@ class DogtagKRAPlugin(sstore.SecretStoreBase): def __init__(self, conf=CONF): """Constructor - create the keyclient.""" - LOG.debug(u._("starting DogtagKRAPlugin init")) + LOG.debug("starting DogtagKRAPlugin init") connection = create_connection(conf, 'kra') # create kraclient @@ -208,7 +208,7 @@ class DogtagKRAPlugin(sstore.SecretStoreBase): self.keyclient.set_transport_cert(KRA_TRANSPORT_NICK) self.plugin_name = conf.dogtag_plugin.plugin_name - LOG.debug(u._("completed DogtagKRAPlugin init")) + LOG.debug("completed DogtagKRAPlugin init") def get_plugin_name(self): return self.plugin_name diff --git a/barbican/plugin/kmip_secret_store.py b/barbican/plugin/kmip_secret_store.py index 65d3341c8..c88a4cf27 100644 --- a/barbican/plugin/kmip_secret_store.py +++ b/barbican/plugin/kmip_secret_store.py @@ -203,7 +203,7 @@ class KMIPSecretStore(ss.SecretStoreBase): } self.pkcs1_only = conf.kmip_plugin.pkcs1_only if self.pkcs1_only: - LOG.debug(u._("KMIP secret store only supports PKCS#1")) + LOG.debug("KMIP secret store only supports PKCS#1") del self.valid_alg_dict[ss.KeyAlgorithm.DSA] self.kmip_barbican_alg_map = { enums.CryptographicAlgorithm.AES: ss.KeyAlgorithm.AES, @@ -270,7 +270,7 @@ class KMIPSecretStore(ss.SecretStoreBase): :returns: dictionary holding key_id returned by server :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ - LOG.debug(u._("Starting symmetric key generation with KMIP plugin")) + LOG.debug("Starting symmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) @@ -284,11 +284,11 @@ class KMIPSecretStore(ss.SecretStoreBase): algorithm = self._get_kmip_algorithm(key_spec.alg.lower()) try: with self.client: - LOG.debug(u._("Opened connection to KMIP client for secret " - "generation")) + LOG.debug("Opened connection to KMIP client for secret " + "generation") uuid = self.client.create(algorithm, key_spec.bit_length) - LOG.debug(u._("SUCCESS: Symmetric key generated with " - "uuid: %s"), uuid) + LOG.debug("SUCCESS: Symmetric key generated with " + "uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) @@ -306,7 +306,7 @@ class KMIPSecretStore(ss.SecretStoreBase): :raises: SecretGeneralException, SecretAlgorithmNotSupportedException KMIPSecretStoreActionNotSupported """ - LOG.debug(u._("Starting asymmetric key generation with KMIP plugin")) + LOG.debug("Starting asymmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) @@ -324,13 +324,13 @@ class KMIPSecretStore(ss.SecretStoreBase): try: with self.client: - LOG.debug(u._("Opened connection to KMIP client for " - "asymmetric secret generation")) + LOG.debug("Opened connection to KMIP client for " + "asymmetric secret generation") public_uuid, private_uuid = self.client.create_key_pair( algorithm, length) - LOG.debug(u._("SUCCESS: Asymmetric key pair generated with " - "public key uuid: %(public_uuid)s and " - "private key uuid: %(private_uuid)s") % + LOG.debug("SUCCESS: Asymmetric key pair generated with " + "public key uuid: %(public_uuid)s and " + "private key uuid: %(private_uuid)s" % {'public_uuid': public_uuid, 'private_uuid': private_uuid}) private_key_metadata = {KMIPSecretStore.KEY_UUID: private_uuid} @@ -352,7 +352,7 @@ class KMIPSecretStore(ss.SecretStoreBase): :returns: Dictionary holding the key_uuid assigned by KMIP :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ - LOG.debug(u._("Starting secret storage with KMIP plugin")) + LOG.debug("Starting secret storage with KMIP plugin") if not self.store_secret_supports(secret_dto.key_spec): raise ss.SecretAlgorithmNotSupportedException( secret_dto.key_spec.alg) @@ -369,9 +369,9 @@ class KMIPSecretStore(ss.SecretStoreBase): try: with self.client: - LOG.debug(u._("Opened connection to KMIP client")) + LOG.debug("Opened connection to KMIP client") uuid = self.client.register(secret) - LOG.debug(u._("SUCCESS: Key stored with uuid: %s"), uuid) + LOG.debug("SUCCESS: Key stored with uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) @@ -386,12 +386,12 @@ class KMIPSecretStore(ss.SecretStoreBase): :returns: SecretDTO of the retrieved Secret :raises: SecretGeneralException """ - LOG.debug(u._("Starting secret retrieval with KMIP plugin")) + LOG.debug("Starting secret retrieval with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: - LOG.debug(u._("Opened connection to KMIP client for secret " - "retrieval")) + LOG.debug("Opened connection to KMIP client for secret " + "retrieval") managed_object = self.client.get(uuid) return self._get_barbican_secret(managed_object, secret_type) except Exception as e: @@ -427,11 +427,11 @@ class KMIPSecretStore(ss.SecretStoreBase): {'key_uuid': } :raises: SecretGeneralException """ - LOG.debug(u._("Starting secret deletion with KMIP plugin")) + LOG.debug("Starting secret deletion with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: - LOG.debug(u._("Opened connection to KMIP client")) + LOG.debug("Opened connection to KMIP client") self.client.destroy(uuid) except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) diff --git a/barbican/plugin/snakeoil_ca.py b/barbican/plugin/snakeoil_ca.py index aa9989cc2..c30842dcc 100644 --- a/barbican/plugin/snakeoil_ca.py +++ b/barbican/plugin/snakeoil_ca.py @@ -212,12 +212,12 @@ class SnakeoilCA(object): def ensure_exists(self): if not self.exists: - LOG.debug(u._('Keypair not found, creating new cert/key')) + LOG.debug('Keypair not found, creating new cert/key') self.cert, self.key, self.chain, self.pkcs7 = ( self.create_keypair()) def create_keypair(self): - LOG.debug(u._('Generating Snakeoil CA')) + LOG.debug('Generating Snakeoil CA') key = crypto.PKey() key.generate_key(crypto.TYPE_RSA, self.key_size) @@ -241,7 +241,7 @@ class SnakeoilCA(object): cert.sign(self.signing_key, 'sha256') - LOG.debug(u._('Snakeoil CA cert/key generated')) + LOG.debug('Snakeoil CA cert/key generated') chain = b'' if self.parent_chain_path: diff --git a/barbican/queue/keystone_listener.py b/barbican/queue/keystone_listener.py index 68597912c..53ebdd4e2 100644 --- a/barbican/queue/keystone_listener.py +++ b/barbican/queue/keystone_listener.py @@ -20,7 +20,6 @@ import oslo_messaging from oslo_service import service from barbican.common import utils -from barbican import i18n as u from barbican import queue from barbican.tasks import keystone_consumer @@ -70,10 +69,10 @@ class NotificationTask(object): """ - LOG.debug(u._("Input keystone event publisher_id = %s"), publisher_id) - LOG.debug(u._("Input keystone event payload = %s"), payload) - LOG.debug(u._("Input keystone event type = %s"), event_type) - LOG.debug(u._("Input keystone event metadata = %s"), metadata) + LOG.debug("Input keystone event publisher_id = %s", publisher_id) + LOG.debug("Input keystone event payload = %s", payload) + LOG.debug("Input keystone event type = %s", event_type) + LOG.debug("Input keystone event metadata = %s", metadata) project_id = self._parse_payload_for_project_id(payload) resource_type, operation_type = self._parse_event_type(event_type) LOG.debug('Keystone Event: resource type={0}, operation type={1}, ' diff --git a/barbican/tasks/certificate_resources.py b/barbican/tasks/certificate_resources.py index bda749c9a..7fd57e35b 100644 --- a/barbican/tasks/certificate_resources.py +++ b/barbican/tasks/certificate_resources.py @@ -20,7 +20,6 @@ from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import resources as res import barbican.common.utils as utils -from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.interface import certificate_manager as cert @@ -355,7 +354,7 @@ def _add_private_key_to_generated_cert_container(container_id, order_model, def modify_certificate_request(order_model, updated_meta): """Update the order with CA.""" # TODO(chellygel): Add the modify certificate request logic. - LOG.debug(u._('in modify_certificate_request')) + LOG.debug('in modify_certificate_request') raise NotImplementedError # pragma: no cover diff --git a/barbican/tasks/keystone_consumer.py b/barbican/tasks/keystone_consumer.py index 3bdc435c6..e41b6fa5c 100644 --- a/barbican/tasks/keystone_consumer.py +++ b/barbican/tasks/keystone_consumer.py @@ -37,7 +37,7 @@ class KeystoneEventConsumer(resources.BaseTask): def __init__(self, db_start=rep.start, db_commit=rep.commit, db_rollback=rep.rollback, db_clear=rep.clear): - LOG.debug(u._('Creating KeystoneEventConsumer task processor')) + LOG.debug('Creating KeystoneEventConsumer task processor') self.db_start = db_start self.db_commit = db_commit diff --git a/barbican/tasks/resources.py b/barbican/tasks/resources.py index 167625607..dbefc5679 100644 --- a/barbican/tasks/resources.py +++ b/barbican/tasks/resources.py @@ -241,7 +241,7 @@ class BeginTypeOrder(BaseTask): def __init__(self): super(BeginTypeOrder, self).__init__() - LOG.debug(u._('Creating BeginTypeOrder task processor')) + LOG.debug('Creating BeginTypeOrder task processor') self.project_repo = rep.get_project_repository() self.helper = _OrderTaskHelper() @@ -286,7 +286,7 @@ class BeginTypeOrder(BaseTask): project ) order.secret_id = new_secret.id - LOG.debug(u._("...done creating keys order's secret.")) + LOG.debug("...done creating keys order's secret.") elif order_type == models.OrderType.ASYMMETRIC: # Create asymmetric Secret new_container = plugin.generate_asymmetric_secret( @@ -295,14 +295,14 @@ class BeginTypeOrder(BaseTask): 'application/octet-stream'), project) order.container_id = new_container.id - LOG.debug(u._("...done creating asymmetric order's secret.")) + LOG.debug("...done creating asymmetric order's secret.") elif order_type == models.OrderType.CERTIFICATE: # Request a certificate new_container = cert.issue_certificate_request( order, project, result_follow_on) if new_container: order.container_id = new_container.id - LOG.debug(u._("...done requesting a certificate.")) + LOG.debug("...done requesting a certificate.") else: raise NotImplementedError( u._('Order type "{order_type}" not implemented.').format( @@ -327,7 +327,7 @@ class UpdateOrder(BaseTask): def __init__(self): super(UpdateOrder, self).__init__() - LOG.debug(u._('Creating UpdateOrder task processor')) + LOG.debug('Creating UpdateOrder task processor') self.helper = _OrderTaskHelper() def retrieve_entity(self, *args, **kwargs): @@ -349,13 +349,13 @@ class UpdateOrder(BaseTask): if order_type == models.OrderType.CERTIFICATE: # Update a certificate request cert.modify_certificate_request(order, updated_meta) - LOG.debug(u._("...done updating a certificate order.")) + LOG.debug("...done updating a certificate order.") else: raise NotImplementedError( u._('Order type "{order_type}" not implemented.').format( order_type=order_type)) - LOG.debug(u._("...done updating order.")) + LOG.debug("...done updating order.") def handle_error(self, order, status, message, exception, *args, **kwargs): @@ -374,7 +374,7 @@ class CheckCertificateStatusOrder(BaseTask): return u._('Check Certificate Order Status') def __init__(self): - LOG.debug(u._('Creating CheckCertificateStatusOrder task processor')) + LOG.debug('Creating CheckCertificateStatusOrder task processor') self.project_repo = rep.get_project_repository() self.helper = _OrderTaskHelper() @@ -410,7 +410,7 @@ class CheckCertificateStatusOrder(BaseTask): order, project, result_follow_on) if new_container: order.container_id = new_container.id - LOG.debug(u._("...done checking status of a certificate order.")) + LOG.debug("...done checking status of a certificate order.") return result_follow_on