Use international logging message
Change-Id: I3d85951930be83417c8f459da21962eb77189e43
This commit is contained in:
parent
306b2ac592
commit
20b790beb2
@ -439,7 +439,8 @@ class CertificateAuthoritiesController(controllers.ACLMixin):
|
||||
@controllers.handle_exceptions(u._('Retrieve project preferred CA'))
|
||||
@controllers.enforce_rbac('certificate_authorities:get_preferred_ca')
|
||||
def preferred(self, external_project_id, **kw):
|
||||
LOG.debug('Start certificate_authorities get project preferred CA')
|
||||
LOG.debug(u._('Start certificate_authorities get'
|
||||
' project preferred CA'))
|
||||
|
||||
project = res.get_or_create_project(external_project_id)
|
||||
|
||||
@ -457,7 +458,8 @@ class CertificateAuthoritiesController(controllers.ACLMixin):
|
||||
@controllers.enforce_rbac('certificate_authorities:post')
|
||||
@controllers.enforce_content_types(['application/json'])
|
||||
def on_post(self, external_project_id, **kwargs):
|
||||
LOG.debug('Start on_post for project-ID %s:...', external_project_id)
|
||||
LOG.debug(u._('Start on_post for project-ID %s:...'),
|
||||
external_project_id)
|
||||
|
||||
data = api.load_body(pecan.request, validator=self.validator)
|
||||
project = res.get_or_create_project(external_project_id)
|
||||
@ -478,7 +480,7 @@ class CertificateAuthoritiesController(controllers.ACLMixin):
|
||||
)
|
||||
|
||||
url = hrefs.convert_certificate_authority_to_href(new_ca.id)
|
||||
LOG.debug('URI to sub-CA is %s', url)
|
||||
LOG.debug(u._('URI to sub-CA is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
pecan.response.headers['Location'] = url
|
||||
|
@ -89,8 +89,8 @@ class ContainerConsumersController(controllers.ACLMixin):
|
||||
@controllers.handle_exceptions(u._('ContainerConsumers(s) retrieval'))
|
||||
@controllers.enforce_rbac('consumers:get')
|
||||
def on_get(self, external_project_id, **kw):
|
||||
LOG.debug('Start consumers on_get '
|
||||
'for container-ID %s:', self.container_id)
|
||||
LOG.debug(u._('Start consumers on_get '
|
||||
'for container-ID %s:'), self.container_id)
|
||||
|
||||
try:
|
||||
self.container_repo.get(self.container_id, external_project_id)
|
||||
|
@ -274,7 +274,7 @@ class ContainersSecretsController(controllers.ACLMixin):
|
||||
self.container_secret_repo.save(new_container_secret)
|
||||
|
||||
url = hrefs.convert_container_to_href(self.container.id)
|
||||
LOG.debug('URI to container is %s', url)
|
||||
LOG.debug(u._('URI to container is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
pecan.response.headers['Location'] = url
|
||||
|
@ -161,8 +161,8 @@ class OrdersController(controllers.ACLMixin):
|
||||
@controllers.handle_exceptions(u._('Order(s) retrieval'))
|
||||
@controllers.enforce_rbac('orders:get')
|
||||
def on_get(self, external_project_id, **kw):
|
||||
LOG.debug('Start orders on_get '
|
||||
'for project-ID %s:', external_project_id)
|
||||
LOG.debug(u._('Start orders on_get '
|
||||
'for project-ID %s:'), external_project_id)
|
||||
|
||||
result = self.order_repo.get_by_create_date(
|
||||
external_project_id, offset_arg=kw.get('offset', 0),
|
||||
@ -205,8 +205,10 @@ class OrdersController(controllers.ACLMixin):
|
||||
order_meta = body.get('meta')
|
||||
request_type = order_meta.get('request_type')
|
||||
|
||||
LOG.debug('Processing order type %s, request type %s',
|
||||
order_type, request_type)
|
||||
LOG.debug(u._('Processing order type %(order_type)s,'
|
||||
' request type %(request_type)s') %
|
||||
{'order_type': order_type,
|
||||
'request_type': request_type})
|
||||
|
||||
if order_type == models.OrderType.CERTIFICATE:
|
||||
validators.validate_ca_id(project.id, body.get('meta'))
|
||||
|
@ -52,8 +52,8 @@ class SecretMetadataController(controllers.ACLMixin):
|
||||
def on_get(self, external_project_id, **kwargs):
|
||||
"""Handles retrieval of existing secret metadata requests."""
|
||||
|
||||
LOG.debug('Start secret metadata on_get '
|
||||
'for secret-ID %s:', self.secret.id)
|
||||
LOG.debug(u._('Start secret metadata on_get '
|
||||
'for secret-ID %s:'), self.secret.id)
|
||||
|
||||
resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
|
||||
pecan.response.status = 200
|
||||
@ -67,13 +67,13 @@ class SecretMetadataController(controllers.ACLMixin):
|
||||
def on_put(self, external_project_id, **kwargs):
|
||||
"""Handles creation/update of secret metadata."""
|
||||
data = api.load_body(pecan.request, validator=self.metadata_validator)
|
||||
LOG.debug('Start secret metadata on_put...%s', data)
|
||||
LOG.debug(u._('Start secret metadata on_put...%s'), data)
|
||||
|
||||
self.user_meta_repo.create_replace_user_metadata(self.secret.id,
|
||||
data)
|
||||
|
||||
url = hrefs.convert_user_meta_to_href(self.secret.id)
|
||||
LOG.debug('URI to secret metadata is %s', url)
|
||||
LOG.debug(u._('URI to secret metadata is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
return {'metadata_ref': url}
|
||||
@ -95,12 +95,12 @@ class SecretMetadataController(controllers.ACLMixin):
|
||||
pecan.abort(409, u._('Conflict. Key in request is already in the '
|
||||
'secret metadata'))
|
||||
|
||||
LOG.debug('Start secret metadatum on_post...%s', metadata)
|
||||
LOG.debug(u._('Start secret metadatum on_post...%s'), metadata)
|
||||
self.user_meta_repo.create_replace_user_metadatum(self.secret.id,
|
||||
key, value)
|
||||
|
||||
url = hrefs.convert_user_meta_to_href(self.secret.id)
|
||||
LOG.debug('URI to secret metadata is %s', url)
|
||||
LOG.debug(u._('URI to secret metadata is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
return {'metadata_ref': url + "/%s {key: %s, value:%s}" % (key,
|
||||
@ -126,8 +126,8 @@ class SecretMetadatumController(controllers.ACLMixin):
|
||||
def on_get(self, external_project_id, remainder, **kwargs):
|
||||
"""Handles retrieval of existing secret metadatum."""
|
||||
|
||||
LOG.debug('Start secret metadatum on_get '
|
||||
'for secret-ID %s:', self.secret.id)
|
||||
LOG.debug(u._('Start secret metadatum on_get '
|
||||
'for secret-ID %s:'), self.secret.id)
|
||||
|
||||
metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id)
|
||||
if remainder in metadata:
|
||||
@ -157,7 +157,7 @@ class SecretMetadatumController(controllers.ACLMixin):
|
||||
'request url.'
|
||||
pecan.abort(409, msg)
|
||||
else:
|
||||
LOG.debug('Start secret metadatum on_put...%s', metadata)
|
||||
LOG.debug(u._('Start secret metadatum on_put...%s'), metadata)
|
||||
|
||||
self.user_meta_repo.create_replace_user_metadatum(self.secret.id,
|
||||
key, value)
|
||||
|
@ -337,8 +337,8 @@ class SecretsController(controllers.ACLMixin):
|
||||
def secret_fields(field):
|
||||
return putil.mime_types.augment_fields_with_content_types(field)
|
||||
|
||||
LOG.debug('Start secrets on_get '
|
||||
'for project-ID %s:', external_project_id)
|
||||
LOG.debug(u._('Start secrets on_get '
|
||||
'for project-ID %s:'), external_project_id)
|
||||
|
||||
name = kw.get('name', '')
|
||||
if name:
|
||||
@ -406,7 +406,8 @@ class SecretsController(controllers.ACLMixin):
|
||||
@controllers.enforce_rbac('secrets:post')
|
||||
@controllers.enforce_content_types(['application/json'])
|
||||
def on_post(self, external_project_id, **kwargs):
|
||||
LOG.debug('Start on_post for project-ID %s:...', external_project_id)
|
||||
LOG.debug(u._('Start on_post for project-ID %s:...'),
|
||||
external_project_id)
|
||||
|
||||
data = api.load_body(pecan.request, validator=self.validator)
|
||||
project = res.get_or_create_project(external_project_id)
|
||||
@ -432,7 +433,7 @@ class SecretsController(controllers.ACLMixin):
|
||||
transport_key_id=data.get('transport_key_id'))
|
||||
|
||||
url = hrefs.convert_secret_to_href(new_secret.id)
|
||||
LOG.debug('URI to secret is %s', url)
|
||||
LOG.debug(u._('URI to secret is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
pecan.response.headers['Location'] = url
|
||||
|
@ -49,7 +49,7 @@ class TransportKeyController(controllers.ACLMixin):
|
||||
@controllers.handle_exceptions(u._('Transport Key retrieval'))
|
||||
@controllers.enforce_rbac('transport_key:get')
|
||||
def on_get(self, external_project_id):
|
||||
LOG.debug("== Getting transport key for %s", external_project_id)
|
||||
LOG.debug(u._("== Getting transport key for %s"), external_project_id)
|
||||
transport_key = self.repo.get(entity_id=self.transport_key_id)
|
||||
if not transport_key:
|
||||
_transport_key_not_found()
|
||||
@ -61,7 +61,7 @@ class TransportKeyController(controllers.ACLMixin):
|
||||
@controllers.handle_exceptions(u._('Transport Key deletion'))
|
||||
@controllers.enforce_rbac('transport_key:delete')
|
||||
def on_delete(self, external_project_id, **kwargs):
|
||||
LOG.debug("== Deleting transport key ===")
|
||||
LOG.debug(u._("== Deleting transport key ==="))
|
||||
try:
|
||||
self.repo.delete_entity_by_id(
|
||||
entity_id=self.transport_key_id,
|
||||
@ -146,7 +146,7 @@ class TransportKeysController(controllers.ACLMixin):
|
||||
self.repo.create_from(new_key)
|
||||
|
||||
url = hrefs.convert_transport_key_to_href(new_key.id)
|
||||
LOG.debug('URI to transport key is %s', url)
|
||||
LOG.debug(u._('URI to transport key is %s'), url)
|
||||
|
||||
pecan.response.status = 201
|
||||
pecan.response.headers['Location'] = url
|
||||
|
@ -21,6 +21,7 @@ purposes only.
|
||||
from barbican.api import middleware
|
||||
from barbican.common import config
|
||||
from barbican.common import utils
|
||||
from barbican import i18n as u
|
||||
|
||||
LOG = utils.getLogger(__name__)
|
||||
CONF = config.CONF
|
||||
@ -33,5 +34,5 @@ class SimpleFilter(middleware.Middleware):
|
||||
|
||||
def process_request(self, req):
|
||||
"""Just announce we have been called."""
|
||||
LOG.debug("Calling SimpleFilter")
|
||||
LOG.debug(u._("Calling SimpleFilter"))
|
||||
return None
|
||||
|
@ -22,6 +22,7 @@ import sys
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
||||
from barbican.common import config
|
||||
from barbican import i18n as u
|
||||
from barbican.model import clean
|
||||
from barbican.model.migration import commands
|
||||
from oslo_log import log
|
||||
@ -136,7 +137,7 @@ class DatabaseManager(object):
|
||||
|
||||
def upgrade(self, args):
|
||||
"""Process the 'upgrade' Alembic command."""
|
||||
LOG.debug("Performing database schema migration...")
|
||||
LOG.debug(u._("Performing database schema migration..."))
|
||||
commands.upgrade(to_version=args.version, sql_url=args.dburl)
|
||||
|
||||
def history(self, args):
|
||||
@ -175,7 +176,8 @@ def main():
|
||||
dm.execute()
|
||||
except Exception as ex:
|
||||
if not _exception_is_successfull_exit(ex):
|
||||
LOG.exception('Problem seen trying to run barbican db manage')
|
||||
LOG.exception(u._LE('Problem seen trying to run'
|
||||
' barbican db manage'))
|
||||
sys.stderr.write("ERROR: {0}\n".format(ex))
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -40,6 +40,7 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
|
||||
|
||||
|
||||
from barbican.common import config
|
||||
from barbican import i18n as u
|
||||
from barbican import queue
|
||||
from barbican.queue import keystone_listener
|
||||
|
||||
@ -62,7 +63,7 @@ def main():
|
||||
log.setup(CONF, 'barbican')
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
LOG.info("Booting up Barbican Keystone listener node...")
|
||||
LOG.info(u._LI("Booting up Barbican Keystone listener node..."))
|
||||
|
||||
# Queuing initialization
|
||||
queue.init(CONF)
|
||||
@ -73,7 +74,8 @@ def main():
|
||||
keystone_listener.MessageServer(CONF)
|
||||
).wait()
|
||||
else:
|
||||
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
|
||||
LOG.info(u._LI("Exiting as Barbican Keystone listener"
|
||||
" is not enabled..."))
|
||||
except RuntimeError as e:
|
||||
fail(1, e)
|
||||
|
||||
|
@ -37,6 +37,7 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
|
||||
|
||||
|
||||
from barbican.common import config
|
||||
from barbican import i18n as u
|
||||
from barbican import queue
|
||||
from barbican.queue import retry_scheduler
|
||||
|
||||
@ -56,7 +57,7 @@ def main():
|
||||
# Import and configure logging.
|
||||
log.setup(CONF, 'barbican-retry-scheduler')
|
||||
LOG = log.getLogger(__name__)
|
||||
LOG.debug("Booting up Barbican worker retry/scheduler node...")
|
||||
LOG.debug(u._("Booting up Barbican worker retry/scheduler node..."))
|
||||
|
||||
# Queuing initialization (as a client only).
|
||||
queue.init(CONF, is_server_side=False)
|
||||
|
@ -37,6 +37,7 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
|
||||
|
||||
|
||||
from barbican.common import config
|
||||
from barbican import i18n as u
|
||||
from barbican import queue
|
||||
from barbican.queue import server
|
||||
|
||||
@ -56,7 +57,7 @@ def main():
|
||||
# Import and configure logging.
|
||||
log.setup(CONF, 'barbican')
|
||||
LOG = log.getLogger(__name__)
|
||||
LOG.debug("Booting up Barbican worker node...")
|
||||
LOG.debug(u._("Booting up Barbican worker node..."))
|
||||
|
||||
# Queuing initialization
|
||||
queue.init(CONF)
|
||||
|
@ -17,6 +17,7 @@
|
||||
Shared business logic.
|
||||
"""
|
||||
from barbican.common import utils
|
||||
from barbican import i18n as u
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
|
||||
@ -42,7 +43,7 @@ def get_or_create_project(project_id):
|
||||
project = project_repo.find_by_external_project_id(project_id,
|
||||
suppress_exception=True)
|
||||
if not project:
|
||||
LOG.debug('Creating project for %s', project_id)
|
||||
LOG.debug(u._('Creating project for %s'), project_id)
|
||||
project = models.Project()
|
||||
project.external_id = project_id
|
||||
project.status = models.States.ACTIVE
|
||||
|
@ -279,7 +279,7 @@ class NewSecretValidator(ValidatorBase):
|
||||
expiration_tz = timeutils.parse_isotime(expiration_raw.strip())
|
||||
expiration = timeutils.normalize_time(expiration_tz)
|
||||
except ValueError:
|
||||
LOG.exception("Problem parsing expiration date")
|
||||
LOG.exception(u._("Problem parsing expiration date"))
|
||||
raise exception.InvalidObject(
|
||||
schema=schema_name,
|
||||
reason=u._("Invalid date for 'expiration'"),
|
||||
@ -336,7 +336,7 @@ class NewSecretValidator(ValidatorBase):
|
||||
try:
|
||||
base64.b64decode(payload)
|
||||
except Exception:
|
||||
LOG.exception("Problem parsing payload")
|
||||
LOG.exception(u._("Problem parsing payload"))
|
||||
raise exception.InvalidObject(
|
||||
schema=schema_name,
|
||||
reason=u._("Invalid payload for payload_content_encoding"),
|
||||
@ -689,7 +689,7 @@ class TypeOrderValidator(ValidatorBase, CACommonHelpersMixin):
|
||||
expiration_tz = timeutils.parse_isotime(expiration_raw)
|
||||
expiration = timeutils.normalize_time(expiration_tz)
|
||||
except ValueError:
|
||||
LOG.exception("Problem parsing expiration date")
|
||||
LOG.exception(u._("Problem parsing expiration date"))
|
||||
raise exception.InvalidObject(schema=schema_name,
|
||||
reason=u._("Invalid date "
|
||||
"for 'expiration'"),
|
||||
|
@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import config
|
||||
from barbican import i18n as u
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories as repo
|
||||
from oslo_log import log
|
||||
@ -35,7 +36,7 @@ def cleanup_unassociated_projects():
|
||||
This looks for projects that have no children entries on the dependent
|
||||
tables and removes them.
|
||||
"""
|
||||
LOG.debug("Cleaning up unassociated projects")
|
||||
LOG.debug(u._("Cleaning up unassociated projects"))
|
||||
session = repo.get_session()
|
||||
project_children_tables = [models.Order,
|
||||
models.KEKDatum,
|
||||
@ -47,7 +48,7 @@ def cleanup_unassociated_projects():
|
||||
models.ProjectCertificateAuthority,
|
||||
models.ProjectQuotas]
|
||||
children_names = map(lambda child: child.__name__, project_children_tables)
|
||||
LOG.debug("Children tables for Project table being checked: %s",
|
||||
LOG.debug(u._("Children tables for Project table being checked: %s"),
|
||||
str(children_names))
|
||||
sub_query = session.query(models.Project.id)
|
||||
for model in project_children_tables:
|
||||
@ -59,8 +60,10 @@ def cleanup_unassociated_projects():
|
||||
query = session.query(models.Project)
|
||||
query = query.filter(models.Project.id.in_(sub_query))
|
||||
delete_count = query.delete(synchronize_session='fetch')
|
||||
LOG.info("Cleaned up %s entries for %s", str(delete_count),
|
||||
models.Project.__name__)
|
||||
LOG.info(u._LI("Cleaned up %(delete_count)s entries for "
|
||||
"%(project_name)s") %
|
||||
{'delete_count': str(delete_count),
|
||||
'project_name': models.Project.__name__})
|
||||
return delete_count
|
||||
|
||||
|
||||
@ -79,9 +82,10 @@ def cleanup_parent_with_no_child(parent_model, child_model,
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from database
|
||||
"""
|
||||
LOG.debug("Cleaning soft deletes for %s without a child in %s",
|
||||
parent_model.__name__,
|
||||
child_model.__name__)
|
||||
LOG.debug(u._("Cleaning soft deletes for %(parent_name)s without "
|
||||
"a child in %(child_name)s") %
|
||||
{'parent_name': parent_model.__name__,
|
||||
'child_name': child_model.__name__})
|
||||
session = repo.get_session()
|
||||
sub_query = session.query(parent_model.id)
|
||||
sub_query = sub_query.outerjoin(child_model)
|
||||
@ -94,8 +98,11 @@ def cleanup_parent_with_no_child(parent_model, child_model,
|
||||
if threshold_date:
|
||||
query = query.filter(parent_model.deleted_at <= threshold_date)
|
||||
delete_count = query.delete(synchronize_session='fetch')
|
||||
LOG.info("Cleaned up %s entries for %s with no children in %s",
|
||||
delete_count, parent_model.__name__, child_model.__name__)
|
||||
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(parent_name)s "
|
||||
"with no children in %(child_name)s") %
|
||||
{'delete_count': delete_count,
|
||||
'parent_name': parent_model.__name__,
|
||||
'child_name': child_model.__name__})
|
||||
return delete_count
|
||||
|
||||
|
||||
@ -106,15 +113,16 @@ def cleanup_softdeletes(model, threshold_date=None):
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from the database
|
||||
"""
|
||||
LOG.debug("Cleaning soft deletes: %s", model.__name__)
|
||||
LOG.debug(u._("Cleaning soft deletes: %s"), model.__name__)
|
||||
session = repo.get_session()
|
||||
query = session.query(model)
|
||||
query = query.filter_by(deleted=True)
|
||||
if threshold_date:
|
||||
query = query.filter(model.deleted_at <= threshold_date)
|
||||
delete_count = query.delete()
|
||||
LOG.info("Cleaned up %s entries for %s", delete_count,
|
||||
model.__name__)
|
||||
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(model_name)s") %
|
||||
{'delete_count': delete_count,
|
||||
'model_name': model.__name__})
|
||||
return delete_count
|
||||
|
||||
|
||||
@ -127,8 +135,8 @@ def cleanup_all(threshold_date=None):
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from the database
|
||||
"""
|
||||
LOG.debug("Cleaning up soft deletions where deletion date"
|
||||
" is older than %s", str(threshold_date))
|
||||
LOG.debug(u._("Cleaning up soft deletions where deletion date"
|
||||
" is older than %s"), str(threshold_date))
|
||||
total = 0
|
||||
total += cleanup_softdeletes(models.TransportKey,
|
||||
threshold_date=threshold_date)
|
||||
@ -164,7 +172,7 @@ def cleanup_all(threshold_date=None):
|
||||
# TODO(edtubill) Clean up projects that were soft deleted by
|
||||
# the keystone listener
|
||||
|
||||
LOG.info("Cleaned up %s soft deleted entries", total)
|
||||
LOG.info(u._LI("Cleaned up %s soft deleted entries"), total)
|
||||
return total
|
||||
|
||||
|
||||
@ -239,7 +247,7 @@ def _soft_delete_expired_secret_children(threshold_date):
|
||||
models.EncryptedDatum,
|
||||
models.ContainerSecret]
|
||||
children_names = map(lambda child: child.__name__, secret_children)
|
||||
LOG.debug("Children tables for Secret table being checked: %s",
|
||||
LOG.debug(u._("Children tables for Secret table being checked: %s"),
|
||||
str(children_names))
|
||||
session = repo.get_session()
|
||||
update_count = 0
|
||||
@ -280,16 +288,18 @@ def soft_delete_expired_secrets(threshold_date):
|
||||
"""
|
||||
# Note: sqllite does not support multiple table updates so
|
||||
# several db updates are used instead
|
||||
LOG.debug('Soft deleting expired secrets older than: %s',
|
||||
LOG.debug(u._('Soft deleting expired secrets older than: %s'),
|
||||
str(threshold_date))
|
||||
update_count = _soft_delete_expired_secrets(threshold_date)
|
||||
|
||||
children_count, acl_total = _soft_delete_expired_secret_children(
|
||||
threshold_date)
|
||||
update_count += children_count
|
||||
LOG.info("Soft deleted %s entries due to secret expiration"
|
||||
" and %s secret acl entries were removed from the database",
|
||||
update_count, acl_total)
|
||||
LOG.info(u._LI("Soft deleted %(update_count)s entries due to secret "
|
||||
"expiration and %(acl_total)s secret acl entries "
|
||||
"wereremoved from the database") %
|
||||
{'update_count': update_count,
|
||||
'acl_total': acl_total})
|
||||
return update_count + acl_total
|
||||
|
||||
|
||||
@ -314,7 +324,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
|
||||
if log_file:
|
||||
CONF.set_override('log_file', log_file)
|
||||
|
||||
LOG.info("Cleaning up soft deletions in the barbican database")
|
||||
LOG.info(u._LI("Cleaning up soft deletions in the barbican database"))
|
||||
log.setup(CONF, 'barbican')
|
||||
|
||||
cleanup_total = 0
|
||||
@ -343,7 +353,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
|
||||
repo.commit()
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception('Failed to clean up soft deletions in database.')
|
||||
LOG.exception(u._LE('Failed to clean up soft deletions in database.'))
|
||||
repo.rollback()
|
||||
cleanup_total = 0 # rollback happened, no entries affected
|
||||
raise ex
|
||||
@ -362,6 +372,6 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
|
||||
|
||||
log.setup(CONF, 'barbican') # reset the overrides
|
||||
|
||||
LOG.info("Cleaning of database affected %s entries",
|
||||
LOG.info(u._LI("Cleaning of database affected %s entries"),
|
||||
cleanup_total)
|
||||
LOG.info('DB clean up finished in %s seconds', elapsed_time)
|
||||
LOG.info(u._LI('DB clean up finished in %s seconds'), elapsed_time)
|
||||
|
@ -28,6 +28,7 @@ from alembic import config as alembic_config
|
||||
|
||||
from barbican.common import config
|
||||
from barbican.common import utils
|
||||
from barbican import i18n as u
|
||||
|
||||
LOG = utils.getLogger(__name__)
|
||||
|
||||
@ -44,8 +45,8 @@ def init_config(sql_url=None):
|
||||
"the CLI or the configuration file.")
|
||||
|
||||
if sqlalchemy_url and 'sqlite' in sqlalchemy_url:
|
||||
LOG.warning('!!! Limited support for migration commands using sqlite'
|
||||
' databases; This operation may not succeed.')
|
||||
LOG.warning(u._('!!! Limited support for migration commands using'
|
||||
' sqlite databases; This operation may not succeed.'))
|
||||
|
||||
config = alembic_config.Config(
|
||||
os.path.join(os.path.dirname(__file__), 'alembic.ini')
|
||||
|
@ -90,7 +90,7 @@ def hard_reset():
|
||||
def setup_database_engine_and_factory():
|
||||
global sa_logger, _SESSION_FACTORY, _ENGINE
|
||||
|
||||
LOG.info('Setting up database engine and session factory')
|
||||
LOG.info(u._LI('Setting up database engine and session factory'))
|
||||
if CONF.debug:
|
||||
sa_logger = logging.getLogger('sqlalchemy.engine')
|
||||
sa_logger.setLevel(logging.DEBUG)
|
||||
@ -214,8 +214,8 @@ def is_db_connection_error(args):
|
||||
|
||||
|
||||
def _create_engine(connection, **engine_args):
|
||||
LOG.debug('Sql connection: please check "sql_connection" property in '
|
||||
'barbican configuration file; Args: %s', engine_args)
|
||||
LOG.debug(u._('Sql connection: please check "sql_connection" property in '
|
||||
'barbican configuration file; Args: %s'), engine_args)
|
||||
|
||||
engine = sqlalchemy.create_engine(connection, **engine_args)
|
||||
|
||||
@ -295,9 +295,9 @@ def clean_paging_values(offset_arg=0, limit_arg=CONF.default_limit_paging):
|
||||
except ValueError:
|
||||
limit = CONF.default_limit_paging
|
||||
|
||||
LOG.debug("Clean paging values limit=%s, offset=%s",
|
||||
limit, offset
|
||||
)
|
||||
LOG.debug(u._("Clean paging values limit=%(limit)s, offset=%(offset)s") %
|
||||
{'limit': limit,
|
||||
'offset': offset})
|
||||
|
||||
return offset, limit
|
||||
|
||||
|
@ -139,7 +139,7 @@ class P11CryptoPlugin(plugin.CryptoPluginBase):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (exception.PKCS11Exception) as pe:
|
||||
LOG.warn("Reinitializing PKCS#11 library: {e}".format(e=pe))
|
||||
LOG.warning("Reinitializing PKCS#11 library: {e}".format(e=pe))
|
||||
self._reinitialize_pkcs11()
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
@ -195,7 +195,7 @@ class DogtagKRAPlugin(sstore.SecretStoreBase):
|
||||
|
||||
def __init__(self, conf=CONF):
|
||||
"""Constructor - create the keyclient."""
|
||||
LOG.debug("starting DogtagKRAPlugin init")
|
||||
LOG.debug(u._("starting DogtagKRAPlugin init"))
|
||||
connection = create_connection(conf, 'kra')
|
||||
|
||||
# create kraclient
|
||||
@ -204,7 +204,7 @@ class DogtagKRAPlugin(sstore.SecretStoreBase):
|
||||
|
||||
self.keyclient.set_transport_cert(KRA_TRANSPORT_NICK)
|
||||
|
||||
LOG.debug("completed DogtagKRAPlugin init")
|
||||
LOG.debug(u._("completed DogtagKRAPlugin init"))
|
||||
|
||||
def store_secret(self, secret_dto):
|
||||
"""Store a secret in the KRA
|
||||
|
@ -200,7 +200,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
}
|
||||
self.pkcs1_only = conf.kmip_plugin.pkcs1_only
|
||||
if self.pkcs1_only:
|
||||
LOG.debug("KMIP secret store only supports PKCS#1")
|
||||
LOG.debug(u._("KMIP secret store only supports PKCS#1"))
|
||||
del self.valid_alg_dict[ss.KeyAlgorithm.DSA]
|
||||
self.kmip_barbican_alg_map = {
|
||||
enums.CryptographicAlgorithm.AES: ss.KeyAlgorithm.AES,
|
||||
@ -262,7 +262,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
:returns: dictionary holding key_id returned by server
|
||||
:raises: SecretGeneralException, SecretAlgorithmNotSupportedException
|
||||
"""
|
||||
LOG.debug("Starting symmetric key generation with KMIP plugin")
|
||||
LOG.debug(u._("Starting symmetric key generation with KMIP plugin"))
|
||||
if not self.generate_supports(key_spec):
|
||||
raise ss.SecretAlgorithmNotSupportedException(
|
||||
key_spec.alg)
|
||||
@ -276,11 +276,11 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
algorithm = self._get_kmip_algorithm(key_spec.alg.lower())
|
||||
try:
|
||||
with self.client:
|
||||
LOG.debug("Opened connection to KMIP client for secret " +
|
||||
"generation")
|
||||
LOG.debug(u._("Opened connection to KMIP client for secret "
|
||||
"generation"))
|
||||
uuid = self.client.create(algorithm, key_spec.bit_length)
|
||||
LOG.debug("SUCCESS: Symmetric key generated with "
|
||||
"uuid: %s", uuid)
|
||||
LOG.debug(u._("SUCCESS: Symmetric key generated with "
|
||||
"uuid: %s"), uuid)
|
||||
return {KMIPSecretStore.KEY_UUID: uuid}
|
||||
except Exception as e:
|
||||
LOG.exception(u._LE("Error opening or writing to client"))
|
||||
@ -298,7 +298,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
:raises: SecretGeneralException, SecretAlgorithmNotSupportedException
|
||||
KMIPSecretStoreActionNotSupported
|
||||
"""
|
||||
LOG.debug("Starting asymmetric key generation with KMIP plugin")
|
||||
LOG.debug(u._("Starting asymmetric key generation with KMIP plugin"))
|
||||
if not self.generate_supports(key_spec):
|
||||
raise ss.SecretAlgorithmNotSupportedException(
|
||||
key_spec.alg)
|
||||
@ -316,13 +316,15 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
|
||||
try:
|
||||
with self.client:
|
||||
LOG.debug("Opened connection to KMIP client for asymmetric " +
|
||||
"secret generation")
|
||||
LOG.debug(u._("Opened connection to KMIP client for "
|
||||
"asymmetric secret generation"))
|
||||
public_uuid, private_uuid = self.client.create_key_pair(
|
||||
algorithm, length)
|
||||
LOG.debug("SUCCESS: Asymmetric key pair generated with "
|
||||
"public key uuid: %s and private key uuid: %s",
|
||||
public_uuid, private_uuid)
|
||||
LOG.debug(u._("SUCCESS: Asymmetric key pair generated with "
|
||||
"public key uuid: %(public_uuid)s and "
|
||||
"private key uuid: %(private_uuid)s") %
|
||||
{'public_uuid': public_uuid,
|
||||
'private_uuid': private_uuid})
|
||||
private_key_metadata = {KMIPSecretStore.KEY_UUID: private_uuid}
|
||||
public_key_metadata = {KMIPSecretStore.KEY_UUID: public_uuid}
|
||||
passphrase_metadata = None
|
||||
@ -342,7 +344,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
:returns: Dictionary holding the key_uuid assigned by KMIP
|
||||
:raises: SecretGeneralException, SecretAlgorithmNotSupportedException
|
||||
"""
|
||||
LOG.debug("Starting secret storage with KMIP plugin")
|
||||
LOG.debug(u._("Starting secret storage with KMIP plugin"))
|
||||
if not self.store_secret_supports(secret_dto.key_spec):
|
||||
raise ss.SecretAlgorithmNotSupportedException(
|
||||
secret_dto.key_spec.alg)
|
||||
@ -359,9 +361,9 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
|
||||
try:
|
||||
with self.client:
|
||||
LOG.debug("Opened connection to KMIP client")
|
||||
LOG.debug(u._("Opened connection to KMIP client"))
|
||||
uuid = self.client.register(secret)
|
||||
LOG.debug("SUCCESS: Key stored with uuid: %s", uuid)
|
||||
LOG.debug(u._("SUCCESS: Key stored with uuid: %s"), uuid)
|
||||
return {KMIPSecretStore.KEY_UUID: uuid}
|
||||
except Exception as e:
|
||||
LOG.exception(u._LE("Error opening or writing to client"))
|
||||
@ -376,12 +378,12 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
:returns: SecretDTO of the retrieved Secret
|
||||
:raises: SecretGeneralException
|
||||
"""
|
||||
LOG.debug("Starting secret retrieval with KMIP plugin")
|
||||
LOG.debug(u._("Starting secret retrieval with KMIP plugin"))
|
||||
uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID])
|
||||
try:
|
||||
with self.client:
|
||||
LOG.debug("Opened connection to KMIP client for secret " +
|
||||
"retrieval")
|
||||
LOG.debug(u._("Opened connection to KMIP client for secret "
|
||||
"retrieval"))
|
||||
managed_object = self.client.get(uuid)
|
||||
return self._get_barbican_secret(managed_object, secret_type)
|
||||
except Exception as e:
|
||||
@ -417,11 +419,11 @@ class KMIPSecretStore(ss.SecretStoreBase):
|
||||
{'key_uuid': <uuid of key>}
|
||||
:raises: SecretGeneralException
|
||||
"""
|
||||
LOG.debug("Starting secret deletion with KMIP plugin")
|
||||
LOG.debug(u._("Starting secret deletion with KMIP plugin"))
|
||||
uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID])
|
||||
try:
|
||||
with self.client:
|
||||
LOG.debug("Opened connection to KMIP client")
|
||||
LOG.debug(u._("Opened connection to KMIP client"))
|
||||
self.client.destroy(uuid)
|
||||
except Exception as e:
|
||||
LOG.exception(u._LE("Error opening or writing to client"))
|
||||
|
@ -212,12 +212,12 @@ class SnakeoilCA(object):
|
||||
|
||||
def ensure_exists(self):
|
||||
if not self.exists:
|
||||
LOG.debug('Keypair not found, creating new cert/key')
|
||||
LOG.debug(u._('Keypair not found, creating new cert/key'))
|
||||
self.cert, self.key, self.chain, self.pkcs7 = (
|
||||
self.create_keypair())
|
||||
|
||||
def create_keypair(self):
|
||||
LOG.debug('Generating Snakeoil CA')
|
||||
LOG.debug(u._('Generating Snakeoil CA'))
|
||||
key = crypto.PKey()
|
||||
key.generate_key(crypto.TYPE_RSA, self.key_size)
|
||||
|
||||
@ -241,7 +241,7 @@ class SnakeoilCA(object):
|
||||
|
||||
cert.sign(self.signing_key, 'sha256')
|
||||
|
||||
LOG.debug('Snakeoil CA cert/key generated')
|
||||
LOG.debug(u._('Snakeoil CA cert/key generated'))
|
||||
|
||||
chain = b''
|
||||
if self.parent_chain_path:
|
||||
|
@ -17,6 +17,7 @@
|
||||
Client-side (i.e. API side) classes and logic.
|
||||
"""
|
||||
from barbican.common import utils
|
||||
from barbican import i18n as u
|
||||
from barbican import queue
|
||||
from barbican.queue import server
|
||||
|
||||
@ -92,9 +93,10 @@ class _DirectTaskInvokerClient(object):
|
||||
try:
|
||||
getattr(self._tasks, method_name)(context, **kwargs)
|
||||
except Exception:
|
||||
LOG.exception(">>>>> Task exception seen for synchronous task "
|
||||
"invocation, so handling exception to mimic "
|
||||
"asynchronous behavior.")
|
||||
LOG.exception(
|
||||
u._(">>>>> Task exception seen for synchronous task "
|
||||
"invocation, so handling exception to mimic "
|
||||
"asynchronous behavior."))
|
||||
|
||||
def call(self, context, method_name, **kwargs):
|
||||
raise ValueError("No support for call() client methods.")
|
||||
|
@ -20,6 +20,7 @@ import oslo_messaging
|
||||
from oslo_service import service
|
||||
|
||||
from barbican.common import utils
|
||||
from barbican import i18n as u
|
||||
from barbican import queue
|
||||
from barbican.tasks import keystone_consumer
|
||||
|
||||
@ -69,10 +70,10 @@ class NotificationTask(object):
|
||||
|
||||
"""
|
||||
|
||||
LOG.debug("Input keystone event publisher_id = %s", publisher_id)
|
||||
LOG.debug("Input keystone event payload = %s", payload)
|
||||
LOG.debug("Input keystone event type = %s", event_type)
|
||||
LOG.debug("Input keystone event metadata = %s", metadata)
|
||||
LOG.debug(u._("Input keystone event publisher_id = %s"), publisher_id)
|
||||
LOG.debug(u._("Input keystone event payload = %s"), payload)
|
||||
LOG.debug(u._("Input keystone event type = %s"), event_type)
|
||||
LOG.debug(u._("Input keystone event metadata = %s"), metadata)
|
||||
project_id = self._parse_payload_for_project_id(payload)
|
||||
resource_type, operation_type = self._parse_event_type(event_type)
|
||||
LOG.debug('Keystone Event: resource type={0}, operation type={1}, '
|
||||
|
@ -74,11 +74,11 @@ class PeriodicServer(service.Service):
|
||||
self.order_retry_repo = repositories.get_order_retry_tasks_repository()
|
||||
|
||||
def start(self):
|
||||
LOG.info("Starting the PeriodicServer")
|
||||
LOG.info(u._LI("Starting the PeriodicServer"))
|
||||
super(PeriodicServer, self).start()
|
||||
|
||||
def stop(self, graceful=True):
|
||||
LOG.info("Halting the PeriodicServer")
|
||||
LOG.info(u._LI("Halting the PeriodicServer"))
|
||||
super(PeriodicServer, self).stop(graceful=graceful)
|
||||
|
||||
@periodic_task.periodic_task
|
||||
|
@ -20,6 +20,7 @@ from barbican.common import exception as excep
|
||||
from barbican.common import hrefs
|
||||
from barbican.common import resources as res
|
||||
import barbican.common.utils as utils
|
||||
from barbican import i18n as u
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories as repos
|
||||
from barbican.plugin.interface import certificate_manager as cert
|
||||
@ -354,7 +355,7 @@ def _add_private_key_to_generated_cert_container(container_id, order_model,
|
||||
def modify_certificate_request(order_model, updated_meta):
|
||||
"""Update the order with CA."""
|
||||
# TODO(chellygel): Add the modify certificate request logic.
|
||||
LOG.debug('in modify_certificate_request')
|
||||
LOG.debug(u._('in modify_certificate_request'))
|
||||
raise NotImplementedError # pragma: no cover
|
||||
|
||||
|
||||
|
@ -37,7 +37,7 @@ class KeystoneEventConsumer(resources.BaseTask):
|
||||
|
||||
def __init__(self, db_start=rep.start, db_commit=rep.commit,
|
||||
db_rollback=rep.rollback, db_clear=rep.clear):
|
||||
LOG.debug('Creating KeystoneEventConsumer task processor')
|
||||
LOG.debug(u._('Creating KeystoneEventConsumer task processor'))
|
||||
|
||||
self.db_start = db_start
|
||||
self.db_commit = db_commit
|
||||
|
@ -241,7 +241,7 @@ class BeginTypeOrder(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super(BeginTypeOrder, self).__init__()
|
||||
LOG.debug('Creating BeginTypeOrder task processor')
|
||||
LOG.debug(u._('Creating BeginTypeOrder task processor'))
|
||||
self.project_repo = rep.get_project_repository()
|
||||
self.helper = _OrderTaskHelper()
|
||||
|
||||
@ -286,7 +286,7 @@ class BeginTypeOrder(BaseTask):
|
||||
project
|
||||
)
|
||||
order.secret_id = new_secret.id
|
||||
LOG.debug("...done creating keys order's secret.")
|
||||
LOG.debug(u._("...done creating keys order's secret."))
|
||||
elif order_type == models.OrderType.ASYMMETRIC:
|
||||
# Create asymmetric Secret
|
||||
new_container = plugin.generate_asymmetric_secret(
|
||||
@ -295,14 +295,14 @@ class BeginTypeOrder(BaseTask):
|
||||
'application/octet-stream'),
|
||||
project)
|
||||
order.container_id = new_container.id
|
||||
LOG.debug("...done creating asymmetric order's secret.")
|
||||
LOG.debug(u._("...done creating asymmetric order's secret."))
|
||||
elif order_type == models.OrderType.CERTIFICATE:
|
||||
# Request a certificate
|
||||
new_container = cert.issue_certificate_request(
|
||||
order, project, result_follow_on)
|
||||
if new_container:
|
||||
order.container_id = new_container.id
|
||||
LOG.debug("...done requesting a certificate.")
|
||||
LOG.debug(u._("...done requesting a certificate."))
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
u._('Order type "{order_type}" not implemented.').format(
|
||||
@ -327,7 +327,7 @@ class UpdateOrder(BaseTask):
|
||||
|
||||
def __init__(self):
|
||||
super(UpdateOrder, self).__init__()
|
||||
LOG.debug('Creating UpdateOrder task processor')
|
||||
LOG.debug(u._('Creating UpdateOrder task processor'))
|
||||
self.helper = _OrderTaskHelper()
|
||||
|
||||
def retrieve_entity(self, *args, **kwargs):
|
||||
@ -349,13 +349,13 @@ class UpdateOrder(BaseTask):
|
||||
if order_type == models.OrderType.CERTIFICATE:
|
||||
# Update a certificate request
|
||||
cert.modify_certificate_request(order, updated_meta)
|
||||
LOG.debug("...done updating a certificate order.")
|
||||
LOG.debug(u._("...done updating a certificate order."))
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
u._('Order type "{order_type}" not implemented.').format(
|
||||
order_type=order_type))
|
||||
|
||||
LOG.debug("...done updating order.")
|
||||
LOG.debug(u._("...done updating order."))
|
||||
|
||||
def handle_error(self, order, status, message, exception,
|
||||
*args, **kwargs):
|
||||
@ -374,7 +374,7 @@ class CheckCertificateStatusOrder(BaseTask):
|
||||
return u._('Check Certificate Order Status')
|
||||
|
||||
def __init__(self):
|
||||
LOG.debug('Creating CheckCertificateStatusOrder task processor')
|
||||
LOG.debug(u._('Creating CheckCertificateStatusOrder task processor'))
|
||||
self.project_repo = rep.get_project_repository()
|
||||
self.helper = _OrderTaskHelper()
|
||||
|
||||
@ -410,7 +410,7 @@ class CheckCertificateStatusOrder(BaseTask):
|
||||
order, project, result_follow_on)
|
||||
if new_container:
|
||||
order.container_id = new_container.id
|
||||
LOG.debug("...done checking status of a certificate order.")
|
||||
LOG.debug(u._("...done checking status of a certificate order."))
|
||||
|
||||
return result_follow_on
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user