Make clean up of soft deletions configurable
Adds the following features to the command: 1) Be able to set minimum number of days to keep soft deletions 2) Clean unassociated projects 3) Soft delete secrets that are expired 4) Set verbose flag 5) Set the log file location Documentation for running the command was also added. This is the second CR for cleaning up the barbican database. 1) Simple soft deletion clean up for barbican-db-manage. 2) Make clean up configurable and add documentation. Change-Id: I1b2360d967bf4b8378eda4766c7ef3113eedffad Partially-implements: blueprint clean-db-soft-deletes
This commit is contained in:
parent
ff92d132ca
commit
acd998c66f
@ -30,6 +30,7 @@ from oslo_log import log as logging
|
||||
|
||||
from barbican.cmd import pkcs11_kek_rewrap as pkcs11_rewrap
|
||||
from barbican.common import config
|
||||
from barbican.model import clean
|
||||
from barbican.model.migration import commands
|
||||
from barbican.plugin.crypto import pkcs11
|
||||
import barbican.version
|
||||
@ -51,12 +52,41 @@ class DbCommands(object):
|
||||
|
||||
description = "Subcommands for managing barbican database"
|
||||
|
||||
cleanup_description = "Cleanup soft-deleted secrets in database"
|
||||
clean_description = "Clean up soft deletions in the database"
|
||||
|
||||
@args('--db-url', '-d', metavar='<db-url>', dest='dburl',
|
||||
help='barbican database URL')
|
||||
def cleanup(self, dburl=None):
|
||||
raise NotImplementedError
|
||||
@args('--min-days', '-m', metavar='<min-days>', dest='min_days', type=int,
|
||||
default=90, help='minimum number of days to keep soft deletions. '
|
||||
'default is %(default)s days.')
|
||||
@args('--verbose', '-V', action='store_true', dest='verbose',
|
||||
default=False, help='Show verbose information about the clean up.')
|
||||
@args('--log-file', '-L', metavar='<log-file>', type=str, default=None,
|
||||
dest='log_file', help='Set log file location. '
|
||||
'Default value for log_file can be found in barbican.conf')
|
||||
@args('--clean-unassociated-projects', '-p', action='store_true',
|
||||
dest='do_clean_unassociated_projects', default=False,
|
||||
help='Remove projects that have no '
|
||||
'associated resources.')
|
||||
@args('--soft-delete-expired-secrets', '-e', action='store_true',
|
||||
dest='do_soft_delete_expired_secrets', default=False,
|
||||
help='Soft delete secrets that are expired.')
|
||||
def clean(self, dburl=None, min_days=None, verbose=None, log_file=None,
|
||||
do_clean_unassociated_projects=None,
|
||||
do_soft_delete_expired_secrets=None):
|
||||
"""Clean soft deletions in the database"""
|
||||
if dburl is None:
|
||||
dburl = CONF.sql_connection
|
||||
if log_file is None:
|
||||
log_file = CONF.log_file
|
||||
|
||||
clean.clean_command(
|
||||
sql_url=dburl,
|
||||
min_num_days=min_days,
|
||||
do_clean_unassociated_projects=do_clean_unassociated_projects,
|
||||
do_soft_delete_expired_secrets=do_soft_delete_expired_secrets,
|
||||
verbose=verbose,
|
||||
log_file=log_file)
|
||||
|
||||
revision_description = "Create a new database version file"
|
||||
|
||||
|
@ -26,7 +26,6 @@ from barbican.model import clean
|
||||
from barbican.model.migration import commands
|
||||
from oslo_log import log
|
||||
|
||||
|
||||
# Import and configure logging.
|
||||
CONF = config.CONF
|
||||
log.setup(CONF, 'barbican')
|
||||
@ -107,6 +106,26 @@ class DatabaseManager(object):
|
||||
create_parser = self.subparsers.add_parser(
|
||||
'clean',
|
||||
help='Clean up soft deletions in the database')
|
||||
create_parser.add_argument(
|
||||
'--min-days', '-m', type=int, default=90,
|
||||
help='minimum number of days to keep soft deletions. default is'
|
||||
' %(default)s days.')
|
||||
create_parser.add_argument('--clean-unassociated-projects', '-p',
|
||||
action="store_true",
|
||||
help='Remove projects that have no '
|
||||
'associated resources.')
|
||||
create_parser.add_argument('--soft-delete-expired-secrets', '-e',
|
||||
action="store_true",
|
||||
help='Soft delete expired secrets.')
|
||||
create_parser.add_argument('--verbose', '-V', action='store_true',
|
||||
help='Show full information about the'
|
||||
' cleanup')
|
||||
create_parser.add_argument('--log-file', '-L',
|
||||
default=CONF.log_file,
|
||||
type=str,
|
||||
help='Set log file location. '
|
||||
'Default value for log_file can be '
|
||||
'found in barbican.conf')
|
||||
create_parser.set_defaults(func=self.clean)
|
||||
|
||||
def revision(self, args):
|
||||
@ -127,7 +146,13 @@ class DatabaseManager(object):
|
||||
commands.current(args.verbose, sql_url=args.dburl)
|
||||
|
||||
def clean(self, args):
|
||||
clean.clean_command(args.dburl)
|
||||
clean.clean_command(
|
||||
sql_url=args.dburl,
|
||||
min_num_days=args.min_days,
|
||||
do_clean_unassociated_projects=args.clean_unassociated_projects,
|
||||
do_soft_delete_expired_secrets=args.soft_delete_expired_secrets,
|
||||
verbose=args.verbose,
|
||||
log_file=args.log_file)
|
||||
|
||||
def execute(self):
|
||||
"""Parse the command line arguments."""
|
||||
|
@ -13,7 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from barbican.common import config
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories as repo
|
||||
@ -22,6 +21,8 @@ from oslo_utils import timeutils
|
||||
|
||||
from sqlalchemy import sql as sa_sql
|
||||
|
||||
import datetime
|
||||
|
||||
# Import and configure logging.
|
||||
CONF = config.CONF
|
||||
log.setup(CONF, 'barbican')
|
||||
@ -33,8 +34,44 @@ def _exception_is_successful_exit(thrown_exception):
|
||||
(thrown_exception.code is None or thrown_exception.code == 0))
|
||||
|
||||
|
||||
def cleanup_parent_with_no_child(parent_model, child_model):
|
||||
"""Clean up soft deletions in parent that do not have references in child
|
||||
def cleanup_unassociated_projects():
|
||||
"""Clean up unassociated projects.
|
||||
|
||||
This looks for projects that have no children entries on the dependent
|
||||
tables and removes them.
|
||||
"""
|
||||
LOG.debug("Cleaning up unassociated projects")
|
||||
session = repo.get_session()
|
||||
project_children_tables = [models.Order,
|
||||
models.KEKDatum,
|
||||
models.Secret,
|
||||
models.ContainerConsumerMetadatum,
|
||||
models.Container,
|
||||
models.PreferredCertificateAuthority,
|
||||
models.CertificateAuthority,
|
||||
models.ProjectCertificateAuthority,
|
||||
models.ProjectQuotas]
|
||||
children_names = map(lambda child: child.__name__, project_children_tables)
|
||||
LOG.debug("Children tables for Project table being checked: %s",
|
||||
str(children_names))
|
||||
sub_query = session.query(models.Project.id)
|
||||
for model in project_children_tables:
|
||||
sub_query = sub_query.outerjoin(model,
|
||||
models.Project.id == model.project_id)
|
||||
sub_query = sub_query.filter(model.id == None) # nopep8
|
||||
sub_query = sub_query.subquery()
|
||||
sub_query = sa_sql.select([sub_query])
|
||||
query = session.query(models.Project)
|
||||
query = query.filter(models.Project.id.in_(sub_query))
|
||||
delete_count = query.delete(synchronize_session='fetch')
|
||||
LOG.info("Cleaned up %s entries for %s", str(delete_count),
|
||||
models.Project.__name__)
|
||||
return delete_count
|
||||
|
||||
|
||||
def cleanup_parent_with_no_child(parent_model, child_model,
|
||||
threshold_date=None):
|
||||
"""Clean up soft deletions in parent that do not have references in child.
|
||||
|
||||
Before running this function, the child table should be cleaned of
|
||||
soft deletions. This function left outer joins the parent and child
|
||||
@ -44,6 +81,7 @@ def cleanup_parent_with_no_child(parent_model, child_model):
|
||||
|
||||
:param parent_model: table class for parent
|
||||
:param child_model: table class for child which restricts parent deletion
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from database
|
||||
"""
|
||||
LOG.debug("Cleaning soft deletes for %s without a child in %s",
|
||||
@ -58,93 +96,281 @@ def cleanup_parent_with_no_child(parent_model, child_model):
|
||||
query = session.query(parent_model)
|
||||
query = query.filter(parent_model.id.in_(sub_query))
|
||||
query = query.filter(parent_model.deleted)
|
||||
if threshold_date:
|
||||
query = query.filter(parent_model.deleted_at <= threshold_date)
|
||||
delete_count = query.delete(synchronize_session='fetch')
|
||||
LOG.info("Cleaned up %s entries for %s with no children in %s",
|
||||
delete_count, parent_model.__name__, child_model.__name__)
|
||||
return delete_count
|
||||
|
||||
|
||||
def cleanup_softdeletes(model):
|
||||
def cleanup_softdeletes(model, threshold_date=None):
|
||||
"""Remove soft deletions from a table.
|
||||
|
||||
:param model: table class to remove soft deletions
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from the database
|
||||
"""
|
||||
LOG.debug("Cleaning soft deletes: %s", model.__name__)
|
||||
session = repo.get_session()
|
||||
query = session.query(model)
|
||||
query = query.filter_by(deleted=True)
|
||||
if threshold_date:
|
||||
query = query.filter(model.deleted_at <= threshold_date)
|
||||
delete_count = query.delete()
|
||||
LOG.info("Cleaned up %s entries for %s", delete_count,
|
||||
model.__name__)
|
||||
return delete_count
|
||||
|
||||
|
||||
def cleanup_all():
|
||||
def cleanup_all(threshold_date=None):
|
||||
"""Clean up the main soft deletable resources.
|
||||
|
||||
This function contains an order of calls to
|
||||
clean up the soft-deletable resources.
|
||||
|
||||
:param threshold_date: soft deletions older than this date will be removed
|
||||
:returns: total number of entries removed from the database
|
||||
"""
|
||||
LOG.debug("Cleaning up soft deletions where deletion date"
|
||||
" is older than %s", str(threshold_date))
|
||||
total = 0
|
||||
total += cleanup_softdeletes(models.TransportKey)
|
||||
total += cleanup_softdeletes(models.TransportKey,
|
||||
threshold_date=threshold_date)
|
||||
|
||||
total += cleanup_softdeletes(models.OrderBarbicanMetadatum)
|
||||
total += cleanup_softdeletes(models.OrderRetryTask)
|
||||
total += cleanup_softdeletes(models.OrderPluginMetadatum)
|
||||
total += cleanup_parent_with_no_child(models.Order, models.OrderRetryTask)
|
||||
total += cleanup_softdeletes(models.OrderBarbicanMetadatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.OrderRetryTask,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.OrderPluginMetadatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_parent_with_no_child(models.Order, models.OrderRetryTask,
|
||||
threshold_date=threshold_date)
|
||||
|
||||
total += cleanup_softdeletes(models.EncryptedDatum)
|
||||
total += cleanup_softdeletes(models.SecretStoreMetadatum)
|
||||
total += cleanup_softdeletes(models.ContainerSecret)
|
||||
total += cleanup_softdeletes(models.EncryptedDatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.SecretUserMetadatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.SecretStoreMetadatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.ContainerSecret,
|
||||
threshold_date=threshold_date)
|
||||
|
||||
total += cleanup_parent_with_no_child(models.Secret, models.Order)
|
||||
total += cleanup_parent_with_no_child(models.Secret, models.Order,
|
||||
threshold_date=threshold_date)
|
||||
|
||||
total += cleanup_softdeletes(models.ContainerConsumerMetadatum)
|
||||
total += cleanup_parent_with_no_child(models.Container, models.Order)
|
||||
total += cleanup_softdeletes(models.KEKDatum)
|
||||
total += cleanup_softdeletes(models.ContainerConsumerMetadatum,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_parent_with_no_child(models.Container, models.Order,
|
||||
threshold_date=threshold_date)
|
||||
total += cleanup_softdeletes(models.KEKDatum,
|
||||
threshold_date=threshold_date)
|
||||
|
||||
# TODO(edtubill) Clean up projects that were soft deleted by
|
||||
# the keystone listener
|
||||
|
||||
LOG.info("Cleaning of database resulted in removing %s entries", total)
|
||||
LOG.info("Cleaned up %s soft deleted entries", total)
|
||||
return total
|
||||
|
||||
|
||||
def clean_command(sql_url=None):
|
||||
def _soft_delete_expired_secrets(threshold_date):
|
||||
"""Soft delete expired secrets.
|
||||
|
||||
:param threshold_date: secrets that have expired past this date
|
||||
will be soft deleted
|
||||
:returns: total number of secrets that were soft deleted
|
||||
"""
|
||||
current_time = timeutils.utcnow()
|
||||
session = repo.get_session()
|
||||
query = session.query(models.Secret.id)
|
||||
query = query.filter(~models.Secret.deleted)
|
||||
query = query.filter(
|
||||
models.Secret.expiration <= threshold_date
|
||||
)
|
||||
update_count = query.update(
|
||||
{
|
||||
models.Secret.deleted: True,
|
||||
models.Secret.deleted_at: current_time
|
||||
},
|
||||
synchronize_session='fetch')
|
||||
return update_count
|
||||
|
||||
|
||||
def _hard_delete_acls_for_soft_deleted_secrets():
|
||||
"""Remove acl entries for secrets that have been soft deleted.
|
||||
|
||||
Removes entries in SecretACL and SecretACLUser which are for secrets
|
||||
that have been soft deleted.
|
||||
"""
|
||||
session = repo.get_session()
|
||||
acl_user_sub_query = session.query(models.SecretACLUser.id)
|
||||
acl_user_sub_query = acl_user_sub_query.join(models.SecretACL)
|
||||
acl_user_sub_query = acl_user_sub_query.join(models.Secret)
|
||||
acl_user_sub_query = acl_user_sub_query.filter(models.Secret.deleted)
|
||||
acl_user_sub_query = acl_user_sub_query.subquery()
|
||||
acl_user_sub_query = sa_sql.select([acl_user_sub_query])
|
||||
|
||||
acl_user_query = session.query(models.SecretACLUser)
|
||||
acl_user_query = acl_user_query.filter(
|
||||
models.SecretACLUser.id.in_(acl_user_sub_query))
|
||||
acl_total = acl_user_query.delete(synchronize_session='fetch')
|
||||
|
||||
acl_sub_query = session.query(models.SecretACL.id)
|
||||
acl_sub_query = acl_sub_query.join(models.Secret)
|
||||
acl_sub_query = acl_sub_query.filter(models.Secret.deleted)
|
||||
acl_sub_query = acl_sub_query.subquery()
|
||||
acl_sub_query = sa_sql.select([acl_sub_query])
|
||||
|
||||
acl_query = session.query(models.SecretACL)
|
||||
acl_query = acl_query.filter(
|
||||
models.SecretACL.id.in_(acl_sub_query))
|
||||
acl_total += acl_query.delete(synchronize_session='fetch')
|
||||
return acl_total
|
||||
|
||||
|
||||
def _soft_delete_expired_secret_children(threshold_date):
|
||||
"""Soft delete the children tables of expired secrets.
|
||||
|
||||
Soft deletes the children tables and hard deletes the ACL children
|
||||
tables of the expired secrets.
|
||||
:param threshold_date: threshold date for secret expiration
|
||||
:returns: returns a pair for number of soft delete children and deleted
|
||||
ACLs
|
||||
"""
|
||||
current_time = timeutils.utcnow()
|
||||
|
||||
secret_children = [models.SecretStoreMetadatum,
|
||||
models.SecretUserMetadatum,
|
||||
models.EncryptedDatum,
|
||||
models.ContainerSecret]
|
||||
children_names = map(lambda child: child.__name__, secret_children)
|
||||
LOG.debug("Children tables for Secret table being checked: %s",
|
||||
str(children_names))
|
||||
session = repo.get_session()
|
||||
update_count = 0
|
||||
|
||||
for table in secret_children:
|
||||
# Go through children and soft delete them
|
||||
sub_query = session.query(table.id)
|
||||
sub_query = sub_query.join(models.Secret)
|
||||
sub_query = sub_query.filter(
|
||||
models.Secret.expiration <= threshold_date
|
||||
)
|
||||
sub_query = sub_query.subquery()
|
||||
sub_query = sa_sql.select([sub_query])
|
||||
query = session.query(table)
|
||||
query = query.filter(table.id.in_(sub_query))
|
||||
current_update_count = query.update(
|
||||
{
|
||||
table.deleted: True,
|
||||
table.deleted_at: current_time
|
||||
},
|
||||
synchronize_session='fetch')
|
||||
update_count += current_update_count
|
||||
|
||||
session.flush()
|
||||
acl_total = _hard_delete_acls_for_soft_deleted_secrets()
|
||||
return update_count, acl_total
|
||||
|
||||
|
||||
def soft_delete_expired_secrets(threshold_date):
|
||||
"""Soft deletes secrets that are past expiration date.
|
||||
|
||||
The expired secrets and its children are marked for deletion.
|
||||
ACLs are soft deleted and then purged from the database.
|
||||
|
||||
:param threshold_date: secrets that have expired past this date
|
||||
will be soft deleted
|
||||
:returns: the sum of soft deleted entries and hard deleted acl entries
|
||||
"""
|
||||
# Note: sqllite does not support multiple table updates so
|
||||
# several db updates are used instead
|
||||
LOG.debug('Soft deleting expired secrets older than: %s',
|
||||
str(threshold_date))
|
||||
update_count = _soft_delete_expired_secrets(threshold_date)
|
||||
|
||||
children_count, acl_total = _soft_delete_expired_secret_children(
|
||||
threshold_date)
|
||||
update_count += children_count
|
||||
LOG.info("Soft deleted %s entries due to secret expiration"
|
||||
" and %s secret acl entries were removed from the database",
|
||||
update_count, acl_total)
|
||||
return update_count + acl_total
|
||||
|
||||
|
||||
def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
|
||||
do_soft_delete_expired_secrets, verbose, log_file):
|
||||
"""Clean command to clean up the database.
|
||||
|
||||
:param sql_url: sql connection string to connect to a database
|
||||
:param min_num_days: clean up soft deletions older than this date
|
||||
:param do_clean_unassociated_projects: If True, clean up
|
||||
unassociated projects
|
||||
:param do_soft_delete_expired_secrets: If True, soft delete secrets
|
||||
that have expired
|
||||
:param verbose: If True, log and print more information
|
||||
:param log_file: If set, override the log_file configured
|
||||
"""
|
||||
# TODO(edtubill) Make unit test for this method
|
||||
|
||||
start_messg = "Cleaning up soft deletions in the barbican database"
|
||||
LOG.info(start_messg)
|
||||
if verbose:
|
||||
# The verbose flag prints out log events to the screen, otherwise
|
||||
# the log events will only go to the log file
|
||||
CONF.set_override('debug', True)
|
||||
|
||||
if log_file:
|
||||
CONF.set_override('log_file', log_file)
|
||||
|
||||
LOG.info("Cleaning up soft deletions in the barbican database")
|
||||
log.setup(CONF, 'barbican')
|
||||
|
||||
cleanup_total = 0
|
||||
current_time = timeutils.utcnow()
|
||||
stop_watch = timeutils.StopWatch()
|
||||
stop_watch.start()
|
||||
try:
|
||||
if sql_url:
|
||||
CONF.set_override('sql_connection', sql_url)
|
||||
repo.setup_database_engine_and_factory()
|
||||
cleanup_all()
|
||||
|
||||
if do_clean_unassociated_projects:
|
||||
cleanup_total += cleanup_unassociated_projects()
|
||||
|
||||
if do_soft_delete_expired_secrets:
|
||||
cleanup_total += soft_delete_expired_secrets(
|
||||
threshold_date=current_time)
|
||||
|
||||
threshold_date = None
|
||||
if min_num_days >= 0:
|
||||
threshold_date = current_time - datetime.timedelta(
|
||||
days=min_num_days)
|
||||
else:
|
||||
threshold_date = current_time
|
||||
cleanup_total += cleanup_all(threshold_date=threshold_date)
|
||||
repo.commit()
|
||||
|
||||
except Exception as ex:
|
||||
if not _exception_is_successful_exit(ex):
|
||||
LOG.exception('Failed to clean up soft deletions in database.')
|
||||
LOG.exception(ex.message)
|
||||
repo.rollback()
|
||||
cleanup_total = 0 # rollback happened, no entries affected
|
||||
raise ex
|
||||
finally:
|
||||
stop_watch.stop()
|
||||
elapsed_time = stop_watch.elapsed()
|
||||
finish_messg = 'DB clean up finished in {0} seconds'.format(
|
||||
elapsed_time)
|
||||
if verbose:
|
||||
CONF.clear_override('debug')
|
||||
|
||||
LOG.info(finish_messg)
|
||||
if log_file:
|
||||
CONF.clear_override('log_file')
|
||||
repo.clear()
|
||||
|
||||
if sql_url:
|
||||
CONF.clear_override('sql_connection')
|
||||
|
||||
log.setup(CONF, 'barbican') # reset the overrides
|
||||
|
||||
LOG.info("Cleaning of database affected %s entries",
|
||||
cleanup_total)
|
||||
LOG.info('DB clean up finished in %s seconds', elapsed_time)
|
||||
|
@ -88,6 +88,37 @@ class TestBarbicanManage(TestBarbicanManageBase):
|
||||
['barbican.cmd.barbican_manage', 'db', 'history'], mock_history,
|
||||
False, sql_url='mockdburl')
|
||||
|
||||
@mock.patch('barbican.model.clean.clean_command')
|
||||
def test_db_clean_no_args(self, mock_clean_command):
|
||||
manager.CONF.set_override('log_file', 'mock_log_file')
|
||||
self._main_test_helper(
|
||||
['barbican.cmd.barbican_manage', 'db', 'clean'],
|
||||
func_name=mock_clean_command,
|
||||
sql_url='mockdburl',
|
||||
min_num_days=90,
|
||||
do_clean_unassociated_projects=False,
|
||||
do_soft_delete_expired_secrets=False,
|
||||
verbose=False,
|
||||
log_file='mock_log_file')
|
||||
manager.CONF.clear_override('log_file')
|
||||
|
||||
@mock.patch('barbican.model.clean.clean_command')
|
||||
def test_db_clean_with_args(self, mock_clean_command):
|
||||
manager.CONF.set_override('log_file', 'mock_log_file')
|
||||
self._main_test_helper(
|
||||
['barbican.cmd.barbican_manage', 'db', 'clean',
|
||||
'--db-url', 'somewhere', '--min-days', '180',
|
||||
'--clean-unassociated-projects', '--soft-delete-expired-secrets',
|
||||
'--verbose', '--log-file', '/tmp/whatevs'],
|
||||
func_name=mock_clean_command,
|
||||
sql_url='somewhere',
|
||||
min_num_days=180,
|
||||
do_clean_unassociated_projects=True,
|
||||
do_soft_delete_expired_secrets=True,
|
||||
verbose=True,
|
||||
log_file='/tmp/whatevs')
|
||||
manager.CONF.clear_override('log_file')
|
||||
|
||||
@mock.patch('barbican.model.migration.commands.current')
|
||||
def test_db_current(self, mock_current):
|
||||
self._main_test_helper(
|
||||
|
@ -19,6 +19,8 @@ from barbican.model import repositories as repos
|
||||
from barbican.tests import database_utils as utils
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
import datetime
|
||||
|
||||
|
||||
def _create_project(project_name):
|
||||
"""Wrapper to create a project and clean"""
|
||||
@ -43,6 +45,15 @@ def _entry_exists(entry):
|
||||
return count >= 1
|
||||
|
||||
|
||||
def _entry_is_soft_deleted(entry):
|
||||
model = entry.__class__
|
||||
entry_id = entry.id
|
||||
session = repos.get_session()
|
||||
query = session.query(model)
|
||||
result = query.filter(model.id == entry_id).first().deleted
|
||||
return result
|
||||
|
||||
|
||||
def _setup_entry(name, *args, **kwargs):
|
||||
func_name = "create_" + name
|
||||
if not hasattr(utils, func_name):
|
||||
@ -74,6 +85,7 @@ class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase):
|
||||
secret1.delete()
|
||||
secret2.delete()
|
||||
clean.cleanup_parent_with_no_child(models.Secret, models.Order)
|
||||
|
||||
# Assert that only secret2 is removed
|
||||
self.assertTrue(_entry_exists(secret1))
|
||||
self.assertFalse(_entry_exists(secret2))
|
||||
@ -101,16 +113,19 @@ class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase):
|
||||
# create secret and secret_meta
|
||||
secret = _setup_entry('secret', project=project)
|
||||
secret_metadatum = _setup_entry('secret_metadatum', secret=secret)
|
||||
secret_user_metadatum = _setup_entry('secret_user_metadatum',
|
||||
secret=secret)
|
||||
kek_datum = _setup_entry('kek_datum', project=project)
|
||||
enc_datum = _setup_entry('encrypted_datum', secret=secret,
|
||||
kek_datum=kek_datum)
|
||||
# delete secret, it should automatically delete
|
||||
# secret_meta and enc_datum
|
||||
# secret_metadatum, enc_datum, and secret_user_metadatum
|
||||
# kek_datum should still exist
|
||||
secret.delete()
|
||||
clean.cleanup_all()
|
||||
self.assertFalse(_entry_exists(secret))
|
||||
self.assertFalse(_entry_exists(secret_metadatum))
|
||||
self.assertFalse(_entry_exists(secret_user_metadatum))
|
||||
self.assertFalse(_entry_exists(enc_datum))
|
||||
self.assertTrue(_entry_exists(kek_datum))
|
||||
|
||||
@ -217,6 +232,114 @@ class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase):
|
||||
self.assertFalse(_entry_exists(order))
|
||||
self.assertFalse(_entry_exists(order_retry_task))
|
||||
|
||||
@_create_project("my keystone id")
|
||||
def test_cleanup_soft_deletion_date(self, project):
|
||||
"""Test cleaning up entries within date"""
|
||||
secret = _setup_entry('secret', project=project)
|
||||
order = order = _setup_entry('order', project=project, secret=secret)
|
||||
current_time = datetime.datetime.utcnow()
|
||||
tomorrow = current_time + datetime.timedelta(days=1)
|
||||
yesterday = current_time - datetime.timedelta(days=1)
|
||||
secret.delete()
|
||||
order.delete()
|
||||
|
||||
# Assert that nothing is deleted due to date
|
||||
clean.cleanup_softdeletes(models.Order, threshold_date=yesterday)
|
||||
clean.cleanup_parent_with_no_child(models.Secret, models.Order,
|
||||
threshold_date=yesterday)
|
||||
self.assertTrue(_entry_exists(secret))
|
||||
self.assertTrue(_entry_exists(order))
|
||||
|
||||
# Assert that everything is deleted due to date
|
||||
clean.cleanup_softdeletes(models.Order, threshold_date=tomorrow)
|
||||
clean.cleanup_parent_with_no_child(models.Secret, models.Order,
|
||||
threshold_date=tomorrow)
|
||||
self.assertFalse(_entry_exists(secret))
|
||||
self.assertFalse(_entry_exists(order))
|
||||
|
||||
@_create_project("my keystone id")
|
||||
def test_soft_deleting_expired_secrets(self, project):
|
||||
"""Test soft deleting secrets that are expired"""
|
||||
|
||||
current_time = datetime.datetime.utcnow()
|
||||
tomorrow = current_time + datetime.timedelta(days=1)
|
||||
yesterday = current_time - datetime.timedelta(days=1)
|
||||
|
||||
not_expired_secret = _setup_entry('secret', project=project)
|
||||
expired_secret = _setup_entry('secret', project=project)
|
||||
not_expired_secret.expiration = tomorrow
|
||||
expired_secret.expiration = yesterday
|
||||
|
||||
# Create children for expired secret
|
||||
expired_secret_store_metadatum = _setup_entry('secret_metadatum',
|
||||
secret=expired_secret)
|
||||
expired_secret_user_metadatum = _setup_entry('secret_user_metadatum',
|
||||
secret=expired_secret)
|
||||
kek_datum = _setup_entry('kek_datum', project=project)
|
||||
expired_enc_datum = _setup_entry('encrypted_datum',
|
||||
secret=expired_secret,
|
||||
kek_datum=kek_datum)
|
||||
container = _setup_entry('container', project=project)
|
||||
expired_container_secret = _setup_entry('container_secret',
|
||||
container=container,
|
||||
secret=expired_secret)
|
||||
expired_acl_secret = _setup_entry('acl_secret',
|
||||
secret=expired_secret,
|
||||
user_ids=["fern", "chris"])
|
||||
|
||||
clean.soft_delete_expired_secrets(current_time)
|
||||
self.assertTrue(_entry_is_soft_deleted(expired_secret))
|
||||
self.assertFalse(_entry_is_soft_deleted(not_expired_secret))
|
||||
|
||||
# Make sure the children of the expired secret are soft deleted as well
|
||||
self.assertTrue(_entry_is_soft_deleted(expired_enc_datum))
|
||||
self.assertTrue(_entry_is_soft_deleted(expired_container_secret))
|
||||
self.assertTrue(_entry_is_soft_deleted(expired_secret_store_metadatum))
|
||||
self.assertTrue(_entry_is_soft_deleted(expired_secret_user_metadatum))
|
||||
self.assertFalse(_entry_exists(expired_acl_secret))
|
||||
|
||||
def test_cleaning_unassociated_projects(self):
|
||||
"""Test cleaning projects that have no child entries"""
|
||||
childless_project = _setup_entry('project',
|
||||
external_id="childless project")
|
||||
project_with_children = _setup_entry(
|
||||
'project',
|
||||
external_id="project with children")
|
||||
|
||||
project_children_list = list()
|
||||
project_children_list.append(
|
||||
_setup_entry('kek_datum', project=project_with_children))
|
||||
|
||||
project_children_list.append(
|
||||
_setup_entry('secret', project=project_with_children))
|
||||
|
||||
container = _setup_entry('container', project=project_with_children)
|
||||
project_children_list.append(container)
|
||||
project_children_list.append(
|
||||
_setup_entry('container_consumer_meta', container=container))
|
||||
cert_authority = _setup_entry('certificate_authority',
|
||||
project=project_with_children)
|
||||
project_children_list.append(cert_authority)
|
||||
project_children_list.append(
|
||||
_setup_entry('preferred_cert_authority',
|
||||
cert_authority=cert_authority))
|
||||
|
||||
project_children_list.append(
|
||||
_setup_entry('project_cert_authority',
|
||||
certificate_authority=cert_authority))
|
||||
project_children_list.append(_setup_entry('project_quotas',
|
||||
project=project_with_children))
|
||||
|
||||
clean.cleanup_unassociated_projects()
|
||||
self.assertTrue(_entry_exists(project_with_children))
|
||||
self.assertFalse(_entry_exists(childless_project))
|
||||
|
||||
container.delete()
|
||||
map(lambda child: child.delete(), project_children_list)
|
||||
clean.cleanup_all()
|
||||
clean.cleanup_unassociated_projects()
|
||||
self.assertFalse(_entry_exists(project_with_children))
|
||||
|
||||
@_create_project("my integrity error keystone id")
|
||||
def test_db_cleanup_raise_integrity_error(self, project):
|
||||
"""Test that an integrity error is thrown
|
||||
|
@ -106,6 +106,16 @@ def create_secret_metadatum(secret=None, key="key", value="value",
|
||||
return secret_meta
|
||||
|
||||
|
||||
def create_secret_user_metadatum(secret=None, key="user_key",
|
||||
value="user_value", session=None):
|
||||
secret_user_metadatum = models.SecretUserMetadatum(key, value)
|
||||
secret_user_metadatum.secret_id = secret.id
|
||||
secret_user_metadatum_repo = repositories.get_secret_user_meta_repository()
|
||||
secret_user_metadatum_repo.create_from(secret_user_metadatum,
|
||||
session=session)
|
||||
return secret_user_metadatum
|
||||
|
||||
|
||||
def create_container(project=None, session=None):
|
||||
container = models.Container()
|
||||
container.project_id = project.id
|
||||
@ -230,6 +240,14 @@ def create_project_quotas(project=None, parsed_project_quotas=None,
|
||||
return project_quota
|
||||
|
||||
|
||||
def create_acl_secret(secret=None, user_ids=[], session=None):
|
||||
acl_secret = models.SecretACL(secret.id, "read")
|
||||
acl_secret.secret_id = secret.id
|
||||
acl_secret_repo = repositories.get_secret_acl_repository()
|
||||
acl_secret_repo.create_from(acl_secret, session=session)
|
||||
return acl_secret
|
||||
|
||||
|
||||
class RepositoryTestCase(oslotest.BaseTestCase):
|
||||
"""Base test case class for in-memory database unit tests.
|
||||
|
||||
|
@ -52,6 +52,11 @@ Barbican Database
|
||||
|
||||
Show current revision of database.
|
||||
|
||||
``barbican-manage db clean [--db-url] [--verbose] [--min-days] [--clean-unassociated-projects] [--soft-delete-expired-secrets] [--log-file]``
|
||||
|
||||
Clean up soft deletions in the database. More documentation can be
|
||||
found here: :doc:`Database Cleaning <database_cleaning>`
|
||||
|
||||
Barbican PKCS11/HSM
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
70
doc/source/admin-guide-cloud/database_cleaning.rst
Normal file
70
doc/source/admin-guide-cloud/database_cleaning.rst
Normal file
@ -0,0 +1,70 @@
|
||||
Database Cleaning
|
||||
=================
|
||||
|
||||
Entries in the Barbican database are soft deleted and can build up over time.
|
||||
These entries can be cleaned up with the clean up command. The command
|
||||
can be used with a cron job to clean the database automatically on intervals.
|
||||
|
||||
|
||||
Commands
|
||||
--------
|
||||
|
||||
The command ```barbican-manage db clean``` can be used to clean up the database.
|
||||
By default, it will remove soft deletions that are at least 90 days old since
|
||||
deletion
|
||||
|
||||
```barbican-manage db clean --min-days 180``` (```-m```) will go
|
||||
through the database and remove soft deleted entries that are at least 90 days
|
||||
old since deletion. The default value is 90 days. Passing a value of
|
||||
```--min-days 0``` will delete all soft-deleted entries up to today.
|
||||
|
||||
```barbican-manage db clean --clean-unassociated-projects``` (```-p```) will go
|
||||
through the database and remove projects that have no associated resources.
|
||||
The default value is False.
|
||||
|
||||
```barbican-manage db clean --soft-delete-expired-secrets``` (```-e```) will go
|
||||
through the database and soft delete any secrets that are past
|
||||
their expiration date. The default value is False. If ```-e``` is used along
|
||||
with ```---min-days 0``` then all the expired secrets will be hard deleted.
|
||||
|
||||
```barbican-manage db clean --verbose``` (```-V```) will print more information
|
||||
out into the terminal.
|
||||
|
||||
```barbican-manage db clean --log-file``` (```-L```) will set the log file
|
||||
location. The creation of the log may fail if the user running the command
|
||||
does not have access to the log file location or if the target directory
|
||||
does not exist. The default value for log_file can be found in
|
||||
```/etc/barbican/barbican.conf``` The log will contain the verbose
|
||||
output from the command.
|
||||
|
||||
Cron Job
|
||||
--------
|
||||
|
||||
A cron job can be created on linux systems to run at a given interval to
|
||||
clean the barbican database.
|
||||
|
||||
Crontab
|
||||
'''''''
|
||||
|
||||
1. Start the crontab editor ```crontab -e``` with the user that runs the clean up
|
||||
command
|
||||
2. Edit the crontab section to run the command at a given interval.
|
||||
```<minute 0-59> <hour 0-23,0=midnight> <day 1-31> <month 1-12> <weekday 0-6, 0=Sunday> clean up command```
|
||||
|
||||
Crontab Examples
|
||||
''''''''''''''''
|
||||
|
||||
```00 00 * * * barbican-manage db clean -p -e``` -Runs a job everyday at midnight
|
||||
which will remove soft deleted entries that 90 days old since soft deletion,
|
||||
will clean unassociated projects, and will soft delete secrets that are
|
||||
expired.
|
||||
|
||||
```00 03 01 * * barbican-manage db clean -m 30``` -Runs a job every month at 3AM
|
||||
which will remove soft deleted entries that are at least 30 days old since
|
||||
deletion.
|
||||
|
||||
```05 01 07 * 6 barbican-manage db clean -m 180 -p -e -L /tmp/barbican-clean-command.log```
|
||||
-Runs a job every month at 1:05AM on the 7th day of the month and every Saturday.
|
||||
Entries that are 180 days old since soft deletion will be removed from the
|
||||
database. Unassociated projects will be removed. Expired secrets will be
|
||||
soft deleted. The log file will be saved to ```/tmp/barbican-clean-command.log```
|
@ -10,5 +10,5 @@ management of secrets.
|
||||
:maxdepth: 1
|
||||
|
||||
access_control.rst
|
||||
|
||||
barbican_manage.rst
|
||||
database_cleaning.rst
|
||||
|
Loading…
Reference in New Issue
Block a user