Fixed bug 1415865 for old backup removal

Old backups are correctly removed, provided that
no not-to-be-removed backup depends upon them.

Incremental backups correctly start from level 0 when
there is no tar_metadata available for the last backup

Command line option --no-incremental is now incompatible
with --max-level and with --always-level

Add the following command which allows to specify
a datetime for old backup removal.

  --remove-from-date YYYY-MM-DDThh:mm:ss

Closes-Bug: #1415865
Change-Id: Ie779ee354b67e9a1a012d28801b17dbd5cd353b6
LAUNCHPAD: https://bugs.launchpad.net/freezer/+bug/1415865
This commit is contained in:
Fausto Marzi 2015-01-29 14:44:53 +00:00 committed by Fabrizio Vanni
parent d2a26b6f91
commit 5c060a9185
12 changed files with 287 additions and 170 deletions

1
.gitignore vendored
View File

@ -1,6 +1,7 @@
__pycache__
dist
build
.venv
.idea
.autogenerated
.coverage

View File

@ -126,11 +126,21 @@ def backup_arguments():
Default False (Disabled)",
dest='restart_always_backup', type=float, default=False)
arg_parser.add_argument(
'-R', '--remove-older-then', action='store',
help="Checks in the specified container for object older then the \
specified days. If i.e. 30 is specified, it will remove the remote \
object older than 30 days. Default False (Disabled)",
dest='remove_older_than', type=float, default=False)
'-R', '--remove-older-then', '--remove-older-than', action='store',
help=('Checks in the specified container for object older than the '
'specified days.'
'If i.e. 30 is specified, it will remove the remote object '
'older than 30 days. Default False (Disabled) '
'The option --remove-older-then is deprecated '
'and will be removed soon'),
dest='remove_older_than', type=float, default=None)
arg_parser.add_argument(
'--remove-from-date', action='store',
help=('Checks the specified container and removes objects older than '
'the provided datetime in the form "YYYY-MM-DDThh:mm:ss '
'i.e. "1974-03-25T23:23:23". '
'Make sure the "T" is between date and time '),
dest='remove_from_date', default=False)
arg_parser.add_argument(
'--no-incremental', action='store_true',
help='''Disable incremantal feature. By default freezer build the
@ -284,6 +294,6 @@ def backup_arguments():
backup_args.__dict__['mysql_db_inst'] = ''
# Freezer version
backup_args.__dict__['__version__'] = '1.1.1'
backup_args.__dict__['__version__'] = '1.1.2'
return backup_args, arg_parser

View File

@ -30,6 +30,7 @@ import logging
def freezer_main(backup_args):
"""Freezer Main execution function.
This main function is a wrapper for most
of the other functions. By calling main() the program execution start
@ -49,7 +50,6 @@ def freezer_main(backup_args):
# Get the list of the containers
backup_args = swift.get_containers_list(backup_args)
if backup_args.action == 'info' or backup_args.list_container or \
backup_args.list_objects:
if backup_args.list_container:
@ -89,24 +89,31 @@ def freezer_main(backup_args):
if backup_args.action == 'backup':
# Check if the provided container already exists in swift.
containers = swift.check_container_existance(backup_args)
if containers['main_container'] is not True:
swift.create_containers(backup_args)
# Get the object list of the remote containers and store it in the
# same dict passes as argument under the dict.remote_obj_list namespace
backup_args = swift.get_container_content(backup_args)
if backup_args.no_incremental:
if backup_args.max_backup_level or \
backup_args.always_backup_level:
raise Exception(
'no-incremental option not compatible '
'with backup level options')
manifest_meta_dict = {}
else:
# Get the object list of the remote containers and store it in
# backup_args.remote_obj_list
backup_args = swift.get_container_content(backup_args)
# Check if a backup exist in swift with same name. If not, set
# backup level to 0
manifest_meta_dict = utils.check_backup_existance(backup_args)
# Check if a backup exist in swift with same name. If not, set
# backup level to 0
manifest_meta_dict =\
utils.check_backup_and_tar_meta_existence(backup_args)
# Set the right backup level for incremental backup
(backup_args, manifest_meta_dict) = utils.set_backup_level(
backup_args, manifest_meta_dict)
backup_args.manifest_meta_dict = manifest_meta_dict
# File system backup mode selected
if backup_args.mode == 'fs':
backup.backup_mode_fs(
backup_args, time_stamp, manifest_meta_dict)
@ -120,7 +127,7 @@ def freezer_main(backup_args):
raise ValueError('Please provide a valid backup mode')
# Admin tasks code should go here, before moving it on a dedicated module
if backup_args.action == 'admin' or backup_args.remove_older_than:
if backup_args.action == 'admin':
# Remove backups older if set.
backup_args = swift.get_container_content(backup_args)
swift.remove_obj_older_than(backup_args)

View File

@ -23,8 +23,9 @@ Freezer functions to interact with OpenStack Swift client and server
from freezer.utils import (
validate_all_args, get_match_backup,
sort_backup_list)
sort_backup_list, date_string_to_timestamp)
import datetime
import os
import swiftclient
import json
@ -110,95 +111,102 @@ def show_objects(backup_opt_dict):
return True
def remove_object(backup_opt_dict, obj):
sw_connector = backup_opt_dict.sw_connector
logging.info('[*] Removing backup object: {0}'.format(obj))
sleep_time = 120
retry_max_count = 60
curr_count = 0
while True:
try:
sw_connector.delete_object(
backup_opt_dict.container, obj)
logging.info(
'[*] Remote object {0} removed'.format(obj))
break
except Exception as error:
curr_count += 1
time.sleep(sleep_time)
if curr_count >= retry_max_count:
err_msg = (
'[*] Remote Object {0} failed to be removed.'
' Retrying intent '
'{1} out of {2} totals'.format(
obj, curr_count,
retry_max_count))
error_message = \
'[*] Error: {0}: {1}'.format(err_msg, error)
raise Exception(error_message)
else:
logging.warning(
('[*] Remote object {0} failed to be removed'
' Retrying intent n. {1} out of {2} totals'.format(
obj, curr_count, retry_max_count)))
def remove_obj_older_than(backup_opt_dict):
"""
Remove object in remote swift server older more tqhen days
Remove object in remote swift server which are
older than the specified days or timestamp
"""
if not backup_opt_dict.remote_obj_list \
or backup_opt_dict.remove_older_than is False:
if not backup_opt_dict.remote_obj_list:
logging.warning('[*] No remote objects will be removed')
return False
return
remove_from_timestamp = False
if backup_opt_dict.remove_older_than is not None:
if backup_opt_dict.remove_from_date:
raise Exception("Please specify remove date unambiguously")
current_timestamp = backup_opt_dict.time_stamp
max_time = backup_opt_dict.remove_older_than * 86400
remove_from_timestamp = current_timestamp - max_time
else:
if not backup_opt_dict.remove_from_date:
raise Exception("Remove date/age not specified")
remove_from_timestamp = date_string_to_timestamp(
backup_opt_dict.remove_from_date)
logging.info('[*] Removing objects older than {0} ({1})'.format(
datetime.datetime.fromtimestamp(remove_from_timestamp),
remove_from_timestamp))
backup_opt_dict.remove_older_than = int(
float(backup_opt_dict.remove_older_than))
logging.info('[*] Removing object older {0} day(s)'.format(
backup_opt_dict.remove_older_than))
# Compute the amount of seconds from the number of days provided by
# remove_older_than and compare it with the remote backup timestamp
max_time = backup_opt_dict.remove_older_than * 86400
current_timestamp = backup_opt_dict.time_stamp
backup_name = backup_opt_dict.backup_name
hostname = backup_opt_dict.hostname
backup_opt_dict = get_match_backup(backup_opt_dict)
sorted_remote_list = sort_backup_list(backup_opt_dict)
sw_connector = backup_opt_dict.sw_connector
level_0_flag = None
tar_meta_0_flag = None
tar_meta_incremental_dep_flag = False
incremental_dep_flag = False
for match_object in sorted_remote_list:
obj_name_match = re.search(r'{0}_({1})_(\d+)_(\d+?)$'.format(
hostname, backup_name), match_object, re.I)
if obj_name_match:
remote_obj_timestamp = int(obj_name_match.group(2))
time_delta = current_timestamp - remote_obj_timestamp
# If the difference between current_timestamp and the backup
# timestamp is smaller then max_time, then the backup is valid
if time_delta > max_time:
# If the time_delta is bigger then max_time, then we verify
# if the level of the backup is 0. In case is not 0,
# the backup is not removed as is part of a backup where the
# levels cross the max_time. In this case we don't remove the
# backup till its level 0.
# Both tar_meta data and backup objects names are handled
if remote_obj_timestamp >= remove_from_timestamp:
if match_object.startswith('tar_meta'):
if tar_meta_0_flag is None:
if obj_name_match.group(3) is '0':
tar_meta_0_flag = True
else:
continue
elif level_0_flag is None:
if obj_name_match.group(3) is '0':
level_0_flag = True
else:
continue
tar_meta_incremental_dep_flag = \
(obj_name_match.group(3) is not '0')
else:
incremental_dep_flag = \
(obj_name_match.group(3) is not '0')
logging.info('[*] Removing backup object: {0}'.format(
match_object))
sleep_time = 120
retry_max_count = 60
curr_count = 0
while True:
try:
sw_connector.delete_object(
backup_opt_dict.container, match_object)
logging.info(
'[*] Remote object {0} removed'.format(
match_object))
break
except Exception as error:
curr_count += 1
time.sleep(sleep_time)
if curr_count >= retry_max_count:
err_msg = (
'[*] Remote Object {0} failed to be removed.'
' Retrying intent '
'{1} out of {2} totals'.format(
match_object, curr_count,
retry_max_count))
error_message = '[*] Error: {0}: {1}'.format(
err_msg, error)
raise Exception(error_message)
else:
logging.warning(
('[*] Remote object {0} failed to be removed'
' Retrying intent n. '
'{1} out of {2} totals'.format(
match_object, curr_count,
retry_max_count)))
else:
if match_object.startswith('tar_meta'):
if not tar_meta_incremental_dep_flag:
remove_object(backup_opt_dict, match_object)
else:
if obj_name_match.group(3) is '0':
tar_meta_incremental_dep_flag = False
else:
if not incremental_dep_flag:
remove_object(backup_opt_dict, match_object)
else:
if obj_name_match.group(3) is '0':
incremental_dep_flag = False
def get_container_content(backup_opt_dict):
@ -224,8 +232,6 @@ def check_container_existance(backup_opt_dict):
Check if the provided container is already available on Swift.
The verification is done by exact matching between the provided container
name and the whole list of container available for the swift account.
If the container is not found, it will be automatically create and used
to execute the backup
"""
required_list = [

View File

@ -42,7 +42,7 @@ def tar_restore_args_valid(backup_opt_dict):
'argument: --restore-abs-path'))
except Exception as err:
valid_args = False
logging.crititcal('[*] Critical Error: {0}'.format(err))
logging.critical('[*] Critical Error: {0}'.format(err))
return valid_args
@ -156,10 +156,8 @@ def gen_tar_command(
os.path.exists(opt_dict.src_file)]
if not validate_all_args(required_list):
logging.critical(
('Error: Please ALL the following options: '
'--path-to-backup, --backup-name'))
raise Exception
raise Exception('Error: Please ALL the following options: '
'--path-to-backup, --backup-name')
# Change che current working directory to op_dict.src_file
os.chdir(os.path.normpath(opt_dict.src_file.strip()))

View File

@ -196,10 +196,8 @@ def get_match_backup(backup_opt_dict):
if not backup_opt_dict.backup_name or not backup_opt_dict.container \
or not backup_opt_dict.remote_obj_list:
err = "[*] Error: please provide a valid Swift container,\
backup name and the container contents"
logging.exception(err)
raise Exception(err)
raise Exception("[*] Error: please provide a valid Swift container,\
backup name and the container contents")
backup_name = backup_opt_dict.backup_name.lower()
if backup_opt_dict.remote_obj_list:
@ -222,6 +220,7 @@ def get_newest_backup(backup_opt_dict):
Return from backup_opt_dict.remote_match_backup, the newest backup
matching the provided backup name and hostname of the node where
freezer is executed. It correspond to the previous backup executed.
NOTE: If backup has no tar_metadata, no newest backup is returned.
'''
if not backup_opt_dict.remote_match_backup:
@ -240,9 +239,12 @@ def get_newest_backup(backup_opt_dict):
remote_obj_timestamp = int(obj_name_match.group(2))
if remote_obj_timestamp > backup_timestamp:
backup_timestamp = remote_obj_timestamp
backup_opt_dict.remote_newest_backup = remote_obj
break
tar_metadata_obj = 'tar_metadata_{0}'.format(remote_obj)
if tar_metadata_obj in sorted_backups_list:
backup_opt_dict.remote_newest_backup = remote_obj
return backup_opt_dict
@ -472,7 +474,7 @@ def get_vol_fs_type(backup_opt_dict):
return filesys_type.lower().strip()
def check_backup_existance(backup_opt_dict):
def check_backup_and_tar_meta_existence(backup_opt_dict):
'''
Check if any backup is already available on Swift.
The verification is done by backup_name, which needs to be unique
@ -545,3 +547,9 @@ def get_mount_from_path(path):
mount_point_path = os.path.dirname(mount_point_path)
return mount_point_path
def date_string_to_timestamp(date_string):
fmt = '%Y-%m-%dT%H:%M:%S'
opt_backup_date = datetime.datetime.strptime(date_string, fmt)
return int(time.mktime(opt_backup_date.timetuple()))

View File

@ -39,7 +39,7 @@ def read(*filenames, **kwargs):
setup(
name='freezer',
version='1.1.1',
version='1.1.2',
url='https://github.com/stackforge/freezer',
license='Apache Software License',
author='Fausto Marzi, Ryszard Chojnacki, Emil Dimitrov',

View File

@ -493,6 +493,7 @@ class FakeSwiftClient:
class Connection:
def __init__(self, key=True, os_options=True, auth_version=True, user=True, authurl=True, tenant_name=True, retries=True, insecure=True):
self.num_try = 0
return None
def put_object(self, opt1=True, opt2=True, opt3=True, opt4=True, opt5=True, headers=True, content_length=True, content_type=True):
@ -504,8 +505,12 @@ class FakeSwiftClient:
def put_container(self, container=True):
return True
def delete_object(self, *args, **kwargs):
return True
def delete_object(self, container_name='', object_name=''):
if self.num_try > 0:
self.num_try -= 1
raise Exception("test num_try {0}".format(self.num_try))
else:
return True
def get_container(self, *args, **kwargs):
return [True, True]
@ -608,7 +613,7 @@ class BackupOpt1:
self.hostname = 'test-hostname'
self.curr_backup_level = 0
self.src_file = '/tmp'
self.tar_path= 'true'
self.tar_path = 'true'
self.dereference_symlink = 'true'
self.no_incremental = 'true'
self.exclude = 'true'
@ -627,12 +632,12 @@ class BackupOpt1:
self.max_backup_level = '20'
self.encrypt_pass_file = '/dev/random'
self.always_backup_level = '20'
self.remove_older_than = '20'
self.remove_from_date = '2014-12-03T23:23:23'
self.restart_always_backup = 100000
self.remote_match_backup = [
'test-hostname_test-backup-name_1234567_0',
'test-hostname_test-backup-name_aaaaa__a',
'test-hostname_test-backup-name_9999999999999999999999999999999_0',
#'test-hostname_test-backup-name_9999999999999999999999999999999_0',
'test-hostname_test-backup-name_1234568_1',
'test-hostname_test-backup-name_1234569_2',
'tar_metadata_test-hostname_test-backup-name_1234569_2',
@ -654,15 +659,14 @@ class BackupOpt1:
self.containers_list = [
{'name' : 'testcontainer1', 'bytes' : 123423, 'count' : 10}
]
self.list_container = True
self.list_objects = True
self.list_container = False
self.list_objects = False
self.restore_from_date = '2014-12-03T23:23:23'
self.restore_from_host = 'test-hostname'
self.action = 'info'
self.insecure = True
self.auth_version = 2
class FakeMySQLdb:
def __init__(self):
@ -829,6 +833,12 @@ class Os:
return '/tmp/testdir'
class Os1(Os):
@classmethod
def exists(cls, directory=True):
return False
class Fake_get_vol_fs_type:
def __init__(self):
@ -839,8 +849,8 @@ class Fake_get_vol_fs_type:
return 'xfs'
def fake_get_match_backup(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_get_match_backup(self, backup_opt):
#backup_opt = BackupOpt1()
backup_opt.remote_match_backup = None
return backup_opt
@ -854,54 +864,40 @@ class FakeSwift:
def __init__(self):
return None
def fake_get_containers_list(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_get_containers_list(self, backup_opt):
return backup_opt
def fake_get_containers_list1(self, *args, **kwargs):
backup_opt = BackupOpt1()
backup_opt.list_container = None
def fake_get_containers_list1(self, backup_opt):
return backup_opt
def fake_get_containers_list2(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_get_containers_list2(self, backup_opt):
backup_opt.list_container = None
backup_opt.list_objects = None
return backup_opt
def fake_get_client(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_get_client(self, backup_opt):
return backup_opt
def fake_show_containers(self, *args, **kwargs):
def fake_show_containers(self, backup_opt):
return True
def fake_show_objects(self, *args, **kwargs):
def fake_show_objects(self, backup_opt):
return True
def fake_check_container_existance(self, *args, **kwargs):
return {'main_container': True}
return {'main_container': True, 'container_segments': True}
def fake_check_container_existance1(self, *args, **kwargs):
return {'main_container': False}
return {'main_container': False, 'container_segments': False}
def fake_get_containers_list3(self, *args, **kwargs):
backup_opt = BackupOpt1()
backup_opt.action = 'restore'
backup_opt.list_container = None
backup_opt.list_objects = None
def fake_get_containers_list3(self, backup_opt):
return backup_opt
def fake_get_containers_list4(self, *args, **kwargs):
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.list_container = None
backup_opt.list_objects = None
def fake_get_containers_list4(self, backup_opt):
backup_opt.containers_list = []
return backup_opt
def fake_get_container_content(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_get_container_content(self, backup_opt):
return backup_opt
@ -919,26 +915,26 @@ class FakeUtils:
def __init__(self):
return None
def fake_set_backup_level_fs(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_set_backup_level_fs(self, backup_opt, manifest_meta):
#backup_opt = BackupOpt1()
manifest_meta = {}
backup_opt.mode = 'fs'
return backup_opt, manifest_meta
def fake_set_backup_level_mongo(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_set_backup_level_mongo(self, backup_opt, manifest_meta):
#backup_opt = BackupOpt1()
manifest_meta = {}
backup_opt.mode = 'mongo'
return backup_opt, manifest_meta
def fake_set_backup_level_mysql(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_set_backup_level_mysql(self, backup_opt, manifest_meta):
#backup_opt = BackupOpt1()
manifest_meta = {}
backup_opt.mode = 'mysql'
return backup_opt, manifest_meta
def fake_set_backup_level_none(self, *args, **kwargs):
backup_opt = BackupOpt1()
def fake_set_backup_level_none(self, backup_opt, manifest_meta):
#backup_opt = BackupOpt1()
manifest_meta = {}
backup_opt.mode = None
return backup_opt, manifest_meta

View File

@ -34,7 +34,6 @@ class TestMain:
def test_freezer_main(self, monkeypatch):
backup_opt = BackupOpt1()
fakelogging = FakeLogging()
fakeswift = FakeSwift()
@ -46,26 +45,19 @@ class TestMain:
monkeypatch.setattr(swift, 'get_client', fakeswift.fake_get_client)
monkeypatch.setattr(swift, 'show_containers', fakeswift.fake_show_containers)
monkeypatch.setattr(swift, 'show_objects', fakeswift.fake_show_objects)
monkeypatch.setattr(swift, 'get_containers_list', fakeswift.fake_get_containers_list1)
assert freezer_main(backup_opt) is True
fakeswift = FakeSwift()
monkeypatch.setattr(swift, 'check_container_existance', fakeswift.fake_check_container_existance)
monkeypatch.setattr(swift, 'get_containers_list', fakeswift.fake_get_containers_list)
backup_opt = BackupOpt1()
assert freezer_main(backup_opt) is True
fakeswift = FakeSwift()
monkeypatch.setattr(swift, 'check_container_existance', fakeswift.fake_check_container_existance1)
monkeypatch.setattr(swift, 'get_containers_list', fakeswift.fake_get_containers_list1)
backup_opt = BackupOpt1()
backup_opt.action = 'info'
assert freezer_main(backup_opt) is False
fakeswift = FakeSwift()
backup_opt.list_container = True
assert freezer_main(backup_opt) is True
backup_opt.list_container = False
backup_opt.list_objects = True
assert freezer_main(backup_opt) is True
monkeypatch.setattr(swift, 'check_container_existance', fakeswift.fake_check_container_existance1)
monkeypatch.setattr(swift, 'get_containers_list', fakeswift.fake_get_containers_list2)
backup_opt = BackupOpt1()
assert freezer_main(backup_opt) is False
fakeswift = FakeSwift()
@ -94,6 +86,8 @@ class TestMain:
monkeypatch.setattr(backup, 'backup_mode_fs', fakebackup.fake_backup_mode_fs)
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.no_incremental = False
assert freezer_main(backup_opt) is None
fakeswift = FakeSwift()
@ -105,6 +99,8 @@ class TestMain:
monkeypatch.setattr(backup, 'backup_mode_mongo', fakebackup.fake_backup_mode_mongo)
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.no_incremental = False
assert freezer_main(backup_opt) is None
fakeswift = FakeSwift()
@ -116,6 +112,8 @@ class TestMain:
monkeypatch.setattr(backup, 'backup_mode_mysql', fakebackup.fake_backup_mode_mysql)
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.no_incremental = False
assert freezer_main(backup_opt) is None
fakeswift = FakeSwift()
@ -125,4 +123,26 @@ class TestMain:
monkeypatch.setattr(utils, 'set_backup_level', fakeutils.fake_set_backup_level_none)
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.no_incremental = False
pytest.raises(ValueError, freezer_main, backup_opt)
fakeswift = FakeSwift()
fakeutils = FakeUtils()
fakebackup = FakeBackup()
monkeypatch.setattr(swift, 'check_container_existance', fakeswift.fake_check_container_existance)
monkeypatch.setattr(swift, 'get_containers_list', fakeswift.fake_get_containers_list4)
monkeypatch.setattr(utils, 'set_backup_level', fakeutils.fake_set_backup_level_fs)
monkeypatch.setattr(backup, 'backup_mode_fs', fakebackup.fake_backup_mode_fs)
backup_opt = BackupOpt1()
backup_opt.action = 'backup'
backup_opt.no_incremental = True
backup_opt.max_backup_level = True
pytest.raises(Exception, freezer_main, backup_opt)
backup_opt.max_backup_level = False
backup_opt.always_backup_level = False
assert freezer_main(backup_opt) is None

View File

@ -26,7 +26,7 @@ from freezer.swift import (create_containers, show_containers,
show_objects, remove_obj_older_than, get_container_content,
check_container_existance, SwiftOptions,
get_client, manifest_upload, add_object, get_containers_list,
object_to_file, object_to_stream)
object_to_file, object_to_stream, remove_object)
import os
import logging
import subprocess
@ -58,6 +58,7 @@ class TestSwift:
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
backup_opt.__dict__['list_container'] = True
assert show_containers(backup_opt) is True
backup_opt.__dict__['list_container'] = False
@ -73,6 +74,7 @@ class TestSwift:
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
backup_opt.__dict__['list_objects'] = True
assert show_objects(backup_opt) is True
backup_opt.__dict__['remote_obj_list'] = None
@ -81,6 +83,31 @@ class TestSwift:
backup_opt.__dict__['list_objects'] = False
assert show_objects(backup_opt) is False
def test_remove_object(self, monkeypatch):
backup_opt = BackupOpt1()
fakelogging = FakeLogging()
fakeclient = FakeSwiftClient()
fakeconnector = fakeclient.client()
fakeswclient = fakeconnector.Connection()
backup_opt.sw_connector = fakeswclient
faketime = FakeTime()
monkeypatch.setattr(logging, 'critical', fakelogging.critical)
monkeypatch.setattr(logging, 'warning', fakelogging.warning)
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
monkeypatch.setattr(time, 'sleep', faketime.sleep)
assert remove_object(backup_opt, 'obj_name') is None
fakeswclient.num_try = 59
assert remove_object(backup_opt, 'obj_name') is None
fakeswclient.num_try = 60
pytest.raises(Exception, remove_object, backup_opt, 'obj_name')
def test_remove_obj_older_than(self, monkeypatch):
backup_opt = BackupOpt1()
@ -97,12 +124,27 @@ class TestSwift:
monkeypatch.setattr(logging, 'error', fakelogging.error)
monkeypatch.setattr(time, 'sleep', faketime.sleep)
backup_opt.__dict__['remove_older_than'] = None
backup_opt.__dict__['remove_from_date'] = None
pytest.raises(Exception, remove_obj_older_than, backup_opt)
backup_opt.__dict__['remove_older_than'] = False
assert remove_obj_older_than(backup_opt) is False
backup_opt = BackupOpt1()
backup_opt.__dict__['remove_older_than'] = 0
backup_opt.__dict__['remove_from_date'] = '2014-12-03T23:23:23'
pytest.raises(Exception, remove_obj_older_than, backup_opt)
backup_opt = BackupOpt1()
backup_opt.__dict__['remove_older_than'] = None
backup_opt.__dict__['remove_from_date'] = '2014-12-03T23:23:23'
assert remove_obj_older_than(backup_opt) is None
backup_opt = BackupOpt1()
backup_opt.__dict__['remove_older_than'] = 0
backup_opt.__dict__['remove_from_date'] = None
assert remove_obj_older_than(backup_opt) is None
backup_opt = BackupOpt1()
backup_opt.__dict__['remote_obj_list'] = []
assert remove_obj_older_than(backup_opt) is None
def test_get_container_content(self, monkeypatch):

View File

@ -23,8 +23,8 @@ Hudson (tjh@cryptsoft.com).
from commons import *
from freezer.tar import (tar_restore, tar_incremental, tar_backup,
gen_tar_command)
from freezer import utils
gen_tar_command, tar_restore_args_valid)
import os
import logging
import subprocess
@ -146,3 +146,20 @@ class TestTar:
backup_opt.__dict__['max_seg_size'] = 1
assert tar_backup(backup_opt, 'tar_command', fakebackup_queue) is not False
def test_tar_restore_args_valid(self, monkeypatch):
backup_opt = BackupOpt1()
fakelogging = FakeLogging()
monkeypatch.setattr(logging, 'critical', fakelogging.critical)
monkeypatch.setattr(logging, 'warning', fakelogging.warning)
monkeypatch.setattr(logging, 'exception', fakelogging.exception)
monkeypatch.setattr(logging, 'error', fakelogging.error)
fakeos = Os()
monkeypatch.setattr(os.path, 'exists', fakeos.exists)
assert tar_restore_args_valid(backup_opt) is True
fakeos1 = Os1()
monkeypatch.setattr(os.path, 'exists', fakeos1.exists)
assert tar_restore_args_valid(backup_opt) is False

View File

@ -5,7 +5,7 @@ from freezer.utils import (
sort_backup_list, create_dir, get_match_backup,
get_newest_backup, get_rel_oldest_backup, get_abs_oldest_backup,
eval_restart_backup, start_time, elapsed_time, set_backup_level,
get_vol_fs_type, check_backup_existance, add_host_name_ts_level,
get_vol_fs_type, check_backup_and_tar_meta_existence, add_host_name_ts_level,
get_mount_from_path)
from freezer import utils
@ -170,6 +170,7 @@ class TestUtils:
manifest_meta = dict()
backup_opt = BackupOpt1()
backup_opt.__dict__['no_incremental'] = False
manifest_meta['x-object-meta-backup-name'] = True
manifest_meta['x-object-meta-backup-current-level'] = 1
manifest_meta['x-object-meta-always-backup-level'] = 3
@ -180,12 +181,14 @@ class TestUtils:
assert manifest_meta['x-object-meta-backup-current-level'] is not False
backup_opt = BackupOpt1()
backup_opt.__dict__['no_incremental'] = False
manifest_meta['x-object-meta-maximum-backup-level'] = 2
(backup_opt, manifest_meta_dict) = set_backup_level(
backup_opt, manifest_meta)
assert manifest_meta['x-object-meta-backup-current-level'] is not False
backup_opt = BackupOpt1()
backup_opt.__dict__['no_incremental'] = False
backup_opt.__dict__['curr_backup_level'] = 1
(backup_opt, manifest_meta_dict) = set_backup_level(
backup_opt, manifest_meta)
@ -193,6 +196,7 @@ class TestUtils:
manifest_meta = dict()
backup_opt = BackupOpt1()
backup_opt.__dict__['no_incremental'] = False
manifest_meta['x-object-meta-backup-name'] = False
manifest_meta['x-object-meta-maximum-backup-level'] = 0
manifest_meta['x-object-meta-backup-current-level'] = 1
@ -200,6 +204,14 @@ class TestUtils:
backup_opt, manifest_meta)
assert manifest_meta['x-object-meta-backup-current-level'] == '0'
manifest_meta = dict()
backup_opt = BackupOpt1()
backup_opt.__dict__['max_backup_level'] = False
backup_opt.__dict__['always_backup_level'] = False
(backup_opt, manifest_meta) = set_backup_level(
backup_opt, manifest_meta)
assert manifest_meta['x-object-meta-backup-current-level'] == '0'
def test_get_vol_fs_type(self, monkeypatch):
backup_opt = BackupOpt1()
@ -218,15 +230,15 @@ class TestUtils:
backup_opt = BackupOpt1()
backup_opt.__dict__['backup_name'] = None
assert type(check_backup_existance(backup_opt)) is dict
assert type(check_backup_and_tar_meta_existence(backup_opt)) is dict
fakeswiftclient = FakeSwiftClient()
backup_opt = BackupOpt1()
assert check_backup_existance(backup_opt) is True
assert check_backup_and_tar_meta_existence(backup_opt) is True
fake_get_newest_backup = Fakeget_newest_backup()
monkeypatch.setattr(utils, 'get_newest_backup', fake_get_newest_backup)
assert type(check_backup_existance(backup_opt)) is dict
assert type(check_backup_and_tar_meta_existence(backup_opt)) is dict
def test_add_host_name_ts_level(self):