Added unittest for utils.py
Now a basic unittest for utils.py is available under tests/test_utils.py. While writing unittest two bugs were encountered and fixed also on this commit. tox.ini also has been modified to be able to use runtests.py Change-Id: Ieecd9549f358566ba5d4c7fcf7ad5780b9311dcd
This commit is contained in:
parent
fb27cb7b85
commit
3302161358
@ -82,9 +82,10 @@ def lvm_snap_remove(backup_opt_dict):
|
||||
shell=True, executable=backup_opt_dict.bash_path)
|
||||
(umount_out, mount_err) = umount_proc.communicate()
|
||||
if re.search(r'\S+', umount_out):
|
||||
logging.critical('[*] Error: impossible to umount {0} {1}\
|
||||
'.format(mount_point, mount_err))
|
||||
raise Exception
|
||||
err = '[*] Error: impossible to umount {0} {1}'.format(
|
||||
mount_point, mount_err)
|
||||
logging.critical(err)
|
||||
raise Exception(err)
|
||||
else:
|
||||
# Change working directory to be able to unmount
|
||||
os.chdir(backup_opt_dict.workdir)
|
||||
@ -101,10 +102,10 @@ def lvm_snap_remove(backup_opt_dict):
|
||||
logging.info('[*] {0}'.format(lvm_rm_out))
|
||||
return True
|
||||
else:
|
||||
logging.critical(
|
||||
'[*] Error: lvm_snap_rm {0}'.format(lvm_rm_err))
|
||||
raise Exception
|
||||
raise Exception
|
||||
err = '[*] Error: lvm_snap_rm {0}'.format(lvm_rm_err)
|
||||
logging.critical(err)
|
||||
raise Exception(err)
|
||||
raise Exception('[*] Error: no lvm snap removed')
|
||||
|
||||
|
||||
def lvm_snap(backup_opt_dict):
|
||||
@ -159,9 +160,9 @@ def lvm_snap(backup_opt_dict):
|
||||
executable=backup_opt_dict.bash_path)
|
||||
(lvm_out, lvm_err) = lvm_process.communicate()
|
||||
if lvm_err is False:
|
||||
logging.critical('[*] lvm snapshot creation error: {0}\
|
||||
'.format(lvm_err))
|
||||
raise Exception
|
||||
err = '[*] lvm snapshot creation error: {0}'.format(lvm_err)
|
||||
logging.critical(err)
|
||||
raise Exception(err)
|
||||
else:
|
||||
logging.warning('[*] {0}'.format(lvm_out))
|
||||
|
||||
@ -197,12 +198,13 @@ def lvm_snap(backup_opt_dict):
|
||||
'.format(abs_snap_name, backup_opt_dict.lvm_dirmount))
|
||||
return True
|
||||
if mount_err:
|
||||
logging.critical('[*] lvm snapshot mounting error: {0}'.format(
|
||||
mount_err))
|
||||
raise Exception
|
||||
err = '[*] lvm snapshot mounting error: {0}'.format(mount_err)
|
||||
logging.critical(err)
|
||||
raise Exception(err)
|
||||
else:
|
||||
logging.warning('[*] Volume {0} succesfully mounted on {1}\
|
||||
'.format(abs_snap_name, backup_opt_dict.lvm_dirmount))
|
||||
logging.warning(
|
||||
'[*] Volume {0} succesfully mounted on {1}'.format(
|
||||
abs_snap_name, backup_opt_dict.lvm_dirmount))
|
||||
return True
|
||||
|
||||
|
||||
|
@ -123,19 +123,20 @@ def validate_all_args(required_list):
|
||||
|
||||
try:
|
||||
for element in required_list:
|
||||
if element is False or not element:
|
||||
if not element:
|
||||
return False
|
||||
except Exception as error:
|
||||
logging.critical("[*] Error: {0} please provide ALL of the following \
|
||||
arguments: {1}".format(error, ' '.join(required_list)))
|
||||
raise Exception
|
||||
err = "[*] Error: validate_all_args: {0} {1}".format(
|
||||
required_list, error)
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_any_args(required_list):
|
||||
'''
|
||||
Ensure ANY of the elements of required_list are True. raise ValueError
|
||||
Ensure ANY of the elements of required_list are True. raise Exception
|
||||
Exception otherwise
|
||||
'''
|
||||
|
||||
@ -143,10 +144,11 @@ def validate_any_args(required_list):
|
||||
for element in required_list:
|
||||
if element:
|
||||
return True
|
||||
except Exception:
|
||||
logging.critical("[*] Error: please provide ANY of the following \
|
||||
arguments: {0}".format(' '.join(required_list)))
|
||||
raise Exception
|
||||
except Exception as error:
|
||||
err = "[*] Error: validate_any_args: {0} {1}".format(
|
||||
required_list, error)
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
return False
|
||||
|
||||
@ -178,9 +180,10 @@ def create_dir(directory):
|
||||
logging.warning('[*] Directory {0} found!'.format(
|
||||
os.path.expanduser(directory)))
|
||||
except Exception as error:
|
||||
logging.warning('[*] Error while creating directory {0}: {1}\
|
||||
'.format(os.path.expanduser(directory, error)))
|
||||
raise Exception
|
||||
err = '[*] Error while creating directory {0}: {1}\
|
||||
'.format(os.path.expanduser(directory), error)
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
|
||||
def get_match_backup(backup_opt_dict):
|
||||
@ -194,9 +197,10 @@ def get_match_backup(backup_opt_dict):
|
||||
|
||||
if not backup_opt_dict.backup_name or not backup_opt_dict.container \
|
||||
or not backup_opt_dict.remote_obj_list:
|
||||
logging.critical("[*] Error: please provide a valid Swift container,\
|
||||
backup name and the container contents")
|
||||
raise Exception
|
||||
err = "[*] Error: please provide a valid Swift container,\
|
||||
backup name and the container contents"
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
backup_name = backup_opt_dict.backup_name.lower()
|
||||
if backup_opt_dict.remote_obj_list:
|
||||
@ -252,9 +256,10 @@ def get_rel_oldest_backup(backup_opt_dict):
|
||||
'''
|
||||
|
||||
if not backup_opt_dict.backup_name:
|
||||
logging.critical("[*] Error: please provide a valid backup name in \
|
||||
backup_opt_dict.backup_name")
|
||||
raise Exception
|
||||
err = "[*] Error: please provide a valid backup name in \
|
||||
backup_opt_dict.backup_name"
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
backup_opt_dict.remote_rel_oldest = u''
|
||||
backup_name = backup_opt_dict.backup_name
|
||||
@ -286,10 +291,10 @@ def get_abs_oldest_backup(backup_opt_dict):
|
||||
The absolute oldest backup correspond the oldest available level 0 backup.
|
||||
'''
|
||||
if not backup_opt_dict.backup_name:
|
||||
|
||||
logging.critical("[*] Error: please provide a valid backup name in \
|
||||
backup_opt_dict.backup_name")
|
||||
raise Exception
|
||||
err = "[*] Error: please provide a valid backup name in \
|
||||
backup_opt_dict.backup_name"
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
backup_opt_dict.remote_abs_oldest = u''
|
||||
if len(backup_opt_dict.remote_match_backup) == 0:
|
||||
@ -298,9 +303,9 @@ def get_abs_oldest_backup(backup_opt_dict):
|
||||
backup_timestamp = 0
|
||||
hostname = backup_opt_dict.hostname
|
||||
for remote_obj in backup_opt_dict.remote_match_backup:
|
||||
object_name = remote_obj.get('name', None)
|
||||
object_name = remote_obj.get('name', '')
|
||||
obj_name_match = re.search(r'{0}_({1})_(\d+)_(\d+?)$'.format(
|
||||
hostname, backup_opt_dict.backup_name), remote_obj, re.I)
|
||||
hostname, backup_opt_dict.backup_name), object_name.lower(), re.I)
|
||||
if not obj_name_match:
|
||||
continue
|
||||
remote_obj_timestamp = int(obj_name_match.group(2))
|
||||
@ -332,7 +337,6 @@ def eval_restart_backup(backup_opt_dict):
|
||||
current_timestamp = backup_opt_dict.time_stamp
|
||||
backup_name = backup_opt_dict.backup_name
|
||||
hostname = backup_opt_dict.hostname
|
||||
first_backup_ts = 0
|
||||
# Get relative oldest backup by calling get_rel_oldes_backup()
|
||||
backup_opt_dict = get_rel_oldest_backup(backup_opt_dict)
|
||||
if not backup_opt_dict.remote_rel_oldest:
|
||||
@ -344,9 +348,10 @@ def eval_restart_backup(backup_opt_dict):
|
||||
obj_name_match = re.search(r'{0}_({1})_(\d+)_(\d+?)$'.format(
|
||||
hostname, backup_name), backup_opt_dict.remote_rel_oldest, re.I)
|
||||
if not obj_name_match:
|
||||
logging.info('[*] No backup match available for backup {0} \
|
||||
and host {1}'.format(backup_name, hostname))
|
||||
return Exception
|
||||
err = ('[*] No backup match available for backup {0} '
|
||||
'and host {1}'.format(backup_name, hostname))
|
||||
logging.info(err)
|
||||
return Exception(err)
|
||||
|
||||
first_backup_ts = int(obj_name_match.group(2))
|
||||
if (current_timestamp - first_backup_ts) > max_time:
|
||||
@ -445,9 +450,9 @@ def get_vol_fs_type(backup_opt_dict):
|
||||
|
||||
vol_name = backup_opt_dict.lvm_srcvol
|
||||
if os.path.exists(vol_name) is False:
|
||||
logging.critical('[*] Provided volume name not found: {0} \
|
||||
'.format(vol_name))
|
||||
raise Exception
|
||||
err = '[*] Provided volume name not found: {0} '.format(vol_name)
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
file_cmd = '{0} -0 -bLs --no-pad --no-buffer --preserve-date \
|
||||
{1}'.format(backup_opt_dict.file_path, vol_name)
|
||||
@ -458,17 +463,15 @@ def get_vol_fs_type(backup_opt_dict):
|
||||
(file_out, file_err) = file_process.communicate()
|
||||
file_match = re.search(r'(\S+?) filesystem data', file_out, re.I)
|
||||
if file_match is None:
|
||||
logging.critical('[*] File system type not guessable: {0}\
|
||||
'.format(file_err))
|
||||
raise Exception
|
||||
err = '[*] File system type not guessable: {0}'.format(file_err)
|
||||
logging.exception(err)
|
||||
raise (err)
|
||||
else:
|
||||
filesys_type = file_match.group(1)
|
||||
logging.info('[*] File system {0} found for volume {1}'.format(
|
||||
filesys_type, vol_name))
|
||||
return filesys_type.lower().strip()
|
||||
|
||||
raise Exception
|
||||
|
||||
|
||||
def check_backup_existance(backup_opt_dict):
|
||||
'''
|
||||
@ -512,9 +515,10 @@ def add_host_name_ts_level(backup_opt_dict, time_stamp=int(time.time())):
|
||||
'''
|
||||
|
||||
if backup_opt_dict.backup_name is False:
|
||||
logging.critical('[*] Error: Please specify the backup name with\
|
||||
--backup-name option')
|
||||
raise Exception
|
||||
err = ('[*] Error: Please specify the backup name with '
|
||||
'--backup-name option')
|
||||
logging.exception(err)
|
||||
raise Exception(err)
|
||||
|
||||
backup_name = u'{0}_{1}_{2}_{3}'.format(
|
||||
backup_opt_dict.hostname,
|
||||
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
172
tests/commons.py
172
tests/commons.py
@ -21,6 +21,72 @@ os.environ['OS_USERNAME'] = 'testusername'
|
||||
os.environ['OS_TENANT_NAME'] = 'testtenantename'
|
||||
|
||||
|
||||
class FakeLogging:
|
||||
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def logging(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def info(cls):
|
||||
return True
|
||||
|
||||
|
||||
class Fakeget_newest_backup:
|
||||
|
||||
def __init__(self, opt1=True):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_newest_backup'] = False
|
||||
return backup_opt
|
||||
|
||||
|
||||
class Fakeget_rel_oldest_backup:
|
||||
|
||||
def __init__(self, opt1=True):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_rel_oldest'] = False
|
||||
return backup_opt
|
||||
|
||||
|
||||
class Fakeget_rel_oldest_backup2:
|
||||
|
||||
def __init__(self, opt1=True):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_rel_oldest'] = True
|
||||
return backup_opt
|
||||
|
||||
|
||||
class FakeDistutils:
|
||||
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
class spawn:
|
||||
def __init__(self, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def find_executable(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
class FakeArgparse:
|
||||
|
||||
def __init__(self):
|
||||
@ -45,6 +111,7 @@ class FakeArgparse:
|
||||
|
||||
@classmethod
|
||||
def parse_args(self):
|
||||
self.hostname = None
|
||||
return self
|
||||
|
||||
|
||||
@ -171,23 +238,42 @@ class FakeSwiftClient:
|
||||
def put_object(self, opt1=True, opt2=True, opt3=True, opt4=True, opt5=True, headers=True, content_length=True, content_type=True):
|
||||
return True
|
||||
|
||||
def head_object(self, opt1=True, opt2=True):
|
||||
return True
|
||||
|
||||
|
||||
class FakeRe:
|
||||
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
class search:
|
||||
def __init__(self, opt1=True, opt2=True, opt3=True):
|
||||
self.opt1 = opt1
|
||||
self.opt2 = opt2
|
||||
self.opt3 = opt3
|
||||
return None
|
||||
@classmethod
|
||||
def search(self, opt1=True, opt2=True, opt3=True):
|
||||
return self
|
||||
|
||||
def group(self, opt1=True, opt2=True):
|
||||
self.opt1 = opt1
|
||||
self.opt2 = opt2
|
||||
@classmethod
|
||||
def group(self, opt1=True, opt2=True):
|
||||
if opt1 == 1:
|
||||
return 'testgroup'
|
||||
else:
|
||||
return '10'
|
||||
|
||||
|
||||
class FakeRe2:
|
||||
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def search(cls, opt1=True, opt2=True, opt3=True):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def group(cls, opt1=True, opt2=True):
|
||||
return None
|
||||
|
||||
|
||||
class BackupOpt1:
|
||||
@ -196,7 +282,7 @@ class BackupOpt1:
|
||||
fakeclient = FakeSwiftClient()
|
||||
fakeconnector = fakeclient.client()
|
||||
fakeswclient = fakeconnector.Connection()
|
||||
self.mysql_conf_file = '/dev/null'
|
||||
self.mysql_conf_file = '/tmp/freezer-test-conf-file'
|
||||
self.lvm_auto_snap = '/dev/null'
|
||||
self.lvm_volgroup = 'testgroup'
|
||||
self.lvm_srcvol = 'testvol'
|
||||
@ -212,7 +298,7 @@ class BackupOpt1:
|
||||
self.umount_path = 'true'
|
||||
self.backup_name = 'test-backup-name'
|
||||
self.hostname = 'test-hostname'
|
||||
self.curr_backup_level = '0'
|
||||
self.curr_backup_level = 0
|
||||
self.src_file = '/tmp'
|
||||
self.tar_path= 'true'
|
||||
self.dereference_symlink = 'true'
|
||||
@ -224,13 +310,29 @@ class BackupOpt1:
|
||||
self.max_backup_level = '0'
|
||||
self.remove_older_than = '0'
|
||||
self.max_seg_size = '0'
|
||||
self.time_stamp = '0'
|
||||
self.time_stamp = 123456789
|
||||
self.container_segments = 'test-container-segements'
|
||||
self.container = 'test-container'
|
||||
self.restart_always_backup = '0'
|
||||
self.workdir = '/tmp'
|
||||
self.upload = 'true'
|
||||
self.sw_connector = fakeswclient
|
||||
self.max_backup_level = '20'
|
||||
self.encrypt_pass_file = '/dev/random'
|
||||
self.always_backup_level = '20'
|
||||
self.remove_older_than = '20'
|
||||
self.restart_always_backup = 100000
|
||||
self.container_segments = 'testcontainerseg'
|
||||
self.remote_match_backup = [
|
||||
'test-hostname_test-backup-name_1234567_0',
|
||||
'test-hostname_test-backup-name_1234567_1',
|
||||
'test-hostname_test-backup-name_1234567_2']
|
||||
self.remote_obj_list = [
|
||||
{'name' : 'test-hostname_test-backup-name_1234567_0'},
|
||||
{'name' : 'test-hostname_test-backup-name_1234567_1'},
|
||||
{'name' : 'test-hostname_test-backup-name_1234567_2'},
|
||||
{'fakename' : 'test-hostname_test-backup-name_1234567_2'},
|
||||
{'name' : 'test-hostname-test-backup-name-asdfa-asdfasdf'}]
|
||||
self.remote_objects = []
|
||||
|
||||
|
||||
class FakeMySQLdb:
|
||||
@ -262,6 +364,16 @@ class FakeMySQLdb:
|
||||
return True
|
||||
|
||||
|
||||
class FakeMySQLdb2:
|
||||
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def connect(self, host=True, user=True, passwd=True):
|
||||
raise Exception
|
||||
|
||||
|
||||
class FakeMongoDB:
|
||||
|
||||
def __init__(self, opt1=True):
|
||||
@ -279,18 +391,38 @@ class FakeMongoDB:
|
||||
return {'me': 'testnode', 'primary': 'testnode'}
|
||||
|
||||
|
||||
class Os:
|
||||
def __init__(self):
|
||||
class FakeMongoDB2:
|
||||
|
||||
def __init__(self, opt1=True):
|
||||
return None
|
||||
|
||||
def expanduser(self, directory=True):
|
||||
return True
|
||||
def __call__(self, opt1=True):
|
||||
return self
|
||||
|
||||
class admin:
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def command(cls, opt1=True):
|
||||
return {'me': 'testnode', 'primary': 'testanothernode'}
|
||||
|
||||
|
||||
class Os:
|
||||
def __init__(self, directory=True):
|
||||
return None
|
||||
|
||||
def expanduser(self, directory=True, opt2=True):
|
||||
return 'testdir'
|
||||
|
||||
def makedirs(self, directory=True):
|
||||
return True
|
||||
return 'testdir'
|
||||
|
||||
def isdir(self, directory=True):
|
||||
return True
|
||||
return 'testdir'
|
||||
|
||||
def exists(self, directory=True):
|
||||
return True
|
||||
return 'testdir'
|
||||
|
||||
def makedirs2(self, directory=True):
|
||||
raise Exception
|
||||
|
@ -3,7 +3,9 @@
|
||||
from freezer.arguments import backup_arguments
|
||||
import argparse
|
||||
from commons import *
|
||||
|
||||
import sys
|
||||
import pytest
|
||||
import distutils.spawn as distspawn
|
||||
import __builtin__
|
||||
|
||||
|
||||
@ -13,8 +15,18 @@ class TestArguments:
|
||||
|
||||
fakeargparse = FakeArgparse()
|
||||
fakeargparse = fakeargparse.ArgumentParser()
|
||||
fakedistutils = FakeDistutils()
|
||||
fakedistutilsspawn = fakedistutils.spawn()
|
||||
|
||||
monkeypatch.setattr(
|
||||
argparse, 'ArgumentParser', fakeargparse)
|
||||
|
||||
assert backup_arguments() is not Exception or not False
|
||||
platform = sys.platform
|
||||
assert backup_arguments() is not False
|
||||
|
||||
sys.__dict__['platform'] = 'darwin'
|
||||
pytest.raises(Exception, backup_arguments)
|
||||
monkeypatch.setattr(
|
||||
distspawn, 'find_executable', fakedistutilsspawn.find_executable)
|
||||
assert backup_arguments() is not False
|
||||
sys.__dict__['platform'] = platform
|
@ -10,6 +10,7 @@ import os
|
||||
import MySQLdb
|
||||
import pymongo
|
||||
import re
|
||||
import pytest
|
||||
from commons import *
|
||||
|
||||
import __builtin__
|
||||
@ -28,7 +29,6 @@ class TestBackUP:
|
||||
fakelvm = Lvm()
|
||||
fakesubprocess = FakeSubProcess()
|
||||
fakesubprocesspopen = fakesubprocess.Popen()
|
||||
fakeopen = FakeOpen()
|
||||
fakemultiprocessing = FakeMultiProcessing()
|
||||
fakemultiprocessingqueue = fakemultiprocessing.Queue()
|
||||
fakemultiprocessingpipe = fakemultiprocessing.Pipe()
|
||||
@ -42,7 +42,7 @@ class TestBackUP:
|
||||
multiprocessing, 'Process', fakemultiprocessing.Process)
|
||||
monkeypatch.setattr(
|
||||
multiprocessing, '__init__', fakemultiprocessinginit)
|
||||
monkeypatch.setattr(__builtin__, 'open', fakeopen.open)
|
||||
#monkeypatch.setattr(__builtin__, 'open', fakeopen.open)
|
||||
monkeypatch.setattr(
|
||||
subprocess.Popen, 'communicate', fakesubprocesspopen.communicate)
|
||||
monkeypatch.setattr(
|
||||
@ -56,9 +56,21 @@ class TestBackUP:
|
||||
monkeypatch.setattr(os.path, 'exists', expanduser.exists)
|
||||
monkeypatch.setattr(swiftclient, 'client', fakeswiftclient.client)
|
||||
|
||||
assert backup_mode_mysql(
|
||||
backup_opt, int(time.time()), test_meta) is None
|
||||
mysql_conf_file = backup_opt.mysql_conf_file
|
||||
backup_opt.__dict__['mysql_conf_file'] = None
|
||||
pytest.raises(Exception, backup_mode_mysql, backup_opt, 123456789, test_meta)
|
||||
|
||||
# Generate mysql conf test file
|
||||
backup_opt.__dict__['mysql_conf_file'] = mysql_conf_file
|
||||
with open(backup_opt.mysql_conf_file, 'w') as mysql_conf_fd:
|
||||
mysql_conf_fd.write('host=abcd\nuser=abcd\npassword=abcd\n')
|
||||
assert backup_mode_mysql(
|
||||
backup_opt, 123456789, test_meta) is None
|
||||
#os.unlink(backup_opt.mysql_conf_file)
|
||||
|
||||
fakemysql2 = FakeMySQLdb2()
|
||||
monkeypatch.setattr(MySQLdb, 'connect', fakemysql2.connect)
|
||||
pytest.raises(Exception, backup_mode_mysql, backup_opt, 123456789, test_meta)
|
||||
|
||||
def test_backup_mode_fs(self, monkeypatch):
|
||||
|
||||
@ -89,7 +101,14 @@ class TestBackUP:
|
||||
monkeypatch.setattr(os.path, 'exists', expanduser.exists)
|
||||
|
||||
assert backup_mode_fs(
|
||||
backup_opt, int(time.time()), test_meta) is None
|
||||
backup_opt, 123456789, test_meta) is None
|
||||
|
||||
backup_opt.__dict__['no_incremental'] = False
|
||||
with open(
|
||||
'/tmp/tar_metadata_test-hostname_test-backup-name_123456789_0', 'w') as fd:
|
||||
fd.write('testcontent\n')
|
||||
assert backup_mode_fs(
|
||||
backup_opt, 123456789, test_meta) is None
|
||||
|
||||
|
||||
def test_backup_mode_mongo(self, monkeypatch):
|
||||
@ -102,7 +121,7 @@ class TestBackUP:
|
||||
fakeos = Os()
|
||||
fakere = FakeRe()
|
||||
fakeswiftclient = FakeSwiftClient()
|
||||
fakeopen = FakeOpen()
|
||||
#fakeopen = FakeOpen()
|
||||
fakelvm = Lvm()
|
||||
fakemultiprocessing = FakeMultiProcessing()
|
||||
fakemultiprocessingqueue = fakemultiprocessing.Queue()
|
||||
@ -122,7 +141,12 @@ class TestBackUP:
|
||||
monkeypatch.setattr(os.path, 'exists', fakeos.exists)
|
||||
monkeypatch.setattr(re, 'search', fakere.search)
|
||||
monkeypatch.setattr(swiftclient, 'client', fakeswiftclient.client)
|
||||
monkeypatch.setattr(__builtin__, 'open', fakeopen.open)
|
||||
#monkeypatch.setattr(__builtin__, 'open', fakeopen.open)
|
||||
|
||||
assert backup_mode_mongo(
|
||||
backup_opt, int(time.time()), test_meta) is None
|
||||
backup_opt, 123456789, test_meta) is None
|
||||
|
||||
fakemongo2 = FakeMongoDB2()
|
||||
monkeypatch.setattr(pymongo, 'MongoClient', fakemongo2)
|
||||
assert backup_mode_mongo(
|
||||
backup_opt, 123456789, test_meta) is True
|
||||
|
245
tests/test_utils.py
Normal file
245
tests/test_utils.py
Normal file
@ -0,0 +1,245 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from freezer.utils import (
|
||||
gen_manifest_meta, validate_all_args, validate_any_args,
|
||||
sort_backup_list, create_dir, get_match_backup,
|
||||
get_newest_backup, get_rel_oldest_backup, get_abs_oldest_backup,
|
||||
eval_restart_backup, start_time, elapsed_time, set_backup_level,
|
||||
get_vol_fs_type, check_backup_existance, add_host_name_ts_level,
|
||||
get_mount_from_path)
|
||||
|
||||
from freezer import utils
|
||||
import pytest
|
||||
import argparse
|
||||
import os
|
||||
import datetime
|
||||
import re
|
||||
from commons import *
|
||||
|
||||
|
||||
class TestUtils:
|
||||
|
||||
def test_gen_manifest_meta(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
manifest_meta = {}
|
||||
|
||||
gen_manifest_meta(
|
||||
backup_opt, manifest_meta, meta_data_backup_file='testfile')
|
||||
|
||||
manifest_meta['x-object-meta-tar-meta-obj-name'] = 'testtar'
|
||||
gen_manifest_meta(
|
||||
backup_opt, manifest_meta, meta_data_backup_file='testfile')
|
||||
del manifest_meta['x-object-meta-tar-meta-obj-name']
|
||||
|
||||
manifest_meta['x-object-meta-tar-prev-meta-obj-name'] = 'testtar'
|
||||
gen_manifest_meta(
|
||||
backup_opt, manifest_meta, meta_data_backup_file='testfile')
|
||||
del manifest_meta['x-object-meta-tar-prev-meta-obj-name']
|
||||
|
||||
backup_opt.__dict__['encrypt_pass_file'] = False
|
||||
gen_manifest_meta(
|
||||
backup_opt, manifest_meta, meta_data_backup_file='testfile')
|
||||
|
||||
def test_validate_all_args(self):
|
||||
|
||||
elements1 = ['test1', 'test2', 'test3']
|
||||
elements2 = ['test1', '', False, None]
|
||||
elements3 = None
|
||||
|
||||
assert validate_all_args(elements1) is True
|
||||
assert validate_all_args(elements2) is False
|
||||
pytest.raises(Exception, validate_all_args, elements3)
|
||||
|
||||
def test_validate_any_args(self):
|
||||
|
||||
elements1 = ['test1', 'test2', 'test3']
|
||||
elements2 = [None, None, False, None]
|
||||
elements3 = None
|
||||
|
||||
assert validate_any_args(elements1) is True
|
||||
assert validate_any_args(elements2) is False
|
||||
pytest.raises(Exception, validate_any_args, elements3)
|
||||
|
||||
def test_sort_backup_list(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
|
||||
assert type(sort_backup_list(backup_opt)) is list
|
||||
|
||||
def test_create_dir(self, monkeypatch):
|
||||
|
||||
dir1 = '/tmp'
|
||||
dir2 = '/tmp/testnoexistent1234'
|
||||
dir3 = '~'
|
||||
fakeos = Os()
|
||||
|
||||
assert create_dir(dir1) is None
|
||||
assert create_dir(dir2) is None
|
||||
os.rmdir(dir2)
|
||||
assert create_dir(dir3) is None
|
||||
monkeypatch.setattr(os, 'makedirs', fakeos.makedirs2)
|
||||
pytest.raises(Exception, create_dir, dir2)
|
||||
|
||||
def test_get_match_backup(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
|
||||
backup_opt = get_match_backup(backup_opt)
|
||||
assert len(backup_opt.remote_match_backup) > 0
|
||||
|
||||
backup_opt.__dict__['backup_name'] = ''
|
||||
pytest.raises(Exception, get_match_backup, backup_opt)
|
||||
|
||||
def test_get_newest_backup(self, monkeypatch):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt = get_newest_backup(backup_opt)
|
||||
assert len(backup_opt.remote_newest_backup) > 0
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_match_backup'] = ''
|
||||
backup_opt = get_newest_backup(backup_opt)
|
||||
assert backup_opt.remote_match_backup is not True
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
fakere2 = FakeRe2()
|
||||
monkeypatch.setattr(re, 'search', fakere2.search)
|
||||
backup_opt = get_newest_backup(backup_opt)
|
||||
assert backup_opt.remote_match_backup is not True
|
||||
|
||||
def test_get_rel_oldest_backup(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt = get_rel_oldest_backup(backup_opt)
|
||||
assert len(backup_opt.remote_rel_oldest) > 0
|
||||
|
||||
backup_opt.__dict__['backup_name'] = ''
|
||||
pytest.raises(Exception, get_rel_oldest_backup, backup_opt)
|
||||
|
||||
def test_get_abs_oldest_backup(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_match_backup'] = []
|
||||
backup_opt = get_abs_oldest_backup(backup_opt)
|
||||
assert len(backup_opt.remote_abs_oldest) == 0
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['remote_match_backup'] = backup_opt.remote_obj_list
|
||||
backup_opt = get_abs_oldest_backup(backup_opt)
|
||||
assert len(backup_opt.remote_abs_oldest) > 0
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['backup_name'] = ''
|
||||
pytest.raises(Exception, get_abs_oldest_backup, backup_opt)
|
||||
|
||||
def test_eval_restart_backup(self, monkeypatch):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
assert eval_restart_backup(backup_opt) is False
|
||||
|
||||
backup_opt.__dict__['restart_always_backup'] = None
|
||||
assert eval_restart_backup(backup_opt) is False
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
fake_get_rel_oldest_backup = Fakeget_rel_oldest_backup()
|
||||
monkeypatch.setattr(utils, 'get_rel_oldest_backup', fake_get_rel_oldest_backup)
|
||||
assert eval_restart_backup(backup_opt) is False
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
fake_get_rel_oldest_backup2 = Fakeget_rel_oldest_backup2()
|
||||
monkeypatch.setattr(utils, 'get_rel_oldest_backup', fake_get_rel_oldest_backup2)
|
||||
fakere2 = FakeRe2()
|
||||
monkeypatch.setattr(re, 'search', fakere2.search)
|
||||
assert eval_restart_backup(backup_opt) is not None
|
||||
#pytest.raises(Exception, eval_restart_backup, backup_opt)
|
||||
|
||||
|
||||
def test_start_time(self):
|
||||
|
||||
(time_stamp, day_time) = start_time()
|
||||
assert type(day_time) is datetime.datetime
|
||||
assert type(time_stamp) is int
|
||||
|
||||
def test_elapsed_time(self):
|
||||
|
||||
(time_stamp, day_time) = start_time()
|
||||
assert elapsed_time(day_time) is None
|
||||
|
||||
def test_set_backup_level(self):
|
||||
|
||||
manifest_meta = dict()
|
||||
backup_opt = BackupOpt1()
|
||||
manifest_meta['x-object-meta-backup-name'] = True
|
||||
manifest_meta['x-object-meta-backup-current-level'] = 1
|
||||
manifest_meta['x-object-meta-always-backup-level'] = 3
|
||||
manifest_meta['x-object-meta-restart-always-backup'] = 3
|
||||
|
||||
(backup_opt, manifest_meta_dict) = set_backup_level(
|
||||
backup_opt, manifest_meta)
|
||||
assert manifest_meta['x-object-meta-backup-current-level'] is not False
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
manifest_meta['x-object-meta-maximum-backup-level'] = 2
|
||||
(backup_opt, manifest_meta_dict) = set_backup_level(
|
||||
backup_opt, manifest_meta)
|
||||
assert manifest_meta['x-object-meta-backup-current-level'] is not False
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['curr_backup_level'] = 1
|
||||
(backup_opt, manifest_meta_dict) = set_backup_level(
|
||||
backup_opt, manifest_meta)
|
||||
assert manifest_meta['x-object-meta-backup-current-level'] is not False
|
||||
|
||||
manifest_meta = dict()
|
||||
backup_opt = BackupOpt1()
|
||||
manifest_meta['x-object-meta-backup-name'] = False
|
||||
manifest_meta['x-object-meta-maximum-backup-level'] = 0
|
||||
manifest_meta['x-object-meta-backup-current-level'] = 1
|
||||
(backup_opt, manifest_meta) = set_backup_level(
|
||||
backup_opt, manifest_meta)
|
||||
assert manifest_meta['x-object-meta-backup-current-level'] == '0'
|
||||
|
||||
def test_get_vol_fs_type(self, monkeypatch):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
pytest.raises(Exception, get_vol_fs_type, backup_opt)
|
||||
|
||||
fakeos = Os()
|
||||
monkeypatch.setattr(os.path, 'exists', fakeos.exists)
|
||||
#fakesubprocess = FakeSubProcess()
|
||||
pytest.raises(Exception, get_vol_fs_type, backup_opt)
|
||||
|
||||
fakere = FakeRe()
|
||||
monkeypatch.setattr(re, 'search', fakere.search)
|
||||
assert type(get_vol_fs_type(backup_opt)) is str
|
||||
|
||||
def test_check_backup_existance(self, monkeypatch):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['backup_name'] = None
|
||||
assert type(check_backup_existance(backup_opt)) is dict
|
||||
|
||||
fakeswiftclient = FakeSwiftClient()
|
||||
backup_opt = BackupOpt1()
|
||||
assert check_backup_existance(backup_opt) is True
|
||||
|
||||
fake_get_newest_backup = Fakeget_newest_backup()
|
||||
monkeypatch.setattr(utils, 'get_newest_backup', fake_get_newest_backup)
|
||||
assert type(check_backup_existance(backup_opt)) is dict
|
||||
|
||||
def test_add_host_name_ts_level(self):
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
backup_opt.__dict__['backup_name'] = False
|
||||
pytest.raises(Exception, add_host_name_ts_level, backup_opt)
|
||||
|
||||
backup_opt = BackupOpt1()
|
||||
assert type(add_host_name_ts_level(backup_opt)) is unicode
|
||||
|
||||
def test_get_mount_from_path(self):
|
||||
|
||||
dir1 = '/tmp'
|
||||
dir2 = '/tmp/nonexistentpathasdf'
|
||||
assert type(get_mount_from_path(dir1)) is str
|
||||
pytest.raises(Exception, get_mount_from_path, dir2)
|
Loading…
Reference in New Issue
Block a user