basic integration tests for freezer-agent
The test provided in the tests/integration directory execute the command line version of the freezer-agent. The tests execute backups and restores, passing the parameters on the command line, and check the results. Types of integration tests depend on the environment variables defined. Local storage tests are enabled by default and use temporary files and directories, while SSH and Swift storage testing need also access to existing external resources. To enable the ssh storage testing, the following environment variables need to be defined: - FREEZER_TEST_SSH_USERNAME - FREEZER_TEST_SSH_KEY - FREEZER_TEST_SSH_HOST - FREEZER_TEST_CONTAINER To enable the swift storage testing, the following environment variables need to be defined: - FREEZER_TEST_OS_USERNAME - FREEZER_TEST_OS_TENANT_NAME - FREEZER_TEST_OS_REGION_NAME - FREEZER_TEST_OS_PASSWORD - FREEZER_TEST_OS_AUTH_URL Tests involving LVM snapshots are executed only with superuser privileges and if the following env variable is not defined: - FREEZER_TEST_NO_LVM Change-Id: I39f0c9bf62f7c82f81ad95f19cf0f7aefb059633
This commit is contained in:
parent
a2a07344d1
commit
cea7fb2702
@ -2,6 +2,15 @@
|
||||
Test Scenario
|
||||
=============
|
||||
|
||||
Summary
|
||||
=======
|
||||
|
||||
* Intro
|
||||
* 1. Setup Devstack machine with swift and elasticsearch
|
||||
* 2. Setup the client machine
|
||||
* 3. Freezer test scenarios
|
||||
* 4. Automated Integration Tests
|
||||
|
||||
Intro
|
||||
=====
|
||||
|
||||
@ -573,3 +582,73 @@ The restore job is the same as in 3.4.3
|
||||
|
||||
freezer-scheduler job-create -c client_node_1 --file job-restore.conf
|
||||
|
||||
|
||||
4. Automated Integration Tests
|
||||
==============================
|
||||
|
||||
Automated integration tests are being provided in the directory
|
||||
|
||||
freezer/tests/integration directory
|
||||
|
||||
Since they require external resources - such as swift or ssh storage -
|
||||
they are executed only when some environment variables are defined.
|
||||
|
||||
4.1 local storage tests
|
||||
-----------------------
|
||||
always executed automatically, using temporary local directories under /tmp
|
||||
(or whatever temporary path is available)
|
||||
|
||||
4.2 ssh storage
|
||||
---------------
|
||||
SSH storage need the following environment variables to be defined:
|
||||
::
|
||||
|
||||
* FREEZER_TEST_SSH_KEY
|
||||
* FREEZER_TEST_SSH_USERNAME
|
||||
* FREEZER_TEST_SSH_HOST
|
||||
* FREEZER_TEST_CONTAINER
|
||||
|
||||
For example:
|
||||
::
|
||||
|
||||
export FREEZER_TEST_SSH_KEY=/home/myuser/.ssh/id_rsa
|
||||
export FREEZER_TEST_SSH_USERNAME=myuser
|
||||
export FREEZER_TEST_SSH_HOST=127.0.0.1
|
||||
export FREEZER_TEST_CONTAINER=/home/myuser/freezer_test_backup_storage_ssh
|
||||
|
||||
4.3 swift storage
|
||||
-----------------
|
||||
To enable the swift integration tests - besides having a working swift node -
|
||||
the following variables need to be defined accordingly:
|
||||
::
|
||||
|
||||
* FREEZER_TEST_OS_TENANT_NAME
|
||||
* FREEZER_TEST_OS_USERNAME
|
||||
* FREEZER_TEST_OS_REGION_NAME
|
||||
* FREEZER_TEST_OS_PASSWORD
|
||||
* FREEZER_TEST_OS_AUTH_URL
|
||||
|
||||
For example:
|
||||
::
|
||||
|
||||
export FREEZER_TEST_OS_TENANT_NAME=fproject
|
||||
export FREEZER_TEST_OS_USERNAME=fuser
|
||||
export FREEZER_TEST_OS_REGION_NAME=RegionOne
|
||||
export FREEZER_TEST_OS_PASSWORD=freezer
|
||||
export FREEZER_TEST_OS_AUTH_URL=http://192.168.56.223:5000/v2.0
|
||||
|
||||
The cloud user/tenant has to be already been created
|
||||
|
||||
4.4 LVM and MySQL
|
||||
-----------------
|
||||
Some tests, like LVM snapshots and access to privileged files, need
|
||||
the tests to be executed with superuser privileges.
|
||||
Tests involving such requirements are not executed when run
|
||||
with normal-user privileges.
|
||||
In cases where LVM snapshot capability is not available (for example
|
||||
the filesystem does not make use of LV or there are not enough space
|
||||
available) the LVM tests can be skipped by defining the following
|
||||
env variable:
|
||||
::
|
||||
|
||||
* FREEZER_TEST_NO_LVM
|
||||
|
272
tests/integration/common.py
Normal file
272
tests/integration/common.py
Normal file
@ -0,0 +1,272 @@
|
||||
# Copyright 2015 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# This product includes cryptographic software written by Eric Young
|
||||
# (eay@cryptsoft.com). This product includes software written by Tim
|
||||
# Hudson (tjh@cryptsoft.com).
|
||||
# ========================================================================
|
||||
|
||||
import distutils.spawn
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
import paramiko
|
||||
|
||||
class CommandFailed(Exception):
|
||||
|
||||
def __init__(self, returncode, cmd, output, stderr):
|
||||
super(CommandFailed, self).__init__()
|
||||
self.returncode = returncode
|
||||
self.cmd = cmd
|
||||
self.stdout = output
|
||||
self.stderr = stderr
|
||||
|
||||
def __str__(self):
|
||||
return ("Command '%s' returned unexpected exit status %d.\n"
|
||||
"stdout:\n%s\n"
|
||||
"stderr:\n%s" % (self.cmd, self.returncode,
|
||||
self.stdout, self.stderr))
|
||||
|
||||
|
||||
def execute(cmd, must_fail=False, merge_stderr=False):
|
||||
"""Executes specified command for the given action."""
|
||||
cmdlist = shlex.split(cmd.encode('utf-8'))
|
||||
result = ''
|
||||
result_err = ''
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
|
||||
proc = subprocess.Popen(cmdlist, stdout=stdout, stderr=stderr)
|
||||
result, result_err = proc.communicate()
|
||||
|
||||
if not must_fail and proc.returncode != 0:
|
||||
raise CommandFailed(proc.returncode, cmd, result, result_err)
|
||||
if must_fail and proc.returncode == 0:
|
||||
raise CommandFailed(proc.returncode, cmd, result, result_err)
|
||||
return result
|
||||
|
||||
|
||||
class Temp_Tree(object):
|
||||
|
||||
def __init__(self, suffix='', dir=None, create=True):
|
||||
self.create = create
|
||||
if create:
|
||||
self.path = tempfile.mkdtemp(dir=dir, prefix='__freezer_', suffix=suffix)
|
||||
else:
|
||||
self.path = dir
|
||||
self.files = []
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def cleanup(self):
|
||||
if self.create and self.path:
|
||||
shutil.rmtree(self.path)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.cleanup()
|
||||
|
||||
def add_random_data(self, ndir=5, nfile=5, size=1024):
|
||||
"""
|
||||
add some files containing randoma data
|
||||
|
||||
:param ndir: number of dirs to create
|
||||
:param nfile: number of files to create in each dir
|
||||
:param size: size of files
|
||||
:return: None
|
||||
"""
|
||||
for x in xrange(ndir):
|
||||
subdir_path = tempfile.mkdtemp(dir=self.path)
|
||||
for y in xrange(nfile):
|
||||
abs_pathname = self.create_file_with_random_data(dir_path=subdir_path, size=size)
|
||||
rel_path_name = abs_pathname[len(self.path)+1:]
|
||||
self.files.append(rel_path_name)
|
||||
|
||||
def create_file_with_random_data(self, dir_path, size=1024):
|
||||
handle, abs_pathname = tempfile.mkstemp(dir=dir_path)
|
||||
with open(abs_pathname, 'wb') as fd:
|
||||
fd.write(os.urandom(size))
|
||||
return abs_pathname
|
||||
|
||||
def get_file_hash(self, rel_filepath):
|
||||
filepath = os.path.join(self.path, rel_filepath)
|
||||
if os.path.isfile(filepath):
|
||||
return self._filehash(filepath)
|
||||
else:
|
||||
return ''
|
||||
|
||||
def _filehash(self, filepath):
|
||||
"""
|
||||
Get GIT style sha1 hash for a file
|
||||
|
||||
:param filepath: path of file to hash
|
||||
:return: hash of the file
|
||||
"""
|
||||
filesize_bytes = os.path.getsize(filepath)
|
||||
hash_obj = hashlib.sha1()
|
||||
hash_obj.update(("blob %u\0" % filesize_bytes).encode('utf-8'))
|
||||
with open(filepath, 'rb') as handle:
|
||||
hash_obj.update(handle.read())
|
||||
return hash_obj.hexdigest()
|
||||
|
||||
def get_file_list(self):
|
||||
"""
|
||||
walks the dir tree and creates a list of relative pathnames
|
||||
:return: list of relative file paths
|
||||
"""
|
||||
self.files = []
|
||||
for root, dirs, files in os.walk(self.path):
|
||||
rel_base = root[len(self.path)+1:]
|
||||
self.files.extend([os.path.join(rel_base, x) for x in files])
|
||||
return self.files
|
||||
|
||||
def is_equal(self, other_tree):
|
||||
"""
|
||||
Checks whether two dir tree contain the same files
|
||||
It checks the number of files and the hash of each file.
|
||||
|
||||
NOTE: tox puts .coverage files in the temp folder (?)
|
||||
|
||||
:param other_tree: dir tree to compare with
|
||||
:return: true if the dir trees contain the same files
|
||||
"""
|
||||
lh_files = [x for x in sorted(self.get_file_list())
|
||||
if not x.startswith('.coverage')]
|
||||
rh_files = [x for x in sorted(other_tree.get_file_list())
|
||||
if not x.startswith('.coverage')]
|
||||
if lh_files != rh_files:
|
||||
return False
|
||||
for fname in lh_files:
|
||||
if os.path.isfile(fname):
|
||||
if self.get_file_hash(fname) != other_tree.get_file_hash(fname):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class TestFS(unittest.TestCase):
|
||||
"""
|
||||
Utility class for setting up the tests.
|
||||
|
||||
Type of tests depends (also) on the environment variables defined.
|
||||
|
||||
To enable the ssh storage testing, the following environment variables need to be defined:
|
||||
- FREEZER_TEST_SSH_KEY
|
||||
- FREEZER_TEST_SSH_USERNAME
|
||||
- FREEZER_TEST_SSH_HOST
|
||||
- FREEZER_TEST_CONTAINER
|
||||
|
||||
To enable the swift storage testing, the following environment variables need to be defined:
|
||||
- FREEZER_TEST_OS_TENANT_NAME
|
||||
- FREEZER_TEST_OS_USERNAME
|
||||
- FREEZER_TEST_OS_REGION_NAME
|
||||
- FREEZER_TEST_OS_PASSWORD
|
||||
- FREEZER_TEST_OS_AUTH_URL
|
||||
|
||||
Tests involving LVM snapshots are evoided if:
|
||||
- user is not root
|
||||
- FREEZER_TEST_NO_LVM is set
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.ssh_key = os.environ.get('FREEZER_TEST_SSH_KEY')
|
||||
self.ssh_username = os.environ.get('FREEZER_TEST_SSH_USERNAME')
|
||||
self.ssh_host = os.environ.get('FREEZER_TEST_SSH_HOST')
|
||||
self.container = os.environ.get('FREEZER_TEST_CONTAINER')
|
||||
self.use_ssh = self.ssh_key and self.ssh_username and self.ssh_host and self.container
|
||||
if self.use_ssh:
|
||||
self.ssh_client = paramiko.SSHClient()
|
||||
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
self.ssh_client.connect(self.ssh_host,
|
||||
username=self.ssh_username,
|
||||
key_filename=self.ssh_key)
|
||||
|
||||
self.os_tenant_name = os.environ.get('FREEZER_TEST_OS_TENANT_NAME')
|
||||
self.os_user_name = os.environ.get('FREEZER_TEST_OS_USERNAME')
|
||||
self.os_region = os.environ.get('FREEZER_TEST_OS_REGION_NAME')
|
||||
self.os_password = os.environ.get('FREEZER_TEST_OS_PASSWORD')
|
||||
self.os_auth_url = os.environ.get('FREEZER_TEST_OS_AUTH_URL')
|
||||
self.use_os = (self.os_tenant_name and self.os_user_name and self.os_region
|
||||
and self.os_password and self.os_auth_url)
|
||||
if self.use_os:
|
||||
os.environ['OS_USERNAME'] = self.os_user_name
|
||||
os.environ['OS_TENANT_NAME'] = self.os_tenant_name
|
||||
os.environ['OS_AUTH_URL'] = self.os_auth_url
|
||||
os.environ['OS_PASSWORD'] = self.os_password
|
||||
os.environ['OS_REGION_NAME'] = self.os_region
|
||||
os.environ['OS_TENANT_ID'] = ''
|
||||
|
||||
self.openstack_executable = distutils.spawn.find_executable('openstack')
|
||||
self.swift_executable = distutils.spawn.find_executable('swift')
|
||||
|
||||
self.use_lvm = (os.getuid() == 0 and 'FREEZER_TEST_NO_LVM' not in os.environ)
|
||||
|
||||
self.source_tree = Temp_Tree()
|
||||
self.dest_tree = Temp_Tree()
|
||||
|
||||
self.ssh_executable = distutils.spawn.find_executable('ssh')
|
||||
|
||||
def tearDown(self):
|
||||
self.source_tree.cleanup()
|
||||
self.dest_tree.cleanup()
|
||||
|
||||
def assertTreesMatch(self):
|
||||
self.assertTrue(self.source_tree.is_equal(self.dest_tree))
|
||||
|
||||
def assertTreesMatchNot(self):
|
||||
self.assertFalse(self.source_tree.is_equal(self.dest_tree))
|
||||
|
||||
def dict_to_args(self, d):
|
||||
arg_string = ''
|
||||
for k, v in d.iteritems():
|
||||
arg_string += ' --{0} {1}'.format(k.replace('_', '-'), v)
|
||||
return arg_string
|
||||
|
||||
def get_file_list_ssh(self, sub_path=''):
|
||||
ftp = self.ssh_client.open_sftp()
|
||||
path = '{0}/{1}'.format(self.container, sub_path)
|
||||
return ftp.listdir(path)
|
||||
|
||||
def remove_ssh_directory(self, sub_path=''):
|
||||
cmd = 'rm -rf {0}/{1}'.format(self.container, sub_path)
|
||||
self.ssh_client.exec_command(cmd)
|
||||
|
||||
def get_file_list_openstack(self, container):
|
||||
if self.openstack_executable:
|
||||
cmd = '{0} object list {1} -f json'.format(
|
||||
self.openstack_executable, container)
|
||||
json_result = execute(cmd)
|
||||
result = json.loads(json_result)
|
||||
return [x['Name'] for x in result]
|
||||
if self.swift_executable:
|
||||
cmd = '{0} list {1}'.format(self.swift_executable, container)
|
||||
result = execute(cmd)
|
||||
return result.split()
|
||||
raise Exception("Unable to get container list using openstackclient/swiftclient")
|
||||
|
||||
def remove_swift_container(self, container):
|
||||
if self.openstack_executable:
|
||||
execute('{0} container delete {1}'.format(self.openstack_executable,
|
||||
container))
|
||||
execute('{0} container delete {1}_segments'.format(self.openstack_executable,
|
||||
container))
|
||||
elif self.swift_executable:
|
||||
execute('{0} delete {1}'.format(self.swift_executable,
|
||||
container))
|
||||
execute('{0} delete {1}_segments'.format(self.swift_executable,
|
||||
container))
|
||||
return True
|
472
tests/integration/test_agent.py
Normal file
472
tests/integration/test_agent.py
Normal file
@ -0,0 +1,472 @@
|
||||
# Copyright 2015 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ========================================================================
|
||||
|
||||
from copy import copy
|
||||
import json
|
||||
import os
|
||||
|
||||
import common
|
||||
import uuid
|
||||
|
||||
|
||||
INTEGRATION_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
TEST_DIR = os.path.normpath(os.path.join(INTEGRATION_DIR, '..'))
|
||||
COMMON_DIR = os.path.normpath(os.path.join(TEST_DIR, '..'))
|
||||
FREEZER_BIN_DIR = os.path.normpath(os.path.join(COMMON_DIR, 'bin'))
|
||||
FREEZERC = os.path.normpath(os.path.join(FREEZER_BIN_DIR, 'freezerc '))
|
||||
|
||||
|
||||
class TestSimpleExecution(common.TestFS):
|
||||
|
||||
def test_freezerc_executes(self):
|
||||
result = common.execute(FREEZERC + ' -h')
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
def test_freezerc_fails_with_wrong_params(self):
|
||||
result = common.execute(FREEZERC + ' --blabla', must_fail=True, merge_stderr=True)
|
||||
self.assertIn('unrecognized arguments', result)
|
||||
|
||||
|
||||
class TestBackupFSLocalstorage(common.TestFS):
|
||||
|
||||
def test_trees(self):
|
||||
self.assertTreesMatch()
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
def test_backup_single_level(self):
|
||||
"""
|
||||
- use the default source and destination trees in /tmp (see common.TestFS)
|
||||
- use temporary directory for backup storage
|
||||
- add some random data
|
||||
- check that trees don't match anymore
|
||||
- execute backup of source tree
|
||||
- execute restore into destination tree
|
||||
- check that source and destination trees match
|
||||
|
||||
:return: non on success
|
||||
"""
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
with common.Temp_Tree() as storage_dir:
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': self.source_tree.path,
|
||||
'container': storage_dir.path,
|
||||
'storage': 'local',
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': uuid.uuid4().hex
|
||||
}
|
||||
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'local',
|
||||
'container': storage_dir.path
|
||||
}
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
def test_backup_preexisting_dir(self):
|
||||
"""
|
||||
Use external pre-defined directory for tests.
|
||||
If directory does not exist, then skip
|
||||
|
||||
Restore to temporary folder (removed on exit)
|
||||
:return:
|
||||
"""
|
||||
workdir = os.path.expanduser('~/test_dir')
|
||||
if not os.path.isdir(workdir):
|
||||
return
|
||||
self.source_tree = common.Temp_Tree(dir='/work', create=False)
|
||||
|
||||
with common.Temp_Tree() as storage_dir:
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': self.source_tree.path,
|
||||
'container': storage_dir.path,
|
||||
'storage': 'local',
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': uuid.uuid4().hex
|
||||
}
|
||||
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'local',
|
||||
'container': storage_dir.path
|
||||
}
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
def test_backup_local_storage_lvm(self):
|
||||
if not self.use_lvm:
|
||||
return
|
||||
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
backup_name = uuid.uuid4().hex
|
||||
lvm_auto_snap= self.source_tree.path
|
||||
lvm_snapsize= '1G'
|
||||
lvm_snapname= 'freezer-snap_{0}'.format(backup_name)
|
||||
lvm_dirmount = '/var/freezer/freezer-{0}'.format(backup_name)
|
||||
path_to_backup = os.path.join(lvm_dirmount, self.source_tree.path)
|
||||
|
||||
with common.Temp_Tree() as storage_dir:
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': path_to_backup,
|
||||
'lvm_auto_snap': lvm_auto_snap,
|
||||
'lvm_dirmount': lvm_dirmount,
|
||||
'lvm_snapsize': lvm_snapsize,
|
||||
'lvm_snapname': lvm_snapname,
|
||||
'container': storage_dir.path,
|
||||
'storage': 'local',
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': backup_name
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'local',
|
||||
'container': storage_dir.path
|
||||
}
|
||||
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
|
||||
class TestBackupSSH(common.TestFS):
|
||||
"""
|
||||
Tests are executed if the following env vars are defined:
|
||||
- FREEZER_TEST_SSH_KEY
|
||||
- FREEZER_TEST_SSH_USERNAME
|
||||
- FREEZER_TEST_SSH_HOST
|
||||
- FREEZER_TEST_CONTAINER (directory on the remote machine used to store backups)
|
||||
"""
|
||||
|
||||
def test_backup_ssh(self):
|
||||
if not self.use_ssh:
|
||||
return
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': self.source_tree.path,
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': uuid.uuid4().hex,
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host,
|
||||
'metadata_out': '-'
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = json.loads(result)
|
||||
sub_path = '_'.join([result['hostname'], result['backup_name']])
|
||||
# It may be reasonable to insert a check of the files in the
|
||||
# storage directory
|
||||
# file_list = self.get_file_list_ssh(sub_path)
|
||||
|
||||
self.assertIn('backup_name', result)
|
||||
self.assertEquals(result['backup_name'], backup_args['backup_name'])
|
||||
self.assertIn('container', result)
|
||||
self.assertEquals(result['container'], self.container)
|
||||
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
self.remove_ssh_directory(sub_path)
|
||||
|
||||
def test_backup_ssh_incremental(self):
|
||||
if not self.use_ssh:
|
||||
return
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': self.source_tree.path,
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': uuid.uuid4().hex,
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host,
|
||||
'metadata_out': '-'
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = json.loads(result)
|
||||
sub_path = '_'.join([result['hostname'], result['backup_name']])
|
||||
# It may be reasonable to insert a check of the files in the
|
||||
# storage directory
|
||||
# file_list = self.get_file_list_ssh(sub_path)
|
||||
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 1
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 2
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
self.remove_ssh_directory(sub_path)
|
||||
|
||||
def test_backup_ssh_incremental_with_lvm(self):
|
||||
if not self.use_ssh:
|
||||
return
|
||||
if not self.use_lvm:
|
||||
return
|
||||
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
backup_name = uuid.uuid4().hex
|
||||
lvm_auto_snap= self.source_tree.path
|
||||
lvm_snapsize= '1G'
|
||||
lvm_snapname= 'freezer-snap_{0}'.format(backup_name)
|
||||
lvm_dirmount = '/var/freezer/freezer-{0}'.format(backup_name)
|
||||
path_to_backup = os.path.join(lvm_dirmount, self.source_tree.path)
|
||||
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': path_to_backup,
|
||||
'lvm_auto_snap': lvm_auto_snap,
|
||||
'lvm_dirmount': lvm_dirmount,
|
||||
'lvm_snapsize': lvm_snapsize,
|
||||
'lvm_snapname': lvm_snapname,
|
||||
'backup_name': backup_name,
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'ssh',
|
||||
'container': self.container,
|
||||
'ssh_key': self.ssh_key,
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 1
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 2
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
|
||||
class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
"""
|
||||
Tests are executed if the following env vars are defined:
|
||||
|
||||
- FREEZER_TEST_OS_TENANT_NAME
|
||||
- FREEZER_TEST_OS_USERNAME
|
||||
- FREEZER_TEST_OS_REGION_NAME
|
||||
- FREEZER_TEST_OS_PASSWORD
|
||||
- FREEZER_TEST_OS_AUTH_URL
|
||||
"""
|
||||
|
||||
def test_backup_os_simple(self):
|
||||
if not self.use_os:
|
||||
return
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'fs',
|
||||
'path_to_backup': self.source_tree.path,
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': uuid.uuid4().hex,
|
||||
'storage': 'swift',
|
||||
'container': 'freezer_test_backups_{0}'.format(uuid.uuid4().hex),
|
||||
'metadata_out': '-'
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'swift',
|
||||
'container': copy(backup_args['container']),
|
||||
}
|
||||
remove_args = {
|
||||
'action': 'admin',
|
||||
'remove_older_than': 0,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'swift',
|
||||
'container': copy(backup_args['container']),
|
||||
}
|
||||
# --- backup
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = json.loads(result)
|
||||
self.assertIn('backup_name', result)
|
||||
self.assertEquals(result['backup_name'], backup_args['backup_name'])
|
||||
self.assertIn('container', result)
|
||||
self.assertEquals(result['container'], backup_args['container'])
|
||||
|
||||
# It may be reasonable to insert a check of the files in the
|
||||
# swift container
|
||||
# file_list = self.get_file_list_openstack(result['container'])
|
||||
|
||||
# --- restore
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# --- remove backups and container
|
||||
result = common.execute(FREEZERC + self.dict_to_args(remove_args))
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = self.remove_swift_container(backup_args['container'])
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
def test_backup_swift_mysql(self):
|
||||
if not self.use_os:
|
||||
return
|
||||
if not self.use_lvm:
|
||||
return
|
||||
if not os.path.isdir('/var/lib/mysql'):
|
||||
return
|
||||
self.source_tree = common.Temp_Tree(dir='/var/lib/mysql', create=False)
|
||||
|
||||
backup_name = uuid.uuid4().hex
|
||||
lvm_auto_snap = self.source_tree.path
|
||||
lvm_snapsize = '1G'
|
||||
lvm_snapname = 'freezer-snap_{0}'.format(backup_name)
|
||||
lvm_dirmount = '/var/freezer/freezer-{0}'.format(backup_name)
|
||||
path_to_backup = os.path.join(lvm_dirmount, self.source_tree.path)
|
||||
|
||||
backup_args = {
|
||||
'action': 'backup',
|
||||
'mode': 'mysql',
|
||||
'mysql_conf': '/etc/mysql/debian.cnf',
|
||||
'path_to_backup': path_to_backup,
|
||||
'lvm_auto_snap': lvm_auto_snap,
|
||||
'lvm_dirmount': lvm_dirmount,
|
||||
'lvm_snapsize': lvm_snapsize,
|
||||
'lvm_snapname': lvm_snapname,
|
||||
'container': 'freezer_test_container_{0}'.format(backup_name),
|
||||
'storage': 'swift',
|
||||
'max_level': '6',
|
||||
'max_segment_size': '67108864',
|
||||
'backup_name': backup_name
|
||||
}
|
||||
restore_args = {
|
||||
'action': 'restore',
|
||||
'restore_abs_path': self.dest_tree.path,
|
||||
'backup_name': copy(backup_args['backup_name']),
|
||||
'storage': 'swift',
|
||||
'container': copy(backup_args['container'])
|
||||
}
|
||||
|
||||
result = common.execute(FREEZERC + self.dict_to_args(backup_args))
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute(FREEZERC + self.dict_to_args(restore_args))
|
||||
self.assertIsNotNone(result)
|
||||
# we cannot test if trees as a running mysql instance will modify the files
|
12
tox.ini
12
tox.ini
@ -16,6 +16,17 @@ deps =
|
||||
pep3143daemon
|
||||
apscheduler
|
||||
pylint>=1.3.1
|
||||
passenv =
|
||||
FREEZER_TEST_SSH_KEY
|
||||
FREEZER_TEST_SSH_USERNAME
|
||||
FREEZER_TEST_SSH_HOST
|
||||
FREEZER_TEST_CONTAINER
|
||||
FREEZER_TEST_OS_TENANT_NAME
|
||||
FREEZER_TEST_OS_USERNAME
|
||||
FREEZER_TEST_OS_REGION_NAME
|
||||
FREEZER_TEST_OS_PASSWORD
|
||||
FREEZER_TEST_OS_AUTH_URL
|
||||
FREEZER_TEST_NO_LVM
|
||||
|
||||
install_command = pip install -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
@ -34,4 +45,3 @@ commands = pylint --rcfile .pylintrc freezer bin/freezerc
|
||||
[flake8]
|
||||
show-source = True
|
||||
exclude = .venv,.tox,dist,doc,test,*egg,tests
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user