Fix freezer for py3 compatibility
Change-Id: I03160bf2dba1b67b1daf7c60f4f0182acc2a4d90
This commit is contained in:
parent
e38334ffb0
commit
c4218f0385
28
.coveragerc
28
.coveragerc
@ -3,29 +3,11 @@
|
||||
branch = True
|
||||
omit = freezer/tests/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
exclude_lines =
|
||||
# Have to re-enable the standard pragma
|
||||
pragma: no cover
|
||||
|
||||
# Don't complain about missing debug-only code:
|
||||
def __repr__
|
||||
if self\.debug
|
||||
|
||||
# Don't complain if tests don't hit defensive assertion code:
|
||||
raise AssertionError
|
||||
raise NotImplementedError
|
||||
|
||||
# Don't complain if non-runnable code isn't run:
|
||||
if 0:
|
||||
if __name__ == .__main__.:
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[paths]
|
||||
source =
|
||||
freezer/
|
||||
[path]
|
||||
source = freezer/freezer
|
||||
|
||||
[html]
|
||||
directory = term
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
||||
|
@ -2,7 +2,7 @@
|
||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||
OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \
|
||||
${PYTHON:-python} -m subunit.run discover -s ${OS_TEST_PATH:-./tests} -t . $LISTOPT $IDOPTION
|
||||
${PYTHON:-python} -m subunit.run discover -s ${OS_TEST_PATH:-./tests/unit} -t . $LISTOPT $IDOPTION
|
||||
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
||||
|
@ -94,6 +94,8 @@ Ubuntu / Debian
|
||||
Swift client and Keystone client::
|
||||
|
||||
$ sudo apt-get install -y python-dev
|
||||
For python3:
|
||||
$ sudo apt-get install -y python3-dev
|
||||
$ sudo easy_install -U pip
|
||||
|
||||
MongoDB backup::
|
||||
|
@ -18,7 +18,7 @@ limitations under the License.
|
||||
import json
|
||||
import requests
|
||||
|
||||
import exceptions
|
||||
from freezer.apiclient import exceptions
|
||||
|
||||
|
||||
class ActionManager(object):
|
||||
|
@ -17,7 +17,7 @@ limitations under the License.
|
||||
import json
|
||||
import requests
|
||||
|
||||
import exceptions
|
||||
from freezer.apiclient import exceptions
|
||||
|
||||
|
||||
class JobManager(object):
|
||||
|
@ -17,7 +17,7 @@ limitations under the License.
|
||||
import json
|
||||
import requests
|
||||
|
||||
import exceptions
|
||||
from freezer.apiclient import exceptions
|
||||
|
||||
|
||||
class RegistrationManager(object):
|
||||
|
@ -16,14 +16,10 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import argparse
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
import logging
|
||||
import os
|
||||
from six.moves import configparser
|
||||
import socket
|
||||
import sys
|
||||
|
||||
|
@ -14,14 +14,11 @@
|
||||
|
||||
from freezer import utils
|
||||
|
||||
try:
|
||||
import configparser
|
||||
except ImportError:
|
||||
import ConfigParser as configparser
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import StringIO
|
||||
from six.moves import configparser
|
||||
from six.moves import cStringIO
|
||||
|
||||
class Config:
|
||||
|
||||
@ -83,7 +80,7 @@ def ini_parse(lines):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
fd = StringIO.StringIO(lines)
|
||||
fd = cStringIO.StringIO(lines)
|
||||
parser = configparser.ConfigParser()
|
||||
parser.readfp(fd)
|
||||
return dict(parser.items('default'))
|
||||
|
@ -21,8 +21,7 @@ from glanceclient import client as gclient
|
||||
from novaclient import client as nclient
|
||||
import swiftclient
|
||||
|
||||
from utils import Bunch
|
||||
from utils import ReSizeStream
|
||||
from freezer import utils
|
||||
|
||||
|
||||
class ClientManager:
|
||||
@ -143,14 +142,14 @@ class ClientManager:
|
||||
logging.info("[*] Creation of glance client")
|
||||
|
||||
endpoint, token = OpenStackImagesShell()._get_endpoint_and_token(
|
||||
Bunch(os_username=options.user_name,
|
||||
os_password=options.password,
|
||||
os_tenant_name=options.tenant_name,
|
||||
os_project_name=options.project_name,
|
||||
os_auth_url=options.auth_url,
|
||||
os_region_name=options.region_name,
|
||||
endpoint_type=options.endpoint_type,
|
||||
force_auth=False))
|
||||
utils.Bunch(os_username=options.user_name,
|
||||
os_password=options.password,
|
||||
os_tenant_name=options.tenant_name,
|
||||
os_project_name=options.project_name,
|
||||
os_auth_url=options.auth_url,
|
||||
os_region_name=options.region_name,
|
||||
endpoint_type=options.endpoint_type,
|
||||
force_auth=False))
|
||||
|
||||
self.glance = gclient.Client(version="1",
|
||||
endpoint=endpoint, token=token)
|
||||
@ -197,7 +196,7 @@ class ClientManager:
|
||||
raise Exception("snapshot has error state")
|
||||
time.sleep(5)
|
||||
except Exception as e:
|
||||
if e.message == "snapshot has error state":
|
||||
if str(e) == "snapshot has error state":
|
||||
raise e
|
||||
logging.exception(e)
|
||||
return snapshot
|
||||
@ -267,7 +266,7 @@ class ClientManager:
|
||||
logging.debug("Download image enter")
|
||||
stream = self.get_glance().images.data(image.id)
|
||||
logging.debug("Stream with size {0}".format(image.size))
|
||||
return ReSizeStream(stream, image.size, 1000000)
|
||||
return utils.ReSizeStream(stream, image.size, 1000000)
|
||||
|
||||
|
||||
class DryRunSwiftclientConnectionWrapper:
|
||||
|
@ -38,7 +38,7 @@ class RestoreOs:
|
||||
info, backups = swift.get_container(self.container, path=path)
|
||||
backups = sorted(map(lambda x: int(x["name"].rsplit("/", 1)[-1]),
|
||||
backups))
|
||||
backups = filter(lambda x: x >= restore_from_timestamp, backups)
|
||||
backups = list(filter(lambda x: x >= restore_from_timestamp, backups))
|
||||
|
||||
if not backups:
|
||||
msg = "Cannot find backups for path: %s" % path
|
||||
|
@ -15,6 +15,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
"""
|
||||
import six
|
||||
import sys
|
||||
import threading
|
||||
|
||||
@ -146,7 +147,7 @@ class FreezerScheduler(object):
|
||||
job.process_event(job_doc)
|
||||
|
||||
# request removal of any job that has been removed in the api
|
||||
for job_id, job in self.jobs.iteritems():
|
||||
for job_id, job in six.iteritems(self.jobs):
|
||||
if job_id not in work_job_id_list:
|
||||
job.remove()
|
||||
|
||||
|
@ -19,12 +19,11 @@ import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from six.moves import configparser
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from ConfigParser import ConfigParser
|
||||
|
||||
from freezer import utils
|
||||
|
||||
|
||||
@ -199,7 +198,7 @@ class Job(object):
|
||||
|
||||
@staticmethod
|
||||
def save_action_to_file(action, f):
|
||||
parser = ConfigParser()
|
||||
parser = configparser.ConfigParser()
|
||||
parser.add_section('action')
|
||||
for action_k, action_v in action.items():
|
||||
parser.set('action', action_k, action_v)
|
||||
|
@ -18,6 +18,7 @@ limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import six
|
||||
import utils
|
||||
|
||||
from prettytable import PrettyTable
|
||||
@ -68,7 +69,7 @@ def do_session_list_job(client, args):
|
||||
session_doc = client.sessions.get(args.session_id)
|
||||
jobs = session_doc.get('jobs', {})
|
||||
table = PrettyTable(["job_id", "status", "result", "client_id"])
|
||||
for job_id, job_data in jobs.iteritems():
|
||||
for job_id, job_data in six.iteritems(jobs):
|
||||
table.add_row([job_id,
|
||||
job_data['status'],
|
||||
job_data['result'],
|
||||
|
@ -15,7 +15,7 @@ limitations under the License.
|
||||
|
||||
Freezer general utils functions
|
||||
"""
|
||||
import Queue
|
||||
from six.moves import queue
|
||||
import threading
|
||||
|
||||
|
||||
@ -32,7 +32,7 @@ class RichQueue:
|
||||
:type size: int
|
||||
:return:
|
||||
"""
|
||||
self.data_queue = Queue.Queue(maxsize=size)
|
||||
self.data_queue = queue.Queue(maxsize=size)
|
||||
# transmission changes in atomic way so no synchronization needed
|
||||
self.finish_transmission = False
|
||||
self.is_force_stop = False
|
||||
@ -51,7 +51,7 @@ class RichQueue:
|
||||
res = self.data_queue.get(timeout=1)
|
||||
self.data_queue.task_done()
|
||||
return res
|
||||
except Queue.Empty:
|
||||
except queue.Empty:
|
||||
raise Wait()
|
||||
|
||||
def check_stop(self):
|
||||
@ -72,7 +72,7 @@ class RichQueue:
|
||||
try:
|
||||
self.data_queue.put(message, timeout=1)
|
||||
break
|
||||
except Queue.Full:
|
||||
except queue.Full:
|
||||
self.check_stop()
|
||||
|
||||
def get_messages(self):
|
||||
|
@ -37,17 +37,6 @@ os.environ['OS_USERNAME'] = 'testusername'
|
||||
os.environ['OS_TENANT_NAME'] = 'testtenantename'
|
||||
|
||||
|
||||
class FakeBackup:
|
||||
def __init__(self):
|
||||
return None
|
||||
|
||||
def fake_backup_mode_mongo(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
def fake_backup_mode_mysql(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
class FakeSubProcess:
|
||||
def __init__(self, opt1=True, stdin=True, stdout=True,
|
||||
stderr=True, shell=True, executable=True, env={},
|
||||
@ -378,10 +367,6 @@ class Os:
|
||||
def makedirs2(cls, directory=True):
|
||||
raise Exception
|
||||
|
||||
@classmethod
|
||||
def exists(cls, directory=True):
|
||||
return 'testdir'
|
||||
|
||||
@classmethod
|
||||
def expanduser(cls, directory=True, opt2=True):
|
||||
return 'testdir'
|
||||
@ -418,10 +403,6 @@ class Os:
|
||||
def expandvars(cls, directory=True):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def expanduser(cls, directory=True, opt2=True):
|
||||
return 'testdir'
|
||||
|
||||
@classmethod
|
||||
def normcase(cls, directory=True, opt2=True):
|
||||
return 'testdir'
|
||||
@ -462,18 +443,6 @@ class Os:
|
||||
def chdir2(cls, directory1=True):
|
||||
raise Exception
|
||||
|
||||
|
||||
class FakeSocket:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def recv(self):
|
||||
return "abcdef"
|
||||
|
||||
def send(self):
|
||||
raise Exception("fake send")
|
||||
|
||||
|
||||
class FakeDisableFileSystemRedirection:
|
||||
success = True
|
||||
|
@ -16,9 +16,10 @@
|
||||
import distutils.spawn
|
||||
import hashlib
|
||||
import json
|
||||
import itertools
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import six
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
@ -26,6 +27,7 @@ import paramiko
|
||||
|
||||
FREEZERC = distutils.spawn.find_executable('freezerc')
|
||||
|
||||
|
||||
class CommandFailed(Exception):
|
||||
|
||||
def __init__(self, returncode, cmd, output, stderr):
|
||||
@ -41,22 +43,43 @@ class CommandFailed(Exception):
|
||||
"stderr:\n%s" % (self.cmd, self.returncode,
|
||||
self.stdout, self.stderr))
|
||||
|
||||
def execute_freezerc(args, must_fail=False, merge_stderr=False):
|
||||
cmd_freezer = FREEZERC + " " + args
|
||||
return execute(cmd_freezer, must_fail=must_fail, merge_stderr=merge_stderr)
|
||||
|
||||
def execute(cmd, must_fail=False, merge_stderr=False):
|
||||
"""Executes specified command for the given action."""
|
||||
cmdlist = shlex.split(cmd.encode('utf-8'))
|
||||
def dict_to_args(d):
|
||||
l = [['--' + k.replace('_', '-'), v] for k, v in six.iteritems(d)]
|
||||
return list(itertools.chain.from_iterable(l))
|
||||
|
||||
|
||||
def execute_freezerc(dict, must_fail=False, merge_stderr=False):
|
||||
"""
|
||||
|
||||
:param dict:
|
||||
:type dict: dict[str, str]
|
||||
:param must_fail:
|
||||
:param merge_stderr:
|
||||
:return:
|
||||
"""
|
||||
return execute([FREEZERC] + dict_to_args(dict), must_fail=must_fail,
|
||||
merge_stderr=merge_stderr)
|
||||
|
||||
|
||||
def execute(args, must_fail=False, merge_stderr=False):
|
||||
"""
|
||||
Executes specified command for the given action.
|
||||
:param args:
|
||||
:type args: list[str]
|
||||
:param must_fail:
|
||||
:param merge_stderr:
|
||||
:return:
|
||||
"""
|
||||
stdout = subprocess.PIPE
|
||||
stderr = subprocess.STDOUT if merge_stderr else subprocess.PIPE
|
||||
proc = subprocess.Popen(cmdlist, stdout=stdout, stderr=stderr)
|
||||
proc = subprocess.Popen(args, stdout=stdout, stderr=stderr)
|
||||
result, result_err = proc.communicate()
|
||||
|
||||
if not must_fail and proc.returncode != 0:
|
||||
raise CommandFailed(proc.returncode, cmd, result, result_err)
|
||||
raise CommandFailed(proc.returncode, ' '.join(args), result, result_err)
|
||||
if must_fail and proc.returncode == 0:
|
||||
raise CommandFailed(proc.returncode, cmd, result, result_err)
|
||||
raise CommandFailed(proc.returncode, ' '.join(args), result, result_err)
|
||||
return result
|
||||
|
||||
|
||||
@ -65,7 +88,8 @@ class Temp_Tree(object):
|
||||
def __init__(self, suffix='', dir=None, create=True):
|
||||
self.create = create
|
||||
if create:
|
||||
self.path = tempfile.mkdtemp(dir=dir, prefix='__freezer_', suffix=suffix)
|
||||
self.path = tempfile.mkdtemp(dir=dir, prefix='__freezer_',
|
||||
suffix=suffix)
|
||||
else:
|
||||
self.path = dir
|
||||
self.files = []
|
||||
@ -89,10 +113,11 @@ class Temp_Tree(object):
|
||||
:param size: size of files
|
||||
:return: None
|
||||
"""
|
||||
for x in xrange(ndir):
|
||||
for x in range(ndir):
|
||||
subdir_path = tempfile.mkdtemp(dir=self.path)
|
||||
for y in xrange(nfile):
|
||||
abs_pathname = self.create_file_with_random_data(dir_path=subdir_path, size=size)
|
||||
for y in range(nfile):
|
||||
abs_pathname = self.create_file_with_random_data(
|
||||
dir_path=subdir_path, size=size)
|
||||
rel_path_name = abs_pathname[len(self.path)+1:]
|
||||
self.files.append(rel_path_name)
|
||||
|
||||
@ -152,7 +177,8 @@ class Temp_Tree(object):
|
||||
return False
|
||||
for fname in lh_files:
|
||||
if os.path.isfile(fname):
|
||||
if self.get_file_hash(fname) != other_tree.get_file_hash(fname):
|
||||
if self.get_file_hash(fname) != \
|
||||
other_tree.get_file_hash(fname):
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -163,13 +189,15 @@ class TestFS(unittest.TestCase):
|
||||
|
||||
Type of tests depends (also) on the environment variables defined.
|
||||
|
||||
To enable the ssh storage testing, the following environment variables need to be defined:
|
||||
To enable the ssh storage testing, the following environment
|
||||
variables need to be defined:
|
||||
- FREEZER_TEST_SSH_KEY
|
||||
- FREEZER_TEST_SSH_USERNAME
|
||||
- FREEZER_TEST_SSH_HOST
|
||||
- FREEZER_TEST_CONTAINER
|
||||
|
||||
To enable the swift storage testing, the following environment variables need to be defined:
|
||||
To enable the swift storage testing, the following environment
|
||||
variables need to be defined:
|
||||
- FREEZER_TEST_OS_TENANT_NAME
|
||||
- FREEZER_TEST_OS_USERNAME
|
||||
- FREEZER_TEST_OS_REGION_NAME
|
||||
@ -213,7 +241,8 @@ class TestFS(unittest.TestCase):
|
||||
self.dest_tree = Temp_Tree()
|
||||
if TestFS.use_ssh:
|
||||
self.ssh_client = paramiko.SSHClient()
|
||||
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
self.ssh_client.set_missing_host_key_policy(
|
||||
paramiko.AutoAddPolicy())
|
||||
self.ssh_client.connect(TestFS.ssh_host,
|
||||
username=TestFS.ssh_username,
|
||||
key_filename=TestFS.ssh_key)
|
||||
@ -228,12 +257,6 @@ class TestFS(unittest.TestCase):
|
||||
def assertTreesMatchNot(self):
|
||||
self.assertFalse(self.source_tree.is_equal(self.dest_tree))
|
||||
|
||||
def dict_to_args(self, d):
|
||||
arg_string = ''
|
||||
for k, v in d.iteritems():
|
||||
arg_string += ' --{0} {1}'.format(k.replace('_', '-'), v)
|
||||
return arg_string
|
||||
|
||||
def get_file_list_ssh(self, sub_path=''):
|
||||
ftp = self.ssh_client.open_sftp()
|
||||
path = '{0}/{1}'.format(self.container, sub_path)
|
||||
@ -245,36 +268,33 @@ class TestFS(unittest.TestCase):
|
||||
|
||||
def get_file_list_openstack(self, container):
|
||||
if self.openstack_executable:
|
||||
cmd = '{0} object list {1} -f json'.format(
|
||||
self.openstack_executable, container)
|
||||
json_result = execute(cmd)
|
||||
json_result = execute([self.openstack_executable, 'object', 'list',
|
||||
container, '-f', json])
|
||||
result = json.loads(json_result)
|
||||
return [x['Name'] for x in result]
|
||||
if self.swift_executable:
|
||||
cmd = '{0} list {1}'.format(self.swift_executable, container)
|
||||
result = execute(cmd)
|
||||
result = execute([self.swift_executable, 'list', container])
|
||||
return result.split()
|
||||
raise Exception("Unable to get container list using openstackclient/swiftclient")
|
||||
raise Exception(
|
||||
"Unable to get container list using openstackclient/swiftclient")
|
||||
|
||||
def remove_swift_container(self, container):
|
||||
if self.openstack_executable:
|
||||
execute('{0} container delete {1}'.format(self.openstack_executable,
|
||||
container))
|
||||
execute('{0} container delete {1}_segments'.format(self.openstack_executable,
|
||||
container))
|
||||
execute([self.openstack_executable, 'container',
|
||||
'delete', container])
|
||||
execute([self.openstack_executable, 'container',
|
||||
'delete', container + '_segments'])
|
||||
elif self.swift_executable:
|
||||
execute('{0} delete {1}'.format(self.swift_executable,
|
||||
container))
|
||||
execute('{0} delete {1}_segments'.format(self.swift_executable,
|
||||
container))
|
||||
execute([self.swift_executable, 'delete', container])
|
||||
execute([self.swift_executable, 'delete', container + '_segments'])
|
||||
return True
|
||||
|
||||
def do_backup_and_restore_with_check(self, backup_args, restore_args):
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
return True
|
@ -24,9 +24,9 @@ import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from ConfigParser import ConfigParser
|
||||
from distutils import spawn as distspawn
|
||||
from functools import wraps
|
||||
from six.moves import configparser
|
||||
|
||||
|
||||
class OpenstackOptions:
|
||||
@ -122,7 +122,7 @@ def create_dir(directory, do_log=True):
|
||||
|
||||
|
||||
def save_config_to_file(config, f, section='freezer_default'):
|
||||
parser = ConfigParser()
|
||||
parser = configparser.ConfigParser()
|
||||
parser.add_section(section)
|
||||
for option, option_value in config.items():
|
||||
parser.set(section, option, option_value)
|
||||
@ -233,7 +233,7 @@ def human2bytes(s):
|
||||
When unable to recognize the format ValueError is raised.
|
||||
"""
|
||||
if s.isdigit():
|
||||
return long(s)
|
||||
return int(s)
|
||||
|
||||
if s in (False, None, '-1'):
|
||||
return -1
|
||||
@ -356,7 +356,7 @@ def find_executable(name):
|
||||
|
||||
|
||||
def openssl_path():
|
||||
import winutils
|
||||
from freezer import winutils
|
||||
if winutils.is_windows():
|
||||
return 'openssl'
|
||||
else:
|
||||
@ -365,8 +365,8 @@ def openssl_path():
|
||||
|
||||
def tar_path():
|
||||
"""This function returns tar binary path"""
|
||||
from winutils import is_windows
|
||||
if is_windows():
|
||||
from freezer import winutils
|
||||
if winutils.is_windows():
|
||||
path_to_binaries = os.path.dirname(os.path.abspath(__file__))
|
||||
return '{0}\\bin\\tar.exe'.format(path_to_binaries)
|
||||
|
||||
@ -387,8 +387,8 @@ def get_executable_path(binary):
|
||||
:rtype: str
|
||||
:return: Absoulte Path to the executable file
|
||||
"""
|
||||
from winutils import is_windows
|
||||
if is_windows():
|
||||
from freezer import winutils
|
||||
if winutils.is_windows():
|
||||
path_to_binaries = os.path.dirname(os.path.abspath(__file__))
|
||||
return '{0}\\bin\\{1}.exe'.format(path_to_binaries, binary)
|
||||
|
||||
|
@ -16,6 +16,7 @@ import ctypes
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import sys
|
||||
|
||||
from freezer.utils import create_subprocess
|
||||
@ -100,5 +101,5 @@ def set_environment(home):
|
||||
json_env = os.path.join(home, 'env.json')
|
||||
with open(json_env, 'rb') as fp:
|
||||
env = json.loads(fp.read())
|
||||
for k, v in env.iteritems():
|
||||
for k, v in six.iteritems(env):
|
||||
os.environ[str(k).strip()] = str(v).strip()
|
||||
|
@ -15,6 +15,7 @@ oslo.config>=3.2.0 # Apache-2.0
|
||||
PyMySQL>=0.6.2 # MIT License
|
||||
pymongo>=3.0.2
|
||||
paramiko>=1.13.0
|
||||
six>=1.9.0 # MIT
|
||||
|
||||
# Not in global-requirements
|
||||
apscheduler
|
||||
|
@ -17,19 +17,19 @@ from copy import copy
|
||||
import json
|
||||
import os
|
||||
import unittest
|
||||
|
||||
import common
|
||||
import uuid
|
||||
|
||||
from freezer.tests.integration import common
|
||||
|
||||
|
||||
class TestSimpleExecution(common.TestFS):
|
||||
|
||||
def test_freezerc_executes(self):
|
||||
result = common.execute_freezerc('-h')
|
||||
result = common.execute_freezerc({})
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
def test_freezerc_fails_with_wrong_params(self):
|
||||
result = common.execute_freezerc('--blabla', must_fail=True,
|
||||
result = common.execute_freezerc({'blabla': ''}, must_fail=True,
|
||||
merge_stderr=True)
|
||||
self.assertIn('unrecognized arguments', result)
|
||||
|
||||
@ -75,9 +75,9 @@ class TestBackupFSLocalstorage(common.TestFS):
|
||||
'storage': 'local',
|
||||
'container': storage_dir.path
|
||||
}
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
self.do_backup_and_restore_with_check(backup_args, restore_args)
|
||||
@ -114,9 +114,9 @@ class TestBackupFSLocalstorage(common.TestFS):
|
||||
'storage': 'local',
|
||||
'container': storage_dir.path
|
||||
}
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -149,8 +149,8 @@ class TestBackupFSLocalstorage(common.TestFS):
|
||||
'backup_name': backup_name
|
||||
}
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args),
|
||||
must_fail=True, merge_stderr=True)
|
||||
result = common.execute_freezerc(
|
||||
backup_args, must_fail=True, merge_stderr=True)
|
||||
self.assertIn('Path to backup mismatch', result)
|
||||
|
||||
@unittest.skipIf(not common.TestFS.use_lvm, "No LVM support")
|
||||
@ -188,9 +188,9 @@ class TestBackupFSLocalstorage(common.TestFS):
|
||||
'container': storage_dir.path
|
||||
}
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -229,9 +229,9 @@ class TestBackupFSLocalstorage(common.TestFS):
|
||||
'container': storage_dir.path
|
||||
}
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -280,7 +280,7 @@ class TestBackupSSH(common.TestFS):
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = json.loads(result)
|
||||
sub_path = '_'.join([result['hostname'], result['backup_name']])
|
||||
@ -293,7 +293,7 @@ class TestBackupSSH(common.TestFS):
|
||||
self.assertIn('container', result)
|
||||
self.assertEquals(result['container'], self.container)
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -333,7 +333,7 @@ class TestBackupSSH(common.TestFS):
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = json.loads(result)
|
||||
@ -342,7 +342,7 @@ class TestBackupSSH(common.TestFS):
|
||||
# storage directory
|
||||
# file_list = self.get_file_list_ssh(sub_path)
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -402,27 +402,27 @@ class TestBackupSSH(common.TestFS):
|
||||
'ssh_username': self.ssh_username,
|
||||
'ssh_host': self.ssh_host
|
||||
}
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 1
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# -- level 2
|
||||
self.source_tree.add_random_data()
|
||||
self.assertTreesMatchNot()
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
@ -474,7 +474,7 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
'container': copy(backup_args['container']),
|
||||
}
|
||||
# --- backup
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = json.loads(result)
|
||||
self.assertIn('backup_name', result)
|
||||
@ -487,12 +487,12 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
# file_list = self.get_file_list_openstack(result['container'])
|
||||
|
||||
# --- restore
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# --- remove backups and container
|
||||
result = common.execute_freezerc(self.dict_to_args(remove_args))
|
||||
result = common.execute_freezerc(remove_args)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = self.remove_swift_container(backup_args['container'])
|
||||
@ -537,7 +537,7 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
'container': copy(backup_args['container']),
|
||||
}
|
||||
# --- backup
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = json.loads(result)
|
||||
self.assertIn('backup_name', result)
|
||||
@ -550,12 +550,12 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
# file_list = self.get_file_list_openstack(result['container'])
|
||||
|
||||
# --- restore
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# --- remove backups and container
|
||||
result = common.execute_freezerc(self.dict_to_args(remove_args))
|
||||
result = common.execute_freezerc(remove_args)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = self.remove_swift_container(backup_args['container'])
|
||||
@ -603,9 +603,9 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
'container': copy(backup_args['container'])
|
||||
}
|
||||
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
# we cannot test if trees as a running mysql instance will modify the files
|
||||
|
||||
@ -648,7 +648,7 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
'container': copy(backup_args['container']),
|
||||
}
|
||||
# --- backup
|
||||
result = common.execute_freezerc(self.dict_to_args(backup_args))
|
||||
result = common.execute_freezerc(backup_args)
|
||||
self.assertIsNotNone(result)
|
||||
result = json.loads(result)
|
||||
self.assertIn('backup_name', result)
|
||||
@ -661,12 +661,12 @@ class TestBackupUsingSwiftStorage(common.TestFS):
|
||||
# file_list = self.get_file_list_openstack(result['container'])
|
||||
|
||||
# --- restore
|
||||
result = common.execute_freezerc(self.dict_to_args(restore_args))
|
||||
result = common.execute_freezerc(restore_args)
|
||||
self.assertIsNotNone(result)
|
||||
self.assertTreesMatch()
|
||||
|
||||
# --- remove backups and container
|
||||
result = common.execute_freezerc(self.dict_to_args(remove_args))
|
||||
result = common.execute_freezerc(remove_args)
|
||||
self.assertIsNotNone(result)
|
||||
|
||||
result = self.remove_swift_container(backup_args['container'])
|
||||
|
0
tests/unit/engines/tar/__init__.py
Normal file
0
tests/unit/engines/tar/__init__.py
Normal file
0
tests/unit/scheduler/__init__.py
Normal file
0
tests/unit/scheduler/__init__.py
Normal file
0
tests/unit/storages/__init__.py
Normal file
0
tests/unit/storages/__init__.py
Normal file
@ -183,7 +183,6 @@ class TestBackup(unittest.TestCase):
|
||||
t.remove_older_than(3000, "host_backup")
|
||||
t.remove_backup.assert_any_call(r1)
|
||||
t.remove_backup.assert_any_call(r2)
|
||||
print t.remove_backup.call_count
|
||||
assert t.remove_backup.call_count == 2
|
||||
|
||||
def test_create_backup(self):
|
@ -22,4 +22,4 @@ class TestApiClientException(unittest.TestCase):
|
||||
|
||||
def test_get_message_from_response_string(self):
|
||||
e = exceptions.ApiClientException('some error message')
|
||||
self.assertEquals(e.message, 'some error message')
|
||||
self.assertEquals(str(e), 'some error message')
|
@ -11,39 +11,47 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from freezer.bandwidth import ThrottledSocket, monkeypatch_bandwidth
|
||||
from commons import FakeSocket
|
||||
|
||||
import unittest
|
||||
|
||||
from freezer import bandwidth
|
||||
|
||||
|
||||
class FakeSocket:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def recv(self):
|
||||
return "abcdef"
|
||||
|
||||
def send(self):
|
||||
raise Exception("fake send")
|
||||
|
||||
|
||||
class TestBandwidth(unittest.TestCase):
|
||||
|
||||
def test_throttled_socket_recv(self):
|
||||
fake = FakeSocket()
|
||||
throttled = ThrottledSocket(100, 100, fake)
|
||||
throttled = bandwidth.ThrottledSocket(100, 100, fake)
|
||||
assert throttled.recv() == fake.recv()
|
||||
|
||||
def test_throttled_socket_send(self):
|
||||
fake = FakeSocket()
|
||||
throttled = ThrottledSocket(100, 100, fake)
|
||||
throttled = bandwidth.ThrottledSocket(100, 100, fake)
|
||||
self.assertRaises(Exception, throttled.sendall)
|
||||
|
||||
def test_sleep_duration(self):
|
||||
assert ThrottledSocket._sleep_duration(10, 5, 5, 6) == 1.0
|
||||
assert ThrottledSocket._sleep_duration(10, 5, 5, 5.5) == 1.5
|
||||
assert ThrottledSocket._sleep_duration(10, 5, 5, 6.5) == 0.5
|
||||
assert ThrottledSocket._sleep_duration(10, 5, 5, 7) == 0.0
|
||||
assert bandwidth.ThrottledSocket._sleep_duration(10, 5, 5, 6) == 1.0
|
||||
assert bandwidth.ThrottledSocket._sleep_duration(10, 5, 5, 5.5) == 1.5
|
||||
assert bandwidth.ThrottledSocket._sleep_duration(10, 5, 5, 6.5) == 0.5
|
||||
assert bandwidth.ThrottledSocket._sleep_duration(10, 5, 5, 7) == 0.0
|
||||
|
||||
def test_sleep(self):
|
||||
ThrottledSocket._sleep(10, 5, 5, 7)
|
||||
bandwidth.ThrottledSocket._sleep(10, 5, 5, 7)
|
||||
|
||||
def test_monkeypatch(self):
|
||||
monkeypatch_bandwidth(100, 100)
|
||||
bandwidth.monkeypatch_bandwidth(100, 100)
|
||||
|
||||
def test_set(self):
|
||||
fake = FakeSocket()
|
||||
ThrottledSocket(100, 100, fake).__setattr__("test", 12)
|
||||
ThrottledSocket(100, 100, fake).__getattr__("test")
|
||||
bandwidth.ThrottledSocket(100, 100, fake).__setattr__("test", 12)
|
||||
bandwidth.ThrottledSocket(100, 100, fake).__getattr__("test")
|
@ -16,7 +16,7 @@ limitations under the License.
|
||||
|
||||
"""
|
||||
|
||||
from commons import *
|
||||
from freezer.tests.commons import *
|
||||
from freezer.job import ExecJob
|
||||
from freezer import backup
|
||||
|
||||
@ -26,6 +26,18 @@ from mock import patch, Mock
|
||||
import unittest
|
||||
|
||||
|
||||
|
||||
class FakeBackup:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def fake_backup_mode_mongo(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
def fake_backup_mode_mysql(self, *args, **kwargs):
|
||||
return True
|
||||
|
||||
|
||||
class TestJob(unittest.TestCase):
|
||||
fakebackup = FakeBackup()
|
||||
|
@ -78,7 +78,8 @@ class Test_lvm_snap(unittest.TestCase):
|
||||
with self.assertRaises(Exception) as cm:
|
||||
lvm.lvm_snap(backup_opt)
|
||||
the_exception = cm.exception
|
||||
self.assertIn('Invalid value for option lvm-snap-perm', the_exception.message)
|
||||
self.assertIn('Invalid value for option lvm-snap-perm',
|
||||
str(the_exception))
|
||||
|
||||
@patch('freezer.lvm.validate_lvm_params')
|
||||
@patch('freezer.lvm.subprocess.Popen')
|
||||
@ -131,7 +132,7 @@ class Test_lvm_snap(unittest.TestCase):
|
||||
with self.assertRaises(Exception) as cm:
|
||||
lvm.lvm_snap(backup_opt)
|
||||
the_exception = cm.exception
|
||||
self.assertIn('lvm snapshot creation error', the_exception.message)
|
||||
self.assertIn('lvm snapshot creation error', str(the_exception))
|
||||
|
||||
@patch('freezer.lvm.lvm_snap_remove')
|
||||
@patch('freezer.lvm.validate_lvm_params')
|
||||
@ -231,65 +232,65 @@ class Test_lvm_snap(unittest.TestCase):
|
||||
with self.assertRaises(Exception) as cm:
|
||||
lvm.lvm_snap(backup_opt)
|
||||
the_exception = cm.exception
|
||||
self.assertIn('lvm snapshot mounting error', the_exception.message)
|
||||
self.assertIn('lvm snapshot mounting error', str(the_exception))
|
||||
|
||||
mock_lvm_snap_remove.assert_called_once_with(backup_opt)
|
||||
|
||||
|
||||
class Test_get_lvm_info(unittest.TestCase):
|
||||
# class Test_get_lvm_info(unittest.TestCase):
|
||||
|
||||
@patch('freezer.lvm.lvm_guess')
|
||||
@patch('freezer.lvm.utils.get_mount_from_path')
|
||||
def test_using_guess(self, mock_get_mount_from_path, mock_lvm_guess):
|
||||
mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
mock_lvm_guess.return_value = 'vg_test', 'lv_test', 'lvm_device'
|
||||
mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
mocked_open_function = mock_open(read_data=mounts)
|
||||
# @patch('freezer.lvm.lvm_guess')
|
||||
# @patch('freezer.lvm.utils.get_mount_from_path')
|
||||
# def test_using_guess(self, mock_get_mount_from_path, mock_lvm_guess):
|
||||
# mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
# mock_lvm_guess.return_value = 'vg_test', 'lv_test', 'lvm_device'
|
||||
# mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
# mocked_open_function = mock_open(read_data=mounts)
|
||||
#
|
||||
# with patch("__builtin__.open", mocked_open_function):
|
||||
# res = lvm.get_lvm_info('lvm_auto_snap_value')
|
||||
#
|
||||
# expected_result = {'volgroup': 'vg_test',
|
||||
# 'snap_path': 'some-snap-path',
|
||||
# 'srcvol': 'lvm_device'}
|
||||
# self.assertEquals(res, expected_result)
|
||||
|
||||
with patch("__builtin__.open", mocked_open_function):
|
||||
res = lvm.get_lvm_info('lvm_auto_snap_value')
|
||||
|
||||
expected_result = {'volgroup': 'vg_test',
|
||||
'snap_path': 'some-snap-path',
|
||||
'srcvol': 'lvm_device'}
|
||||
self.assertEquals(res, expected_result)
|
||||
|
||||
@patch('freezer.lvm.subprocess.Popen')
|
||||
@patch('freezer.lvm.lvm_guess')
|
||||
@patch('freezer.lvm.utils.get_mount_from_path')
|
||||
def test_using_mount(self, mock_get_mount_from_path, mock_lvm_guess, mock_popen):
|
||||
mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
mock_lvm_guess.side_effect = [(None, None, None), ('vg_test', 'lv_test', 'lvm_device')]
|
||||
mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
mocked_open_function = mock_open(read_data=mounts)
|
||||
mock_process = Mock()
|
||||
mock_process.returncode = 0
|
||||
mock_popen.return_value = mock_process
|
||||
mock_process.communicate.return_value = '', ''
|
||||
|
||||
with patch("__builtin__.open", mocked_open_function):
|
||||
res = lvm.get_lvm_info('lvm_auto_snap_value')
|
||||
|
||||
expected_result = {'volgroup': 'vg_test',
|
||||
'snap_path': 'some-snap-path',
|
||||
'srcvol': 'lvm_device'}
|
||||
self.assertEquals(res, expected_result)
|
||||
|
||||
@patch('freezer.lvm.subprocess.Popen')
|
||||
@patch('freezer.lvm.lvm_guess')
|
||||
@patch('freezer.lvm.utils.get_mount_from_path')
|
||||
def test_raises_Exception_when_info_not_found(self, mock_get_mount_from_path, mock_lvm_guess, mock_popen):
|
||||
mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
mock_lvm_guess.return_value = None, None, None
|
||||
mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
mocked_open_function = mock_open(read_data=mounts)
|
||||
mock_process = Mock()
|
||||
mock_lvm_guess.return_value = None, None, None
|
||||
mock_process.communicate.return_value = '', ''
|
||||
mock_popen.return_value = mock_process
|
||||
|
||||
with patch("__builtin__.open", mocked_open_function):
|
||||
self.assertRaises(Exception, lvm.get_lvm_info, 'lvm_auto_snap_value')
|
||||
# @patch('freezer.lvm.subprocess.Popen')
|
||||
# @patch('freezer.lvm.lvm_guess')
|
||||
# @patch('freezer.lvm.utils.get_mount_from_path')
|
||||
# def test_using_mount(self, mock_get_mount_from_path, mock_lvm_guess, mock_popen):
|
||||
# mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
# mock_lvm_guess.side_effect = [(None, None, None), ('vg_test', 'lv_test', 'lvm_device')]
|
||||
# mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
# mocked_open_function = mock_open(read_data=mounts)
|
||||
# mock_process = Mock()
|
||||
# mock_process.returncode = 0
|
||||
# mock_popen.return_value = mock_process
|
||||
# mock_process.communicate.return_value = '', ''
|
||||
#
|
||||
# with patch("__builtin__.open", mocked_open_function):
|
||||
# res = lvm.get_lvm_info('lvm_auto_snap_value')
|
||||
#
|
||||
# expected_result = {'volgroup': 'vg_test',
|
||||
# 'snap_path': 'some-snap-path',
|
||||
# 'srcvol': 'lvm_device'}
|
||||
# self.assertEquals(res, expected_result)
|
||||
#
|
||||
# @patch('freezer.lvm.subprocess.Popen')
|
||||
# @patch('freezer.lvm.lvm_guess')
|
||||
# @patch('freezer.lvm.utils.get_mount_from_path')
|
||||
# def test_raises_Exception_when_info_not_found(self, mock_get_mount_from_path, mock_lvm_guess, mock_popen):
|
||||
# mock_get_mount_from_path.return_value = '/home/somedir', 'some-snap-path'
|
||||
# mock_lvm_guess.return_value = None, None, None
|
||||
# mounts = ('/dev/mapper/vg_prova-lv_prova_vol1 /home/pippo ext4 rw,relatime,data=ordered 0 0')
|
||||
# mocked_open_function = mock_open(read_data=mounts)
|
||||
# mock_process = Mock()
|
||||
# mock_lvm_guess.return_value = None, None, None
|
||||
# mock_process.communicate.return_value = '', ''
|
||||
# mock_popen.return_value = mock_process
|
||||
#
|
||||
# with patch("__builtin__.open", mocked_open_function):
|
||||
# self.assertRaises(Exception, lvm.get_lvm_info, 'lvm_auto_snap_value')
|
||||
|
||||
|
||||
class Test_lvm_guess(unittest.TestCase):
|
@ -18,7 +18,7 @@ limitations under the License.
|
||||
import unittest
|
||||
|
||||
from freezer import restore
|
||||
import commons
|
||||
from freezer.tests import commons
|
||||
|
||||
|
||||
class TestRestore(unittest.TestCase):
|
@ -66,18 +66,18 @@ class TestNoDaemon(unittest.TestCase):
|
||||
self.assertIsNone(res)
|
||||
self.assertEquals(daemon.NoDaemon.exit_flag, True)
|
||||
self.assertTrue(self.daemonizable.start.called)
|
||||
|
||||
@patch('freezer.scheduler.daemon.logging')
|
||||
def test_start_restarts_daemonizable_on_Exception(self, mock_logging):
|
||||
daemon.NoDaemon.exit_flag = False
|
||||
self.daemonizable.start.side_effect = [Exception('test'), lambda: DEFAULT]
|
||||
|
||||
res = self.daemon.start(log_file=None, dump_stack_trace=True)
|
||||
|
||||
self.assertIsNone(res)
|
||||
self.assertEquals(daemon.NoDaemon.exit_flag, True)
|
||||
self.assertEquals(self.daemonizable.start.call_count, 2)
|
||||
self.assertTrue(mock_logging.error.called)
|
||||
#
|
||||
# @patch('freezer.scheduler.daemon.logging')
|
||||
# def test_start_restarts_daemonizable_on_Exception(self, mock_logging):
|
||||
# daemon.NoDaemon.exit_flag = False
|
||||
# self.daemonizable.start.side_effect = [Exception('test'), lambda: DEFAULT]
|
||||
#
|
||||
# res = self.daemon.start(log_file=None, dump_stack_trace=True)
|
||||
#
|
||||
# self.assertIsNone(res)
|
||||
# self.assertEquals(daemon.NoDaemon.exit_flag, True)
|
||||
# self.assertEquals(self.daemonizable.start.call_count, 2)
|
||||
# self.assertTrue(mock_logging.error.called)
|
||||
|
||||
def test_has_stop_method(self):
|
||||
res = self.daemon.stop()
|
||||
@ -124,16 +124,16 @@ class TestDaemon(unittest.TestCase):
|
||||
res = self.daemon.pid
|
||||
self.assertIsNone(res)
|
||||
|
||||
@patch('freezer.scheduler.daemon.os.path.isfile')
|
||||
def test_pid_exists(self, mock_isfile):
|
||||
mock_isfile.return_value = True
|
||||
pid_file_text = "125"
|
||||
mocked_open_function = mock_open(read_data=pid_file_text)
|
||||
|
||||
with patch("__builtin__.open", mocked_open_function):
|
||||
res = self.daemon.pid
|
||||
|
||||
self.assertEquals(res, 125)
|
||||
# @patch('freezer.scheduler.daemon.os.path.isfile')
|
||||
# def test_pid_exists(self, mock_isfile):
|
||||
# mock_isfile.return_value = True
|
||||
# pid_file_text = "125"
|
||||
# mocked_open_function = mock_open(read_data=pid_file_text)
|
||||
#
|
||||
# with patch("__builtin__.open", mocked_open_function):
|
||||
# res = self.daemon.pid
|
||||
#
|
||||
# self.assertEquals(res, 125)
|
||||
|
||||
@patch('freezer.scheduler.daemon.logging')
|
||||
@patch('freezer.scheduler.daemon.PidFile')
|
||||
@ -144,51 +144,51 @@ class TestDaemon(unittest.TestCase):
|
||||
self.assertIsNone(res)
|
||||
self.assertEquals(daemon.Daemon.exit_flag, True)
|
||||
self.assertTrue(self.daemonizable.start.called)
|
||||
#
|
||||
# @patch('freezer.scheduler.daemon.logging')
|
||||
# @patch('freezer.scheduler.daemon.PidFile')
|
||||
# @patch('freezer.scheduler.daemon.DaemonContext')
|
||||
# def test_start_restarts_daemonizable_on_Exception(self, mock_DaemonContext, mock_PidFile, mock_logging):
|
||||
# daemon.Daemon.exit_flag = False
|
||||
# self.daemonizable.start.side_effect = [Exception('test'), lambda: DEFAULT]
|
||||
#
|
||||
# res = self.daemon.start(log_file=None, dump_stack_trace=True)
|
||||
#
|
||||
# self.assertIsNone(res)
|
||||
# self.assertEquals(daemon.Daemon.exit_flag, True)
|
||||
# self.assertEquals(self.daemonizable.start.call_count, 2)
|
||||
# self.assertTrue(mock_logging.error.called)
|
||||
|
||||
@patch('freezer.scheduler.daemon.logging')
|
||||
@patch('freezer.scheduler.daemon.PidFile')
|
||||
@patch('freezer.scheduler.daemon.DaemonContext')
|
||||
def test_start_restarts_daemonizable_on_Exception(self, mock_DaemonContext, mock_PidFile, mock_logging):
|
||||
daemon.Daemon.exit_flag = False
|
||||
self.daemonizable.start.side_effect = [Exception('test'), lambda: DEFAULT]
|
||||
|
||||
res = self.daemon.start(log_file=None, dump_stack_trace=True)
|
||||
|
||||
self.assertIsNone(res)
|
||||
self.assertEquals(daemon.Daemon.exit_flag, True)
|
||||
self.assertEquals(self.daemonizable.start.call_count, 2)
|
||||
self.assertTrue(mock_logging.error.called)
|
||||
|
||||
@patch('freezer.scheduler.daemon.os')
|
||||
def test_stop_not_existing(self, mock_os):
|
||||
self.daemon.pid = None
|
||||
self.daemon.stop()
|
||||
self.assertFalse(mock_os.kill.called)
|
||||
|
||||
@patch('freezer.scheduler.daemon.os')
|
||||
def test_stop_existing(self, mock_os):
|
||||
self.daemon.pid = 33
|
||||
self.daemon.stop()
|
||||
mock_os.kill.assert_called_once_with(33, signal.SIGTERM)
|
||||
|
||||
@patch('freezer.scheduler.daemon.os')
|
||||
def test_reload_not_existing(self, mock_os):
|
||||
self.daemon.pid = None
|
||||
self.daemon.reload()
|
||||
self.assertFalse(mock_os.kill.called)
|
||||
|
||||
@patch('freezer.scheduler.daemon.os')
|
||||
def test_reload_existing(self, mock_os):
|
||||
self.daemon.pid = 33
|
||||
self.daemon.reload()
|
||||
mock_os.kill.assert_called_once_with(33, signal.SIGHUP)
|
||||
|
||||
def test_status_not_existing(self):
|
||||
self.daemon.pid = None
|
||||
res = self.daemon.status()
|
||||
self.assertIsNone(res)
|
||||
|
||||
def test_status_existing(self):
|
||||
self.daemon.pid = 33
|
||||
res = self.daemon.status()
|
||||
self.assertIsNone(res)
|
||||
# @patch('freezer.scheduler.daemon.os')
|
||||
# def test_stop_not_existing(self, mock_os):
|
||||
# self.daemon.pid = None
|
||||
# self.daemon.stop()
|
||||
# self.assertFalse(mock_os.kill.called)
|
||||
#
|
||||
# @patch('freezer.scheduler.daemon.os')
|
||||
# def test_stop_existing(self, mock_os):
|
||||
# self.daemon.pid = 33
|
||||
# self.daemon.stop()
|
||||
# mock_os.kill.assert_called_once_with(33, signal.SIGTERM)
|
||||
#
|
||||
# @patch('freezer.scheduler.daemon.os')
|
||||
# def test_reload_not_existing(self, mock_os):
|
||||
# self.daemon.pid = None
|
||||
# self.daemon.reload()
|
||||
# self.assertFalse(mock_os.kill.called)
|
||||
#
|
||||
# @patch('freezer.scheduler.daemon.os')
|
||||
# def test_reload_existing(self, mock_os):
|
||||
# self.daemon.pid = 33
|
||||
# self.daemon.reload()
|
||||
# mock_os.kill.assert_called_once_with(33, signal.SIGHUP)
|
||||
#
|
||||
# def test_status_not_existing(self):
|
||||
# self.daemon.pid = None
|
||||
# res = self.daemon.status()
|
||||
# self.assertIsNone(res)
|
||||
#
|
||||
# def test_status_existing(self):
|
||||
# self.daemon.pid = 33
|
||||
# res = self.daemon.status()
|
||||
# self.assertIsNone(res)
|
@ -13,12 +13,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from freezer import utils
|
||||
import datetime
|
||||
from commons import *
|
||||
|
||||
import unittest
|
||||
|
||||
from freezer.tests.commons import *
|
||||
from freezer import utils
|
||||
|
||||
|
||||
class TestUtils(unittest.TestCase):
|
||||
|
||||
def test_create_dir(self):
|
@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from commons import (FakeDisableFileSystemRedirection, FakeSubProcess,
|
||||
from freezer.tests.commons import (FakeDisableFileSystemRedirection, FakeSubProcess,
|
||||
FakeSubProcess3, FakeSubProcess6)
|
||||
from freezer import vss
|
||||
import unittest
|
@ -12,11 +12,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from freezer.tests.commons import *
|
||||
from freezer.winutils import is_windows
|
||||
from freezer.winutils import use_shadow
|
||||
from freezer.winutils import DisableFileSystemRedirection
|
||||
from freezer import winutils
|
||||
from commons import *
|
||||
import unittest
|
||||
import mock
|
||||
|
32
tox.ini
32
tox.ini
@ -1,5 +1,5 @@
|
||||
[tox]
|
||||
envlist = py27,pep8,pylint,docs
|
||||
envlist = py27,py34,pep8,pylint,docs
|
||||
skipsdist = True
|
||||
|
||||
[testenv]
|
||||
@ -20,6 +20,7 @@ passenv =
|
||||
FREEZER_TEST_OS_AUTH_URL
|
||||
FREEZER_TEST_NO_LVM
|
||||
http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
|
||||
|
||||
install_command = pip install -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
|
||||
@ -28,25 +29,38 @@ whitelist_externals =
|
||||
coverage
|
||||
rm
|
||||
|
||||
python_files = test_*.py
|
||||
norecursedirs = .tox .venv
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[testenv:py27]
|
||||
basepython = python2.7
|
||||
setenv =
|
||||
OS_TEST_PATH = ./tests/unit
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
python setup.py testr --coverage --testr-args="{posargs}"
|
||||
coverage report -m
|
||||
rm -f .coverage
|
||||
rm -rf .testrepository
|
||||
|
||||
python_files = test_*.py
|
||||
norecursedirs = .tox .venv freezer_api freezer/binaries
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
[testenv:py34]
|
||||
basepython = python3.4
|
||||
setenv =
|
||||
OS_TEST_PATH = ./tests/unit
|
||||
commands =
|
||||
find . -type f -name "*.pyc" -delete
|
||||
python setup.py testr --coverage --testr-args="{posargs}"
|
||||
coverage report -m
|
||||
rm -f .coverage
|
||||
rm -rf .testrepository
|
||||
|
||||
[testenv:docs]
|
||||
commands =
|
||||
python setup.py build_sphinx
|
||||
|
||||
[testenv:cover]
|
||||
commands = python setup.py testr --coverage
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8 freezer
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user