Disables shell=True in backup and restore
This PR disables shell=True in backup and restore. Co-Authored-By: wu.chunyang <wchy1001@gmail.com> Story: 2010004 Task: 45128 Change-Id: I13405e8bc93586441da28898e99dce936fc6d68a
This commit is contained in:
parent
895e59237e
commit
1126317f87
@ -13,6 +13,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import signal
|
import signal
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@ -30,6 +31,7 @@ class BaseRunner(object):
|
|||||||
cmd = ''
|
cmd = ''
|
||||||
restore_cmd = ''
|
restore_cmd = ''
|
||||||
prepare_cmd = ''
|
prepare_cmd = ''
|
||||||
|
backup_log = ''
|
||||||
|
|
||||||
encrypt_key = CONF.backup_encryption_key
|
encrypt_key = CONF.backup_encryption_key
|
||||||
|
|
||||||
@ -40,6 +42,7 @@ class BaseRunner(object):
|
|||||||
self.storage = kwargs.pop('storage', None)
|
self.storage = kwargs.pop('storage', None)
|
||||||
self.location = kwargs.pop('location', '')
|
self.location = kwargs.pop('location', '')
|
||||||
self.checksum = kwargs.pop('checksum', '')
|
self.checksum = kwargs.pop('checksum', '')
|
||||||
|
self._gzip = False
|
||||||
|
|
||||||
if 'restore_location' not in kwargs:
|
if 'restore_location' not in kwargs:
|
||||||
kwargs['restore_location'] = self.datadir
|
kwargs['restore_location'] = self.datadir
|
||||||
@ -56,9 +59,7 @@ class BaseRunner(object):
|
|||||||
# Only decrypt if the object name ends with .enc
|
# Only decrypt if the object name ends with .enc
|
||||||
if self.location.endswith('.enc'):
|
if self.location.endswith('.enc'):
|
||||||
self.restore_command = self.decrypt_cmd
|
self.restore_command = self.decrypt_cmd
|
||||||
self.restore_command = (self.restore_command +
|
self.restore_command = self.restore_cmd % kwargs
|
||||||
self.unzip_cmd +
|
|
||||||
(self.restore_cmd % kwargs))
|
|
||||||
self.prepare_command = self.prepare_cmd % kwargs
|
self.prepare_command = self.prepare_cmd % kwargs
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -75,11 +76,11 @@ class BaseRunner(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def zip_cmd(self):
|
def zip_cmd(self):
|
||||||
return ' | gzip'
|
return self._gzip
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unzip_cmd(self):
|
def unzip_cmd(self):
|
||||||
return 'gzip -d -c | '
|
return self._gzip
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def zip_manifest(self):
|
def zip_manifest(self):
|
||||||
@ -114,12 +115,26 @@ class BaseRunner(object):
|
|||||||
return '.enc' if self.encrypt_key else ''
|
return '.enc' if self.encrypt_key else ''
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
|
"""Running backup cmd"""
|
||||||
LOG.info("Running backup cmd: %s", self.command)
|
LOG.info("Running backup cmd: %s", self.command)
|
||||||
self.process = subprocess.Popen(self.command, shell=True,
|
with open(self.backup_log, "w+") as fp:
|
||||||
stdout=subprocess.PIPE,
|
if not self._gzip:
|
||||||
stderr=subprocess.PIPE,
|
self.process = subprocess.Popen(self.command.split(),
|
||||||
preexec_fn=os.setsid)
|
shell=False,
|
||||||
self.pid = self.process.pid
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=fp,
|
||||||
|
preexec_fn=os.setsid)
|
||||||
|
else:
|
||||||
|
bkup_process = subprocess.Popen(self.command.split(),
|
||||||
|
shell=False,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=fp)
|
||||||
|
self.process = subprocess.Popen(["gzip"], shell=False,
|
||||||
|
stdin=bkup_process.stdout,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=fp)
|
||||||
|
bkup_process.stdout.close()
|
||||||
|
self.pid = self.process.pid
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
"""Start up the process."""
|
"""Start up the process."""
|
||||||
@ -141,14 +156,11 @@ class BaseRunner(object):
|
|||||||
if exc_type is not None:
|
if exc_type is not None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
|
||||||
err = self.process.stderr.read()
|
|
||||||
if err:
|
|
||||||
raise Exception(err)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not self.check_process():
|
if not self.check_process():
|
||||||
|
with open(self.backup_log, "r") as fp:
|
||||||
|
err = fp.read()
|
||||||
|
if err:
|
||||||
|
raise Exception(err)
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
self.post_backup()
|
self.post_backup()
|
||||||
@ -190,23 +202,42 @@ class BaseRunner(object):
|
|||||||
stream = self.storage.load(location, checksum)
|
stream = self.storage.load(location, checksum)
|
||||||
|
|
||||||
LOG.info('Running restore from stream, command: %s', command)
|
LOG.info('Running restore from stream, command: %s', command)
|
||||||
self.process = subprocess.Popen(command, shell=True,
|
|
||||||
stdin=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE)
|
|
||||||
content_length = 0
|
content_length = 0
|
||||||
for chunk in stream:
|
if not re.match(r'.*.gz', location) or not self._gzip:
|
||||||
self.process.stdin.write(chunk)
|
LOG.info('gz processor without gz file or with gzip disabled')
|
||||||
content_length += len(chunk)
|
self.process = subprocess.Popen(command.split(), shell=False,
|
||||||
self.process.stdin.close()
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
try:
|
stderr=subprocess.PIPE)
|
||||||
err = self.process.stderr.read()
|
for chunk in stream:
|
||||||
if err:
|
self.process.stdin.write(chunk) # write data to mbstream
|
||||||
raise Exception(err)
|
content_length += len(chunk)
|
||||||
except OSError:
|
stdout, stderr = self.process.communicate()
|
||||||
pass
|
else:
|
||||||
|
LOG.info('gz processor with gz file')
|
||||||
|
gunzip = subprocess.Popen(["gzip", "-d", "-c"], shell=False,
|
||||||
|
stdin=subprocess.PIPE,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
self.process = subprocess.Popen(command.split(), shell=False,
|
||||||
|
stdin=gunzip.stdout,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
for chunk in stream:
|
||||||
|
gunzip.stdin.write(chunk) # write data to mbstream
|
||||||
|
content_length += len(chunk)
|
||||||
|
gunzip.stdin.close()
|
||||||
|
gunzip.stdout.close()
|
||||||
|
stdout, stderr = self.process.communicate()
|
||||||
|
stdout_str = stdout.decode()
|
||||||
|
stderr_str = stderr.decode()
|
||||||
|
LOG.info("command: %s, stdout: %s, stderr: %s",
|
||||||
|
command, stdout_str, stderr_str)
|
||||||
|
|
||||||
if not self.check_restore_process():
|
if not self.check_restore_process():
|
||||||
|
LOG.info('self.check_restore_process() False')
|
||||||
|
if stderr_str:
|
||||||
|
raise Exception(stderr_str)
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
return content_length
|
return content_length
|
||||||
|
@ -26,63 +26,48 @@ CONF = cfg.CONF
|
|||||||
|
|
||||||
class InnoBackupEx(mysql_base.MySQLBaseRunner):
|
class InnoBackupEx(mysql_base.MySQLBaseRunner):
|
||||||
"""Implementation of Backup and Restore for InnoBackupEx."""
|
"""Implementation of Backup and Restore for InnoBackupEx."""
|
||||||
backup_log = '/tmp/innobackupex.log'
|
restore_cmd = ('xbstream -x -C %(restore_location)s --parallel=2')
|
||||||
prepare_log = '/tmp/prepare.log'
|
|
||||||
restore_cmd = ('xbstream -x -C %(restore_location)s --parallel=2'
|
|
||||||
' 2>/tmp/xbstream_extract.log')
|
|
||||||
prepare_cmd = ('innobackupex'
|
prepare_cmd = ('innobackupex'
|
||||||
' --defaults-file=%(restore_location)s/backup-my.cnf'
|
' --defaults-file=%(restore_location)s/backup-my.cnf'
|
||||||
' --ibbackup=xtrabackup'
|
' --ibbackup=xtrabackup'
|
||||||
' --apply-log'
|
' --apply-log'
|
||||||
' %(restore_location)s'
|
' %(restore_location)s')
|
||||||
' 2>' + prepare_log)
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(InnoBackupEx, self).__init__(*args, **kwargs)
|
||||||
|
self.backup_log = '/tmp/innobackupex.log'
|
||||||
|
self._gzip = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
cmd = ('innobackupex'
|
cmd = ('innobackupex'
|
||||||
' --stream=xbstream'
|
' --stream=xbstream'
|
||||||
' --parallel=2 ' +
|
' --parallel=2 ' +
|
||||||
self.user_and_pass + ' %s' % self.datadir +
|
self.user_and_pass + ' %s' % self.datadir)
|
||||||
' 2>' + self.backup_log
|
return cmd
|
||||||
)
|
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
|
||||||
|
|
||||||
def check_restore_process(self):
|
def check_restore_process(self):
|
||||||
"""Check whether xbstream restore is successful."""
|
"""Check whether xbstream restore is successful."""
|
||||||
LOG.info('Checking return code of xbstream restore process.')
|
LOG.info('Checking return code of xbstream restore process.')
|
||||||
return_code = self.process.wait()
|
return_code = self.process.returncode
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
LOG.error('xbstream exited with %s', return_code)
|
LOG.error('xbstream exited with %s', return_code)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
with open('/tmp/xbstream_extract.log', 'r') as xbstream_log:
|
|
||||||
for line in xbstream_log:
|
|
||||||
# Ignore empty lines
|
|
||||||
if not line.strip():
|
|
||||||
continue
|
|
||||||
|
|
||||||
LOG.error('xbstream restore failed with: %s',
|
|
||||||
line.rstrip('\n'))
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def post_restore(self):
|
def post_restore(self):
|
||||||
"""Hook that is called after the restore command."""
|
"""Hook that is called after the restore command."""
|
||||||
LOG.info("Running innobackupex prepare: %s.", self.prepare_command)
|
LOG.info("Running innobackupex prepare: %s.", self.prepare_command)
|
||||||
processutils.execute(self.prepare_command, shell=True)
|
stdout, stderr = processutils.execute(*self.prepare_command.split())
|
||||||
|
LOG.info("the prepare command stdout: %s, stderr: %s", stdout, stderr)
|
||||||
|
if not stderr:
|
||||||
|
msg = "innobackupex prepare log file empty"
|
||||||
|
raise Exception(msg)
|
||||||
|
|
||||||
LOG.info("Checking innobackupex prepare log")
|
last_line = stderr.splitlines()[-1].strip()
|
||||||
with open(self.prepare_log, 'r') as prepare_log:
|
if not re.search('completed OK!', last_line):
|
||||||
output = prepare_log.read()
|
msg = "innobackupex prepare did not complete successfully"
|
||||||
if not output:
|
raise Exception(msg)
|
||||||
msg = "innobackupex prepare log file empty"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
last_line = output.splitlines()[-1].strip()
|
|
||||||
if not re.search('completed OK!', last_line):
|
|
||||||
msg = "innobackupex prepare did not complete successfully"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class InnoBackupExIncremental(InnoBackupEx):
|
class InnoBackupExIncremental(InnoBackupEx):
|
||||||
@ -94,8 +79,7 @@ class InnoBackupExIncremental(InnoBackupEx):
|
|||||||
' --apply-log'
|
' --apply-log'
|
||||||
' --redo-only'
|
' --redo-only'
|
||||||
' %(restore_location)s'
|
' %(restore_location)s'
|
||||||
' %(incremental_args)s'
|
' %(incremental_args)s')
|
||||||
' 2>/tmp/innoprepare.log')
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if not kwargs.get('lsn'):
|
if not kwargs.get('lsn'):
|
||||||
@ -111,9 +95,8 @@ class InnoBackupExIncremental(InnoBackupEx):
|
|||||||
' --stream=xbstream'
|
' --stream=xbstream'
|
||||||
' --incremental'
|
' --incremental'
|
||||||
' --incremental-lsn=%(lsn)s ' +
|
' --incremental-lsn=%(lsn)s ' +
|
||||||
self.user_and_pass + ' %s' % self.datadir +
|
self.user_and_pass + ' %s' % self.datadir)
|
||||||
' 2>' + self.backup_log)
|
return cmd
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
|
||||||
|
|
||||||
def get_metadata(self):
|
def get_metadata(self):
|
||||||
_meta = super(InnoBackupExIncremental, self).get_metadata()
|
_meta = super(InnoBackupExIncremental, self).get_metadata()
|
||||||
|
@ -23,20 +23,23 @@ CONF = cfg.CONF
|
|||||||
|
|
||||||
class MariaBackup(mysql_base.MySQLBaseRunner):
|
class MariaBackup(mysql_base.MySQLBaseRunner):
|
||||||
"""Implementation of Backup and Restore using mariabackup."""
|
"""Implementation of Backup and Restore using mariabackup."""
|
||||||
backup_log = '/tmp/mariabackup.log'
|
restore_cmd = ('mbstream -x -C %(restore_location)s')
|
||||||
restore_log = '/tmp/mbstream_extract.log'
|
|
||||||
restore_cmd = ('mbstream -x -C %(restore_location)s 2>' + restore_log)
|
|
||||||
prepare_cmd = ''
|
prepare_cmd = ''
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super(MariaBackup, self).__init__(*args, **kwargs)
|
||||||
|
self.backup_log = '/tmp/mariabackup.log'
|
||||||
|
self._gzip = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
cmd = ('mariabackup --backup --stream=xbstream ' +
|
cmd = ('mariabackup --backup --stream=xbstream ' +
|
||||||
self.user_and_pass + ' 2>' + self.backup_log)
|
self.user_and_pass)
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
return cmd
|
||||||
|
|
||||||
def check_restore_process(self):
|
def check_restore_process(self):
|
||||||
LOG.debug('Checking return code of mbstream restore process.')
|
LOG.info('Checking return code of mbstream restore process.')
|
||||||
return_code = self.process.wait()
|
return_code = self.process.returncode
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
LOG.error('mbstream exited with %s', return_code)
|
LOG.error('mbstream exited with %s', return_code)
|
||||||
return False
|
return False
|
||||||
@ -48,8 +51,7 @@ class MariaBackupIncremental(MariaBackup):
|
|||||||
"""Incremental backup and restore using mariabackup."""
|
"""Incremental backup and restore using mariabackup."""
|
||||||
incremental_prep = ('mariabackup --prepare '
|
incremental_prep = ('mariabackup --prepare '
|
||||||
'--target-dir=%(restore_location)s '
|
'--target-dir=%(restore_location)s '
|
||||||
'%(incremental_args)s '
|
'%(incremental_args)s')
|
||||||
'2>/tmp/innoprepare.log')
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if not kwargs.get('lsn'):
|
if not kwargs.get('lsn'):
|
||||||
@ -64,11 +66,10 @@ class MariaBackupIncremental(MariaBackup):
|
|||||||
cmd = (
|
cmd = (
|
||||||
'mariabackup --backup --stream=xbstream'
|
'mariabackup --backup --stream=xbstream'
|
||||||
' --incremental-lsn=%(lsn)s ' +
|
' --incremental-lsn=%(lsn)s ' +
|
||||||
self.user_and_pass +
|
self.user_and_pass
|
||||||
' 2>' +
|
|
||||||
self.backup_log
|
|
||||||
)
|
)
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
LOG.info('cmd:{}'.format(cmd))
|
||||||
|
return cmd
|
||||||
|
|
||||||
def get_metadata(self):
|
def get_metadata(self):
|
||||||
meta = super(MariaBackupIncremental, self).get_metadata()
|
meta = super(MariaBackupIncremental, self).get_metadata()
|
||||||
@ -81,6 +82,6 @@ class MariaBackupIncremental(MariaBackup):
|
|||||||
|
|
||||||
def run_restore(self):
|
def run_restore(self):
|
||||||
"""Run incremental restore."""
|
"""Run incremental restore."""
|
||||||
LOG.debug('Running incremental restore')
|
LOG.info('Running incremental restore')
|
||||||
self.incremental_restore(self.location, self.checksum)
|
self.incremental_restore(self.location, self.checksum)
|
||||||
return self.restore_content_length
|
return self.restore_content_length
|
||||||
|
@ -44,7 +44,7 @@ class MySQLBaseRunner(base.BaseRunner):
|
|||||||
|
|
||||||
def check_process(self):
|
def check_process(self):
|
||||||
"""Check the backup output for 'completed OK!'."""
|
"""Check the backup output for 'completed OK!'."""
|
||||||
LOG.debug('Checking backup process output.')
|
LOG.info('Checking backup process output.')
|
||||||
with open(self.backup_log, 'r') as backup_log:
|
with open(self.backup_log, 'r') as backup_log:
|
||||||
output = backup_log.read()
|
output = backup_log.read()
|
||||||
if not output:
|
if not output:
|
||||||
@ -78,7 +78,7 @@ class MySQLBaseRunner(base.BaseRunner):
|
|||||||
def incremental_restore_cmd(self, incremental_dir):
|
def incremental_restore_cmd(self, incremental_dir):
|
||||||
"""Return a command for a restore with a incremental location."""
|
"""Return a command for a restore with a incremental location."""
|
||||||
args = {'restore_location': incremental_dir}
|
args = {'restore_location': incremental_dir}
|
||||||
return (self.decrypt_cmd + self.unzip_cmd + self.restore_cmd % args)
|
return self.restore_cmd % args
|
||||||
|
|
||||||
def incremental_prepare_cmd(self, incremental_dir):
|
def incremental_prepare_cmd(self, incremental_dir):
|
||||||
if incremental_dir is not None:
|
if incremental_dir is not None:
|
||||||
@ -97,7 +97,9 @@ class MySQLBaseRunner(base.BaseRunner):
|
|||||||
prepare_cmd = self.incremental_prepare_cmd(incremental_dir)
|
prepare_cmd = self.incremental_prepare_cmd(incremental_dir)
|
||||||
|
|
||||||
LOG.info("Running restore prepare command: %s.", prepare_cmd)
|
LOG.info("Running restore prepare command: %s.", prepare_cmd)
|
||||||
processutils.execute(prepare_cmd, shell=True)
|
stdout, stderr = processutils.execute(*prepare_cmd.split())
|
||||||
|
LOG.info("The command: %s, stdout: %s, stderr: %s",
|
||||||
|
prepare_cmd, stdout, stderr)
|
||||||
|
|
||||||
def incremental_restore(self, location, checksum):
|
def incremental_restore(self, location, checksum):
|
||||||
"""Recursively apply backups from all parents.
|
"""Recursively apply backups from all parents.
|
||||||
@ -135,6 +137,8 @@ class MySQLBaseRunner(base.BaseRunner):
|
|||||||
LOG.info("Restoring back to full backup.")
|
LOG.info("Restoring back to full backup.")
|
||||||
command = self.restore_command
|
command = self.restore_command
|
||||||
|
|
||||||
|
LOG.debug("command:{}".format(command))
|
||||||
|
|
||||||
self.restore_content_length += self.unpack(location, checksum, command)
|
self.restore_content_length += self.unpack(location, checksum, command)
|
||||||
self.incremental_prepare(incremental_dir)
|
self.incremental_prepare(incremental_dir)
|
||||||
|
|
||||||
|
@ -27,6 +27,8 @@ class PgBasebackup(base.BaseRunner):
|
|||||||
_is_read_only = None
|
_is_read_only = None
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.backup_log = '/tmp/pgbackup.log'
|
||||||
|
self._gzip = False
|
||||||
if not kwargs.get('wal_archive_dir'):
|
if not kwargs.get('wal_archive_dir'):
|
||||||
raise AttributeError('wal_archive_dir attribute missing')
|
raise AttributeError('wal_archive_dir attribute missing')
|
||||||
self.wal_archive_dir = kwargs.pop('wal_archive_dir')
|
self.wal_archive_dir = kwargs.pop('wal_archive_dir')
|
||||||
@ -191,6 +193,14 @@ class PgBasebackup(base.BaseRunner):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def check_restore_process(self):
|
||||||
|
LOG.info('Checking return code of postgres restore process.')
|
||||||
|
return_code = self.process.returncode
|
||||||
|
if return_code != 0:
|
||||||
|
LOG.error('postgres process exited with %s', return_code)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class PgBasebackupIncremental(PgBasebackup):
|
class PgBasebackupIncremental(PgBasebackup):
|
||||||
"""Incremental backup/restore for PostgreSQL.
|
"""Incremental backup/restore for PostgreSQL.
|
||||||
|
@ -17,6 +17,7 @@ import re
|
|||||||
from oslo_concurrency import processutils
|
from oslo_concurrency import processutils
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
import semantic_version
|
||||||
|
|
||||||
from backup.drivers import mysql_base
|
from backup.drivers import mysql_base
|
||||||
|
|
||||||
@ -24,6 +25,10 @@ LOG = logging.getLogger(__name__)
|
|||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class XtraBackupException(Exception):
|
||||||
|
"""Exception class for XtraBackup."""
|
||||||
|
|
||||||
|
|
||||||
class XtraBackup(mysql_base.MySQLBaseRunner):
|
class XtraBackup(mysql_base.MySQLBaseRunner):
|
||||||
"""Implementation of Backup and Restore for XtraBackup 8.0.
|
"""Implementation of Backup and Restore for XtraBackup 8.0.
|
||||||
|
|
||||||
@ -37,57 +42,50 @@ class XtraBackup(mysql_base.MySQLBaseRunner):
|
|||||||
|
|
||||||
innobackupex was removed in Percona XtraBackup 8.0.
|
innobackupex was removed in Percona XtraBackup 8.0.
|
||||||
"""
|
"""
|
||||||
backup_log = '/tmp/xtrabackup.log'
|
restore_cmd = 'xbstream -x -C %(restore_location)s --parallel=2'
|
||||||
prepare_log = '/tmp/prepare.log'
|
prepare_cmd = 'xtrabackup --target-dir=%(restore_location)s --prepare'
|
||||||
restore_cmd = ('xbstream -x -C %(restore_location)s --parallel=2'
|
|
||||||
' 2>/tmp/xbstream_extract.log')
|
def __init__(self, *args, **kwargs):
|
||||||
prepare_cmd = (f'xtrabackup '
|
super(XtraBackup, self).__init__(*args, **kwargs)
|
||||||
f'--target-dir=%(restore_location)s '
|
self.backup_log = '/tmp/xtrabackup.log'
|
||||||
f'--prepare 2>{prepare_log}')
|
self._gzip = True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
cmd = (f'xtrabackup --backup --stream=xbstream --parallel=2 '
|
cmd = (f'xtrabackup --backup --stream=xbstream --parallel=2 '
|
||||||
f'--datadir={self.datadir} {self.user_and_pass} '
|
f'--datadir=%(datadir)s --user=%(user)s '
|
||||||
f'2>{self.backup_log}')
|
f'--password=%(password)s --host=%(host)s'
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
% {
|
||||||
|
'datadir': self.datadir,
|
||||||
|
'user': CONF.db_user,
|
||||||
|
'password': CONF.db_password,
|
||||||
|
'host': CONF.db_host}
|
||||||
|
)
|
||||||
|
return cmd
|
||||||
|
|
||||||
def check_restore_process(self):
|
def check_restore_process(self):
|
||||||
"""Check whether xbstream restore is successful."""
|
"""Check whether xbstream restore is successful."""
|
||||||
LOG.info('Checking return code of xbstream restore process.')
|
LOG.info('Checking return code of xbstream restore process.')
|
||||||
return_code = self.process.wait()
|
return_code = self.process.returncode
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
LOG.error('xbstream exited with %s', return_code)
|
LOG.error('xbstream exited with %s', return_code)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
with open('/tmp/xbstream_extract.log', 'r') as xbstream_log:
|
|
||||||
for line in xbstream_log:
|
|
||||||
# Ignore empty lines
|
|
||||||
if not line.strip():
|
|
||||||
continue
|
|
||||||
|
|
||||||
LOG.error('xbstream restore failed with: %s',
|
|
||||||
line.rstrip('\n'))
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def post_restore(self):
|
def post_restore(self):
|
||||||
"""Prepare after data restore."""
|
"""Prepare after data restore."""
|
||||||
LOG.info("Running prepare command: %s.", self.prepare_command)
|
LOG.info("Running prepare command: %s.", self.prepare_command)
|
||||||
processutils.execute(self.prepare_command, shell=True)
|
stdout, stderr = processutils.execute(*self.prepare_command.split())
|
||||||
|
LOG.info("The command: %s : stdout: %s, stderr: %s",
|
||||||
|
self.prepare_command, stdout, stderr)
|
||||||
LOG.info("Checking prepare log")
|
LOG.info("Checking prepare log")
|
||||||
with open(self.prepare_log, 'r') as prepare_log:
|
if not stderr:
|
||||||
output = prepare_log.read()
|
msg = "Empty prepare log file"
|
||||||
if not output:
|
raise Exception(msg)
|
||||||
msg = "Empty prepare log file"
|
last_line = stderr.splitlines()[-1].strip()
|
||||||
raise Exception(msg)
|
if not re.search('completed OK!', last_line):
|
||||||
|
msg = "Prepare did not complete successfully"
|
||||||
last_line = output.splitlines()[-1].strip()
|
raise Exception(msg)
|
||||||
if not re.search('completed OK!', last_line):
|
|
||||||
msg = "Prepare did not complete successfully"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class XtraBackupIncremental(XtraBackup):
|
class XtraBackupIncremental(XtraBackup):
|
||||||
@ -95,8 +93,7 @@ class XtraBackupIncremental(XtraBackup):
|
|||||||
prepare_log = '/tmp/prepare.log'
|
prepare_log = '/tmp/prepare.log'
|
||||||
incremental_prep = (f'xtrabackup --prepare --apply-log-only'
|
incremental_prep = (f'xtrabackup --prepare --apply-log-only'
|
||||||
f' --target-dir=%(restore_location)s'
|
f' --target-dir=%(restore_location)s'
|
||||||
f' %(incremental_args)s'
|
f' %(incremental_args)s')
|
||||||
f' 2>{prepare_log}')
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if not kwargs.get('lsn'):
|
if not kwargs.get('lsn'):
|
||||||
@ -106,13 +103,24 @@ class XtraBackupIncremental(XtraBackup):
|
|||||||
|
|
||||||
super(XtraBackupIncremental, self).__init__(*args, **kwargs)
|
super(XtraBackupIncremental, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# NOTE: Since 8.0.27, xtrabackup enables strict mode by default.
|
||||||
|
@property
|
||||||
|
def add_incremental_opts(self) -> bool:
|
||||||
|
cmd = ["xtrabackup", "--version"]
|
||||||
|
_, stderr = processutils.execute(*cmd)
|
||||||
|
xbackup_version = semantic_version.Version.coerce(
|
||||||
|
str(stderr).split()[2])
|
||||||
|
strict_mode_version = semantic_version.Version("8.0.27")
|
||||||
|
return xbackup_version < strict_mode_version
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cmd(self):
|
def cmd(self):
|
||||||
cmd = (f'xtrabackup --backup --stream=xbstream '
|
cmd = (f'xtrabackup --backup --stream=xbstream '
|
||||||
f'--incremental --incremental-lsn=%(lsn)s '
|
f'--incremental-lsn=%(lsn)s '
|
||||||
f'--datadir={self.datadir} {self.user_and_pass} '
|
f'--datadir={self.datadir} {self.user_and_pass}')
|
||||||
f'2>{self.backup_log}')
|
if self.add_incremental_opts:
|
||||||
return cmd + self.zip_cmd + self.encrypt_cmd
|
return '{} --incremental'.format(cmd)
|
||||||
|
return cmd
|
||||||
|
|
||||||
def get_metadata(self):
|
def get_metadata(self):
|
||||||
_meta = super(XtraBackupIncremental, self).get_metadata()
|
_meta = super(XtraBackupIncremental, self).get_metadata()
|
||||||
|
@ -7,3 +7,4 @@ keystoneauth1 # Apache-2.0
|
|||||||
python-swiftclient # Apache-2.0
|
python-swiftclient # Apache-2.0
|
||||||
psycopg2-binary>=2.6.2 # LGPL/ZPL
|
psycopg2-binary>=2.6.2 # LGPL/ZPL
|
||||||
cryptography>=2.1.4 # BSD/Apache-2.0
|
cryptography>=2.1.4 # BSD/Apache-2.0
|
||||||
|
semantic-version>=2.7.0 # BSD
|
||||||
|
0
backup/tests/unittests/__init__.py
Normal file
0
backup/tests/unittests/__init__.py
Normal file
0
backup/tests/unittests/drivers/__init__.py
Normal file
0
backup/tests/unittests/drivers/__init__.py
Normal file
144
backup/tests/unittests/drivers/test_innobackupex.py
Normal file
144
backup/tests/unittests/drivers/test_innobackupex.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
from oslo_utils import importutils
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.backup_encryption_key = None
|
||||||
|
CONF.backup_id = "backup_unittest"
|
||||||
|
CONF.db_user = "db_user"
|
||||||
|
CONF.db_password = "db_password"
|
||||||
|
CONF.db_host = "db_host"
|
||||||
|
|
||||||
|
driver_mapping = {
|
||||||
|
'innobackupex': 'backup.drivers.innobackupex.InnoBackupEx',
|
||||||
|
'innobackupex_inc': 'backup.drivers.innobackupex.InnoBackupExIncremental',
|
||||||
|
'mariabackup': 'backup.drivers.mariabackup.MariaBackup',
|
||||||
|
'mariabackup_inc': 'backup.drivers.mariabackup.MariaBackupIncremental',
|
||||||
|
'pg_basebackup': 'backup.drivers.postgres.PgBasebackup',
|
||||||
|
'pg_basebackup_inc': 'backup.drivers.postgres.PgBasebackupIncremental',
|
||||||
|
'xtrabackup': 'backup.drivers.xtrabackup.XtraBackup',
|
||||||
|
'xtrabackup_inc': 'backup.drivers.xtrabackup.XtraBackupIncremental'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestInnoBackupEx(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['innobackupex'])
|
||||||
|
self.params = {}
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(self.runner_cls)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
cmd = ('innobackupex'
|
||||||
|
' --stream=xbstream'
|
||||||
|
' --parallel=2 ' +
|
||||||
|
runner.user_and_pass + ' %s' % runner.datadir)
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_check_restore_process(self):
|
||||||
|
'''Check manifest'''
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.process = MagicMock()
|
||||||
|
returncode = PropertyMock(return_value=0)
|
||||||
|
type(runner.process).returncode = returncode
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
self.assertEqual(runner.check_restore_process(), True)
|
||||||
|
|
||||||
|
|
||||||
|
class TestInnoBackupExIncremental(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['innobackupex_inc'])
|
||||||
|
self.params = {
|
||||||
|
'lsn': '1234567890',
|
||||||
|
}
|
||||||
|
self.metadata = {}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
cmd = ('innobackupex'
|
||||||
|
' --stream=xbstream'
|
||||||
|
' --incremental'
|
||||||
|
' --incremental-lsn=%(lsn)s ' +
|
||||||
|
runner.user_and_pass + ' %s' % runner.datadir)
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_get_metadata(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.get_metadata = MagicMock(return_value=self.metadata)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.get_metadata()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, self.metadata)
|
||||||
|
|
||||||
|
def test_run_restore(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
length = 10
|
||||||
|
runner.incremental_restore = MagicMock(return_value=length)
|
||||||
|
runner.restore_content_length = length
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.run_restore()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, length)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
135
backup/tests/unittests/drivers/test_mariadb.py
Normal file
135
backup/tests/unittests/drivers/test_mariadb.py
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
from oslo_utils import importutils
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.backup_encryption_key = None
|
||||||
|
CONF.backup_id = "backup_unittest"
|
||||||
|
CONF.db_user = "db_user"
|
||||||
|
CONF.db_password = "db_password"
|
||||||
|
CONF.db_host = "db_host"
|
||||||
|
|
||||||
|
driver_mapping = {
|
||||||
|
'innobackupex': 'backup.drivers.innobackupex.InnoBackupEx',
|
||||||
|
'innobackupex_inc': 'backup.drivers.innobackupex.InnoBackupExIncremental',
|
||||||
|
'mariabackup': 'backup.drivers.mariabackup.MariaBackup',
|
||||||
|
'mariabackup_inc': 'backup.drivers.mariabackup.MariaBackupIncremental',
|
||||||
|
'pg_basebackup': 'backup.drivers.postgres.PgBasebackup',
|
||||||
|
'pg_basebackup_inc': 'backup.drivers.postgres.PgBasebackupIncremental',
|
||||||
|
'xtrabackup': 'backup.drivers.xtrabackup.XtraBackup',
|
||||||
|
'xtrabackup_inc': 'backup.drivers.xtrabackup.XtraBackupIncremental'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestMariaBackup(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['mariabackup'])
|
||||||
|
self.params = {}
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(self.runner_cls)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
cmd = ("mariabackup --backup --stream=xbstream {}".format(
|
||||||
|
runner.user_and_pass))
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_check_restore_process(self):
|
||||||
|
'''Check manifest'''
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.process = MagicMock()
|
||||||
|
returncode = PropertyMock(return_value=0)
|
||||||
|
type(runner.process).returncode = returncode
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
self.assertEqual(runner.check_restore_process(), True)
|
||||||
|
|
||||||
|
|
||||||
|
class TestMariaBackupIncremental(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['mariabackup_inc'])
|
||||||
|
self.params = {
|
||||||
|
'lsn': '1234567890',
|
||||||
|
'incremental_dir': './'
|
||||||
|
}
|
||||||
|
self.metadata = {}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
cmd = (
|
||||||
|
'mariabackup --backup --stream=xbstream'
|
||||||
|
' --incremental-lsn=%(lsn)s ' +
|
||||||
|
runner.user_and_pass
|
||||||
|
)
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_get_metadata(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.get_metadata = MagicMock(return_value=self.metadata)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.get_metadata()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, self.metadata)
|
||||||
|
|
||||||
|
def test_run_restore(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
length = 10
|
||||||
|
runner.incremental_restore = MagicMock(return_value=length)
|
||||||
|
runner.restore_content_length = length
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.run_restore()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, length)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
367
backup/tests/unittests/drivers/test_postgres.py
Normal file
367
backup/tests/unittests/drivers/test_postgres.py
Normal file
@ -0,0 +1,367 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
from oslo_utils import importutils
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.backup_encryption_key = None
|
||||||
|
CONF.backup_id = "backup_unittest"
|
||||||
|
|
||||||
|
driver_mapping = {
|
||||||
|
'innobackupex': 'backup.drivers.innobackupex.InnoBackupEx',
|
||||||
|
'innobackupex_inc': 'backup.drivers.innobackupex.InnoBackupExIncremental',
|
||||||
|
'mariabackup': 'backup.drivers.mariabackup.MariaBackup',
|
||||||
|
'mariabackup_inc': 'backup.drivers.mariabackup.MariaBackupIncremental',
|
||||||
|
'pg_basebackup': 'backup.drivers.postgres.PgBasebackup',
|
||||||
|
'pg_basebackup_inc': 'backup.drivers.postgres.PgBasebackupIncremental',
|
||||||
|
'xtrabackup': 'backup.drivers.xtrabackup.XtraBackup',
|
||||||
|
'xtrabackup_inc': 'backup.drivers.xtrabackup.XtraBackupIncremental'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestPgBasebackup(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['pg_basebackup'])
|
||||||
|
self.params = {
|
||||||
|
'wal_archive_dir': './',
|
||||||
|
'filename': '000000010000000000000006.00000168.backup'
|
||||||
|
}
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(self.runner_cls)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if os.path.exists(self.params.get('filename')):
|
||||||
|
os.remove(self.params.get('filename'))
|
||||||
|
|
||||||
|
def _create_test_data(self):
|
||||||
|
with open(self.params.get('filename'), 'w') as file:
|
||||||
|
file.write("START WAL LOCATION: -1/3000028 "
|
||||||
|
"(file 000000010000000000000003)\n")
|
||||||
|
file.write("STOP WAL LOCATION: 0/3000028 "
|
||||||
|
"(file 000000010000000000000003)\n")
|
||||||
|
file.write("CHECKPOINT LOCATION: 0/3000098\n")
|
||||||
|
file.write("BACKUP METHOD: streamed\n")
|
||||||
|
file.write("BACKUP FROM: master\n")
|
||||||
|
file.write("START TIME: 2023-05-01 06:53:41 UTC\n")
|
||||||
|
file.write("LABEL: 3070d460-1e67-4fbd-92ca-97c1d0101077\n")
|
||||||
|
file.write("START TIMELINE: 1\n")
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(runner.cmd,
|
||||||
|
"pg_basebackup -U postgres -Ft -z "
|
||||||
|
"--wal-method=fetch --label={} "
|
||||||
|
"--pgdata=-".format(self.params.get('filename')))
|
||||||
|
|
||||||
|
def test_manifest(self):
|
||||||
|
'''Check manifest'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(runner.manifest,
|
||||||
|
"{}.tar.gz".format(self.params.get('filename')))
|
||||||
|
|
||||||
|
def test_is_read_only(self):
|
||||||
|
'''Check is_read_only'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
runner._is_read_only = True
|
||||||
|
self.assertEqual(runner.is_read_only, True)
|
||||||
|
|
||||||
|
def test_get_wal_files(self):
|
||||||
|
'''Check get_wal_file'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
recent_backup_file = "000000010000000000000006.00000168.backup"
|
||||||
|
last_wal = "000000010000000000000007"
|
||||||
|
self._create_test_data()
|
||||||
|
|
||||||
|
runner.get_backup_files = MagicMock(
|
||||||
|
return_value=[recent_backup_file])
|
||||||
|
with open(last_wal, "w") as file:
|
||||||
|
file.write("test")
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.get_wal_files()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, [last_wal])
|
||||||
|
|
||||||
|
if os.path.exists(last_wal):
|
||||||
|
os.remove(last_wal)
|
||||||
|
|
||||||
|
def test_get_backup_files(self):
|
||||||
|
'''Check get_backup_file'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
recent_backup_file = "000000010000000000000006.00000168.backup"
|
||||||
|
runner.get_backup_files = MagicMock(
|
||||||
|
return_value=[recent_backup_file])
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.get_backup_files()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, [recent_backup_file])
|
||||||
|
|
||||||
|
def test_get_backup_metadata(self):
|
||||||
|
'''Check get_backup_metadata'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.label = self.params.get('filename')
|
||||||
|
self._create_test_data()
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
backup_metadata = runner.get_backup_metadata(
|
||||||
|
self.params.get('filename')
|
||||||
|
)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(backup_metadata['start-segment'], '-1/3000028')
|
||||||
|
self.assertEqual(
|
||||||
|
backup_metadata['start-wal-file'], '000000010000000000000003'
|
||||||
|
)
|
||||||
|
self.assertEqual(backup_metadata['stop-segment'], '0/3000028')
|
||||||
|
self.assertEqual(
|
||||||
|
backup_metadata['stop-wal-file'], '000000010000000000000003')
|
||||||
|
self.assertEqual(
|
||||||
|
backup_metadata['checkpoint-location'], '0/3000098'
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
backup_metadata['label'], '3070d460-1e67-4fbd-92ca-97c1d0101077'
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_metadata(self):
|
||||||
|
'''Check get_metadata'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.get_metadata = MagicMock(
|
||||||
|
return_value={'start-segment': '0/3000028'}
|
||||||
|
)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
metadata = runner.get_metadata()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(metadata['start-segment'], '0/3000028')
|
||||||
|
|
||||||
|
def test_context(self):
|
||||||
|
'''Check context methods'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner._is_read_only = True
|
||||||
|
runner.pre_backup = MagicMock()
|
||||||
|
runner._run = MagicMock()
|
||||||
|
runner.post_backup = MagicMock()
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
with runner:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
runner.pre_backup.assert_called_once_with()
|
||||||
|
runner._run.assert_called_once_with()
|
||||||
|
runner.post_backup.assert_called_once_with()
|
||||||
|
|
||||||
|
def test_check_process(self):
|
||||||
|
'''Check check_process'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner._is_read_only = True
|
||||||
|
runner.start_segment = True
|
||||||
|
runner.start_wal_file = True
|
||||||
|
runner.stop_segment = True
|
||||||
|
runner.stop_wal_file = True
|
||||||
|
runner.label = True
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.check_process()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertTrue(ret)
|
||||||
|
|
||||||
|
def test_check_restore_process(self):
|
||||||
|
'''Check check_restore_process'''
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner._is_read_only = True
|
||||||
|
runner.start_segment = True
|
||||||
|
runner.start_wal_file = True
|
||||||
|
runner.stop_segment = True
|
||||||
|
runner.stop_wal_file = True
|
||||||
|
runner.label = True
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.check_process()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertTrue(ret)
|
||||||
|
|
||||||
|
|
||||||
|
class TestPgBasebackupIncremental(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['pg_basebackup_inc'])
|
||||||
|
self.params = {
|
||||||
|
'wal_archive_dir': './',
|
||||||
|
'filename': '000000010000000000000006.00000168.backup',
|
||||||
|
'parent_location': 'http://example.com/example.tar.gz',
|
||||||
|
'parent_checksum': '63e696c5eb85550fed0a7a1a6411eb7d'
|
||||||
|
}
|
||||||
|
self.metadata = {
|
||||||
|
'start-segment': '0/3000028',
|
||||||
|
'start-wal-file': '000000010000000000000003',
|
||||||
|
'stop-segment': '0/3000028',
|
||||||
|
'stop-wal-file': '000000010000000000000003',
|
||||||
|
'checkpoint-location': '0/3000098',
|
||||||
|
'label': '000000010000000000000006.00000168.backup',
|
||||||
|
'parent_location': self.params.get('parent_location'),
|
||||||
|
'parent_checksum': self.params.get('parent_checksum'),
|
||||||
|
}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if os.path.exists(self.params.get('filename')):
|
||||||
|
os.remove(self.params.get('filename'))
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_pre_backup(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.pre_backup = MagicMock(return_value=None)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
runner.pre_backup()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
runner.pre_backup.assert_called_once_with()
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
wal_file_list = [
|
||||||
|
'000000010000000000000005',
|
||||||
|
'000000010000000000000003',
|
||||||
|
'000000010000000000000004'
|
||||||
|
]
|
||||||
|
wal_archive_dir = self.params.get('wal_archive_dir')
|
||||||
|
cmd = (f'tar -czf - -C {wal_archive_dir} '
|
||||||
|
f'{" ".join(wal_file_list)}')
|
||||||
|
|
||||||
|
runner.get_wal_files = MagicMock(return_value=wal_file_list)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner._cmd()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, cmd)
|
||||||
|
|
||||||
|
def test_get_metadata(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.get_metadata = MagicMock(return_value=self.metadata)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.get_metadata()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, self.metadata)
|
||||||
|
|
||||||
|
def test_incremental_restore_cmd(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
cmd = f'tar xzf - -C /var/lib/postgresql/data/pgdata'
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.incremental_restore_cmd()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, cmd)
|
||||||
|
|
||||||
|
def test_incremental_restore(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
wal_file_list = [
|
||||||
|
'000000010000000000000005',
|
||||||
|
'000000010000000000000003',
|
||||||
|
'000000010000000000000004'
|
||||||
|
]
|
||||||
|
runner.get_wal_files = MagicMock(return_value=wal_file_list)
|
||||||
|
metadata = {
|
||||||
|
'parent_location': 'https://example.com/',
|
||||||
|
'parent_checksum': 'cc39f022c5d10f38e963062ca40c95bd',
|
||||||
|
}
|
||||||
|
runner.storage = MagicMock(return_value=metadata)
|
||||||
|
command = "testcommand"
|
||||||
|
length = 10
|
||||||
|
runner.incremental_restore = MagicMock(return_value=length)
|
||||||
|
runner.incremental_restore_cmd = MagicMock(return_value=command)
|
||||||
|
runner.unpack = MagicMock(return_value=length)
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.incremental_restore({
|
||||||
|
'location': metadata['parent_location'],
|
||||||
|
'checksum': metadata['parent_checksum']
|
||||||
|
})
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, length)
|
||||||
|
|
||||||
|
def test_run_restore(self):
|
||||||
|
# prepare the test
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
length = 10
|
||||||
|
runner.incremental_restore = MagicMock(return_value=length)
|
||||||
|
runner.restore_content_length = length
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
ret = runner.run_restore()
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, length)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
196
backup/tests/unittests/drivers/test_xtrabackup.py
Normal file
196
backup/tests/unittests/drivers/test_xtrabackup.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
# implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from oslo_log import log as logging
|
||||||
|
from oslo_utils import importutils
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.backup_encryption_key = None
|
||||||
|
CONF.backup_id = "backup_unittest"
|
||||||
|
CONF.db_user = "db_user"
|
||||||
|
CONF.db_password = "db_password"
|
||||||
|
CONF.db_host = "db_host"
|
||||||
|
|
||||||
|
driver_mapping = {
|
||||||
|
'innobackupex': 'backup.drivers.innobackupex.InnoBackupEx',
|
||||||
|
'innobackupex_inc': 'backup.drivers.innobackupex.InnoBackupExIncremental',
|
||||||
|
'mariabackup': 'backup.drivers.mariabackup.MariaBackup',
|
||||||
|
'mariabackup_inc': 'backup.drivers.mariabackup.MariaBackupIncremental',
|
||||||
|
'pg_basebackup': 'backup.drivers.postgres.PgBasebackup',
|
||||||
|
'pg_basebackup_inc': 'backup.drivers.postgres.PgBasebackupIncremental',
|
||||||
|
'xtrabackup': 'backup.drivers.xtrabackup.XtraBackup',
|
||||||
|
'xtrabackup_inc': 'backup.drivers.xtrabackup.XtraBackupIncremental'
|
||||||
|
}
|
||||||
|
|
||||||
|
from backup.drivers.xtrabackup import XtraBackupException
|
||||||
|
|
||||||
|
|
||||||
|
class TestXtraBackup(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['xtrabackup'])
|
||||||
|
self.params = {
|
||||||
|
'db_datadir': '/var/lib/mysql/data'
|
||||||
|
}
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(self.runner_cls)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
cmd = (f'xtrabackup --backup --stream=xbstream --parallel=2 '
|
||||||
|
f'--datadir=%(datadir)s --user=%(user)s '
|
||||||
|
f'--password=%(password)s --host=%(host)s'
|
||||||
|
% {
|
||||||
|
'datadir': runner.datadir,
|
||||||
|
'user': CONF.db_user,
|
||||||
|
'password': CONF.db_password,
|
||||||
|
'host': CONF.db_host}
|
||||||
|
)
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_check_restore_process(self):
|
||||||
|
'''Check manifest'''
|
||||||
|
# call the method
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
runner.process = MagicMock()
|
||||||
|
returncode = PropertyMock(return_value=0)
|
||||||
|
type(runner.process).returncode = returncode
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(runner.check_restore_process(), True)
|
||||||
|
|
||||||
|
def test_post_restore(self):
|
||||||
|
'''Check manifest'''
|
||||||
|
runner = self.runner_cls(**self.params)
|
||||||
|
mock = Mock(side_effect=XtraBackupException(
|
||||||
|
'Prepare did not complete successfully'))
|
||||||
|
runner.post_restore = mock
|
||||||
|
|
||||||
|
# call the method
|
||||||
|
with self.assertRaises(
|
||||||
|
XtraBackupException,
|
||||||
|
msg="Prepare did not complete successfully"):
|
||||||
|
runner.post_restore()
|
||||||
|
|
||||||
|
|
||||||
|
# Manually import XtraBackupIncremental to prevent from running
|
||||||
|
# xtrabackup --version when calling the TestXtraBackupIncremental
|
||||||
|
# constructor
|
||||||
|
from backup.drivers.xtrabackup import XtraBackupIncremental
|
||||||
|
|
||||||
|
|
||||||
|
class TestXtraBackupIncremental(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.runner_cls = importutils.import_class(
|
||||||
|
driver_mapping['xtrabackup_inc'])
|
||||||
|
self.params = {
|
||||||
|
'lsn': '1234567890',
|
||||||
|
'parent_location': '',
|
||||||
|
'parent_checksum': '',
|
||||||
|
}
|
||||||
|
self.metadata = {
|
||||||
|
'lsn': '1234567890',
|
||||||
|
'parent_location': 'https://example.com/location',
|
||||||
|
'parent_checksum': 'f1508ecf362a364c5aae008b4b5a9cb9',
|
||||||
|
}
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_instance(self):
|
||||||
|
'''Check instance and add_incremental_opts'''
|
||||||
|
# call the method
|
||||||
|
with patch(
|
||||||
|
'backup.drivers.xtrabackup.XtraBackupIncremental.'
|
||||||
|
'add_incremental_opts', new_callable=PropertyMock
|
||||||
|
) as XtraBackupIncremental_add_incremental_opts:
|
||||||
|
XtraBackupIncremental_add_incremental_opts.return_value = True
|
||||||
|
runner = XtraBackupIncremental(**self.params)
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
|
||||||
|
def test_cmd(self):
|
||||||
|
'''Check cmd property'''
|
||||||
|
# call the method
|
||||||
|
with patch(
|
||||||
|
'backup.drivers.xtrabackup.XtraBackupIncremental.'
|
||||||
|
'add_incremental_opts', new_callable=PropertyMock
|
||||||
|
) as XtraBackupIncremental_add_incremental_opts:
|
||||||
|
XtraBackupIncremental_add_incremental_opts.return_value = True
|
||||||
|
runner = XtraBackupIncremental(**self.params)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
# assertions
|
||||||
|
cmd = (f'xtrabackup --backup --stream=xbstream '
|
||||||
|
f'--incremental-lsn=%(lsn)s '
|
||||||
|
f'--datadir={runner.datadir} {runner.user_and_pass}')
|
||||||
|
if runner.add_incremental_opts:
|
||||||
|
cmd = '{} --incremental'.format(cmd)
|
||||||
|
self.assertEqual(runner.cmd, cmd)
|
||||||
|
|
||||||
|
def test_get_metadata(self):
|
||||||
|
'''Check get_metadata'''
|
||||||
|
with patch(
|
||||||
|
'backup.drivers.xtrabackup.XtraBackupIncremental.'
|
||||||
|
'add_incremental_opts', new_callable=PropertyMock
|
||||||
|
) as XtraBackupIncremental_add_incremental_opts:
|
||||||
|
XtraBackupIncremental_add_incremental_opts.return_value = True
|
||||||
|
runner = XtraBackupIncremental(**self.params)
|
||||||
|
runner.get_metadata = MagicMock(return_value=self.metadata)
|
||||||
|
|
||||||
|
# assertions
|
||||||
|
self.assertIsNotNone(runner)
|
||||||
|
ret = runner.get_metadata()
|
||||||
|
self.assertEqual(ret, self.metadata)
|
||||||
|
|
||||||
|
def test_run_restore(self):
|
||||||
|
'''Check run_restore'''
|
||||||
|
with patch(
|
||||||
|
'backup.drivers.xtrabackup.XtraBackupIncremental.'
|
||||||
|
'add_incremental_opts', new_callable=PropertyMock
|
||||||
|
) as XtraBackupIncremental_add_incremental_opts:
|
||||||
|
XtraBackupIncremental_add_incremental_opts.return_value = True
|
||||||
|
runner = XtraBackupIncremental(**self.params)
|
||||||
|
length = 10
|
||||||
|
runner.incremental_restore = MagicMock(return_value=length)
|
||||||
|
runner.restore_content_length = length
|
||||||
|
# call the method
|
||||||
|
ret = runner.run_restore()
|
||||||
|
# assertions
|
||||||
|
self.assertEqual(ret, length)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
4
tox.ini
4
tox.ini
@ -39,12 +39,14 @@ commands =
|
|||||||
commands = oslo_debug_helper {posargs}
|
commands = oslo_debug_helper {posargs}
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
|
allowlist_externals = sh
|
||||||
setenv =
|
setenv =
|
||||||
{[testenv]setenv}
|
{[testenv]setenv}
|
||||||
PYTHON=coverage run --source trove
|
PYTHON=coverage run --source trove
|
||||||
commands =
|
commands =
|
||||||
coverage erase
|
coverage erase
|
||||||
stestr run --serial {posargs}
|
sh -c 'OS_TEST_PATH={toxinidir}/backup/tests/unittests stestr run --serial {posargs}'
|
||||||
|
sh -c 'OS_TEST_PATH={toxinidir}/trove/tests/unittests stestr run --serial {posargs}'
|
||||||
#coverage run -a run_tests.py
|
#coverage run -a run_tests.py
|
||||||
coverage html -d cover
|
coverage html -d cover
|
||||||
coverage xml -o cover/coverage.xml
|
coverage xml -o cover/coverage.xml
|
||||||
|
Loading…
Reference in New Issue
Block a user