Merge "Add decorator to check snapshot logs"

This commit is contained in:
Jenkins 2016-04-06 04:43:23 +00:00 committed by Gerrit Code Review
commit 4287a4e354
4 changed files with 1171 additions and 0 deletions

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,7 @@ from devops.helpers.helpers import wait
from netaddr import IPAddress
from netaddr import IPNetwork
from proboscis.asserts import assert_equal
from proboscis.asserts import assert_false
from proboscis.asserts import assert_true
# pylint: disable=import-error
from six.moves.urllib.error import HTTPError
@ -37,8 +38,10 @@ from fuelweb_test import logger
from fuelweb_test import logwrap
from fuelweb_test.helpers.ssh_manager import SSHManager
from fuelweb_test.helpers.utils import get_mongo_partitions
from fuelweb_test.settings import DNS_SUFFIX
from fuelweb_test.settings import EXTERNAL_DNS
from fuelweb_test.settings import EXTERNAL_NTP
from fuelweb_test.settings import FUEL_MASTER_HOSTNAME
from fuelweb_test.settings import OPENSTACK_RELEASE
from fuelweb_test.settings import OPENSTACK_RELEASE_UBUNTU
from fuelweb_test.settings import POOLS
@ -1196,3 +1199,103 @@ def check_plugin_path_env(var_name, plugin_path):
'File {plugin_path:s} (variable: {var_name:s}) does not exists!'
''.format(plugin_path=plugin_path, var_name=var_name)
)
def check_snapshot_logs(ip, snapshot_name, controller_fqdns, compute_fqdns):
snapshot_path_master = "/var/dump/{}".format(snapshot_name)
master_hostname = ''.join((FUEL_MASTER_HOSTNAME, DNS_SUFFIX))
snapshot_logs_path = os.path.join(
'{0}/fuelweb_test/config_templates/'.format(os.environ.get(
"WORKSPACE", "./")), 'snapshot_logs.yaml')
if os.path.exists(snapshot_logs_path):
with open(snapshot_logs_path, 'r') as f:
snapshot_logs = yaml.load(f)
logger.debug("snapshot logs are {}".format(snapshot_logs))
assert_true(snapshot_logs, "Failed to get expected snapshot"
" logs from {}".format(snapshot_logs_path))
absent_logs = []
logger.debug("checking master logs...")
for log in snapshot_logs['master']['master_node_logs'].split():
logger.debug("checking {} log file".format(log))
log_path = "{dump_path}/{hostname}/{log}".format(
dump_path=snapshot_path_master, hostname=master_hostname, log=log)
cmd = "ls {}".format(log_path)
result = ssh_manager.execute_on_remote(
ip=ip,
cmd=cmd,
err_msg="Couldn't find {} log on master node".format(log),
raise_on_assert=False)
if not result['exit_code'] == 0:
absent_logs.append(log_path)
for controller_fqdn in controller_fqdns:
logger.debug("checking controller logs from remote directory...")
for log in snapshot_logs['master']['remote']['controller'].split():
logger.debug("checking {} log file".format(log))
log_path = "{dump_path}/{hostname}/var/log/remote" \
"/{fqdn}/{log}".format(dump_path=snapshot_path_master,
hostname=master_hostname,
fqdn=controller_fqdn, log=log)
cmd = "ls {}".format(log_path)
result = ssh_manager.execute_on_remote(
ip=ip,
cmd=cmd,
err_msg="Couldn't find {0} log in controller remote directory"
" for node {1}".format(log, controller_fqdn),
raise_on_assert=False)
if not result['exit_code'] == 0:
absent_logs.append(log_path)
logger.debug("checking controller logs...")
for log in snapshot_logs['controller'].split():
logger.debug("checking {} log file".format(log))
log_path = "{dump_path}/{fqdn}/{log}".format(
dump_path=snapshot_path_master,
fqdn=controller_fqdn.replace(DNS_SUFFIX, ""), log=log)
cmd = "ls {}".format(log_path)
result = ssh_manager.execute_on_remote(
ip=ip,
cmd=cmd,
err_msg="Couldn't find {0} log for"
" node {1}".format(log, controller_fqdn),
raise_on_assert=False)
if not result['exit_code'] == 0:
absent_logs.append(log_path)
for compute_fqdn in compute_fqdns:
logger.debug("checking compute logs from remote directory...")
for log in snapshot_logs['master']['remote']['compute'].split():
logger.debug("checking {} log file".format(log))
log_path = "{dump_path}/{hostname}/var/log/remote" \
"/{fqdn}/{log}".format(dump_path=snapshot_path_master,
hostname=master_hostname,
fqdn=compute_fqdn, log=log)
cmd = "ls {}".format(log_path)
result = ssh_manager.execute_on_remote(
ip=ip,
cmd=cmd,
err_msg="Couldn't find {0} log in compute remote directory"
" for node {1}".format(log, compute_fqdn),
raise_on_assert=False)
if not result['exit_code'] == 0:
absent_logs.append(log_path)
logger.debug("checking compute logs...")
for log in snapshot_logs['compute'].split():
logger.debug("checking {} log file".format(log))
log_path = "{dump_path}/{fqdn}/{log}".format(
dump_path=snapshot_path_master,
fqdn=compute_fqdn.replace(DNS_SUFFIX, ""), log=log)
cmd = "ls {}".format(log_path)
result = ssh_manager.execute_on_remote(
ip=ip,
cmd=cmd,
err_msg="Couldn't find {0} log"
" for node {1}".format(log, compute_fqdn),
raise_on_assert=False)
if not result['exit_code'] == 0:
absent_logs.append(log_path)
logger.debug("missed logs are {}".format(absent_logs))
assert_false(absent_logs, "Next logs aren't present"
" in snapshot logs {}".format(absent_logs))

View File

@ -33,6 +33,7 @@ from fuelweb_test import logger
from fuelweb_test import settings
from fuelweb_test.helpers.checkers import check_action_logs
from fuelweb_test.helpers.checkers import check_repo_managment
from fuelweb_test.helpers.checkers import check_snapshot_logs
from fuelweb_test.helpers.checkers import check_stats_on_collector
from fuelweb_test.helpers.checkers import check_stats_private_info
from fuelweb_test.helpers.checkers import count_stats_on_collector
@ -832,3 +833,39 @@ def token(func):
args[0].login()
return func(*args, **kwargs)
return wrapper
def check_fuel_snapshot(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
try:
cluster_id = args[0].env.fuel_web.get_last_created_cluster()
logger.info("start checking snapshot logs")
controllers = \
args[0].env.fuel_web.get_nailgun_cluster_nodes_by_roles(
cluster_id, ['controller'])
computes = \
args[0].env.fuel_web.get_nailgun_cluster_nodes_by_roles(
cluster_id, ['compute'])
logger.debug("controller nodes are {}".format(controllers))
logger.debug("compute nodes are {}".format(computes))
controllers_fqdns = [controller['fqdn']
for controller in controllers]
compute_fqdns = [compute['fqdn'] for compute in computes]
logger.debug("controller fqdns are {}".format(controllers_fqdns))
logger.debug("compute fqdns are {}".format(compute_fqdns))
args[0].env.fuel_web.task_wait(
args[0].env.fuel_web.client.generate_logs(), 60 * 10)
snapshot_name = args[0].env.ssh_manager.execute_on_remote(
args[0].env.ssh_manager.admin_ip,
cmd="ls -I *.tar.xz /var/dump/")['stdout_str']
logger.debug("snapshot name is {}".format(snapshot_name))
check_snapshot_logs(args[0].env.ssh_manager.admin_ip,
snapshot_name, controllers_fqdns,
compute_fqdns)
return result
except Exception:
logger.error(traceback.format_exc())
raise
return wrapper

View File

@ -21,6 +21,7 @@ from proboscis.asserts import assert_equal
from proboscis.asserts import assert_true
from proboscis import test
from fuelweb_test.helpers.decorators import check_fuel_snapshot
from fuelweb_test.helpers.decorators import log_snapshot_after_test
from fuelweb_test.helpers.eb_tables import Ebtables
from fuelweb_test.helpers import os_actions
@ -336,6 +337,7 @@ class MultiroleControllerCinder(TestBasic):
@test(depends_on=[SetupEnvironment.prepare_slaves_3],
groups=["deploy_multirole_controller_cinder"])
@log_snapshot_after_test
@check_fuel_snapshot
def deploy_multirole_controller_cinder(self):
"""Deploy cluster in HA mode with multi-role controller and cinder