diff --git a/mini-mon.yml b/mini-mon.yml index 5939f07..8afcf9d 100644 --- a/mini-mon.yml +++ b/mini-mon.yml @@ -83,3 +83,4 @@ - {role: monasca-default-alarms, tags: [alarms]} - include: smoke.yml +- include: smoke2.yml diff --git a/smoke2.yml b/smoke2.yml new file mode 100644 index 0000000..e20c6ce --- /dev/null +++ b/smoke2.yml @@ -0,0 +1,12 @@ +- hosts: mini-mon + tasks: + - name: Populate vars + template: src=tests/smoke2.py.j2 dest=/vagrant/tests/smoke2.py mode=0775 + - name: Copy config + copy: src=tests/smoke2_configs.py dest=/vagrant/tests/smoke2_configs.py + - name: Run the smoke2 test + command: /vagrant/tests/smoke2.py + environment: + PATH: "{{ansible_env.PATH}}:{{monasca_virtualenv_dir}}/bin" + register: smoke2 + - debug: var=smoke2.stdout_lines \ No newline at end of file diff --git a/tests/smoke2.py.j2 b/tests/smoke2.py.j2 new file mode 100755 index 0000000..e7cdf2d --- /dev/null +++ b/tests/smoke2.py.j2 @@ -0,0 +1,431 @@ +#!/opt/monasca/bin/python +# +from __future__ import print_function +import argparse +import kafka +import glob +import MySQLdb +from monascaclient import ksclient +import psutil +import requests +import shlex +import smoke2_configs +import socket +import subprocess +import sys +import utils + +config = smoke2_configs.test_config +args = 0 + +# successfully = '\033[5;40;32mSuccessfully\033[0m' +# successful = '\033[5;40;32mSuccessful.\033[0m' +# error = '\033[5;41;37m[ERROR]:\033[0m' +successfully = 'Successfully' +successful = 'Successful.' +error = '[ERROR]' + +# parse command line arguments +def parse_commandline_args(): + parser = argparse.ArgumentParser() + parser.add_argument('-dbtype', '--dbtype', + default="{{database_type|default('influxdb')}}", + help='specify which database (influxdb or vertica)') + parser.add_argument('-k', '--kafka', + default="{{kafka_hosts|default(0)}}", + help='will check kafka on listed node(s). ' + 'ex. -k "192.168.10.4 192.168.10.7"') + parser.add_argument('-z', '--zoo', + default="{{zookeeper_hosts|default(0)}}", + help='will check zookeeper on listed node(s). ' + 'ex. -z "192.168.10.4 192.168.10.7"') + parser.add_argument('-m', '--mysql', + default="{{mysql_host|default(0)}}", + help='will check mysql on listed node. ' + 'ex. -m "192.168.10.4"') + parser.add_argument('-db', '--db', + help='will check database on listed node. ' + 'ex. -db "192.168.10.4"') + parser.add_argument('-s', '--single', + help='will check all services on single node. ' + 'ex. -s "192.168.10.4"') + parser.add_argument('-api', '--monapi', + default="{{mini_mon|default(0)}}", + help='will check url api access on node. ' + 'ex. -api "192.168.10.4"') + parser.add_argument('-v', '--verbose', action='store_true', default=0, + help='will display all checking info') + return parser.parse_args() + + +def find_processes(): + """Find_process is meant to validate that all the required processes + are running""" + process_missing = [] + process_list = config['check']['expected_processes'] + + for process in process_list: + process_found_flag = False + + for item in psutil.process_iter(): + for cmd in item.cmdline(): + if process in cmd: + process_found_flag = True + break + + if not process_found_flag: + process_missing.append(process) + + if len(process_missing) > 0: # if processes were not found + print (error + ' Process = {} Not Found' + .format(process_missing)) + debug_missing_process(process_missing[0]) + return False + print(successful + ' All Processes are running.') + return True + + +def debug_missing_process(process_missing): + """A tool to output potential remedies for missing processes""" + # msg = config['help'][process_missing] + msg = config['help']['test'] + print(msg) + + +def check_port(node, port): + """Returns False if port is open (for fail check)""" + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = sock.connect_ex((node, port)) + if result == 0: + if args.verbose: + print(successful + " Port {} is open".format(port)) + return False + else: + if args.verbose: + print(error + " Port {} is not open".format(port)) + return True + + +def debug_kafka(node): + print('********VERIFYING KAFKA NODE(S)********') + node = node.split(' ') + topics = config['default']['kafka']['topics'] + for nodeip in node: + if nodeip[-5:-4] == ':': + nodeip = nodeip[:-5] + fail = check_port(nodeip, 9092) + if args.verbose: + print('Checking topics on node {}:'.format(nodeip)) + kafka_client = kafka.client.KafkaClient(nodeip + ':9092') + for topic in topics: + try: + kafka.consumer.SimpleConsumer( + kafka_client, + 'Foo', + topic, + auto_commit=True, + max_buffer_size=None) + if args.verbose: + print('\t' + successfully + ' connected ' + 'to topic {}'.format(topic)) + except KeyError: + print('\t' + error + ' Could not connect ' + 'to topic {}'.format(topic)) + fail = True + if fail: + return False + else: + if not args.verbose: + print(successful) + return True + + +def debug_zookeeper(node): + print('*******VERIFYING ZOOKEEPER NODE(S)*******') + node = node.split(' ') + for nodeip in node: + if nodeip[-5:-4] == ':': + nodeip = nodeip[:-5] + fail = check_port(nodeip, 2181) + cmd = "nc " + nodeip + ' 2181' + ps = subprocess.Popen(('echo', 'ruok'), stdout=subprocess.PIPE) + try: + output = subprocess.check_output(shlex.split(cmd), + stdin=ps.stdout) + if output == 'imok': + if args.verbose: + print("cmd: echo ruok | " + cmd + " Response: {}" + .format(output) + " " + successful) + else: + print(successful) + except subprocess.CalledProcessError: + print(error + ' Node {} is not responding'.format(nodeip)) + return False + if fail: + return False + return True + + +def debug_mysql(node, mysql_user, mysql_pass): + print('********VERIFYING MYSQL NODE********') + fail = check_port(node, 3306) + schema = config['default']['mysql_schema'] + try: + conn = MySQLdb.connect( + host=node, + user=mysql_user, + passwd=mysql_pass, + db='mon') + if args.verbose: + print(successfully + ' connected to node {}'.format(node)) + conn.query('show tables') + result = conn.store_result() + if args.verbose: + print('Checking MYSQL Table Schema on node {}:'.format(node)) + for x in range(0, result.num_rows()): + row = result.fetch_row()[0][0] + if row in schema: + if args.verbose: + print('\t' + successfully + + ' matched table {}'.format(row)) + else: + print('\t' + error + ' Table {} does not ' + 'match config'.format(row)) + fail = True + if fail: + print('\033[5;41;37m[ERROR]: MySQL test failed\033[0m') + return False + else: + if not args.verbose: + print(successful) + return True + + except MySQLdb.OperationalError, e: + print(error + ' MySQL connection failed: {}'.format(e)) + return False + + +def debug_influx(node, influx_user, influx_pass): + print('********VERIFYING INFLUXDB NODE********') + try: + from influxdb import client + except ImportError, e: + print("[ERROR]: InfluxDB Python Package is not installed!") + return 1 + fail = check_port(node, 8086) + fail = check_port(node, 8090) + try: + conn = client.InfluxDBClient( + node, + 8086, + influx_user, + influx_pass, + 'mon' + ) + conn.query('show series;') + if args.verbose: + print(successfully + ' connected to node {}'.format(node)) + else: + print(successful) + except Exception, e: + print('{}'.format(e)) + return False + if fail: + return False + return True + + +def debug_vertica(node, vuser, vpass): + print('********VERIFYING VERTICA NODE********') + fail = check_port(node, 5433) + fail = check_port(node, 5434) + try: + cmd = "/opt/vertica/bin/vsql -U " + vuser + " -w " + vpass + " " \ + "-c \"select count(*) from MonMetrics.Measurements\"" + output = subprocess.check_output(shlex.split(cmd)) + if args.verbose: + print("Running cmd: select count(*) from MonMetrics.Measurements") + output = [int(s) for s in output.split() if s.isdigit()] + print("Response: " + str(output[0]) + " " + successful) + else: + print(successful) + except subprocess.CalledProcessError: + print(error + " Cannot connect to vertica") + if fail: + return False + return True + + +def debug_keystone(key_user, key_pass, project, auth_url): + keystone = { + 'username': key_user, + 'password': key_pass, + 'project': project, + 'auth_url': auth_url + } + ks_client = ksclient.KSClient(**keystone) + if args.verbose: + print(successfully + ' connected to keystone with ' + 'token {}'.format(ks_client.token)) + else: + print(successful) + return ks_client.token + + +def debug_rest_urls(node, token): + print('********VERIFYING REST API********') + url = 'http://' + node + ":8080/" + fail = check_port(node, 8080) + try: + r = requests.get(url, headers={'X-Auth-Token': token}) + if r.status_code == 200: + version_id = r.json()['elements'][0]['id'] + if args.verbose: + print(successfully + ' connected to REST API on ' + 'node {}. Response (version id): {}' + .format(node, version_id)) + else: + print(successful) + except requests.ConnectionError: + print(error + ' incorrect response from REST ' + 'API on node {}'.format(node)) + return False + if fail: + return False + else: + return True + + +def debug_storm(node): + print('********VERIFYING STORM********') + fail = check_port(node, 6701) + fail = check_port(node, 6702) + fail = check_port(node, 6627) + cmd = "/opt/storm/apache*" + cmd = glob.glob(cmd)[0] + "/bin/storm list" + grep = "grep 'ACTIVE'" + try: + ps = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) + output = subprocess.check_output(shlex.split(grep), stdin=ps.stdout) + if output: + output = output[:27] + output = " ".join(output.split()) + if args.verbose: + print(successful + " Storm status: {}".format(output)) + else: + print(successful) + except Exception, e: + print(error + " {}".format(e)) + return False + if fail: + return False + else: + return True + + +def main(): + utils.setup_cli() + + # parse the command line arguments + global args + args = parse_commandline_args() + + if args.single: + debug_zookeeper(args.single) + elif args.zoo != 0: + debug_zookeeper(args.zoo) + else: + print(error + ' Could not parse zookeeper node!') + + if args.single: + debug_kafka(args.single) + elif args.kafka != 0: + debug_kafka(args.kafka) + else: + print(error + ' Could not parse kafka node!') + + print('*****VERIFYING HOST SERVICES/PROCESSES*****') + if not find_processes(): + print('*****TEST FAILED*****') + return 1 + + print('*****VERIFYING KEYSTONE*****') + key_user = "{{keystone_user|default(0)}}" + key_pass = "{{keystone_password|default(0)}}" + key_host = "{{keystone_host|default(0)}}" + if key_host != 0: + auth_url = "http://" + key_host + ':35357/v3' + if key_user != 0 and key_pass != 0: + try: + token = debug_keystone(key_user, key_pass, 'test', auth_url) + except Exception, e: + print(error + ' {}'.format(e)) + print('*****TEST FAILED*****') + return 1 + else: + print(error + ' Could not parse keystone user/pass') + else: + print(error + ' Could not parse keystone node') + + if args.single: + debug_rest_urls(args.single, token) + elif args.monapi != 0: + debug_rest_urls(args.monapi, token) + else: + print(error + ' Could not parse node for REST API') + + storm_node = "{{nimbus_host|default(0)}}" + if storm_node != 0: + debug_storm(storm_node) + else: + print(error + ' Could not parse storm node') + + mysql_user = "{{mon_mysql_users[1]['username']|default(0)}}" + mysql_pass = "{{mon_mysql_users[1]['password']|default(0)}}" + if mysql_user != 0 and mysql_pass != 0: + if args.single: + debug_mysql(args.single, mysql_user, mysql_pass) + elif args.mysql != 0: + debug_mysql(args.mysql, mysql_user, mysql_pass) + else: + print(error + ' Could not parse node for mysql') + else: + print(error + ' Could not parse mysql user/pass') + + if args.dbtype == 'vertica': + vertica_user = "{{vertica_users[1]['username']|default(0)}}" + vertica_pass = "{{vertica_users[1]['password']|default(0)}}" + vertica_node = "{{mini_mon}}" + if vertica_user != 0 and vertica_pass != 0: + if args.single: + debug_vertica(args.single, vertica_user, vertica_pass) + elif args.db: + debug_vertica(args.db, vertica_user, vertica_pass) + else: + debug_vertica(vertica_node, vertica_user, vertica_pass) + else: + print(error + ' Cloud not parse vertica user/pass') + else: + influx_user = "{{mon_influxdb_users[0]['username']|default(0)}}" + influx_pass = "{{mon_influxdb_users[0]['password']|default(0)}}" + influx_node = "{{influxdb_url|default(0)}}" + if args.single: + debug_influx(args.single, influx_user, influx_pass) + else: + if args.db: + debug_influx(args.db, influx_user, influx_pass) + elif influx_node != 0: + if influx_node[0] == 'h': + influx_node = influx_node[7:-5] + debug_influx(influx_node, influx_user, influx_pass) + else: + debug_influx(influx_node, influx_user, influx_pass) + else: + print(error + " Could not parse influxdb node") + return 1 + + print('*****TEST FINISHED*****') + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/smoke2_configs.py b/tests/smoke2_configs.py new file mode 100644 index 0000000..4a0d3ed --- /dev/null +++ b/tests/smoke2_configs.py @@ -0,0 +1,37 @@ +# -*- encoding: utf-8 -*- + +"""configurations for smoke2 test""" + +test_config = { + 'default': { # the default configuration, + # simple test of each component of monasca-vagrant + 'kafka': { + 'topics': [ + 'metrics', 'events', 'raw-events', 'transformed-events', + 'stream-definitions', 'transform-definitions', + 'alarm-state-transitions', 'alarm-notifications', + 'retry-notifications' + ] + }, + 'mysql_schema': [ + 'alarm', 'alarm_action', 'alarm_definition', 'alarm_metric', + 'metric_definition', 'metric_definition_dimensions', + 'metric_dimension', 'notification_method', 'schema_migrations', + 'stream_actions', 'stream_definition', 'sub_alarm', + 'sub_alarm_definition', 'sub_alarm_definition_dimension', + 'event_transform' + ], + }, + + 'check': { + 'expected_processes': [ + 'apache-storm', 'monasca-api', 'monasca-statsd', + 'monasca-collector', 'monasca-forward', + 'monasca-notification', 'monasca-persister', + 'monasca-statsd', 'monasca-forward', 'monasca-collect' + ] + }, + 'help': { + 'test': 'wiki link for help with specific process' + } +}