2022-04-08 08:48:49 -07:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import argparse
|
2022-04-22 07:58:29 -07:00
|
|
|
import csv
|
2022-04-08 08:48:49 -07:00
|
|
|
import datetime
|
|
|
|
import glob
|
|
|
|
import itertools
|
|
|
|
import json
|
2022-04-22 07:58:29 -07:00
|
|
|
import logging
|
2022-04-08 08:48:49 -07:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import socket
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2022-04-25 07:47:56 -07:00
|
|
|
|
|
|
|
try:
|
|
|
|
import psutil
|
|
|
|
except ImportError:
|
|
|
|
psutil = None
|
|
|
|
print('No psutil, process information will not be included',
|
|
|
|
file=sys.stderr)
|
|
|
|
|
|
|
|
try:
|
|
|
|
import pymysql
|
|
|
|
except ImportError:
|
|
|
|
pymysql = None
|
|
|
|
print('No pymysql, database information will not be included',
|
|
|
|
file=sys.stderr)
|
2022-04-08 08:48:49 -07:00
|
|
|
|
2022-04-22 07:58:29 -07:00
|
|
|
LOG = logging.getLogger('perf')
|
|
|
|
|
2022-04-08 08:48:49 -07:00
|
|
|
# https://www.elastic.co/blog/found-crash-elasticsearch#mapping-explosion
|
|
|
|
|
|
|
|
|
|
|
|
def tryint(value):
|
|
|
|
try:
|
|
|
|
return int(value)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def get_service_stats(service):
|
|
|
|
stats = {'MemoryCurrent': 0}
|
|
|
|
output = subprocess.check_output(['/usr/bin/systemctl', 'show', service] +
|
|
|
|
['-p%s' % stat for stat in stats])
|
|
|
|
for line in output.decode().split('\n'):
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
stat, val = line.split('=')
|
2022-04-26 15:46:56 +02:00
|
|
|
stats[stat] = tryint(val)
|
2022-04-08 08:48:49 -07:00
|
|
|
|
|
|
|
return stats
|
|
|
|
|
|
|
|
|
|
|
|
def get_services_stats():
|
|
|
|
services = [os.path.basename(s) for s in
|
2022-05-26 09:31:36 -07:00
|
|
|
glob.glob('/etc/systemd/system/devstack@*.service')] + \
|
|
|
|
['apache2.service']
|
2022-04-08 08:48:49 -07:00
|
|
|
return [dict(service=service, **get_service_stats(service))
|
|
|
|
for service in services]
|
|
|
|
|
|
|
|
|
|
|
|
def get_process_stats(proc):
|
|
|
|
cmdline = proc.cmdline()
|
|
|
|
if 'python' in cmdline[0]:
|
|
|
|
cmdline = cmdline[1:]
|
|
|
|
return {'cmd': cmdline[0],
|
|
|
|
'pid': proc.pid,
|
|
|
|
'args': ' '.join(cmdline[1:]),
|
|
|
|
'rss': proc.memory_info().rss}
|
|
|
|
|
|
|
|
|
|
|
|
def get_processes_stats(matches):
|
|
|
|
me = os.getpid()
|
|
|
|
procs = psutil.process_iter()
|
|
|
|
|
|
|
|
def proc_matches(proc):
|
|
|
|
return me != proc.pid and any(
|
|
|
|
re.search(match, ' '.join(proc.cmdline()))
|
|
|
|
for match in matches)
|
|
|
|
|
|
|
|
return [
|
|
|
|
get_process_stats(proc)
|
|
|
|
for proc in procs
|
|
|
|
if proc_matches(proc)]
|
|
|
|
|
|
|
|
|
|
|
|
def get_db_stats(host, user, passwd):
|
|
|
|
dbs = []
|
2022-05-23 13:56:13 -07:00
|
|
|
try:
|
|
|
|
db = pymysql.connect(host=host, user=user, password=passwd,
|
|
|
|
database='stats',
|
|
|
|
cursorclass=pymysql.cursors.DictCursor)
|
|
|
|
except pymysql.err.OperationalError as e:
|
|
|
|
if 'Unknown database' in str(e):
|
|
|
|
print('No stats database; assuming devstack failed',
|
|
|
|
file=sys.stderr)
|
|
|
|
return []
|
|
|
|
raise
|
|
|
|
|
2022-04-08 08:48:49 -07:00
|
|
|
with db:
|
|
|
|
with db.cursor() as cur:
|
2022-04-28 12:34:38 -07:00
|
|
|
cur.execute('SELECT db,op,count FROM queries')
|
2022-04-08 08:48:49 -07:00
|
|
|
for row in cur:
|
|
|
|
dbs.append({k: tryint(v) for k, v in row.items()})
|
|
|
|
return dbs
|
|
|
|
|
|
|
|
|
|
|
|
def get_http_stats_for_log(logfile):
|
|
|
|
stats = {}
|
2022-04-22 07:58:29 -07:00
|
|
|
apache_fields = ('host', 'a', 'b', 'date', 'tz', 'request', 'status',
|
|
|
|
'length', 'c', 'agent')
|
|
|
|
ignore_agents = ('curl', 'uwsgi', 'nova-status')
|
2022-06-23 09:25:22 -07:00
|
|
|
ignored_services = set()
|
2022-04-22 07:58:29 -07:00
|
|
|
for line in csv.reader(open(logfile), delimiter=' '):
|
|
|
|
fields = dict(zip(apache_fields, line))
|
|
|
|
if len(fields) != len(apache_fields):
|
|
|
|
# Not a combined access log, so we can bail completely
|
|
|
|
return []
|
|
|
|
try:
|
|
|
|
method, url, http = fields['request'].split(' ')
|
|
|
|
except ValueError:
|
|
|
|
method = url = http = ''
|
|
|
|
if 'HTTP' not in http:
|
|
|
|
# Not a combined access log, so we can bail completely
|
|
|
|
return []
|
|
|
|
|
|
|
|
# Tempest's User-Agent is unchanged, but client libraries and
|
|
|
|
# inter-service API calls use proper strings. So assume
|
|
|
|
# 'python-urllib' is tempest so we can tell it apart.
|
|
|
|
if 'python-urllib' in fields['agent'].lower():
|
|
|
|
agent = 'tempest'
|
|
|
|
else:
|
|
|
|
agent = fields['agent'].split(' ')[0]
|
|
|
|
if agent.startswith('python-'):
|
|
|
|
agent = agent.replace('python-', '')
|
|
|
|
if '/' in agent:
|
|
|
|
agent = agent.split('/')[0]
|
|
|
|
|
|
|
|
if agent in ignore_agents:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
service, rest = url.strip('/').split('/', 1)
|
|
|
|
except ValueError:
|
|
|
|
# Root calls like "GET /identity"
|
|
|
|
service = url.strip('/')
|
|
|
|
rest = ''
|
|
|
|
|
2022-06-23 09:25:22 -07:00
|
|
|
if not service.isalpha():
|
|
|
|
ignored_services.add(service)
|
|
|
|
continue
|
|
|
|
|
2022-04-22 07:58:29 -07:00
|
|
|
method_key = '%s-%s' % (agent, method)
|
|
|
|
try:
|
|
|
|
length = int(fields['length'])
|
|
|
|
except ValueError:
|
|
|
|
LOG.warning('[%s] Failed to parse length %r from line %r' % (
|
|
|
|
logfile, fields['length'], line))
|
|
|
|
length = 0
|
|
|
|
stats.setdefault(service, {'largest': 0})
|
|
|
|
stats[service].setdefault(method_key, 0)
|
|
|
|
stats[service][method_key] += 1
|
|
|
|
stats[service]['largest'] = max(stats[service]['largest'],
|
|
|
|
length)
|
2022-04-08 08:48:49 -07:00
|
|
|
|
2022-06-23 09:25:22 -07:00
|
|
|
if ignored_services:
|
|
|
|
LOG.warning('Ignored services: %s' % ','.join(
|
|
|
|
sorted(ignored_services)))
|
|
|
|
|
2022-04-08 08:48:49 -07:00
|
|
|
# Flatten this for ES
|
|
|
|
return [{'service': service, 'log': os.path.basename(logfile),
|
|
|
|
**vals}
|
|
|
|
for service, vals in stats.items()]
|
|
|
|
|
|
|
|
|
|
|
|
def get_http_stats(logfiles):
|
|
|
|
return list(itertools.chain.from_iterable(get_http_stats_for_log(log)
|
|
|
|
for log in logfiles))
|
|
|
|
|
|
|
|
|
|
|
|
def get_report_info():
|
|
|
|
return {
|
|
|
|
'timestamp': datetime.datetime.now().isoformat(),
|
|
|
|
'hostname': socket.gethostname(),
|
2022-04-22 07:58:29 -07:00
|
|
|
'version': 2,
|
2022-04-08 08:48:49 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
process_defaults = ['privsep', 'mysqld', 'erlang', 'etcd']
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--db-user', default='root',
|
|
|
|
help=('MySQL user for collecting stats '
|
|
|
|
'(default: "root")'))
|
|
|
|
parser.add_argument('--db-pass', default=None,
|
|
|
|
help='MySQL password for db-user')
|
|
|
|
parser.add_argument('--db-host', default='localhost',
|
|
|
|
help='MySQL hostname')
|
|
|
|
parser.add_argument('--apache-log', action='append', default=[],
|
|
|
|
help='Collect API call stats from this apache log')
|
|
|
|
parser.add_argument('--process', action='append',
|
|
|
|
default=process_defaults,
|
|
|
|
help=('Include process stats for this cmdline regex '
|
|
|
|
'(default is %s)' % ','.join(process_defaults)))
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2022-04-22 07:58:29 -07:00
|
|
|
logging.basicConfig(level=logging.WARNING)
|
|
|
|
|
2022-04-08 08:48:49 -07:00
|
|
|
data = {
|
|
|
|
'services': get_services_stats(),
|
2022-04-25 07:47:56 -07:00
|
|
|
'db': pymysql and args.db_pass and get_db_stats(args.db_host,
|
|
|
|
args.db_user,
|
|
|
|
args.db_pass) or [],
|
|
|
|
'processes': psutil and get_processes_stats(args.process) or [],
|
2022-04-08 08:48:49 -07:00
|
|
|
'api': get_http_stats(args.apache_log),
|
|
|
|
'report': get_report_info(),
|
|
|
|
}
|
|
|
|
|
|
|
|
print(json.dumps(data, indent=2))
|