Fixing flake8 errors now that tox is working again
Change-Id: Id34991711c57f20922f003928008143032723af3
This commit is contained in:
@@ -636,32 +636,3 @@ class AgentCheck(util.Dimensions):
|
||||
return val
|
||||
else:
|
||||
return cast(val)
|
||||
|
||||
|
||||
def run_check(name, path=None):
|
||||
import tests.common
|
||||
|
||||
# Read the config file
|
||||
config = Config()
|
||||
confd_path = path or os.path.join(config.get_confd_path(),
|
||||
'{0}.yaml'.format(name))
|
||||
|
||||
try:
|
||||
f = open(confd_path)
|
||||
except IOError:
|
||||
raise Exception('Unable to open configuration at %s' % confd_path)
|
||||
|
||||
config_str = f.read()
|
||||
f.close()
|
||||
|
||||
# Run the check
|
||||
check, instances = tests.common.get_check(name, config_str)
|
||||
if not instances:
|
||||
raise Exception('YAML configuration returned no instances.')
|
||||
for instance in instances:
|
||||
check.check(instance)
|
||||
if check.has_events():
|
||||
print("Events:\n")
|
||||
pprint.pprint(check.get_events(), indent=4)
|
||||
print("Metrics:\n")
|
||||
pprint.pprint(check.get_metrics(), indent=4)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# Core modules
|
||||
import logging
|
||||
import socket
|
||||
import system.win32 as w32
|
||||
import threading
|
||||
import time
|
||||
|
||||
import monasca_agent.common.check_status as check_status
|
||||
import monasca_agent.common.metrics as metrics
|
||||
import monasca_agent.common.util as util
|
||||
import system.win32 as w32
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -74,8 +74,7 @@ class PoolWorker(threading.Thread):
|
||||
|
||||
class Pool(object):
|
||||
|
||||
"""
|
||||
The Pool class represents a pool of worker threads.
|
||||
"""The Pool class represents a pool of worker threads.
|
||||
|
||||
It has methods which allows tasks to be offloaded to the
|
||||
worker processes in a few different ways.
|
||||
|
||||
@@ -170,6 +170,7 @@ class ServicesCheck(monasca_agent.collector.checks.AgentCheck):
|
||||
|
||||
def _check(self, instance):
|
||||
"""This function should be implemented by inherited classes.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@@ -5,16 +5,20 @@ import urllib2
|
||||
import urlparse
|
||||
|
||||
# project
|
||||
import monasca_agent.common.util as util
|
||||
import monasca_agent.collector.checks as checks
|
||||
import monasca_agent.collector.checks.services_checks as services_checks
|
||||
import monasca_agent.collector.checks.utils as utils
|
||||
import monasca_agent.common.util as util
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Apache(checks.AgentCheck):
|
||||
"""Tracks basic connection/requests/workers metrics
|
||||
|
||||
See http://httpd.apache.org/docs/2.2/mod/mod_status.html for more details
|
||||
"""
|
||||
|
||||
GAUGES = {'IdleWorkers': 'apache.performance.idle_worker_count',
|
||||
'BusyWorkers': 'apache.performance.busy_worker_count',
|
||||
'CPULoad': 'apache.performance.cpu_load_perc',
|
||||
|
||||
@@ -32,7 +32,6 @@ class CheckMK(AgentCheck):
|
||||
def __init__(self, name, init_config, agent_config, instances=None):
|
||||
AgentCheck.__init__(self, name, init_config, agent_config, instances)
|
||||
|
||||
|
||||
def check(self, instance):
|
||||
"""Run check_mk_agent and process the '<<<local>>>' results.
|
||||
"""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import urllib2
|
||||
import re
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks.utils import add_basic_auth
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import psutil
|
||||
import logging
|
||||
import psutil
|
||||
|
||||
import monasca_agent.collector.checks as checks
|
||||
|
||||
@@ -34,7 +34,6 @@ class Cpu(checks.AgentCheck):
|
||||
num_of_metrics += 1
|
||||
log.debug('Collected {0} cpu metrics'.format(num_of_metrics))
|
||||
|
||||
|
||||
def _format_results(self, us, sy, wa, idle, st, dimensions):
|
||||
data = {'cpu.user_perc': us,
|
||||
'cpu.system_perc': sy,
|
||||
|
||||
@@ -16,8 +16,7 @@ class Crash(checks.AgentCheck):
|
||||
log.debug('crash dir: %s', self.crash_dir)
|
||||
|
||||
def check(self, instance):
|
||||
"""
|
||||
Capture crash dump statistics
|
||||
"""Capture crash dump statistics
|
||||
"""
|
||||
dimensions = self._set_dimensions(None, instance)
|
||||
dump_count = 0
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from fnmatch import fnmatch
|
||||
from os import stat
|
||||
from os import walk
|
||||
from os.path import abspath
|
||||
from os.path import exists
|
||||
from os.path import join
|
||||
from os import stat
|
||||
from os import walk
|
||||
import time
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import psutil
|
||||
import logging
|
||||
import os
|
||||
import psutil
|
||||
import re
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -41,7 +42,7 @@ class Disk(checks.AgentCheck):
|
||||
total_used = 0
|
||||
for partition in partitions:
|
||||
if partition.fstype not in fs_types_to_ignore \
|
||||
or (device_blacklist_re \
|
||||
or (device_blacklist_re
|
||||
and not device_blacklist_re.match(partition.device)):
|
||||
device_name = self._get_device_name(partition.device)
|
||||
disk_usage = psutil.disk_usage(partition.mountpoint)
|
||||
@@ -87,7 +88,6 @@ class Disk(checks.AgentCheck):
|
||||
dimensions=rollup_dimensions)
|
||||
log.debug('Collected 2 rolled-up disk usage metrics')
|
||||
|
||||
|
||||
def _get_re_exclusions(self, instance):
|
||||
"""Parse device blacklist regular expression"""
|
||||
filter = None
|
||||
@@ -95,7 +95,7 @@ class Disk(checks.AgentCheck):
|
||||
filter_device_re = instance.get('device_blacklist_re', None)
|
||||
if filter_device_re:
|
||||
filter = re.compile(filter_device_re)
|
||||
except re.error as err:
|
||||
except re.error:
|
||||
log.error('Error processing regular expression {0}'.format(filter_device_re))
|
||||
|
||||
return filter
|
||||
|
||||
@@ -3,8 +3,8 @@ import json
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import urllib2
|
||||
import urllib
|
||||
import urllib2
|
||||
from urlparse import urlsplit
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
@@ -206,7 +206,6 @@ class ElasticSearch(AgentCheck):
|
||||
self.STATS_URL = "/_cluster/nodes/stats?all=true"
|
||||
self.NODES_URL = "/_cluster/nodes?network=true"
|
||||
|
||||
|
||||
additional_metrics = {
|
||||
"elasticsearch.cache.field.evictions": ("gauge", "indices.cache.field_evictions"),
|
||||
"elasticsearch.cache.field.size": ("gauge", "indices.cache.field_size_in_bytes"),
|
||||
|
||||
@@ -232,8 +232,7 @@ class HAProxy(AgentCheck):
|
||||
# Store this host status so we can check against it later
|
||||
self.host_status[url][key] = data['status']
|
||||
|
||||
@staticmethod
|
||||
def _create_event(status, hostname, lastchg, service_name):
|
||||
def _create_event(self, status, hostname, lastchg, service_name):
|
||||
if status == "DOWN":
|
||||
alert_type = "error"
|
||||
title = "HAProxy %s front-end reported %s %s" % (service_name, hostname, status)
|
||||
|
||||
@@ -12,8 +12,9 @@ from httplib2 import httplib
|
||||
from httplib2 import HttpLib2Error
|
||||
|
||||
import monasca_agent.collector.checks.services_checks as services_checks
|
||||
import monasca_agent.common.keystone as keystone
|
||||
import monasca_agent.common.config as cfg
|
||||
import monasca_agent.common.keystone as keystone
|
||||
|
||||
|
||||
class HTTPCheck(services_checks.ServicesCheck):
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ import yaml
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
from distutils.version import LooseVersion
|
||||
from monasca_agent.collector.virt import inspector
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.virt import inspector
|
||||
|
||||
|
||||
class LibvirtCheck(AgentCheck):
|
||||
@@ -93,8 +93,8 @@ class LibvirtCheck(AgentCheck):
|
||||
try:
|
||||
with open(self.instance_cache_file, 'w') as cache_yaml:
|
||||
yaml.safe_dump(id_cache, cache_yaml)
|
||||
if stat.S_IMODE(os.stat(self.instance_cache_file).st_mode) != 0600:
|
||||
os.chmod(self.instance_cache_file, 0600)
|
||||
if stat.S_IMODE(os.stat(self.instance_cache_file).st_mode) != 0o600:
|
||||
os.chmod(self.instance_cache_file, 0o600)
|
||||
except IOError as e:
|
||||
self.log.error("Cannot write to {0}: {1}".format(self.instance_cache_file, e))
|
||||
|
||||
@@ -139,8 +139,8 @@ class LibvirtCheck(AgentCheck):
|
||||
try:
|
||||
with open(self.metric_cache_file, 'w') as cache_yaml:
|
||||
yaml.safe_dump(metric_cache, cache_yaml)
|
||||
if stat.S_IMODE(os.stat(self.metric_cache_file).st_mode) != 0600:
|
||||
os.chmod(self.metric_cache_file, 0600)
|
||||
if stat.S_IMODE(os.stat(self.metric_cache_file).st_mode) != 0o600:
|
||||
os.chmod(self.metric_cache_file, 0o600)
|
||||
except IOError as e:
|
||||
self.log.error("Cannot write to {0}: {1}".format(self.metric_cache_file, e))
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import psutil
|
||||
import logging
|
||||
import psutil
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import monasca_agent.collector.checks as checks
|
||||
import monasca_agent.common.util as util
|
||||
@@ -36,7 +37,7 @@ class Load(checks.AgentCheck):
|
||||
# Get output from uptime
|
||||
try:
|
||||
uptime = subprocess.Popen(['uptime'],
|
||||
stdout=sp.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
close_fds=True).communicate()[0]
|
||||
except Exception:
|
||||
log.exception('Cannot extract load using uptime')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import psutil
|
||||
import logging
|
||||
import psutil
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@ import socket
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from monasca_agent.collector.checks.services_checks import ServicesCheck, Status
|
||||
from monasca_agent.collector.checks.services_checks import ServicesCheck
|
||||
from monasca_agent.collector.checks.services_checks import Status
|
||||
|
||||
|
||||
class WrapNagios(ServicesCheck):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# stdlib
|
||||
import logging
|
||||
import re
|
||||
import psutil
|
||||
import re
|
||||
|
||||
# project
|
||||
import monasca_agent.collector.checks as checks
|
||||
@@ -32,7 +32,6 @@ class Network(checks.AgentCheck):
|
||||
exclude_iface_re = None
|
||||
|
||||
nics = psutil.net_io_counters(pernic=True)
|
||||
count = 0
|
||||
for nic_name in nics.keys():
|
||||
if self._is_nic_monitored(nic_name, excluded_ifaces, exclude_iface_re):
|
||||
nic = nics[nic_name]
|
||||
|
||||
@@ -32,4 +32,3 @@ class NtpCheck(AgentCheck):
|
||||
# case the agent host's clock is messed up.
|
||||
ntp_ts = ntp_stats.recv_time
|
||||
self.gauge('ntp.offset', ntp_offset, timestamp=ntp_ts, dimensions=dimensions)
|
||||
|
||||
|
||||
@@ -226,7 +226,7 @@ class RabbitMQ(checks.AgentCheck):
|
||||
root = root.get(path, {})
|
||||
|
||||
value = root.get(keys[-1], None)
|
||||
if value == None:
|
||||
if value is None:
|
||||
value = 0.0
|
||||
try:
|
||||
self.log.debug("Collected data for %s: metric name: %s: value: %f dimensions: %s" % (object_type, metric_name, float(value), str(dimensions)))
|
||||
|
||||
@@ -29,19 +29,18 @@ def run_command(command, input=None):
|
||||
stdout,
|
||||
stderr))
|
||||
return errcode, stdout, stderr
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
log.error("Failure while executing command - {0}".format(command))
|
||||
|
||||
|
||||
def process_command(command):
|
||||
"""
|
||||
Runs the command and returns json output
|
||||
"""Runs the command and returns json output
|
||||
"""
|
||||
try:
|
||||
errcode, stdout, stderr = run_command(command)
|
||||
json_output = json.loads(stdout)
|
||||
return json_output
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
log.error('Failure while processing output - {0}'.format(stdout))
|
||||
|
||||
|
||||
@@ -50,11 +49,9 @@ class SwiftDiags(checks.AgentCheck):
|
||||
super(SwiftDiags, self).__init__(name, init_config, agent_config)
|
||||
|
||||
def check(self, instance):
|
||||
"""
|
||||
Get swift checks and propagate.
|
||||
"""Get swift checks and propagate.
|
||||
The checks are part of HP swift-diags package and checks are
|
||||
are run only if the package exists.
|
||||
|
||||
"""
|
||||
if not (os.path.exists(DIAG_COMMAND) and
|
||||
os.path.exists(CHECKER_COMMAND)):
|
||||
|
||||
@@ -20,7 +20,7 @@ import monasca_agent.common.util as util
|
||||
|
||||
# set up logging before importing any other components
|
||||
util.initialize_logging('collector')
|
||||
os.umask(022)
|
||||
os.umask(0o22)
|
||||
|
||||
# Check we're not using an old version of Python. We need 2.4 above because
|
||||
# some modules (like subprocess) were only introduced in 2.4.
|
||||
@@ -264,7 +264,6 @@ def main():
|
||||
run_check(check)
|
||||
|
||||
elif 'configcheck' == command or 'configtest' == command:
|
||||
osname = util.get_os()
|
||||
all_valid = True
|
||||
paths = util.Paths()
|
||||
for conf_path in glob.glob(os.path.join(paths.get_confd_path(), "*.yaml")):
|
||||
@@ -317,6 +316,7 @@ def main():
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def run_check(check):
|
||||
|
||||
is_multi_threaded = False
|
||||
|
||||
@@ -20,7 +20,7 @@ from monasca_agent.collector.virt.hyperv import utilsv2
|
||||
from monasca_agent.collector.virt import inspector as virt_inspector
|
||||
|
||||
|
||||
def instance_name(instance):
|
||||
def get_instance_name(instance):
|
||||
"""Shortcut to get instance name."""
|
||||
return getattr(instance, 'OS-EXT-SRV-ATTR:instance_name', None)
|
||||
|
||||
|
||||
@@ -279,5 +279,5 @@ def get_hypervisor_inspector():
|
||||
cfg.CONF.hypervisor_inspector,
|
||||
invoke_on_load=True)
|
||||
return mgr.driver
|
||||
except ImportError as e:
|
||||
except ImportError:
|
||||
return Inspector()
|
||||
|
||||
@@ -90,7 +90,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
vm_moid = self._ops.get_vm_moid(instance.id)
|
||||
if vm_moid is None:
|
||||
raise virt_inspector.InstanceNotFoundException(
|
||||
_('VM %s not found in VMware Vsphere') % instance.id)
|
||||
'VM %s not found in VMware Vsphere' % instance.id)
|
||||
cpu_util_counter_id = self._ops.get_perf_counter_id(
|
||||
VC_AVERAGE_CPU_CONSUMED_CNTR)
|
||||
cpu_util = self._ops.query_vm_aggregate_stats(
|
||||
@@ -107,7 +107,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
vm_moid = self._ops.get_vm_moid(instance.id)
|
||||
if not vm_moid:
|
||||
raise virt_inspector.InstanceNotFoundException(
|
||||
_('VM %s not found in VMware Vsphere') % instance.id)
|
||||
'VM %s not found in VMware Vsphere' % instance.id)
|
||||
|
||||
vnic_stats = {}
|
||||
vnic_ids = set()
|
||||
@@ -139,7 +139,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
vm_moid = self._ops.get_vm_moid(instance.id)
|
||||
if vm_moid is None:
|
||||
raise virt_inspector.InstanceNotFoundException(
|
||||
_('VM %s not found in VMware Vsphere') % instance.id)
|
||||
'VM %s not found in VMware Vsphere' % instance.id)
|
||||
mem_counter_id = self._ops.get_perf_counter_id(
|
||||
VC_AVERAGE_MEMORY_CONSUMED_CNTR)
|
||||
memory = self._ops.query_vm_aggregate_stats(
|
||||
@@ -152,7 +152,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
vm_moid = self._ops.get_vm_moid(instance.id)
|
||||
if not vm_moid:
|
||||
raise virt_inspector.InstanceNotFoundException(
|
||||
_('VM %s not found in VMware Vsphere') % instance.id)
|
||||
'VM %s not found in VMware Vsphere' % instance.id)
|
||||
|
||||
disk_stats = {}
|
||||
disk_ids = set()
|
||||
|
||||
@@ -57,23 +57,23 @@ class XenapiException(virt_inspector.InspectorException):
|
||||
|
||||
def get_api_session():
|
||||
if not api:
|
||||
raise ImportError(_('XenAPI not installed'))
|
||||
raise ImportError('XenAPI not installed')
|
||||
|
||||
url = CONF.xenapi.connection_url
|
||||
username = CONF.xenapi.connection_username
|
||||
password = CONF.xenapi.connection_password
|
||||
if not url or password is None:
|
||||
raise XenapiException(_('Must specify connection_url, and '
|
||||
'connection_password to use'))
|
||||
raise XenapiException('Must specify connection_url, and '
|
||||
'connection_password to use')
|
||||
|
||||
exception = api.Failure(_("Unable to log in to XenAPI "
|
||||
"(is the Dom0 disk full?)"))
|
||||
exception = api.Failure("Unable to log in to XenAPI "
|
||||
"(is the Dom0 disk full?)")
|
||||
try:
|
||||
session = api.Session(url)
|
||||
with timeout.Timeout(CONF.xenapi.login_timeout, exception):
|
||||
session.login_with_password(username, password)
|
||||
except api.Failure as e:
|
||||
msg = _("Could not connect to XenAPI: %s") % e.details[0]
|
||||
msg = "Could not connect to XenAPI: %s" % e.details[0]
|
||||
raise XenapiException(msg)
|
||||
return session
|
||||
|
||||
@@ -96,10 +96,10 @@ class XenapiInspector(virt_inspector.Inspector):
|
||||
n = len(vm_refs)
|
||||
if n == 0:
|
||||
raise virt_inspector.InstanceNotFoundException(
|
||||
_('VM %s not found in XenServer') % instance_name)
|
||||
'VM %s not found in XenServer' % instance_name)
|
||||
elif n > 1:
|
||||
raise XenapiException(
|
||||
_('Multiple VM %s found in XenServer') % instance_name)
|
||||
'Multiple VM %s found in XenServer' % instance_name)
|
||||
else:
|
||||
return vm_refs[0]
|
||||
|
||||
@@ -112,7 +112,7 @@ class XenapiInspector(virt_inspector.Inspector):
|
||||
vcpus_number = metrics_rec['VCPUs_number']
|
||||
vcpus_utils = metrics_rec['VCPUs_utilisation']
|
||||
if len(vcpus_utils) == 0:
|
||||
msg = _("Could not get VM %s CPU Utilization") % instance_name
|
||||
msg = "Could not get VM %s CPU Utilization" % instance_name
|
||||
raise XenapiException(msg)
|
||||
|
||||
utils = 0.0
|
||||
|
||||
@@ -34,10 +34,7 @@ class InvalidDimensionValue(Exception):
|
||||
|
||||
|
||||
class MetricsAggregator(object):
|
||||
|
||||
"""
|
||||
A metric aggregator class.
|
||||
"""
|
||||
"""A metric aggregator class."""
|
||||
|
||||
def __init__(self, hostname, recent_point_threshold=None):
|
||||
self.events = []
|
||||
@@ -63,10 +60,8 @@ class MetricsAggregator(object):
|
||||
priority=None,
|
||||
dimensions=None,
|
||||
hostname=None):
|
||||
event = {
|
||||
'msg_title': title,
|
||||
'msg_text': text,
|
||||
}
|
||||
event = {'msg_title': title,
|
||||
'msg_text': text}
|
||||
if date_happened is not None:
|
||||
event['timestamp'] = date_happened
|
||||
else:
|
||||
@@ -97,7 +92,7 @@ class MetricsAggregator(object):
|
||||
for context, metric in self.metrics.items():
|
||||
try:
|
||||
metrics.extend(metric.flush(timestamp))
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
log.exception('Error flushing {0} metrics.'.format(metric.name))
|
||||
|
||||
# Log a warning regarding metrics with old timestamps being submitted
|
||||
|
||||
@@ -31,6 +31,7 @@ NTP_OFFSET_THRESHOLD = 600
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Stylizer(object):
|
||||
|
||||
STYLES = {
|
||||
@@ -102,9 +103,7 @@ def get_ntp_info():
|
||||
|
||||
|
||||
class AgentStatus(object):
|
||||
|
||||
"""
|
||||
A small class used to load and save status messages to the filesystem.
|
||||
"""A small class used to load and save status messages to the filesystem.
|
||||
"""
|
||||
|
||||
NAME = None
|
||||
@@ -497,8 +496,6 @@ class CollectorStatus(AgentStatus):
|
||||
check_status['error'] = es.error
|
||||
status_info['emitter'].append(check_status)
|
||||
|
||||
osname = config.get_os()
|
||||
|
||||
paths = util.Paths()
|
||||
try:
|
||||
status_info['confd_path'] = paths.get_confd_path()
|
||||
|
||||
@@ -9,6 +9,7 @@ try:
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
|
||||
from monasca_agent.common.exceptions import PathNotFound
|
||||
import monasca_agent.common.singleton as singleton
|
||||
|
||||
DEFAULT_CONFIG_FILE = '/etc/monasca/agent/agent.yaml'
|
||||
@@ -17,10 +18,11 @@ LOGGING_MAX_BYTES = 5 * 1024 * 1024
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Config(object):
|
||||
# Make this a singleton class so we don't get the config every time
|
||||
# the class is created
|
||||
__metaclass__ = singleton.Singleton
|
||||
six.add_metaclass(singleton.Singleton)
|
||||
|
||||
def __init__(self, configFile=None):
|
||||
if configFile is not None:
|
||||
@@ -146,10 +148,10 @@ def main():
|
||||
api_config = configuration.get_config('Api')
|
||||
statsd_config = configuration.get_config('Statsd')
|
||||
logging_config = configuration.get_config('Logging')
|
||||
print "Main Configuration: \n {0}".format(config)
|
||||
print "\nApi Configuration: \n {0}".format(api_config)
|
||||
print "\nStatsd Configuration: \n {0}".format(statsd_config)
|
||||
print "\nLogging Configuration: \n {0}".format(logging_config)
|
||||
print("Main Configuration: \n {0}".format(config))
|
||||
print("\nApi Configuration: \n {0}".format(api_config))
|
||||
print("\nStatsd Configuration: \n {0}".format(statsd_config))
|
||||
print("\nLogging Configuration: \n {0}".format(logging_config))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -12,29 +12,28 @@
|
||||
|
||||
# Core modules
|
||||
import atexit
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import errno
|
||||
import signal
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AgentSupervisor(object):
|
||||
|
||||
''' A simple supervisor to keep a restart a child on expected auto-restarts
|
||||
'''
|
||||
"""A simple supervisor to keep a restart a child on expected auto-restarts
|
||||
"""
|
||||
RESTART_EXIT_STATUS = 5
|
||||
|
||||
@classmethod
|
||||
def start(cls, parent_func, child_func=None):
|
||||
''' `parent_func` is a function that's called every time the child
|
||||
"""`parent_func` is a function that's called every time the child
|
||||
process dies.
|
||||
`child_func` is a function that should be run by the forked child
|
||||
that will auto-restart with the RESTART_EXIT_STATUS.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Allow the child process to die on SIGTERM
|
||||
signal.signal(signal.SIGTERM, cls._handle_sigterm)
|
||||
@@ -70,9 +69,7 @@ class AgentSupervisor(object):
|
||||
|
||||
|
||||
class Daemon(object):
|
||||
|
||||
"""
|
||||
A generic daemon class.
|
||||
"""A generic daemon class.
|
||||
|
||||
Usage: subclass the Daemon class and override the run() method
|
||||
"""
|
||||
@@ -86,8 +83,7 @@ class Daemon(object):
|
||||
self.pidfile = pidfile
|
||||
|
||||
def daemonize(self):
|
||||
"""
|
||||
Do the UNIX double-fork magic, see Stevens' "Advanced
|
||||
"""Do the UNIX double-fork magic, see Stevens' "Advanced
|
||||
Programming in the UNIX Environment" for details (ISBN 0201563177)
|
||||
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
|
||||
"""
|
||||
@@ -208,22 +204,19 @@ class Daemon(object):
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
You should override this method when you subclass Daemon. It will be called after the process has been
|
||||
daemonized by start() or restart().
|
||||
"""You should override this method when you subclass Daemon.
|
||||
It will be called after the process has been daemonized by start() or restart().
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def info(self):
|
||||
"""
|
||||
You should override this method when you subclass Daemon. It will be
|
||||
called to provide information about the status of the process
|
||||
"""You should override this method when you subclass Daemon.
|
||||
It will be called to provide information about the status of the process
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def status(self):
|
||||
"""
|
||||
Get the status of the daemon. Exits with 0 if running, 1 if not.
|
||||
"""Get the status of the daemon. Exits with 0 if running, 1 if not.
|
||||
"""
|
||||
pid = self.pid()
|
||||
|
||||
|
||||
@@ -12,3 +12,7 @@ class CheckException(Exception):
|
||||
|
||||
class NaN(CheckException):
|
||||
pass
|
||||
|
||||
|
||||
class PathNotFound(Exception):
|
||||
pass
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import six
|
||||
|
||||
from monascaclient import ksclient
|
||||
|
||||
@@ -10,7 +11,7 @@ log = logging.getLogger(__name__)
|
||||
class Keystone(object):
|
||||
# Make this a singleton class so we don't get the token every time
|
||||
# the class is created
|
||||
__metaclass__ = singleton.Singleton
|
||||
six.add_metaclass(singleton.Singleton)
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
@@ -4,7 +4,8 @@ from collections import namedtuple
|
||||
import logging
|
||||
from time import time
|
||||
|
||||
from monasca_agent.common.exceptions import Infinity, UnknownValue
|
||||
from monasca_agent.common.exceptions import Infinity
|
||||
from monasca_agent.common.exceptions import UnknownValue
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -30,9 +31,7 @@ class MetricTypes(object):
|
||||
|
||||
|
||||
class Metric(object):
|
||||
|
||||
"""
|
||||
A base metric class that accepts points, slices them into time intervals
|
||||
"""A base metric class that accepts points, slices them into time intervals
|
||||
and performs roll-ups within those intervals.
|
||||
"""
|
||||
|
||||
@@ -46,7 +45,6 @@ class Metric(object):
|
||||
|
||||
|
||||
class Gauge(Metric):
|
||||
|
||||
"""A metric that tracks a value at particular points in time. """
|
||||
|
||||
def __init__(self, formatter, name, dimensions,
|
||||
@@ -87,7 +85,6 @@ class Gauge(Metric):
|
||||
|
||||
|
||||
class Counter(Metric):
|
||||
|
||||
"""A metric that tracks a counter value. """
|
||||
|
||||
def __init__(self, formatter, name, dimensions,
|
||||
@@ -129,7 +126,6 @@ class Counter(Metric):
|
||||
|
||||
|
||||
class Histogram(Metric):
|
||||
|
||||
"""A metric to track the distribution of a set of values. """
|
||||
|
||||
def __init__(self, formatter, name, dimensions,
|
||||
@@ -202,7 +198,6 @@ class Histogram(Metric):
|
||||
|
||||
|
||||
class Set(Metric):
|
||||
|
||||
"""A metric to track the number of unique elements in a set. """
|
||||
|
||||
def __init__(self, formatter, name, dimensions,
|
||||
@@ -224,7 +219,6 @@ class Set(Metric):
|
||||
self.values.add(value)
|
||||
|
||||
def flush(self, timestamp):
|
||||
metrics = []
|
||||
if not self.values:
|
||||
return []
|
||||
else:
|
||||
@@ -243,7 +237,6 @@ class Set(Metric):
|
||||
|
||||
|
||||
class Rate(Metric):
|
||||
|
||||
"""Track the rate of metrics over each flush interval """
|
||||
|
||||
def __init__(self, formatter, name, dimensions,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import inspect
|
||||
import imp
|
||||
import itertools
|
||||
import glob
|
||||
import math
|
||||
import hashlib
|
||||
import imp
|
||||
import inspect
|
||||
import itertools
|
||||
import math
|
||||
import optparse
|
||||
import os
|
||||
import platform
|
||||
@@ -23,7 +23,8 @@ log = logging.getLogger(__name__)
|
||||
|
||||
# Tornado
|
||||
try:
|
||||
from tornado import ioloop, version_info as tornado_version
|
||||
from tornado import ioloop
|
||||
from tornado import version_info as tornado_version
|
||||
except ImportError:
|
||||
# We are likely running the agent without the forwarder and tornado is not installed
|
||||
# Generate a warning
|
||||
@@ -37,10 +38,7 @@ LOGGING_MAX_BYTES = 5 * 1024 * 1024
|
||||
NumericTypes = (float, int, long)
|
||||
|
||||
import monasca_agent.common.config as configuration
|
||||
|
||||
|
||||
class PathNotFound(Exception):
|
||||
pass
|
||||
from monasca_agent.common.exceptions import PathNotFound
|
||||
|
||||
|
||||
class Watchdog(object):
|
||||
@@ -88,7 +86,6 @@ class Watchdog(object):
|
||||
|
||||
|
||||
class PidFile(object):
|
||||
|
||||
"""A small helper class for pidfiles. """
|
||||
|
||||
PID_DIR = '/var/run/monasca-agent'
|
||||
@@ -147,9 +144,7 @@ class PidFile(object):
|
||||
|
||||
|
||||
class LaconicFilter(logging.Filter):
|
||||
|
||||
"""
|
||||
Filters messages, only print them once while keeping memory under control
|
||||
"""Filters messages, only print them once while keeping memory under control
|
||||
"""
|
||||
LACONIC_MEM_LIMIT = 1024
|
||||
|
||||
@@ -177,7 +172,6 @@ class LaconicFilter(logging.Filter):
|
||||
|
||||
|
||||
class Timer(object):
|
||||
|
||||
"""Helper class """
|
||||
|
||||
def __init__(self):
|
||||
@@ -203,9 +197,7 @@ class Timer(object):
|
||||
|
||||
|
||||
class Platform(object):
|
||||
|
||||
"""
|
||||
Return information about the given platform.
|
||||
"""Return information about the given platform.
|
||||
"""
|
||||
@staticmethod
|
||||
def is_darwin(name=None):
|
||||
@@ -249,9 +241,7 @@ class Platform(object):
|
||||
|
||||
|
||||
class Dimensions(object):
|
||||
|
||||
"""
|
||||
Class to update the default dimensions.
|
||||
"""Class to update the default dimensions.
|
||||
"""
|
||||
|
||||
def __init__(self, agent_config):
|
||||
@@ -276,9 +266,7 @@ class Dimensions(object):
|
||||
|
||||
|
||||
class Paths(object):
|
||||
|
||||
"""
|
||||
Return information about system paths.
|
||||
"""Return information about system paths.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.osname = get_os()
|
||||
@@ -316,7 +304,7 @@ class Paths(object):
|
||||
raise PathNotFound(path)
|
||||
|
||||
def _windows_confd_path(self):
|
||||
common_data = _windows_commondata_path()
|
||||
common_data = self._windows_commondata_path()
|
||||
path = os.path.join(common_data, 'Datadog', 'conf.d')
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
@@ -357,7 +345,8 @@ class Paths(object):
|
||||
how-do-i-find-the-windows-common-application-data-folder-using-python
|
||||
"""
|
||||
import ctypes
|
||||
from ctypes import wintypes, windll
|
||||
from ctypes import windll
|
||||
from ctypes import wintypes
|
||||
|
||||
_SHGetFolderPath = windll.shell32.SHGetFolderPathW
|
||||
_SHGetFolderPath.argtypes = [wintypes.HWND,
|
||||
@@ -385,6 +374,7 @@ class Paths(object):
|
||||
log.info("Windows certificate path: %s" % crt_path)
|
||||
tornado.simple_httpclient._DEFAULT_CA_CERTS = crt_path
|
||||
|
||||
|
||||
def plural(count):
|
||||
if count == 1:
|
||||
return ""
|
||||
@@ -488,8 +478,7 @@ def is_valid_hostname(hostname):
|
||||
|
||||
|
||||
def get_hostname():
|
||||
"""
|
||||
Get the canonical host name this agent should identify as. This is
|
||||
"""Get the canonical host name this agent should identify as. This is
|
||||
the authoritative source of the host name for the agent.
|
||||
|
||||
Tries, in order:
|
||||
@@ -558,9 +547,11 @@ def get_parsed_args():
|
||||
|
||||
|
||||
def load_check_directory():
|
||||
''' Return the initialized checks from checks_d, and a mapping of checks that failed to
|
||||
"""Return the initialized checks from checks_d, and a mapping of checks that failed to
|
||||
initialize. Only checks that have a configuration
|
||||
file in conf.d will be returned. '''
|
||||
file in conf.d will be returned.
|
||||
"""
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
config = configuration.Config()
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import collections
|
||||
import copy
|
||||
import logging
|
||||
import time
|
||||
import random
|
||||
import time
|
||||
|
||||
import monascaclient.client
|
||||
import monasca_agent.common.keystone as keystone
|
||||
import monascaclient.client
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -22,9 +22,7 @@ class MonascaAPI(object):
|
||||
MAX_BACKOFF = 60 # seconds
|
||||
|
||||
def __init__(self, config):
|
||||
"""
|
||||
Initialize Mon api client connection.
|
||||
"""
|
||||
"""Initialize Mon api client connection."""
|
||||
self.config = config
|
||||
self.url = config['url']
|
||||
self.api_version = '2_0'
|
||||
|
||||
@@ -6,11 +6,11 @@
|
||||
"""
|
||||
|
||||
# Standard imports
|
||||
import socket
|
||||
import datetime
|
||||
import logging
|
||||
import signal
|
||||
import socket
|
||||
import sys
|
||||
import datetime
|
||||
|
||||
# set up logging before importing any other components
|
||||
import monasca_agent.common.util as util
|
||||
@@ -18,19 +18,19 @@ import monasca_agent.common.util as util
|
||||
util.initialize_logging('forwarder')
|
||||
|
||||
import os
|
||||
os.umask(022)
|
||||
os.umask(0o22)
|
||||
|
||||
# Tornado
|
||||
import tornado.escape
|
||||
import tornado.httpclient
|
||||
import tornado.httpserver
|
||||
import tornado.ioloop
|
||||
import tornado.web
|
||||
import tornado.escape
|
||||
import tornado.options
|
||||
import tornado.web
|
||||
|
||||
# agent import
|
||||
import monasca_agent.common.config as cfg
|
||||
import monasca_agent.common.check_status as check_status
|
||||
import monasca_agent.common.config as cfg
|
||||
import monasca_agent.common.metrics as metrics
|
||||
import monasca_agent.common.util as util
|
||||
import monasca_agent.forwarder.api.monasca_api as mon
|
||||
@@ -134,7 +134,8 @@ class Forwarder(tornado.web.Application):
|
||||
def log_request(self, handler):
|
||||
"""Override the tornado logging method.
|
||||
If everything goes well, log level is DEBUG.
|
||||
Otherwise it's WARNING or ERROR depending on the response code. """
|
||||
Otherwise it's WARNING or ERROR depending on the response code.
|
||||
"""
|
||||
if handler.get_status() < 400:
|
||||
log_method = log.debug
|
||||
elif handler.get_status() < 500:
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# stdlib
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from operator import attrgetter
|
||||
import sys
|
||||
import time
|
||||
|
||||
# project
|
||||
import monasca_agent.common.check_status as check_status
|
||||
@@ -116,9 +117,9 @@ class MetricTransaction(Transaction):
|
||||
|
||||
|
||||
class TransactionManager(util.Dimensions):
|
||||
|
||||
"""Holds any transaction derived object list and make sure they
|
||||
are all commited, without exceeding parameters (throttling, memory consumption) """
|
||||
are all commited, without exceeding parameters (throttling, memory consumption)
|
||||
"""
|
||||
|
||||
def __init__(self, max_wait_for_replay, max_queue_size, throttling_delay, agent_config):
|
||||
super(TransactionManager, self).__init__(agent_config)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import monasca_agent.common.check_status as check_status
|
||||
import monasca_agent.common.emitter as emitter
|
||||
import monasca_agent.common.util as util
|
||||
@@ -18,8 +19,7 @@ EVENT_CHUNK_SIZE = 50
|
||||
|
||||
|
||||
class Reporter(threading.Thread):
|
||||
"""
|
||||
The reporter periodically sends the aggregated metrics to the
|
||||
"""The reporter periodically sends the aggregated metrics to the
|
||||
server.
|
||||
"""
|
||||
|
||||
|
||||
@@ -21,10 +21,7 @@ metric_class = {
|
||||
|
||||
|
||||
class Server(object):
|
||||
|
||||
"""
|
||||
A statsd udp server.
|
||||
"""
|
||||
"""A statsd udp server."""
|
||||
|
||||
def __init__(self, aggregator, host, port, forward_to_host=None, forward_to_port=None):
|
||||
self.host = host
|
||||
|
||||
@@ -1,27 +1,30 @@
|
||||
# set up logging before importing any other components
|
||||
from collector import modules
|
||||
from config import initialize_logging
|
||||
from monasca_agent.pup import pup
|
||||
from collector import modules
|
||||
from monasca_agent.statsd import daemon
|
||||
|
||||
initialize_logging('collector')
|
||||
|
||||
import win32serviceutil
|
||||
import win32service
|
||||
import win32event
|
||||
import sys
|
||||
import logging
|
||||
import time
|
||||
import multiprocessing
|
||||
import sys
|
||||
import time
|
||||
import win32event
|
||||
import win32service
|
||||
import win32serviceutil
|
||||
|
||||
from optparse import Values
|
||||
from collector.checks.collector import Collector
|
||||
from emitter import http_emitter
|
||||
from ddagent import Application
|
||||
from win32.common import handle_exe_click
|
||||
from collector.jmxfetch import JMXFetch
|
||||
from ddagent import Application
|
||||
from emitter import http_emitter
|
||||
from optparse import Values
|
||||
from win32.common import handle_exe_click
|
||||
|
||||
from monasca_agent.common.config import get_config, load_check_directory, set_win32_cert_path
|
||||
from monasca_agent.common.config import get_config
|
||||
from monasca_agent.common.config import load_check_directory
|
||||
from monasca_agent.common.config import set_win32_cert_path
|
||||
from monasca_agent.statsd.daemon import MonascaStatsd
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
RESTART_INTERVAL = 24 * 60 * 60 # Defaults to 1 day
|
||||
|
||||
@@ -2,9 +2,10 @@ import ctypes
|
||||
|
||||
|
||||
def handle_exe_click(name):
|
||||
''' When the executables are clicked directly in the UI, we must let the
|
||||
"""When the executables are clicked directly in the UI, we must let the
|
||||
user know that they have to install the program as a service instead of
|
||||
running it directly. '''
|
||||
running it directly.
|
||||
"""
|
||||
message = """To use %(name)s, you must install it as a service.
|
||||
|
||||
To install %(name)s as a service, you must run the following in the console:
|
||||
|
||||
@@ -3,34 +3,54 @@
|
||||
# Licensed under the terms of the CECILL License
|
||||
# Modified for Datadog
|
||||
|
||||
import yaml
|
||||
try:
|
||||
from yaml import CLoader as Loader
|
||||
except ImportError:
|
||||
from yaml import Loader
|
||||
import sys
|
||||
import os
|
||||
import os.path as osp
|
||||
import sys
|
||||
import threading as thread
|
||||
import webbrowser
|
||||
import thread # To manage the windows process asynchronously
|
||||
import yaml
|
||||
|
||||
import win32serviceutil
|
||||
import win32service
|
||||
import win32serviceutil
|
||||
|
||||
# GUI Imports
|
||||
from guidata.qt.QtCore import SIGNAL, Qt, QSize, QPoint, QTimer
|
||||
from guidata.qt.QtGui import QInputDialog, QWidget, QFont, QLabel, QGroupBox, QHBoxLayout, QSystemTrayIcon
|
||||
from guidata.qt.QtGui import QVBoxLayout, QPushButton, QSplitter, QListWidget, QMenu, QMessageBox
|
||||
from guidata.qt.QtCore import QPoint
|
||||
from guidata.qt.QtCore import QSize
|
||||
from guidata.qt.QtCore import Qt
|
||||
from guidata.qt.QtCore import QTimer
|
||||
from guidata.qt.QtCore import SIGNAL
|
||||
|
||||
from guidata.configtools import get_icon, get_family, MONOSPACE
|
||||
from guidata.qt.QtGui import QFont
|
||||
from guidata.qt.QtGui import QGroupBox
|
||||
from guidata.qt.QtGui import QHBoxLayout
|
||||
from guidata.qt.QtGui import QInputDialog
|
||||
from guidata.qt.QtGui import QLabel
|
||||
from guidata.qt.QtGui import QListWidget
|
||||
from guidata.qt.QtGui import QMenu
|
||||
from guidata.qt.QtGui import QMessageBox
|
||||
from guidata.qt.QtGui import QPushButton
|
||||
from guidata.qt.QtGui import QSplitter
|
||||
from guidata.qt.QtGui import QSystemTrayIcon
|
||||
from guidata.qt.QtGui import QVBoxLayout
|
||||
from guidata.qt.QtGui import QWidget
|
||||
|
||||
from guidata.configtools import get_family
|
||||
from guidata.configtools import get_icon
|
||||
from guidata.configtools import MONOSPACE
|
||||
from guidata.qthelpers import get_std_icon
|
||||
from spyderlib.widgets.sourcecode.codeeditor import CodeEditor
|
||||
|
||||
|
||||
# Datadog
|
||||
from common.util import get_os
|
||||
from config import (get_confd_path, get_config_path, get_config,
|
||||
_windows_commondata_path)
|
||||
from config import _windows_commondata_path
|
||||
from config import get_confd_path
|
||||
from config import get_config
|
||||
from config import get_config_path
|
||||
|
||||
EXCLUDED_WINDOWS_CHECKS = [
|
||||
'cacti', 'directory', 'gearmand',
|
||||
|
||||
@@ -88,5 +88,5 @@ def save_plugin_config(config_dir, plugin_name, user, conf):
|
||||
allow_unicode=True,
|
||||
default_flow_style=False))
|
||||
gid = pwd.getpwnam(user).pw_gid
|
||||
os.chmod(config_path, 0640)
|
||||
os.chmod(config_path, 0o640)
|
||||
os.chown(config_path, 0, gid)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from args_plugin import ArgsPlugin
|
||||
from plugin import Plugin
|
||||
from service_plugin import ServicePlugin
|
||||
from utils import find_process_cmdline
|
||||
from utils import find_process_name
|
||||
from utils import watch_process
|
||||
from utils import service_api_check
|
||||
from args_plugin import ArgsPlugin
|
||||
from service_plugin import ServicePlugin
|
||||
from utils import watch_process
|
||||
|
||||
@@ -10,8 +10,7 @@ class ArgsPlugin(Plugin):
|
||||
"""Base plugin for detection plugins that take arguments for configuration rather than do detection."""
|
||||
|
||||
def _build_instance(self, arg_list):
|
||||
""" If a value for each arg in the arg_list was specified build it into an instance dictionary. Also check
|
||||
for dimensions and add if they were specified.
|
||||
"""If a value for each arg in the arg_list was specified build it into an instance dictionary. Also check for dimensions and add if they were specified.
|
||||
:param arg_list: Arguments to include
|
||||
:return: instance dictionary
|
||||
"""
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
Detection classes should be platform independent
|
||||
"""
|
||||
import sys
|
||||
import logging
|
||||
import sys
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
|
||||
@@ -102,11 +102,11 @@ class Apache(monasca_setup.detection.Plugin):
|
||||
log.info("\tSuccessfully setup Apache plugin.")
|
||||
else:
|
||||
log.warn('Unable to access the Apache server-status URL;' + error_msg)
|
||||
except urllib2.URLError, e:
|
||||
except urllib2.URLError as e:
|
||||
log.error('\tError {0} received when accessing url {1}.'.format(e.reason, apache_url) +
|
||||
'\n\tPlease ensure the Apache web server is running and your configuration ' +
|
||||
'information in /root/.apache.cnf is correct.' + error_msg)
|
||||
except urllib2.HTTPError, e:
|
||||
except urllib2.HTTPError as e:
|
||||
log.error('\tError code {0} received when accessing {1}'.format(e.code, apache_url) + error_msg)
|
||||
else:
|
||||
log.error('\tThe dependencies for Apache Web Server are not installed or unavailable.' + error_msg)
|
||||
|
||||
@@ -8,14 +8,12 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Crash(Plugin):
|
||||
"""
|
||||
Detect if kdump is installed and enabled and setup configuration to
|
||||
"""Detect if kdump is installed and enabled and setup configuration to
|
||||
monitor for crash dumps.
|
||||
"""
|
||||
|
||||
def _detect(self):
|
||||
"""
|
||||
Run detection, set self.available True if a crash kernel is loaded.
|
||||
"""Run detection, set self.available True if a crash kernel is loaded.
|
||||
"""
|
||||
loaded = '/sys/kernel/kexec_crash_loaded'
|
||||
if os.path.isfile(loaded):
|
||||
@@ -24,8 +22,7 @@ class Crash(Plugin):
|
||||
self.available = True
|
||||
|
||||
def build_config(self):
|
||||
"""
|
||||
Build the config as a Plugin object and return it.
|
||||
"""Build the config as a Plugin object and return it.
|
||||
"""
|
||||
log.info('\tEnabling the Monasca crash dump healthcheck')
|
||||
config = agent_config.Plugins()
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
import logging
|
||||
import re
|
||||
from subprocess import CalledProcessError, STDOUT
|
||||
from subprocess import CalledProcessError
|
||||
from subprocess import STDOUT
|
||||
|
||||
from monasca_setup.detection import Plugin, find_process_cmdline, watch_process
|
||||
from monasca_setup.detection.utils import find_addr_listening_on_port
|
||||
from monasca_setup.detection.utils import check_output
|
||||
from monasca_setup import agent_config
|
||||
|
||||
from monasca_setup.detection import find_process_cmdline
|
||||
from monasca_setup.detection import Plugin
|
||||
from monasca_setup.detection import watch_process
|
||||
|
||||
from monasca_setup.detection.utils import check_output
|
||||
from monasca_setup.detection.utils import find_addr_listening_on_port
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import ConfigParser
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import ConfigParser
|
||||
import monasca_setup.detection
|
||||
|
||||
import monasca_setup.agent_config
|
||||
import monasca_setup.detection
|
||||
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
@@ -101,9 +102,9 @@ class Libvirt(monasca_setup.detection.Plugin):
|
||||
|
||||
def dependencies_installed(self):
|
||||
try:
|
||||
import novaclient
|
||||
import time
|
||||
import yaml
|
||||
import novaclient
|
||||
# novaclient module versions were renamed in version 2.22
|
||||
if novaclient.__version__ < LooseVersion("2.22"):
|
||||
import novaclient.v1_1.client
|
||||
|
||||
@@ -37,7 +37,6 @@ class MKLivestatus(monasca_setup.detection.Plugin):
|
||||
"""Search common Nagios/Icinga config file locations for mk_livestatus
|
||||
broker module socket path
|
||||
"""
|
||||
socket_path = None
|
||||
# Search likely Nagios/Icinga config file locations
|
||||
for cfg_file in nagios_cfg_files:
|
||||
if os.path.isfile(cfg_file):
|
||||
|
||||
@@ -9,7 +9,8 @@ import yaml
|
||||
|
||||
import monasca_setup.agent_config
|
||||
import monasca_setup.detection
|
||||
from monasca_setup.detection import find_process_cmdline, watch_process
|
||||
from monasca_setup.detection import find_process_cmdline
|
||||
from monasca_setup.detection import watch_process
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,7 +39,6 @@ class MonAPI(monasca_setup.detection.Plugin):
|
||||
def build_config(self):
|
||||
"""Build the config as a Plugins object and return."""
|
||||
log.info("\tEnabling the Monasca api healthcheck")
|
||||
admin_port = self.api_config['server']['adminConnectors'][0]['port']
|
||||
config = monasca_setup.agent_config.Plugins()
|
||||
config.merge(dropwizard_health_check('monitoring', 'api', 'http://localhost:8081/healthcheck'))
|
||||
|
||||
|
||||
@@ -38,8 +38,8 @@ class MySQL(monasca_setup.detection.Plugin):
|
||||
# Attempt login, requires either an empty root password from localhost
|
||||
# or relying on a configured /root/.my.cnf
|
||||
if self.dependencies_installed(): # ensures MySQLdb is available
|
||||
import MySQLdb
|
||||
import _mysql_exceptions
|
||||
import MySQLdb
|
||||
try:
|
||||
MySQLdb.connect(read_default_file=mysql_conf)
|
||||
except _mysql_exceptions.MySQLError:
|
||||
|
||||
@@ -17,7 +17,7 @@ class Postfix(monasca_setup.detection.Plugin):
|
||||
"""
|
||||
if monasca_setup.detection.find_process_cmdline('postfix') is not None:
|
||||
# Test for sudo access
|
||||
# TODO: don't hardcode the user. Need to get it from the arguments to monasca_setup
|
||||
# TODO(craig): don't hardcode the user. Need to get it from the arguments to monasca_setup
|
||||
test_sudo = os.system('sudo -l -U mon-agent find /var/spool/postfix/incoming -type f > /dev/null')
|
||||
if test_sudo != 0:
|
||||
log.info("Postfix found but the required sudo access is not configured.\n\t" +
|
||||
|
||||
@@ -2,8 +2,8 @@ import logging
|
||||
import os
|
||||
import yaml
|
||||
|
||||
from monasca_setup.detection import Plugin
|
||||
from monasca_setup import agent_config
|
||||
from monasca_setup.detection import Plugin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
"""
|
||||
import logging
|
||||
import subprocess
|
||||
from subprocess import Popen, PIPE, CalledProcessError
|
||||
from subprocess import CalledProcessError
|
||||
from subprocess import PIPE
|
||||
from subprocess import Popen
|
||||
|
||||
import psutil
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ class LinuxInit(service.Service):
|
||||
# todo log dir is hardcoded
|
||||
for path in (self.log_dir, self.config_dir, '%s/conf.d' % self.config_dir):
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path, 0755)
|
||||
os.makedirs(path, 0o755)
|
||||
os.chown(path, 0, user.pw_gid)
|
||||
# the log dir needs to be writable by the user
|
||||
os.chown(self.log_dir, user.pw_uid, user.pw_gid)
|
||||
@@ -66,7 +66,7 @@ class Systemd(LinuxInit):
|
||||
service_script.write(template.read().format(prefix=self.prefix_dir, monasca_user=self.username,
|
||||
config_dir=self.config_dir))
|
||||
os.chown(init_path, 0, 0)
|
||||
os.chmod(init_path, 0644)
|
||||
os.chmod(init_path, 0o644)
|
||||
|
||||
# Enable the service
|
||||
subprocess.check_call(['systemctl', 'daemon-reload'])
|
||||
@@ -128,7 +128,7 @@ class SysV(LinuxInit):
|
||||
conf.write(template.read().format(prefix=self.prefix_dir, monasca_user=self.username,
|
||||
config_dir=self.config_dir))
|
||||
os.chown(self.init_script, 0, 0)
|
||||
os.chmod(self.init_script, 0755)
|
||||
os.chmod(self.init_script, 0o755)
|
||||
|
||||
for runlevel in ['2', '3', '4', '5']:
|
||||
link_path = '/etc/rc%s.d/S10monasca-agent' % runlevel
|
||||
|
||||
@@ -44,8 +44,7 @@ def discover_plugins(custom_path):
|
||||
|
||||
|
||||
def select_plugins(plugin_names, plugin_list):
|
||||
"""
|
||||
:param plugin_names: A list of names
|
||||
""":param plugin_names: A list of names
|
||||
:param plugin_list: A list of detection plugins classes
|
||||
:return: Returns a list of plugins from plugin_list that match plugin_names
|
||||
"""
|
||||
@@ -85,4 +84,4 @@ def write_template(template_path, out_path, variables, group, is_yaml=False):
|
||||
else:
|
||||
conf.write(contents)
|
||||
os.chown(out_path, 0, group)
|
||||
os.chmod(out_path, 0640)
|
||||
os.chmod(out_path, 0o640)
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
hacking>=0.9.2,<0.10
|
||||
hacking>=0.10.2
|
||||
flake8>=2.2.4
|
||||
nose==1.3.0
|
||||
|
||||
7
tox.ini
7
tox.ini
@@ -21,11 +21,14 @@ commands = {posargs}
|
||||
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
max-complexity = 30
|
||||
# TODO: ignored checks should be enabled in the future
|
||||
# E501 Line length > 80 characters
|
||||
# F401 module imported but unused
|
||||
# H302 import only modules
|
||||
# H904 Wrap long lines in parentheses instead of a backslash (DEPRECATED)
|
||||
ignore = E501, F401, H302, H904,
|
||||
# H405 Multiline docstring separated by empty line
|
||||
# H105 Don't use author tags
|
||||
ignore = E501,F401,H302,H904,H803,H405,H105
|
||||
show-source = True
|
||||
exclude=.venv,.git,.tox,dist,*egg,build
|
||||
exclude=.venv,.git,.tox,dist,*egg,build,tests
|
||||
|
||||
Reference in New Issue
Block a user