Pulled out unused bits, move pup to standalone

This commit is contained in:
Tim Kuhlman 2014-04-25 12:31:07 -06:00
parent 174f6a22ef
commit f3275ae606
49 changed files with 4 additions and 2231 deletions

View File

@ -44,9 +44,6 @@ use_mount: no
# Change port the Agent is listening to
# listen_port: 17123
# Start a graphite listener on this port
# graphite_listen_port: 17124
# Additional directory to look for Datadog checks
# additional_checksd: /etc/dd-agent/checks.d/

View File

@ -19,7 +19,6 @@ from cStringIO import StringIO
# project
from util import get_os, yaml, yLoader, Platform
from jmxfetch import JMXFetch, JMX_COLLECT_COMMAND
from migration import migrate_old_style_configuration
# CONSTANTS
DATADOG_CONF = "datadog.conf"
@ -197,7 +196,6 @@ def get_config(parse_args=True, cfg_path=None, options=None):
'dogstatsd_normalize': 'yes',
'dogstatsd_port': 8125,
'dogstatsd_target': 'http://localhost:17123',
'graphite_listen_port': None,
'hostname': None,
'listen_port': None,
'tags': None,
@ -313,13 +311,6 @@ def get_config(parse_args=True, cfg_path=None, options=None):
if config.get('Main', 'watchdog').lower() in ('no', 'false'):
agentConfig['watchdog'] = False
# Optional graphite listener
if config.has_option('Main', 'graphite_listen_port'):
agentConfig['graphite_listen_port'] = \
int(config.get('Main', 'graphite_listen_port'))
else:
agentConfig['graphite_listen_port'] = None
# Dogstatsd config
dogstatsd_defaults = {
'dogstatsd_port': 8125,
@ -657,9 +648,6 @@ def load_check_directory(agentConfig):
log.error("No conf.d folder found at '%s' or in the directory where the Agent is currently deployed.\n" % e.args[0])
sys.exit(3)
# Migrate datadog.conf integration configurations that are not supported anymore
migrate_old_style_configuration(agentConfig, confd_path, get_config_path(None, os_name=get_os()))
# Start JMXFetch if needed
JMXFetch.init(confd_path, agentConfig, get_logging_config(), DEFAULT_CHECK_FREQUENCY, JMX_COLLECT_COMMAND)

View File

@ -3,22 +3,7 @@ import sys
from pprint import pformat as pp
from util import json, md5, get_os
from config import get_ssl_certificate
def get_http_library(proxy_settings, use_forwarder):
#There is a bug in the https proxy connection in urllib2 on python < 2.6.3
if use_forwarder:
# We are using the forwarder, so it's local trafic. We don't use the proxy
import urllib2
elif proxy_settings is None or int(sys.version_info[1]) >= 7\
or (int(sys.version_info[1]) == 6 and int(sys.version_info[2]) >= 3):
# Python version >= 2.6.3
import urllib2
else:
# Python version < 2.6.3
import urllib2proxy as urllib2
return urllib2
import urllib2
def post_headers(agentConfig, payload):
return {
@ -49,7 +34,6 @@ def http_emitter(message, log, agentConfig):
headers = post_headers(agentConfig, zipped)
proxy_settings = agentConfig.get('proxy_settings', None)
urllib2 = get_http_library(proxy_settings, agentConfig['use_forwarder'])
try:
request = urllib2.Request(url, zipped, headers)

View File

@ -431,17 +431,6 @@ class Application(tornado.web.Application):
tr_sched = tornado.ioloop.PeriodicCallback(flush_trs,TRANSACTION_FLUSH_INTERVAL,
io_loop = self.mloop)
# Register optional Graphite listener
gport = self._agentConfig.get("graphite_listen_port", None)
if gport is not None:
log.info("Starting graphite listener on port %s" % gport)
from graphite import GraphiteServer
gs = GraphiteServer(self, get_hostname(self._agentConfig), io_loop=self.mloop)
if non_local_traffic is True:
gs.listen(gport)
else:
gs.listen(gport, address = "localhost")
# Start everything
if self._watchdog:
self._watchdog.reset()

View File

@ -1,122 +0,0 @@
# pylint: disable=no-name-in-module
import struct
import logging
import cPickle as pickle
from tornado.ioloop import IOLoop
log = logging.getLogger(__name__)
try:
from tornado.netutil import TCPServer
except Exception, e:
try:
from tornado.tcpserver import TCPServer
except Exception:
log.warn("Tornado < 2.1.1 detected, using compatibility TCPServer")
from compat.tornadotcpserver import TCPServer
class GraphiteServer(TCPServer):
def __init__(self, app, hostname, io_loop=None, ssl_options=None, **kwargs):
log.warn('Graphite listener is started -- if you do not need graphite, turn it off in datadog.conf.')
log.warn('Graphite relay uses pickle to transport messages. Pickle is not secured against remote execution exploits.')
log.warn('See http://blog.nelhage.com/2011/03/exploiting-pickle/ for more details')
self.app = app
self.hostname = hostname
TCPServer.__init__(self, io_loop=io_loop, ssl_options=ssl_options, **kwargs)
def handle_stream(self, stream, address):
GraphiteConnection(stream, address, self.app, self.hostname)
class GraphiteConnection(object):
def __init__(self, stream, address, app, hostname):
log.debug('received a new connection from %s', address)
self.app = app
self.stream = stream
self.address = address
self.hostname = hostname
self.stream.set_close_callback(self._on_close)
self.stream.read_bytes(4, self._on_read_header)
def _on_read_header(self, data):
try:
size = struct.unpack("!L", data)[0]
log.debug("Receiving a string of size:" + str(size))
self.stream.read_bytes(size, self._on_read_line)
except Exception, e:
log.error(e)
def _on_read_line(self, data):
log.debug('read a new line from %s', self.address)
self._decode(data)
def _on_close(self):
log.debug('client quit %s', self.address)
def _parseMetric(self, metric):
"""Graphite does not impose a particular metric structure.
So this is where you can insert logic to extract various bits
out of the graphite metric name.
For instance, if the hostname is in 4th position,
you could use: host = components[3]
"""
try:
components = metric.split('.')
host = self.hostname
metric = metric
device = "N/A"
return metric, host, device
except Exception, e:
log.exception("Unparsable metric: %s" % metric)
return None, None, None
def _postMetric(self, name, host, device, datapoint):
ts = datapoint[0]
value = datapoint[1]
self.app.appendMetric("graphite", name, host, device, ts, value)
def _processMetric(self, metric, datapoint):
"""Parse the metric name to fetch (host, metric, device) and
send the datapoint to datadog"""
log.debug("New metric: %s, values: %s" % (metric, datapoint))
(metric, host, device) = self._parseMetric(metric)
if metric is not None:
self._postMetric(metric, host, device, datapoint)
log.info("Posted metric: %s, host: %s, device: %s" % (metric, host, device))
def _decode(self, data):
try:
datapoints = pickle.loads(data)
except Exception:
log.exception("Cannot decode grapite points")
return
for (metric, datapoint) in datapoints:
try:
datapoint = (float(datapoint[0]), float(datapoint[1]))
except Exception, e:
log.error(e)
continue
self._processMetric(metric, datapoint)
self.stream.read_bytes(4, self._on_read_header)
def start_graphite_listener(port):
echo_server = GraphiteServer()
echo_server.listen(port)
IOLoop.instance().start()
if __name__ == '__main__':
start_graphite_listener(17124)

View File

@ -1,382 +0,0 @@
"""
Module that tries to migrate old style configuration to checks.d interface
for checks that don't support old style configuration anymore
It also comments out related lines in datadog.conf.
Point of entry is: migrate_old_style_configuration at the bottom of the file
which is called when the checks.d directory is loaded when Agent starts.
"""
# std
import os.path
import logging
import string
# 3rd party
from yaml import dump as dump_to_yaml
try:
from yaml import CDumper as Dumper
except ImportError:
from yaml import Dumper
log = logging.getLogger(__name__)
CASSANDRA_CONFIG = {
'init_config': {'conf': [{'exclude': {'attribute': ['MinimumCompactionThreshold',
'MaximumCompactionThreshold',
'RowCacheKeysToSave',
'KeyCacheSavePeriodInSeconds',
'RowCacheSavePeriodInSeconds',
'PendingTasks',
'Scores',
'RpcTimeout'],
'keyspace': 'system'},
'include': {'attribute': ['BloomFilterDiskSpaceUsed',
'BloomFilterFalsePositives',
'BloomFilterFalseRatio',
'Capacity',
'CompressionRatio',
'CompletedTasks',
'ExceptionCount',
'Hits',
'RecentHitRate',
'LiveDiskSpaceUsed',
'LiveSSTableCount',
'Load',
'MaxRowSize',
'MeanRowSize',
'MemtableColumnsCount',
'MemtableDataSize',
'MemtableSwitchCount',
'MinRowSize',
'ReadCount',
'Requests',
'Size',
'TotalDiskSpaceUsed',
'TotalReadLatencyMicros',
'TotalWriteLatencyMicros',
'UpdateInterval',
'WriteCount',
'PendingTasks'],
'domain': 'org.apache.cassandra.db'}},
{'include': {'attribute': ['ActiveCount',
'CompletedTasks',
'CurrentlyBlockedTasks',
'TotalBlockedTasks'],
'domain': 'org.apache.cassandra.internal'}},
{'include': {'attribute': ['TotalTimeouts'],
'domain': 'org.apache.cassandra.net'}}]},
'instances': [{'host': 'localhost', 'port': 7199}]
}
CASSANDRA_MAPPING = {
'cassandra_host': ('host', str),
'cassandra_port': ('port', int),
}
ACTIVEMQ_INIT_CONFIG = {
'conf': [{'include': {'Type': 'Queue',
'attribute': {'AverageEnqueueTime': {'alias': 'activemq.queue.avg_enqueue_time',
'metric_type': 'gauge'},
'ConsumerCount': {'alias': 'activemq.queue.consumer_count',
'metric_type': 'gauge'},
'DequeueCount': {'alias': 'activemq.queue.dequeue_count',
'metric_type': 'counter'},
'DispatchCount': {'alias': 'activemq.queue.dispatch_count',
'metric_type': 'counter'},
'EnqueueCount': {'alias': 'activemq.queue.enqueue_count',
'metric_type': 'counter'},
'ExpiredCount': {'alias': 'activemq.queue.expired_count',
'type': 'counter'},
'InFlightCount': {'alias': 'activemq.queue.in_flight_count',
'metric_type': 'counter'},
'MaxEnqueueTime': {'alias': 'activemq.queue.max_enqueue_time',
'metric_type': 'gauge'},
'MemoryPercentUsage': {'alias': 'activemq.queue.memory_pct',
'metric_type': 'gauge'},
'MinEnqueueTime': {'alias': 'activemq.queue.min_enqueue_time',
'metric_type': 'gauge'},
'ProducerCount': {'alias': 'activemq.queue.producer_count',
'metric_type': 'gauge'},
'QueueSize': {'alias': 'activemq.queue.size',
'metric_type': 'gauge'}}}},
{'include': {'Type': 'Broker',
'attribute': {'MemoryPercentUsage': {'alias': 'activemq.broker.memory_pct',
'metric_type': 'gauge'},
'StorePercentUsage': {'alias': 'activemq.broker.store_pct',
'metric_type': 'gauge'},
'TempPercentUsage': {'alias': 'activemq.broker.temp_pct',
'metric_type': 'gauge'}}}}]}
SOLR_INIT_CONFIG = {
'conf':
[{'include':
{'attribute':
{'maxDoc':
{'alias': 'solr.searcher.maxdoc',
'metric_type': 'gauge'},
'numDocs':
{'alias': 'solr.searcher.numdocs',
'metric_type': 'gauge'},
'warmupTime':
{'alias': 'solr.searcher.warmup',
'metric_type': 'gauge'}},
'type': 'searcher'}},
{'include':
{'attribute':
{'cumulative_evictions':
{'alias': 'solr.cache.evictions',
'metric_type': 'counter'},
'cumulative_hits': {'alias': 'solr.cache.hits',
'metric_type': 'counter'},
'cumulative_inserts': {'alias': 'solr.cache.inserts',
'metric_type': 'counter'},
'cumulative_lookups': {'alias': 'solr.cache.lookups',
'metric_type': 'counter'}},
'id': 'org.apache.solr.search.FastLRUCache'}},
{'include': {'attribute': {'cumulative_evictions': {'alias': 'solr.cache.evictions',
'metric_type': 'counter'},
'cumulative_hits': {'alias': 'solr.cache.hits',
'metric_type': 'counter'},
'cumulative_inserts': {'alias': 'solr.cache.inserts',
'metric_type': 'counter'},
'cumulative_lookups': {'alias': 'solr.cache.lookups',
'metric_type': 'counter'}},
'id': 'org.apache.solr.search.LRUCache'}},
{'include': {'attribute': {'avgRequestsPerSecond': {'alias': 'solr.search_handler.avg_requests_per_sec',
'metric_type': 'gauge'},
'avgTimePerRequest': {'alias': 'solr.search_handler.avg_time_per_req',
'metric_type': 'gauge'},
'errors': {'alias': 'solr.search_handler.errors',
'metric_type': 'counter'},
'requests': {'alias': 'solr.search_handler.requests',
'metric_type': 'counter'},
'timeouts': {'alias': 'solr.search_handler.timeouts',
'metric_type': 'counter'},
'totalTime': {'alias': 'solr.search_handler.time',
'metric_type': 'counter'}},
'id': 'org.apache.solr.handler.component.SearchHandler'}}]}
TOMCAT_INIT_CONFIG = {'conf': [{'include': {'attribute': {'currentThreadCount': {'alias': 'tomcat.threads.count',
'metric_type': 'gauge'},
'currentThreadsBusy': {'alias': 'tomcat.threads.busy',
'metric_type': 'gauge'},
'maxThreads': {'alias': 'tomcat.threads.max',
'metric_type': 'gauge'}},
'type': 'ThreadPool'}},
{'include': {'attribute': {'bytesReceived': {'alias': 'tomcat.bytes_rcvd',
'metric_type': 'counter'},
'bytesSent': {'alias': 'tomcat.bytes_sent',
'metric_type': 'counter'},
'errorCount': {'alias': 'tomcat.error_count',
'metric_type': 'counter'},
'maxTime': {'alias': 'tomcat.max_time',
'metric_type': 'gauge'},
'processingTime': {'alias': 'tomcat.processing_time',
'metric_type': 'counter'},
'requestCount': {'alias': 'tomcat.request_count',
'metric_type': 'counter'}},
'type': 'GlobalRequestProcessor'}},
{'include': {'attribute': {'errorCount': {'alias': 'tomcat.servlet.error_count',
'metric_type': 'counter'},
'processingTime': {'alias': 'tomcat.servlet.processing_time',
'metric_type': 'counter'},
'requestCount': {'alias': 'tomcat.servlet.request_count',
'metric_type': 'counter'}},
'j2eeType': 'Servlet'}},
{'include': {'accessCount': {'alias': 'tomcat.cache.access_count',
'metric_type': 'counter'},
'hitsCounts': {'alias': 'tomcat.cache.hits_count',
'metric_type': 'counter'},
'type': 'Cache'}},
{'include': {'jspCount': {'alias': 'tomcat.jsp.count',
'metric_type': 'counter'},
'jspReloadCount': {'alias': 'tomcat.jsp.reload_count',
'metric_type': 'counter'},
'type': 'JspMonitor'}}]}
class NoConfigToMigrateException(Exception): pass
def migrate_cassandra(agentConfig):
for old_key, params in CASSANDRA_MAPPING.iteritems():
new_key, param_type = params
if old_key not in agentConfig:
return None
CASSANDRA_CONFIG['instances'][0][new_key] = param_type(agentConfig[old_key])
return CASSANDRA_CONFIG
def migrate_tomcat(agentConfig):
return parse_jmx_agent_config(agentConfig, "tomcat", init_config=TOMCAT_INIT_CONFIG)
def migrate_solr(agentConfig):
return parse_jmx_agent_config(agentConfig, "solr", init_config=SOLR_INIT_CONFIG)
def migrate_activemq(agentConfig):
return parse_jmx_agent_config(agentConfig, 'activemq', init_config=ACTIVEMQ_INIT_CONFIG)
def migrate_java(agentConfig):
return parse_jmx_agent_config(agentConfig, 'java')
def _load_old_config(agentConfig, config_key):
""" Load the configuration according to the previous syntax in datadog.conf"""
connections = []
users = []
passwords = []
# We load the configuration according to the previous config schema
server = agentConfig.get("%s_jmx_server" % config_key, None)
user = agentConfig.get("%s_jmx_user" % config_key, None)
passw = agentConfig.get("%s_jmx_pass" % config_key, None)
if server is not None:
connections.append(server)
users.append(user)
passwords.append(passw)
# We load the configuration according to the current schema
def load_conf(index=1):
instance = agentConfig.get("%s_jmx_instance_%s" % (config_key, index), None)
if instance:
if '@' in instance:
instance = instance.split('@')
auth = "@".join(instance[0:-1]).split(':')
users.append(auth[0])
passwords.append(auth[1])
connections.append(instance[-1])
else:
users.append(None)
passwords.append(None)
connections.append(instance)
load_conf(index+1)
load_conf()
return (connections, users, passwords)
def parse_jmx_agent_config(agentConfig, config_key, init_config=None):
""" Converts the old style config to the checks.d style"""
(connections, users, passwords) = _load_old_config(agentConfig, config_key)
# If there is no old configuration, don't try to run these
# integrations.
if not (connections and users and passwords):
return None
config = {}
instances = []
for i in range(len(connections)):
try:
connect = connections[i].split(':')
instance = {
'host':connect[0],
'port':int(connect[1]),
'user':users[i],
'password':passwords[i]
}
if len(connect) == 3:
instance['name'] = connect[2]
instances.append(instance)
except Exception, e:
log.error("Cannot migrate JMX instance %s" % config_key)
config['instances'] = instances
if init_config is not None:
config['init_config'] = init_config
else:
config['init_config'] = {}
return config
def _write_conf(check_name, config, confd_dir):
if config is None:
log.debug("No config for check: %s" % check_name)
raise NoConfigToMigrateException()
try:
yaml_config = dump_to_yaml(config, Dumper=Dumper, default_flow_style=False)
except Exception, e:
log.exception("Couldn't create yaml from config: %s" % config)
return
file_name = "%s.yaml" % check_name
full_path = os.path.join(confd_dir, file_name)
if os.path.exists(full_path):
log.debug("Config already exists for check: %s" % full_path)
return
try:
f = open(full_path, 'w')
f.write(yaml_config)
log.info("Successfully wrote %s" % full_path)
except Exception, e:
log.exception("Cannot write config file %s" % full_path)
CHECKS_TO_MIGRATE = {
# A dictionary of check name, migration function
'cassandra' : migrate_cassandra,
'tomcat': migrate_tomcat,
'solr': migrate_solr,
'activemq': migrate_activemq,
'jmx': migrate_java,
}
TO_COMMENT = [
'java_',
'cassandra_',
'tomcat_',
'solr_',
'activemq_'
]
def _comment_old_config(datadog_conf_path):
"""Tries to comment lines in datadog.conf that shouldn't be used anymore"""
f = open(datadog_conf_path, "r+")
config_lines = map(string.strip, f.readlines())
new_lines = []
for line in config_lines:
should_comment = False
for key in TO_COMMENT:
if line.startswith(key):
should_comment = True
break
if should_comment:
new_lines.append("# %s" % line)
else:
new_lines.append(line)
f.seek(0)
f.write("\n".join(new_lines))
f.truncate()
f.close()
def migrate_old_style_configuration(agentConfig, confd_dir, datadog_conf_path):
"""This will try to migrate some integration configurations configured in datadog._comment_old_conf
to the checks.d format
"""
log.info("Running migration script")
should_comment_datadog_conf = False
for check_name, migrate_fct in CHECKS_TO_MIGRATE.iteritems():
log.debug("Migrating %s integration" % check_name)
try:
_write_conf(check_name, migrate_fct(agentConfig), confd_dir)
should_comment_datadog_conf = True
except NoConfigToMigrateException:
pass
except Exception, e:
log.exception("Error while migrating %s" % check_name)
if should_comment_datadog_conf:
try:
_comment_old_config(datadog_conf_path)
except Exception, e:
log.exception("Error while trying to comment deprecated lines in datadog.conf")

View File

@ -1,280 +0,0 @@
##############################################################################
##
## minjson.py implements JSON reading and writing in python.
## Copyright (c) 2005 Jim Washington and Contributors.
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.=
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
##############################################################################
# minjson.py
# use python's parser to read minimal javascript objects.
# str's objects and fixes the text to write javascript.
# Thanks to Patrick Logan for starting the json-py project and making so many
# good test cases.
# Jim Washington 7 Aug 2005.
from re import compile, sub, search, DOTALL
# set to true if transmission size is much more important than speed
# only affects writing, and makes a minimal difference in output size.
alwaysStripWhiteSpace = False
# add to this string if you wish to exclude additional math operators
# from reading.
badOperators = '*'
#################################
# read JSON object #
#################################
slashstarcomment = compile(r'/\*.*?\*/',DOTALL)
doubleslashcomment = compile(r'//.*\n')
def _Read(aString):
"""Use eval in a 'safe' way to turn javascript expression into
a python expression. Allow only True, False, and None in global
__builtins__, and since those map as true, false, null in
javascript, pass those as locals
"""
try:
result = eval(aString,
{"__builtins__":{'True':True,'False':False,'None':None}},
{'null':None,'true':True,'false':False})
except NameError:
raise ReadException, \
"Strings must be quoted. Could not read '%s'." % aString
except SyntaxError:
raise ReadException, \
"Syntax error. Could not read '%s'." % aString
return result
# badOperators is defined at the top of the module
# generate the regexes for math detection
regexes = {}
for operator in badOperators:
if operator in '+*':
# '+' and '*' need to be escaped with \ in re
regexes[operator,'numeric operation'] \
= compile(r"\d*\s*\%s|\%s\s*\d*" % (operator, operator))
else:
regexes[operator,'numeric operation'] \
= compile(r"\d*\s*%s|%s\s*\d*" % (operator, operator))
def _getStringState(aSequence):
"""return the list of required quote closures if the end of aString needs them
to close quotes.
"""
state = []
for k in aSequence:
if k in ['"',"'"]:
if state and k == state[-1]:
state.pop()
else:
state.append(k)
return state
def _sanityCheckMath(aString):
"""just need to check that, if there is a math operator in the
client's JSON, it is inside a quoted string. This is mainly to
keep client from successfully sending 'D0S'*9**9**9**9...
Return True if OK, False otherwise
"""
for operator in badOperators:
#first check, is it a possible math operation?
if regexes[(operator,'numeric operation')].search(aString) is not None:
# OK. possible math operation. get the operator's locations
getlocs = regexes[(operator,'numeric operation')].finditer(aString)
locs = [item.span() for item in getlocs]
halfStrLen = len(aString) / 2
#fortunately, this should be rare
for loc in locs:
exprStart = loc[0]
exprEnd = loc[1]
# We only need to know the char is within open quote
# status.
if exprStart <= halfStrLen:
teststr = aString[:exprStart]
else:
teststr = list(aString[exprEnd+1:])
teststr.reverse()
if not _getStringState(teststr):
return False
return True
def safeRead(aString):
"""turn the js into happier python and check for bad operations
before sending it to the interpreter
"""
# get rid of trailing null. Konqueror appends this, and the python
# interpreter balks when it is there.
CHR0 = chr(0)
while aString.endswith(CHR0):
aString = aString[:-1]
# strip leading and trailing whitespace
aString = aString.strip()
# zap /* ... */ comments
aString = slashstarcomment.sub('',aString)
# zap // comments
aString = doubleslashcomment.sub('',aString)
# here, we only check for the * operator as a DOS problem by default;
# additional operators may be excluded by editing badOperators
# at the top of the module
if _sanityCheckMath(aString):
return _Read(aString)
else:
raise ReadException, 'Unacceptable JSON expression: %s' % aString
read = safeRead
#################################
# write object as JSON #
#################################
#alwaysStripWhiteSpace is defined at the top of the module
tfnTuple = (('True','true'),('False','false'),('None','null'),)
def _replaceTrueFalseNone(aString):
"""replace True, False, and None with javascript counterparts"""
for k in tfnTuple:
if k[0] in aString:
aString = aString.replace(k[0],k[1])
return aString
def _handleCode(subStr,stripWhiteSpace):
"""replace True, False, and None with javascript counterparts if
appropriate, remove unicode u's, fix long L's, make tuples
lists, and strip white space if requested
"""
if 'e' in subStr:
#True, False, and None have 'e' in them. :)
subStr = (_replaceTrueFalseNone(subStr))
if stripWhiteSpace:
# re.sub might do a better job, but takes longer.
# Spaces are the majority of the whitespace, anyway...
subStr = subStr.replace(' ','')
if subStr[-1] in "uU":
#remove unicode u's
subStr = subStr[:-1]
if "L" in subStr:
#remove Ls from long ints
subStr = subStr.replace("L",'')
#do tuples as lists
if "(" in subStr:
subStr = subStr.replace("(",'[')
if ")" in subStr:
subStr = subStr.replace(")",']')
return subStr
# re for a double-quoted string that has a single-quote in it
# but no double-quotes and python punctuation after:
redoublequotedstring = compile(r'"[^"]*\'[^"]*"[,\]\}:\)]')
escapedSingleQuote = r"\'"
escapedDoubleQuote = r'\"'
def doQuotesSwapping(aString):
"""rewrite doublequoted strings with single quotes as singlequoted strings with
escaped single quotes"""
s = []
foundlocs = redoublequotedstring.finditer(aString)
prevend = 0
for loc in foundlocs:
start,end = loc.span()
s.append(aString[prevend:start])
tempstr = aString[start:end]
endchar = tempstr[-1]
ts1 = tempstr[1:-2]
ts1 = ts1.replace("'",escapedSingleQuote)
ts1 = "'%s'%s" % (ts1,endchar)
s.append(ts1)
prevend = end
s.append(aString[prevend:])
return ''.join(s)
def _pyexpr2jsexpr(aString, stripWhiteSpace):
"""Take advantage of python's formatting of string representations of
objects. Python always uses "'" to delimit strings. Except it doesn't when
there is ' in the string. Fix that, then, if we split
on that delimiter, we have a list that alternates non-string text with
string text. Since string text is already properly escaped, we
only need to replace True, False, and None in non-string text and
remove any unicode 'u's preceding string values.
if stripWhiteSpace is True, remove spaces, etc from the non-string
text.
"""
inSingleQuote = False
inDoubleQuote = False
#python will quote with " when there is a ' in the string,
#so fix that first
if redoublequotedstring.search(aString):
aString = doQuotesSwapping(aString)
marker = None
if escapedSingleQuote in aString:
#replace escaped single quotes with a marker
marker = markerBase = '|'
markerCount = 1
while marker in aString:
#if the marker is already there, make it different
markerCount += 1
marker = markerBase * markerCount
aString = aString.replace(escapedSingleQuote,marker)
#escape double-quotes
aString = aString.replace('"',escapedDoubleQuote)
#split the string on the real single-quotes
splitStr = aString.split("'")
outList = []
alt = True
for subStr in splitStr:
#if alt is True, non-string; do replacements
if alt:
subStr = _handleCode(subStr,stripWhiteSpace)
outList.append(subStr)
alt = not alt
result = '"'.join(outList)
if marker:
#put the escaped single-quotes back as "'"
result = result.replace(marker,"'")
return result
def write(obj, encoding="utf-8",stripWhiteSpace=alwaysStripWhiteSpace):
"""Represent the object as a string. Do any necessary fix-ups
with pyexpr2jsexpr"""
try:
#not really sure encode does anything here
aString = str(obj).encode(encoding)
except UnicodeEncodeError:
aString = obj.encode(encoding)
if isinstance(obj,basestring):
if '"' in aString:
aString = aString.replace(escapedDoubleQuote,'"')
result = '"%s"' % aString.replace('"',escapedDoubleQuote)
else:
result = '"%s"' % aString
else:
result = _pyexpr2jsexpr(aString,stripWhiteSpace).encode(encoding)
return result
class ReadException(Exception):
pass
class WriteException(Exception):
pass

File diff suppressed because it is too large Load Diff

View File

@ -25,27 +25,11 @@ except ImportError:
VALID_HOSTNAME_RFC_1123_PATTERN = re.compile(r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
MAX_HOSTNAME_LEN = 255
# Import json for the agent. Try simplejson first, then the stdlib version and
# if all else fails, use minjson which we bundle with the agent.
def generate_minjson_adapter():
import minjson
class json(object):
@staticmethod
def dumps(data):
return minjson.write(data)
@staticmethod
def loads(data):
return minjson.safeRead(data)
return json
# Import json for the agent. Try simplejson first, then the stdlib version
try:
import simplejson as json
except ImportError:
try:
import json
except ImportError:
json = generate_minjson_adapter()
import json

View File

@ -26,9 +26,6 @@ api_key: APIKEYHERE
# Change port the Agent is listening to
# listen_port: 17123
# Start a graphite listener on this port
# graphite_listen_port: 17124
# Certificate file.
# ca_certs = datadog-cert.pem

View File

Before

Width:  |  Height:  |  Size: 40 KiB

After

Width:  |  Height:  |  Size: 40 KiB

View File

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

Before

Width:  |  Height:  |  Size: 4.0 KiB

After

Width:  |  Height:  |  Size: 4.0 KiB

View File

Before

Width:  |  Height:  |  Size: 226 B

After

Width:  |  Height:  |  Size: 226 B

View File

Before

Width:  |  Height:  |  Size: 277 B

After

Width:  |  Height:  |  Size: 277 B

View File

Before

Width:  |  Height:  |  Size: 2.7 KiB

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

Before

Width:  |  Height:  |  Size: 4.1 KiB

After

Width:  |  Height:  |  Size: 4.1 KiB

View File

@ -19,10 +19,5 @@ use_ec2_instance_id: no
use_mount: no
# Start a graphite listener on this port
# graphite_listen_port: 17124
nagios_log: /var/log/nagios3/nagios.log
nagios_perf_cfg: /var/log/blah.log
graphite_listen_port: 17126

View File

@ -15,7 +15,6 @@ class TestConfig(unittest.TestCase):
self.assertEquals(agentConfig["dd_url"], "https://app.datadoghq.com")
self.assertEquals(agentConfig["api_key"], "1234")
self.assertEquals(agentConfig["nagios_log"], "/var/log/nagios3/nagios.log")
self.assertEquals(agentConfig["graphite_listen_port"], 17126)
def testGoodPidFie(self):
"""Verify that the pid file succeeds and fails appropriately"""

View File

@ -90,11 +90,11 @@ class TestUnitDogStatsd(unittest.TestCase):
nt.assert_equal(third['host'], 'myhost')
def test_tags_gh442(self):
import json
import util
import dogstatsd
from aggregator import api_formatter
json = util.generate_minjson_adapter()
dogstatsd.json = json
serialized = dogstatsd.serialize_metrics([api_formatter("foo", 12, 1, ('tag',), 'host')])
assert '"tags": ["tag"]' in serialized