Merge "Add python36 support"
This commit is contained in:
commit
813d850605
@ -81,7 +81,7 @@ class AgentCheck(util.Dimensions):
|
||||
# value in white list.
|
||||
dim_key_values = {}
|
||||
if metrics.get(metric):
|
||||
dim_key_values = metrics.get(metric).values()[0]
|
||||
dim_key_values = list(metrics.get(metric).values())[0]
|
||||
else:
|
||||
# If white list has a "dimensions" section, set the key
|
||||
# value dimension pairs to all the metrics. But the
|
||||
|
@ -15,8 +15,8 @@
|
||||
|
||||
import logging
|
||||
import socket
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
# project
|
||||
import monasca_agent.collector.checks as checks
|
||||
@ -52,7 +52,7 @@ class Apache(checks.AgentCheck):
|
||||
if not self.url:
|
||||
raise Exception("Missing 'apache_status_url' in Apache config")
|
||||
|
||||
req = urllib2.Request(self.url, None, util.headers(self.agent_config))
|
||||
req = urllib.request.Request(self.url, None, util.headers(self.agent_config))
|
||||
apache_user = instance.get('apache_user', None)
|
||||
apache_password = instance.get('apache_password', None)
|
||||
if apache_user and apache_password:
|
||||
@ -61,7 +61,7 @@ class Apache(checks.AgentCheck):
|
||||
log.debug("Not using authentication for Apache Web Server")
|
||||
|
||||
# Submit a service check for status page availability.
|
||||
parsed_url = urlparse.urlparse(self.url)
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
apache_host = parsed_url.hostname
|
||||
apache_port = str(parsed_url.port or 80)
|
||||
service_check_name = 'apache.status'
|
||||
@ -78,7 +78,7 @@ class Apache(checks.AgentCheck):
|
||||
instance)
|
||||
|
||||
try:
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
except Exception as e:
|
||||
self.log.info(
|
||||
"%s is DOWN, error: %s. Connection failed." % (service_check_name, str(e)))
|
||||
|
@ -13,8 +13,8 @@
|
||||
|
||||
import requests
|
||||
|
||||
from urlparse import urlparse
|
||||
from urlparse import urlunparse
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from six.moves.urllib.parse import urlunparse
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks import utils
|
||||
|
@ -63,7 +63,7 @@ class Ceph(checks.AgentCheck):
|
||||
return
|
||||
ceph_df = self._ceph_cmd('df detail', 'json')
|
||||
metrics = self._get_usage_metrics(ceph_df)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=self.dimensions)
|
||||
|
||||
def _collect_stats_metrics(self):
|
||||
@ -72,7 +72,7 @@ class Ceph(checks.AgentCheck):
|
||||
ceph_status = self._ceph_cmd('status', 'json')
|
||||
ceph_status_plain = self._ceph_cmd('status')
|
||||
metrics = self._get_stats_metrics(ceph_status, ceph_status_plain)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=self.dimensions)
|
||||
|
||||
def _collect_mon_metrics(self):
|
||||
@ -80,10 +80,10 @@ class Ceph(checks.AgentCheck):
|
||||
return
|
||||
ceph_status = self._ceph_cmd('status', 'json')
|
||||
mon_metrics_dict = self._get_mon_metrics(ceph_status)
|
||||
for monitor, metrics in mon_metrics_dict.iteritems():
|
||||
for monitor, metrics in mon_metrics_dict.items():
|
||||
mon_dimensions = self.dimensions.copy()
|
||||
mon_dimensions['monitor'] = monitor
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=mon_dimensions)
|
||||
|
||||
def _collect_osd_metrics(self):
|
||||
@ -95,14 +95,14 @@ class Ceph(checks.AgentCheck):
|
||||
osd_metrics_dict = self._get_osd_metrics(ceph_osd_df,
|
||||
ceph_osd_perf,
|
||||
ceph_osd_dump)
|
||||
for osd, metrics in osd_metrics_dict.iteritems():
|
||||
for osd, metrics in osd_metrics_dict.items():
|
||||
osd_dimensions = self.dimensions.copy()
|
||||
osd_dimensions['osd'] = osd
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=osd_dimensions)
|
||||
|
||||
osd_summary_metrics = self._get_osd_summary_metrics(ceph_osd_df)
|
||||
for metric, value in osd_summary_metrics.iteritems():
|
||||
for metric, value in osd_summary_metrics.items():
|
||||
self.gauge(metric, value, dimensions=self.dimensions)
|
||||
|
||||
def _collect_pool_metrics(self):
|
||||
@ -110,20 +110,20 @@ class Ceph(checks.AgentCheck):
|
||||
return
|
||||
ceph_df = self._ceph_cmd('df detail', 'json')
|
||||
pool_metrics_dict = self._get_pool_metrics(ceph_df)
|
||||
for pool, metrics in pool_metrics_dict.iteritems():
|
||||
for pool, metrics in pool_metrics_dict.items():
|
||||
pool_dimensions = self.dimensions.copy()
|
||||
pool_dimensions['pool'] = pool
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=pool_dimensions)
|
||||
self.gauge('ceph.pools.count', len(pool_metrics_dict.keys()),
|
||||
dimensions=self.dimensions)
|
||||
|
||||
ceph_osd_pool_stats = self._ceph_cmd('osd pool stats', 'json')
|
||||
pool_stats_dict = self._get_pool_stats_metrics(ceph_osd_pool_stats)
|
||||
for pool, metrics in pool_stats_dict.iteritems():
|
||||
for pool, metrics in pool_stats_dict.items():
|
||||
pool_dimensions = self.dimensions.copy()
|
||||
pool_dimensions['pool'] = pool
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=pool_dimensions)
|
||||
|
||||
def _ceph_cmd(self, args, format='plain'):
|
||||
@ -526,14 +526,14 @@ class Ceph(checks.AgentCheck):
|
||||
pool_metrics = {}
|
||||
for pool in ceph_osd_pool_stats:
|
||||
pool_name = pool['pool_name']
|
||||
for metric, value in pool['client_io_rate'].iteritems():
|
||||
for metric, value in pool['client_io_rate'].items():
|
||||
if pool_name in pool_metrics:
|
||||
pool_metrics[pool_name].update({
|
||||
'ceph.pool.client.' + metric: value})
|
||||
else:
|
||||
pool_metrics[pool_name] = {
|
||||
'ceph.pool.client.' + metric: value}
|
||||
for metric, value in pool['recovery_rate'].iteritems():
|
||||
for metric, value in pool['recovery_rate'].items():
|
||||
if pool_name in pool_metrics:
|
||||
pool_metrics[pool_name].update({
|
||||
'ceph.pool.recovery.' + metric: value})
|
||||
|
@ -14,7 +14,8 @@
|
||||
from datetime import datetime
|
||||
import socket
|
||||
import ssl
|
||||
from urlparse import urlparse
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
|
@ -12,7 +12,8 @@
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import urllib2
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.common.util import headers
|
||||
@ -46,10 +47,10 @@ class CouchDb(AgentCheck):
|
||||
|
||||
"""
|
||||
self.log.debug('Fetching Couchdb stats at url: %s' % url)
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
|
||||
# Do the request, log any errors
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
response = request.read()
|
||||
return json.loads(response)
|
||||
|
||||
|
@ -14,7 +14,8 @@
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks.utils import add_basic_auth
|
||||
@ -67,15 +68,15 @@ class Couchbase(AgentCheck):
|
||||
|
||||
"""
|
||||
self.log.debug('Fetching Couchbase stats at url: %s' % url)
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
if 'user' in instance and 'password' in instance:
|
||||
add_basic_auth(req, instance['user'], instance['password'])
|
||||
|
||||
if instance['is_recent_python']:
|
||||
timeout = instance.get('timeout', DEFAULT_TIMEOUT)
|
||||
request = urllib2.urlopen(req, timeout=timeout)
|
||||
request = urllib.request.urlopen(req, timeout=timeout)
|
||||
else:
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
|
||||
response = request.read()
|
||||
return json.loads(response)
|
||||
|
@ -78,7 +78,7 @@ class Docker(checks.AgentCheck):
|
||||
def _report_container_metrics(self, container_dict, add_kubernetes_dimensions, dimensions):
|
||||
self._curr_system_cpu, self._cpu_count = self._get_system_cpu_ns()
|
||||
system_memory = self._get_total_memory()
|
||||
for container in container_dict.itervalues():
|
||||
for container in container_dict.values():
|
||||
try:
|
||||
container_dimensions = dimensions.copy()
|
||||
container_id = container['Id']
|
||||
@ -222,7 +222,7 @@ class Docker(checks.AgentCheck):
|
||||
|
||||
def _find_cgroup_filename_pattern(self, container_id):
|
||||
# We try with different cgroups so that it works even if only one is properly working
|
||||
for mountpoint in self._mount_points.itervalues():
|
||||
for mountpoint in self._mount_points.values():
|
||||
stat_file_path_lxc = os.path.join(mountpoint, "lxc")
|
||||
stat_file_path_docker = os.path.join(mountpoint, "docker")
|
||||
stat_file_path_coreos = os.path.join(mountpoint, "system.slice")
|
||||
|
@ -15,8 +15,8 @@ import json
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks.utils import add_basic_auth
|
||||
@ -186,7 +186,7 @@ class ElasticSearch(AgentCheck):
|
||||
|
||||
# Support URLs that have a path in them from the config, for
|
||||
# backwards-compatibility.
|
||||
parsed = urlparse.urlparse(config_url)
|
||||
parsed = urllib.parse.urlparse(config_url)
|
||||
if parsed[2] != "":
|
||||
config_url = "%s://%s" % (parsed[0], parsed[1])
|
||||
|
||||
@ -198,12 +198,12 @@ class ElasticSearch(AgentCheck):
|
||||
self._define_params(version)
|
||||
|
||||
# Load stats data.
|
||||
url = urlparse.urljoin(config_url, self.STATS_URL)
|
||||
url = urllib.parse.urljoin(config_url, self.STATS_URL)
|
||||
stats_data = self._get_data(url, auth)
|
||||
self._process_stats_data(config_url, stats_data, auth, dimensions=dimensions)
|
||||
|
||||
# Load the health data.
|
||||
url = urlparse.urljoin(config_url, self.HEALTH_URL)
|
||||
url = urllib.parse.urljoin(config_url, self.HEALTH_URL)
|
||||
health_data = self._get_data(url, auth)
|
||||
self._process_health_data(config_url, health_data, dimensions=dimensions)
|
||||
|
||||
@ -282,10 +282,10 @@ class ElasticSearch(AgentCheck):
|
||||
|
||||
`auth` is a tuple of (username, password) or None
|
||||
"""
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
if auth:
|
||||
add_basic_auth(req, *auth)
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
response = request.read()
|
||||
return json.loads(response)
|
||||
|
||||
@ -318,7 +318,7 @@ class ElasticSearch(AgentCheck):
|
||||
# Fetch interface address from ifconfig or ip addr and check
|
||||
# against the primary IP from ES
|
||||
try:
|
||||
nodes_url = urlparse.urljoin(config_url, self.NODES_URL)
|
||||
nodes_url = urllib.parse.urljoin(config_url, self.NODES_URL)
|
||||
primary_addr = self._get_primary_addr(nodes_url, node, auth)
|
||||
except NodeNotFound:
|
||||
# Skip any nodes that aren't found
|
||||
@ -334,11 +334,11 @@ class ElasticSearch(AgentCheck):
|
||||
|
||||
Used in ES < 0.19
|
||||
"""
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
# Load basic authentication configuration, if available.
|
||||
if auth:
|
||||
add_basic_auth(req, *auth)
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
response = request.read()
|
||||
data = json.loads(response)
|
||||
|
||||
|
@ -12,7 +12,8 @@
|
||||
# under the License.
|
||||
|
||||
from collections import defaultdict
|
||||
import urllib2
|
||||
from six.moves import urllib
|
||||
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.common.util import headers
|
||||
@ -82,17 +83,17 @@ class HAProxy(AgentCheck):
|
||||
"""
|
||||
# Try to fetch data from the stats URL
|
||||
|
||||
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
passman = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||
passman.add_password(None, url, username, password)
|
||||
authhandler = urllib2.HTTPBasicAuthHandler(passman)
|
||||
opener = urllib2.build_opener(authhandler)
|
||||
urllib2.install_opener(opener)
|
||||
authhandler = urllib.request.HTTPBasicAuthHandler(passman)
|
||||
opener = urllib.request.build_opener(authhandler)
|
||||
urllib.request.install_opener(opener)
|
||||
url = "%s%s" % (url, STATS_URL)
|
||||
|
||||
self.log.debug("HAProxy Fetching haproxy search data from: %s" % url)
|
||||
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
request = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
request = urllib.request.urlopen(req)
|
||||
response = request.read()
|
||||
# Split the data by line
|
||||
return response.split('\n')
|
||||
|
@ -13,7 +13,8 @@
|
||||
|
||||
from collections import defaultdict
|
||||
import re
|
||||
import urllib2
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
@ -68,7 +69,7 @@ class KyotoTycoonCheck(AgentCheck):
|
||||
if name is not None:
|
||||
dimensions.update({'instance': name})
|
||||
|
||||
response = urllib2.urlopen(url)
|
||||
response = urllib.request.urlopen(url)
|
||||
body = response.read()
|
||||
|
||||
totals = defaultdict(lambda: 0)
|
||||
|
@ -11,7 +11,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import urllib2
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks.utils import add_basic_auth
|
||||
@ -83,10 +83,10 @@ class Lighttpd(AgentCheck):
|
||||
|
||||
dimensions = self._set_dimensions(None, instance)
|
||||
self.log.debug("Connecting to %s" % url)
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
if 'user' in instance and 'password' in instance:
|
||||
add_basic_auth(req, instance['user'], instance['password'])
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
headers_resp = request.info().headers
|
||||
server_version = self._get_server_version(headers_resp)
|
||||
response = request.read()
|
||||
|
@ -64,7 +64,7 @@ class LXC(checks.AgentCheck):
|
||||
return
|
||||
metrics = self._get_cpu_metrics(container_name)
|
||||
cpu_dimensions = self._get_dimensions(container_name)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=cpu_dimensions)
|
||||
|
||||
def _collect_mem_metrics(self, container_name):
|
||||
@ -72,7 +72,7 @@ class LXC(checks.AgentCheck):
|
||||
return
|
||||
metrics = self._get_mem_metrics(container_name)
|
||||
mem_dimensions = self._get_dimensions(container_name)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=mem_dimensions)
|
||||
|
||||
def _collect_swap_metrics(self, container_name):
|
||||
@ -81,17 +81,17 @@ class LXC(checks.AgentCheck):
|
||||
metrics = self._get_swap_metrics(container_name)
|
||||
if metrics:
|
||||
swap_dimensions = self._get_dimensions(container_name)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=swap_dimensions)
|
||||
|
||||
def _collect_net_metrics(self, container_name):
|
||||
if not self.instance.get('net', True):
|
||||
return
|
||||
metrics = self._get_net_metrics(container_name)
|
||||
for iface_name, iface_metrics in metrics.iteritems():
|
||||
for iface_name, iface_metrics in metrics.items():
|
||||
net_dimensions = self._get_dimensions(container_name,
|
||||
{'iface': iface_name})
|
||||
for metric, value in iface_metrics.iteritems():
|
||||
for metric, value in iface_metrics.items():
|
||||
self.gauge(metric, value, dimensions=net_dimensions)
|
||||
|
||||
def _collect_disk_metrics(self, container_name):
|
||||
@ -99,7 +99,7 @@ class LXC(checks.AgentCheck):
|
||||
return
|
||||
metrics = self._get_disk_metrics(container_name)
|
||||
disk_dimensions = self._get_dimensions(container_name)
|
||||
for metric, value in metrics.iteritems():
|
||||
for metric, value in metrics.items():
|
||||
self.gauge(metric, value, dimensions=disk_dimensions)
|
||||
|
||||
def _get_cpu_metrics(self, container_name):
|
||||
|
@ -12,7 +12,8 @@
|
||||
# under the License.
|
||||
|
||||
import re
|
||||
import urllib2
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
from monasca_agent.collector.checks.utils import add_basic_auth
|
||||
@ -46,10 +47,10 @@ class Nginx(AgentCheck):
|
||||
|
||||
def _get_data(self, instance):
|
||||
url = instance.get('nginx_status_url')
|
||||
req = urllib2.Request(url, None, headers(self.agent_config))
|
||||
req = urllib.request.Request(url, None, headers(self.agent_config))
|
||||
if 'user' in instance and 'password' in instance:
|
||||
add_basic_auth(req, instance['user'], instance['password'])
|
||||
request = urllib2.urlopen(req)
|
||||
request = urllib.request.urlopen(req)
|
||||
return request.read()
|
||||
|
||||
def _get_metrics(self, response, dimensions):
|
||||
|
@ -390,7 +390,7 @@ class OvsCheck(AgentCheck):
|
||||
#
|
||||
# Grab the first metric name and see if it's in the cache.
|
||||
#
|
||||
metric_name = self._get_metrics_map(measure).keys()[0]
|
||||
metric_name = list(self._get_metrics_map(measure).keys())[0]
|
||||
|
||||
for ifx in ctr_cache:
|
||||
if metric_name not in ctr_cache[ifx]:
|
||||
|
@ -13,8 +13,8 @@
|
||||
|
||||
import json
|
||||
import re
|
||||
import urllib2
|
||||
import urlparse
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
import monasca_agent.collector.checks as checks
|
||||
|
||||
@ -161,11 +161,11 @@ class RabbitMQ(checks.AgentCheck):
|
||||
"{0} / {0}_regexes parameter must be a list".format(object_type))
|
||||
|
||||
# setup urllib2 for Basic Auth
|
||||
auth_handler = urllib2.HTTPBasicAuthHandler()
|
||||
auth_handler = urllib.request.HTTPBasicAuthHandler()
|
||||
auth_handler.add_password(
|
||||
realm='RabbitMQ Management', uri=base_url, user=username, passwd=password)
|
||||
opener = urllib2.build_opener(auth_handler)
|
||||
urllib2.install_opener(opener)
|
||||
opener = urllib.request.build_opener(auth_handler)
|
||||
urllib.request.install_opener(opener)
|
||||
|
||||
return base_url, max_detailed, specified
|
||||
|
||||
@ -191,8 +191,8 @@ class RabbitMQ(checks.AgentCheck):
|
||||
@staticmethod
|
||||
def _get_data(url):
|
||||
try:
|
||||
data = json.loads(urllib2.urlopen(url).read())
|
||||
except urllib2.URLError as e:
|
||||
data = json.loads(urllib.request.urlopen(url).read())
|
||||
except urllib.error.URLError as e:
|
||||
raise Exception('Cannot open RabbitMQ API url: %s %s' % (url, str(e)))
|
||||
except ValueError as e:
|
||||
raise Exception('Cannot parse JSON response from API url: %s %s' % (url, str(e)))
|
||||
@ -207,7 +207,7 @@ class RabbitMQ(checks.AgentCheck):
|
||||
filters: explicit or regexes filters of specified queues or nodes
|
||||
(specified in the yaml file)
|
||||
"""
|
||||
data = self._get_data(urlparse.urljoin(base_url, object_type))
|
||||
data = self._get_data(urllib.parse.urljoin(base_url, object_type))
|
||||
# Make a copy of this list as we will remove items from it at each iteration
|
||||
explicit_filters = list(filters['explicit'])
|
||||
regex_filters = filters['regexes']
|
||||
@ -291,7 +291,7 @@ class RabbitMQ(checks.AgentCheck):
|
||||
dimensions_list = DIMENSIONS_MAP[object_type].copy()
|
||||
dimensions = self._set_dimensions({'component': 'rabbitmq', 'service': 'rabbitmq'},
|
||||
instance)
|
||||
for d in dimensions_list.iterkeys():
|
||||
for d in dimensions_list:
|
||||
dim = data.get(d, None)
|
||||
if dim not in [None, ""]:
|
||||
dimensions[dimensions_list[d]] = dim
|
||||
|
@ -101,7 +101,7 @@ class Redis(AgentCheck):
|
||||
|
||||
"""
|
||||
try:
|
||||
for item in string.split(','):
|
||||
for item in ','.split():
|
||||
k, v = item.rsplit('=', 1)
|
||||
if k == key:
|
||||
try:
|
||||
|
@ -18,7 +18,8 @@ import itertools
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
import xmlrpclib
|
||||
|
||||
from six.moves import xmlrpc_client
|
||||
|
||||
# 3p
|
||||
import supervisor.xmlrpc
|
||||
@ -95,7 +96,7 @@ class Supervisord(checks.AgentCheck):
|
||||
# Gather all process information
|
||||
try:
|
||||
processes = supe.getAllProcessInfo()
|
||||
except xmlrpclib.Fault as error:
|
||||
except xmlrpc_client.Fault as error:
|
||||
raise Exception(
|
||||
'An error occurred while reading process information: %s %s'
|
||||
% (error.faultCode, error.faultString)
|
||||
@ -118,7 +119,7 @@ class Supervisord(checks.AgentCheck):
|
||||
dimensions=dimensions, value_meta=server_details)
|
||||
raise Exception(msg)
|
||||
|
||||
except xmlrpclib.ProtocolError as error:
|
||||
except xmlrpc_client.ProtocolError as error:
|
||||
if error.errcode == 401: # authorization error
|
||||
msg = 'Username or password to %s are incorrect.' % server_name
|
||||
else:
|
||||
@ -197,14 +198,14 @@ class Supervisord(checks.AgentCheck):
|
||||
if sock is not None:
|
||||
host = instance.get('host', DEFAULT_SOCKET_IP)
|
||||
transport = supervisor.xmlrpc.SupervisorTransport(None, None, sock)
|
||||
server = xmlrpclib.ServerProxy(host, transport=transport)
|
||||
server = xmlrpc_client.ServerProxy(host, transport=transport)
|
||||
else:
|
||||
host = instance.get('host', DEFAULT_HOST)
|
||||
port = instance.get('port', DEFAULT_PORT)
|
||||
user = instance.get('user')
|
||||
password = instance.get('pass')
|
||||
auth = '%s:%s@' % (user, password) if user and password else ''
|
||||
server = xmlrpclib.Server('http://%s%s:%s/RPC2' % (auth, host, port))
|
||||
server = xmlrpc_client.Server('http://%s%s:%s/RPC2' % (auth, host, port))
|
||||
return server.supervisor
|
||||
|
||||
@staticmethod
|
||||
|
@ -61,8 +61,8 @@ class WMICheck(AgentCheck):
|
||||
# If there are filters, we need one query per filter.
|
||||
if filters:
|
||||
for f in filters:
|
||||
prop = f.keys()[0]
|
||||
search = f.values()[0]
|
||||
prop = list(f.keys())[0]
|
||||
search = list(f.values())[0]
|
||||
if SEARCH_WILDCARD in search:
|
||||
search = search.replace(SEARCH_WILDCARD, '%')
|
||||
wql = "SELECT * FROM %s WHERE %s LIKE '%s'" % (wmi_class, prop, search)
|
||||
|
@ -37,9 +37,10 @@ Tested with Zookeeper versions 3.0.0 to 3.4.10
|
||||
|
||||
import re
|
||||
import socket
|
||||
from StringIO import StringIO
|
||||
import struct
|
||||
|
||||
from six import StringIO
|
||||
|
||||
from monasca_agent.collector.checks import AgentCheck
|
||||
|
||||
|
||||
|
@ -118,7 +118,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
vnic_id_to_stats_map = self._ops.query_vm_device_stats(
|
||||
vm_moid, net_counter_id, duration)
|
||||
vnic_stats[net_counter] = vnic_id_to_stats_map
|
||||
vnic_ids.update(vnic_id_to_stats_map.iterkeys())
|
||||
vnic_ids.update(vnic_id_to_stats_map.keys())
|
||||
|
||||
# Stats provided from vSphere are in KB/s, converting it to B/s.
|
||||
for vnic_id in vnic_ids:
|
||||
@ -169,7 +169,7 @@ class VsphereInspector(virt_inspector.Inspector):
|
||||
disk_id_to_stat_map = self._ops.query_vm_device_stats(
|
||||
vm_moid, disk_counter_id, duration)
|
||||
disk_stats[disk_counter] = disk_id_to_stat_map
|
||||
disk_ids.update(disk_id_to_stat_map.iterkeys())
|
||||
disk_ids.update(disk_id_to_stat_map.keys())
|
||||
|
||||
for disk_id in disk_ids:
|
||||
|
||||
|
@ -686,7 +686,7 @@ def chunks(iterable, chunk_size):
|
||||
count = 0
|
||||
try:
|
||||
for _ in range(chunk_size):
|
||||
chunk[count] = iterable.next()
|
||||
chunk[count] = next(iterable)
|
||||
count += 1
|
||||
yield chunk[:count]
|
||||
except StopIteration:
|
||||
|
@ -11,7 +11,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ConfigParser
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
@ -23,6 +22,7 @@ from oslo_config import cfg
|
||||
|
||||
from mock import patch
|
||||
from mock.mock import MagicMock
|
||||
from six.moves import configparser
|
||||
|
||||
from monasca_setup.detection import utils
|
||||
from monasca_setup.detection.plugins.ovs import Ovs
|
||||
@ -80,7 +80,7 @@ class TestOvs(unittest.TestCase):
|
||||
ovs_obj.conf = MagicMock()
|
||||
ovs_obj.conf.default_config_files = ovs_obj.neutron_conf
|
||||
ovs_obj.conf.default_config_dirs = os.path.abspath(os.path.join(ovs_obj.neutron_conf, os.pardir))
|
||||
with patch.object(ConfigParser, 'SafeConfigParser') as mock_config_parser:
|
||||
with patch.object(configparser, 'SafeConfigParser') as mock_config_parser:
|
||||
config_parser_obj = mock_config_parser.return_value
|
||||
with contextlib.nested(
|
||||
patch.object(cfg, 'CONF'),
|
||||
|
@ -13,6 +13,8 @@
|
||||
|
||||
import unittest
|
||||
|
||||
import six
|
||||
|
||||
import monasca_agent.common.aggregator as aggregator
|
||||
import monasca_agent.common.metrics as metrics_pkg
|
||||
|
||||
@ -64,7 +66,7 @@ class TestMetricsAggregator(unittest.TestCase):
|
||||
value_meta=value_meta)
|
||||
|
||||
def testValidMetricUnicodeDimensionValue(self):
|
||||
dimensions = {unichr(2440): 'B', 'B': 'C', 'D': 'E'}
|
||||
dimensions = {six.unichr(2440): 'B', 'B': 'C', 'D': 'E'}
|
||||
value_meta = {"This is a test": "test, test, test"}
|
||||
self.submit_metric("Foo",
|
||||
5,
|
||||
@ -72,7 +74,7 @@ class TestMetricsAggregator(unittest.TestCase):
|
||||
value_meta=value_meta)
|
||||
|
||||
def testValidMetricUnicodeDimensionKey(self):
|
||||
dimensions = {'A': 'B', 'B': unichr(920), 'D': 'E'}
|
||||
dimensions = {'A': 'B', 'B': six.unichr(920), 'D': 'E'}
|
||||
value_meta = {"This is a test": "test, test, test"}
|
||||
self.submit_metric("Foo",
|
||||
5,
|
||||
@ -82,7 +84,7 @@ class TestMetricsAggregator(unittest.TestCase):
|
||||
def testValidMetricUnicodeMetricName(self):
|
||||
dimensions = {'A': 'B', 'B': 'C', 'D': 'E'}
|
||||
value_meta = {"This is a test": "test, test, test"}
|
||||
self.submit_metric(unichr(6021),
|
||||
self.submit_metric(six.unichr(6021),
|
||||
5,
|
||||
dimensions=dimensions,
|
||||
value_meta=value_meta)
|
||||
@ -161,7 +163,6 @@ class TestMetricsAggregator(unittest.TestCase):
|
||||
|
||||
def testInvalidDimensionKeyLength(self):
|
||||
dimensions = {'A'*256: 'B', 'B': 'C', 'D': 'E'}
|
||||
print dimensions
|
||||
value_meta = {"This is a test": "test, test, test"}
|
||||
self.submit_metric("Foo",
|
||||
5,
|
||||
|
@ -14,6 +14,8 @@
|
||||
Performance tests for the agent/dogstatsd metrics aggregator.
|
||||
"""
|
||||
|
||||
from six.moves import range
|
||||
|
||||
|
||||
from monasca_agent.common.aggregator import MetricsAggregator
|
||||
|
||||
@ -27,9 +29,9 @@ class TestAggregatorPerf(object):
|
||||
def test_dogstatsd_aggregation_perf(self):
|
||||
ma = MetricsAggregator('my.host')
|
||||
|
||||
for _ in xrange(self.FLUSH_COUNT):
|
||||
for i in xrange(self.LOOPS_PER_FLUSH):
|
||||
for j in xrange(self.METRIC_COUNT):
|
||||
for _ in range(self.FLUSH_COUNT):
|
||||
for i in range(self.LOOPS_PER_FLUSH):
|
||||
for j in range(self.METRIC_COUNT):
|
||||
|
||||
# metrics
|
||||
ma.submit_packets('counter.%s:%s|c' % (j, i))
|
||||
@ -54,10 +56,10 @@ class TestAggregatorPerf(object):
|
||||
def test_checksd_aggregation_perf(self):
|
||||
ma = MetricsAggregator('my.host')
|
||||
|
||||
for _ in xrange(self.FLUSH_COUNT):
|
||||
for i in xrange(self.LOOPS_PER_FLUSH):
|
||||
for _ in range(self.FLUSH_COUNT):
|
||||
for i in range(self.LOOPS_PER_FLUSH):
|
||||
# Counters
|
||||
for j in xrange(self.METRIC_COUNT):
|
||||
for j in range(self.METRIC_COUNT):
|
||||
ma.increment('counter.%s' % j, i)
|
||||
ma.gauge('gauge.%s' % j, i)
|
||||
ma.histogram('histogram.%s' % j, i)
|
||||
|
@ -130,7 +130,7 @@ class TestAggregator(unittest.TestCase):
|
||||
self.aggr.increment(
|
||||
'test-counter', 1, dimensions={'a': 'avalue', 'b': 'bvalue', 'b': 'bvalue'})
|
||||
self.assertEqual(len(self.aggr.metrics), 1, self.aggr.metrics)
|
||||
metric = self.aggr.metrics.values()[0]
|
||||
metric = list(self.aggr.metrics.values())[0]
|
||||
self.assertEqual(metric.value, 2)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -15,11 +15,12 @@ import logging
|
||||
logging.basicConfig()
|
||||
import subprocess
|
||||
import time
|
||||
import urllib2
|
||||
import urlparse
|
||||
from nose.plugins.skip import SkipTest
|
||||
from tests.common import load_check
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
|
||||
PORT = 9200
|
||||
MAX_WAIT = 150
|
||||
|
||||
@ -30,8 +31,8 @@ class TestElastic(unittest.TestCase):
|
||||
loop = 0
|
||||
while True:
|
||||
try:
|
||||
req = urllib2.Request(url, None)
|
||||
request = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url, None)
|
||||
request = urllib.request.urlopen(req)
|
||||
break
|
||||
except Exception:
|
||||
time.sleep(0.5)
|
||||
|
@ -13,7 +13,6 @@
|
||||
import unittest
|
||||
import subprocess
|
||||
import time
|
||||
import urllib2
|
||||
import tempfile
|
||||
import os
|
||||
import logging
|
||||
@ -22,6 +21,9 @@ from tests.common import load_check, kill_subprocess
|
||||
from nose.plugins.attrib import attr
|
||||
from nose.plugins.skip import SkipTest
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
MAX_WAIT = 30
|
||||
@ -36,14 +38,14 @@ class HaproxyTestCase(unittest.TestCase):
|
||||
while True:
|
||||
try:
|
||||
STATS_URL = ";csv;norefresh"
|
||||
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
|
||||
passman = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||
passman.add_password(None, url, "datadog", "isdevops")
|
||||
authhandler = urllib2.HTTPBasicAuthHandler(passman)
|
||||
opener = urllib2.build_opener(authhandler)
|
||||
urllib2.install_opener(opener)
|
||||
authhandler = urllib.request.HTTPBasicAuthHandler(passman)
|
||||
opener = urllib.request.build_opener(authhandler)
|
||||
urllib.request.install_opener(opener)
|
||||
url = "%s%s" % (url, STATS_URL)
|
||||
req = urllib2.Request(url)
|
||||
request = urllib2.urlopen(req)
|
||||
req = urllib.request.Request(url)
|
||||
request = urllib.request.urlopen(req)
|
||||
break
|
||||
except Exception:
|
||||
time.sleep(0.5)
|
||||
|
@ -12,10 +12,11 @@
|
||||
|
||||
import logging
|
||||
import unittest
|
||||
from cStringIO import StringIO
|
||||
|
||||
from collector.checks import LaconicFilter
|
||||
|
||||
from six.moves import cStringIO as StringIO
|
||||
|
||||
|
||||
class TestLaconic(unittest.TestCase):
|
||||
|
||||
|
@ -15,6 +15,7 @@ import time
|
||||
import unittest
|
||||
|
||||
import nose.tools as nt
|
||||
from six.moves import range
|
||||
|
||||
from monasca_agent.common.aggregator import MetricsAggregator
|
||||
|
||||
@ -255,7 +256,7 @@ class TestUnitMonascaStatsd(unittest.TestCase):
|
||||
percentiles = range(100)
|
||||
random.shuffle(percentiles) # in place
|
||||
for i in percentiles:
|
||||
for j in xrange(20):
|
||||
for j in range(20):
|
||||
for type_ in ['h', 'ms']:
|
||||
m = 'my.p:%s|%s' % (i, type_)
|
||||
stats.submit_packets(m)
|
||||
@ -362,7 +363,7 @@ class TestUnitMonascaStatsd(unittest.TestCase):
|
||||
cnt = 100000
|
||||
for run in [1, 2]:
|
||||
stats = MetricsAggregator('myhost')
|
||||
for i in xrange(cnt):
|
||||
for i in range(cnt):
|
||||
if run == 2:
|
||||
stats.submit_packets('test.counter:1|c|@0.5')
|
||||
stats.submit_packets('test.hist:1|ms|@0.5')
|
||||
|
@ -19,6 +19,7 @@ import binascii
|
||||
import re
|
||||
import shutil
|
||||
from nose.plugins.skip import SkipTest
|
||||
from six.moves import range
|
||||
|
||||
class TestPostfix(unittest.TestCase):
|
||||
#
|
||||
@ -74,7 +75,7 @@ class TestPostfix(unittest.TestCase):
|
||||
""" % (self.queue_root))
|
||||
|
||||
# stuff 10K msgs in random queues
|
||||
for _ in xrange(1, 10000):
|
||||
for _ in range(1, 10000):
|
||||
shuffle(self.queues)
|
||||
rand_queue = sample(self.queues, 1)[0]
|
||||
queue_file = binascii.b2a_hex(os.urandom(7))
|
||||
|
@ -16,6 +16,8 @@ import platform
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from six.moves import reduce
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logger = logging.getLogger(__file__)
|
||||
|
||||
@ -138,8 +140,8 @@ none 985964 1 985963 1% /lib/init/rw
|
||||
res = disk.check()
|
||||
# Assert we have disk & inode stats
|
||||
assert len(res) == 2
|
||||
assert res.keys()[0]
|
||||
assert res.keys()[1]
|
||||
assert list(res.keys())[0]
|
||||
assert list(res.keys())[1]
|
||||
|
||||
def testMemory(self):
|
||||
global logger
|
||||
|
@ -58,7 +58,7 @@ class TestTail(unittest.TestCase):
|
||||
|
||||
# Consume from the tail
|
||||
gen = tail.tail(line_by_line=False, move_end=True)
|
||||
gen.next()
|
||||
next(gen)
|
||||
|
||||
# Verify that the tail consumed the data I wrote
|
||||
self.assertEqual(tail._size, len(init_string))
|
||||
@ -73,7 +73,7 @@ class TestTail(unittest.TestCase):
|
||||
self.log_file.flush()
|
||||
|
||||
# Verify that the tail recognized the logrotation
|
||||
gen.next()
|
||||
next(gen)
|
||||
self.assertEqual(self.last_line, new_string[:-1], self.last_line)
|
||||
except OSError:
|
||||
"logrotate is not present"
|
||||
|
@ -13,6 +13,8 @@
|
||||
import unittest
|
||||
from datetime import timedelta, datetime
|
||||
|
||||
from six.moves import range
|
||||
|
||||
from monasca_agent.forwarder.transaction import Transaction, TransactionManager
|
||||
from monasca_agent.forwarder.daemon import MAX_QUEUE_SIZE, THROTTLING_DELAY
|
||||
|
||||
@ -50,7 +52,7 @@ class TestTransaction(unittest.TestCase):
|
||||
|
||||
step = 10
|
||||
oneTrSize = (MAX_QUEUE_SIZE / step) - 1
|
||||
for i in xrange(step):
|
||||
for i in range(step):
|
||||
tr = memTransaction(oneTrSize, trManager)
|
||||
trManager.append(tr)
|
||||
|
||||
@ -94,7 +96,7 @@ class TestTransaction(unittest.TestCase):
|
||||
|
||||
# Add 3 transactions, make sure no memory limit is in the way
|
||||
oneTrSize = MAX_QUEUE_SIZE / 10
|
||||
for i in xrange(3):
|
||||
for i in range(3):
|
||||
tr = memTransaction(oneTrSize, trManager)
|
||||
trManager.append(tr)
|
||||
|
||||
|
@ -11,7 +11,8 @@
|
||||
# under the License.
|
||||
|
||||
import unittest
|
||||
from StringIO import StringIO
|
||||
|
||||
from six import StringIO
|
||||
|
||||
from tests.common import get_check
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user