Replace six with python3 code style

Co-authored by: Matthias Runge <mrunge@redhat.com>

Change-Id: I85a4d79396874670f1b36cb91cfba5da812c2839
This commit is contained in:
kuangcx 2020-11-02 14:37:26 +08:00 committed by Matthias Runge
parent 71f3d092ac
commit c0632ae9e0
44 changed files with 88 additions and 172 deletions

@ -20,7 +20,6 @@ import sys
from os_win import exceptions as os_win_exc from os_win import exceptions as os_win_exc
from os_win import utilsfactory from os_win import utilsfactory
from oslo_utils import units from oslo_utils import units
import six
from ceilometer.compute.pollsters import util from ceilometer.compute.pollsters import util
from ceilometer.compute.virt import inspector as virt_inspector from ceilometer.compute.virt import inspector as virt_inspector
@ -37,12 +36,8 @@ def convert_exceptions(exception_map, yields=True):
break break
exc_info = sys.exc_info() exc_info = sys.exc_info()
# NOTE(claudiub): Python 3 raises the exception object given as exc = raised_exception(str(exc_info[1]))
# the second argument in six.reraise. raise exc.with_traceback(exc_info[2])
# The original message will be maintained by passing the
# original exception.
exc = raised_exception(six.text_type(exc_info[1]))
six.reraise(raised_exception, exc, exc_info[2])
def decorator(function): def decorator(function):
if yields: if yields:

@ -17,7 +17,6 @@
from lxml import etree from lxml import etree
from oslo_log import log as logging from oslo_log import log as logging
from oslo_utils import units from oslo_utils import units
import six
try: try:
import libvirt import libvirt
@ -60,7 +59,7 @@ class LibvirtInspector(virt_inspector.Inspector):
'ex': ex} 'ex': ex}
raise virt_inspector.InstanceNotFoundException(msg) raise virt_inspector.InstanceNotFoundException(msg)
except Exception as ex: except Exception as ex:
raise virt_inspector.InspectorException(six.text_type(ex)) raise virt_inspector.InspectorException(str(ex))
def _get_domain_not_shut_off_or_raise(self, instance): def _get_domain_not_shut_off_or_raise(self, instance):
instance_name = util.instance_name(instance) instance_name = util.instance_name(instance)

@ -17,7 +17,6 @@
from oslo_config import cfg from oslo_config import cfg
from oslo_utils import units from oslo_utils import units
import six
from ceilometer.compute.virt import inspector as virt_inspector from ceilometer.compute.virt import inspector as virt_inspector
from ceilometer.compute.virt.vmware import vsphere_operations from ceilometer.compute.virt.vmware import vsphere_operations
@ -132,7 +131,7 @@ class VsphereInspector(virt_inspector.Inspector):
in vnic_id_to_stats_map.items() in vnic_id_to_stats_map.items()
if not k.startswith('vmnic')} if not k.startswith('vmnic')}
vnic_stats[net_counter] = vnic_id_to_stats_map vnic_stats[net_counter] = vnic_id_to_stats_map
vnic_ids.update(six.iterkeys(vnic_id_to_stats_map)) vnic_ids.update(vnic_id_to_stats_map.keys())
# Stats provided from vSphere are in KB/s, converting it to B/s. # Stats provided from vSphere are in KB/s, converting it to B/s.
for vnic_id in sorted(vnic_ids): for vnic_id in sorted(vnic_ids):
@ -165,7 +164,7 @@ class VsphereInspector(virt_inspector.Inspector):
disk_id_to_stat_map = self._ops.query_vm_device_stats( disk_id_to_stat_map = self._ops.query_vm_device_stats(
vm_mobj, disk_counter_id, duration) vm_mobj, disk_counter_id, duration)
disk_stats[disk_counter] = disk_id_to_stat_map disk_stats[disk_counter] = disk_id_to_stat_map
disk_ids.update(six.iterkeys(disk_id_to_stat_map)) disk_ids.update(disk_id_to_stat_map.keys())
for disk_id in disk_ids: for disk_id in disk_ids:

@ -15,7 +15,6 @@ import os
from jsonpath_rw_ext import parser from jsonpath_rw_ext import parser
from oslo_log import log from oslo_log import log
import six
import yaml import yaml
from ceilometer.i18n import _ from ceilometer.i18n import _
@ -71,7 +70,7 @@ class Definition(object):
if 'plugin' in cfg: if 'plugin' in cfg:
plugin_cfg = cfg['plugin'] plugin_cfg = cfg['plugin']
if isinstance(plugin_cfg, six.string_types): if isinstance(plugin_cfg, str):
plugin_name = plugin_cfg plugin_name = plugin_cfg
plugin_params = {} plugin_params = {}
else: else:
@ -107,7 +106,7 @@ class Definition(object):
else: else:
fields = '|'.join('(%s)' % path for path in fields) fields = '|'.join('(%s)' % path for path in fields)
if isinstance(fields, six.integer_types): if isinstance(fields, int):
self.getter = fields self.getter = fields
else: else:
try: try:

@ -18,7 +18,6 @@ from oslo_log import log
from oslo_utils import fnmatch from oslo_utils import fnmatch
from oslo_utils import timeutils from oslo_utils import timeutils
import pkg_resources import pkg_resources
import six
from ceilometer import declarative from ceilometer import declarative
from ceilometer.event import models from ceilometer.event import models
@ -97,7 +96,7 @@ class EventDefinition(object):
raise declarative.EventDefinitionException( raise declarative.EventDefinitionException(
_("Required field %s not specified") % err.args[0], self.cfg) _("Required field %s not specified") % err.args[0], self.cfg)
if isinstance(event_type, six.string_types): if isinstance(event_type, str):
event_type = [event_type] event_type = [event_type]
for t in event_type: for t in event_type:

@ -13,7 +13,6 @@
"""Model classes for use in the events storage API. """Model classes for use in the events storage API.
""" """
from oslo_utils import timeutils from oslo_utils import timeutils
import six
def serialize_dt(value): def serialize_dt(value):
@ -26,7 +25,7 @@ class Model(object):
def __init__(self, **kwds): def __init__(self, **kwds):
self.fields = list(kwds) self.fields = list(kwds)
for k, v in six.iteritems(kwds): for k, v in kwds.items():
setattr(self, k, v) setattr(self, k, v)
def as_dict(self): def as_dict(self):
@ -78,7 +77,7 @@ class Event(Model):
def __repr__(self): def __repr__(self):
trait_list = [] trait_list = []
if self.traits: if self.traits:
trait_list = [six.text_type(trait) for trait in self.traits] trait_list = [str(trait) for trait in self.traits]
return ("<Event: %s, %s, %s, %s>" % return ("<Event: %s, %s, %s, %s>" %
(self.message_id, self.event_type, self.generated, (self.message_id, self.event_type, self.generated,
" ".join(trait_list))) " ".join(trait_list)))
@ -146,6 +145,6 @@ class Trait(Model):
if trait_type is cls.DATETIME_TYPE: if trait_type is cls.DATETIME_TYPE:
return timeutils.normalize_time(timeutils.parse_isotime(value)) return timeutils.normalize_time(timeutils.parse_isotime(value))
# Cropping the text value to match the TraitText value size # Cropping the text value to match the TraitText value size
if isinstance(value, six.binary_type): if isinstance(value, bytes):
return value.decode('utf-8')[:255] return value.decode('utf-8')[:255]
return six.text_type(value)[:255] return str(value)[:255]

@ -17,13 +17,11 @@ import abc
from oslo_log import log from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
import six
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class TraitPluginBase(object, metaclass=abc.ABCMeta):
class TraitPluginBase(object):
"""Base class for plugins. """Base class for plugins.
It converts notification fields to Trait values. It converts notification fields to Trait values.
@ -132,7 +130,7 @@ class SplitterTraitPlugin(TraitPluginBase):
for match in match_list] for match in match_list]
def _trait_value(self, match): def _trait_value(self, match):
value = six.text_type(match[1]) value = str(match[1])
if self.max_split is not None: if self.max_split is not None:
values = value.split(self.separator, self.max_split) values = value.split(self.separator, self.max_split)
else: else:

@ -16,11 +16,8 @@
import abc import abc
import six
class Inspector(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class Inspector(object):
@abc.abstractmethod @abc.abstractmethod
def inspect_generic(self, host, cache, extra_metadata, param): def inspect_generic(self, host, cache, extra_metadata, param):
"""A generic inspect function. """A generic inspect function.

@ -20,7 +20,6 @@ import copy
from oslo_log import log from oslo_log import log
from pysnmp.entity.rfc3413.oneliner import cmdgen from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp.proto import rfc1905 from pysnmp.proto import rfc1905
import six
from urllib import parse as urlparse from urllib import parse as urlparse
@ -207,7 +206,7 @@ class SNMPInspector(base.Inspector):
@classmethod @classmethod
def construct_metadata(cls, oid_cache, meta_defs, suffix='', host=None): def construct_metadata(cls, oid_cache, meta_defs, suffix='', host=None):
metadata = {} metadata = {}
for key, oid_def in six.iteritems(meta_defs): for key, oid_def in meta_defs.items():
metadata[key] = cls.get_oid_value(oid_cache, oid_def, suffix, host) metadata[key] = cls.get_oid_value(oid_cache, oid_def, suffix, host)
return metadata return metadata
@ -291,7 +290,7 @@ class SNMPInspector(base.Inspector):
# populate the oid into cache # populate the oid into cache
self._query_oids(host, [_interface_ip_oid], cache, True) self._query_oids(host, [_interface_ip_oid], cache, True)
ip_addr = '' ip_addr = ''
for k, v in six.iteritems(oid_cache): for k, v in oid_cache.items():
if k.startswith(_interface_ip_oid) and v == int(suffix[1:]): if k.startswith(_interface_ip_oid) and v == int(suffix[1:]):
ip_addr = k.replace(_interface_ip_oid + ".", "") ip_addr = k.replace(_interface_ip_oid + ".", "")
metadata.update(ip=ip_addr) metadata.update(ip=ip_addr)
@ -342,6 +341,6 @@ class SNMPInspector(base.Inspector):
processed['metric_oid'] = (param['oid'], eval(param['type'])) processed['metric_oid'] = (param['oid'], eval(param['type']))
processed['post_op'] = param.get('post_op', None) processed['post_op'] = param.get('post_op', None)
processed['metadata'] = {} processed['metadata'] = {}
for k, v in six.iteritems(param.get('metadata', {})): for k, v in param.get('metadata', {}).items():
processed['metadata'][k] = (v['oid'], eval(v['type'])) processed['metadata'][k] = (v['oid'], eval(v['type']))
return processed return processed

@ -19,7 +19,6 @@ import pkg_resources
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_utils import netutils from oslo_utils import netutils
import six
from ceilometer import declarative from ceilometer import declarative
from ceilometer.hardware import inspector as insloader from ceilometer.hardware import inspector as insloader
@ -44,7 +43,7 @@ class MeterDefinition(object):
def __init__(self, definition_cfg): def __init__(self, definition_cfg):
self.cfg = definition_cfg self.cfg = definition_cfg
for fname, fval in self.cfg.items(): for fname, fval in self.cfg.items():
if (isinstance(fname, six.string_types) and if (isinstance(fname, str) and
(fname in self.required_fields or (fname in self.required_fields or
fname.endswith('_inspector'))): fname.endswith('_inspector'))):
setattr(self, fname, fval) setattr(self, fname, fval)

@ -28,7 +28,6 @@ import threading
import time import time
from oslo_config import cfg from oslo_config import cfg
import six
from ceilometer.i18n import _ from ceilometer.i18n import _
from ceilometer.ipmi.platform import exception as nmexcept from ceilometer.ipmi.platform import exception as nmexcept
@ -178,8 +177,7 @@ class NodeManager(object):
with open(file_path, 'rb') as bin_fp: with open(file_path, 'rb') as bin_fp:
data_str = binascii.hexlify(bin_fp.read()) data_str = binascii.hexlify(bin_fp.read())
if six.PY3: data_str = data_str.decode('ascii')
data_str = data_str.decode('ascii')
oem_id_index = data_str.find(prefix) oem_id_index = data_str.find(prefix)
if oem_id_index != -1: if oem_id_index != -1:
ret = data_str[oem_id_index + len(prefix): ret = data_str[oem_id_index + len(prefix):

@ -15,7 +15,6 @@
import abc import abc
from oslo_log import log from oslo_log import log
import six
from ceilometer.i18n import _ from ceilometer.i18n import _
from ceilometer.ipmi.platform import exception as nmexcept from ceilometer.ipmi.platform import exception as nmexcept
@ -26,8 +25,7 @@ from ceilometer import sample
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class _Base(plugin_base.PollsterBase, metaclass=abc.ABCMeta):
class _Base(plugin_base.PollsterBase):
def setup_environment(self): def setup_environment(self):
super(_Base, self).setup_environment() super(_Base, self).setup_environment()

@ -15,8 +15,6 @@ import itertools
import os import os
import re import re
import six
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from stevedore import extension from stevedore import extension
@ -60,7 +58,7 @@ class MeterDefinition(object):
_("Required fields %s not specified") % missing, self.cfg) _("Required fields %s not specified") % missing, self.cfg)
self._event_type = self.cfg.get('event_type') self._event_type = self.cfg.get('event_type')
if isinstance(self._event_type, six.string_types): if isinstance(self._event_type, str):
self._event_type = [self._event_type] self._event_type = [self._event_type]
self._event_type = [re.compile(etype) for etype in self._event_type] self._event_type = [re.compile(etype) for etype in self._event_type]
@ -93,7 +91,7 @@ class MeterDefinition(object):
# List of fields we expected when multiple meter are in the payload # List of fields we expected when multiple meter are in the payload
self.lookup = self.cfg.get('lookup') self.lookup = self.cfg.get('lookup')
if isinstance(self.lookup, six.string_types): if isinstance(self.lookup, str):
self.lookup = [self.lookup] self.lookup = [self.lookup]
def match_type(self, meter_name): def match_type(self, meter_name):
@ -204,7 +202,7 @@ class ProcessMeterNotifications(endpoint.SampleEndpoint):
md = MeterDefinition(meter_cfg, self.conf, plugin_manager) md = MeterDefinition(meter_cfg, self.conf, plugin_manager)
except declarative.DefinitionException as e: except declarative.DefinitionException as e:
errmsg = "Error loading meter definition: %s" errmsg = "Error loading meter definition: %s"
LOG.error(errmsg, six.text_type(e)) LOG.error(errmsg, str(e))
else: else:
definitions[meter_cfg['name']] = md definitions[meter_cfg['name']] = md
return definitions.values() return definitions.values()

@ -17,7 +17,6 @@ import abc
import collections import collections
from oslo_log import log from oslo_log import log
import six
from ceilometer.i18n import _ from ceilometer.i18n import _
from ceilometer.network.services import base from ceilometer.network.services import base
@ -217,8 +216,7 @@ class LBHealthMonitorPollster(base.BaseServicesPollster):
) )
@six.add_metaclass(abc.ABCMeta) class _LBStatsPollster(base.BaseServicesPollster, metaclass=abc.ABCMeta):
class _LBStatsPollster(base.BaseServicesPollster):
"""Base Statistics pollster. """Base Statistics pollster.
It is capturing the statistics info and yielding samples for connections It is capturing the statistics info and yielding samples for connections

@ -16,7 +16,6 @@
import abc import abc
from oslo_utils import netutils from oslo_utils import netutils
import six
from stevedore import driver as _driver from stevedore import driver as _driver
from urllib import parse as urlparse from urllib import parse as urlparse
@ -24,8 +23,7 @@ from ceilometer.polling import plugin_base
from ceilometer import sample from ceilometer import sample
@six.add_metaclass(abc.ABCMeta) class _Base(plugin_base.PollsterBase, metaclass=abc.ABCMeta):
class _Base(plugin_base.PollsterBase):
NAMESPACE = 'network.statistics.drivers' NAMESPACE = 'network.statistics.drivers'
drivers = {} drivers = {}

@ -15,11 +15,8 @@
import abc import abc
import six
class Driver(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class Driver(object):
def __init__(self, conf): def __init__(self, conf):
self.conf = conf self.conf = conf

@ -16,7 +16,6 @@ import copy
from oslo_log import log from oslo_log import log
import requests import requests
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from ceilometer.i18n import _ from ceilometer.i18n import _
@ -75,12 +74,12 @@ class AnalyticsAPIBaseClient(object):
curl_command = ['REQ: curl -i -X GET '] curl_command = ['REQ: curl -i -X GET ']
params = [] params = []
for name, value in six.iteritems(req_params['data']): for name, value in req_params['data'].items():
params.append("%s=%s" % (name, value)) params.append("%s=%s" % (name, value))
curl_command.append('"%s?%s" ' % (url, '&'.join(params))) curl_command.append('"%s?%s" ' % (url, '&'.join(params)))
for name, value in six.iteritems(req_params['headers']): for name, value in req_params['headers'].items():
curl_command.append('-H "%s: %s" ' % (name, value)) curl_command.append('-H "%s: %s" ' % (name, value))
LOG.debug(''.join(curl_command)) LOG.debug(''.join(curl_command))
@ -93,7 +92,7 @@ class AnalyticsAPIBaseClient(object):
resp.status_code, resp.status_code,
resp.reason)] resp.reason)]
dump.extend('%s: %s\n' % (k, v) dump.extend('%s: %s\n' % (k, v)
for k, v in six.iteritems(resp.headers)) for k, v in resp.headers.items())
dump.append('\n') dump.append('\n')
if resp.content: if resp.content:
dump.extend([resp.content, '\n']) dump.extend([resp.content, '\n'])

@ -14,11 +14,9 @@
# under the License. # under the License.
import abc import abc
from oslo_log import log from oslo_log import log
import requests import requests
from requests import auth from requests import auth
import six
from ceilometer.i18n import _ from ceilometer.i18n import _
@ -26,8 +24,7 @@ from ceilometer.i18n import _
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta) class _Base(object, metaclass=abc.ABCMeta):
class _Base(object):
"""Base class of OpenDaylight REST APIs Clients.""" """Base class of OpenDaylight REST APIs Clients."""
@abc.abstractproperty @abc.abstractproperty
@ -196,7 +193,7 @@ class Client(object):
curl_command.append('--user "%s":"***" ' % auth_class.username) curl_command.append('--user "%s":"***" ' % auth_class.username)
for name, value in six.iteritems(self._req_params['headers']): for name, value in self._req_params['headers'].items():
curl_command.append('-H "%s: %s" ' % (name, value)) curl_command.append('-H "%s: %s" ' % (name, value))
LOG.debug(''.join(curl_command)) LOG.debug(''.join(curl_command))
@ -208,7 +205,7 @@ class Client(object):
resp.status_code, resp.status_code,
resp.reason)] resp.reason)]
dump.extend('%s: %s\n' % (k, v) dump.extend('%s: %s\n' % (k, v)
for k, v in six.iteritems(resp.headers)) for k, v in resp.headers.items())
dump.append('\n') dump.append('\n')
if resp.content: if resp.content:
dump.extend([resp.content, '\n']) dump.extend([resp.content, '\n'])

@ -14,7 +14,6 @@
# under the License. # under the License.
from oslo_log import log from oslo_log import log
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from ceilometer.network.statistics import driver from ceilometer.network.statistics import driver
@ -27,7 +26,7 @@ LOG = log.getLogger(__name__)
def _get_properties(properties, prefix='properties'): def _get_properties(properties, prefix='properties'):
resource_meta = {} resource_meta = {}
if properties is not None: if properties is not None:
for k, v in six.iteritems(properties): for k, v in properties.items():
value = v['value'] value = v['value']
key = prefix + '_' + k key = prefix + '_' + k
if 'name' in v: if 'name' in v:
@ -139,7 +138,7 @@ class OpenDayLightDriver(driver.Driver):
container_data['user_links'] = user_links container_data['user_links'] = user_links
for user_link_row in user_links_raw['userLinks']: for user_link_row in user_links_raw['userLinks']:
user_link = {} user_link = {}
for k, v in six.iteritems(user_link_row): for k, v in user_link_row.items():
if (k == "dstNodeConnector" or if (k == "dstNodeConnector" or
k == "srcNodeConnector"): k == "srcNodeConnector"):
port_raw, node_raw = v.split('@') port_raw, node_raw = v.split('@')
@ -190,7 +189,7 @@ class OpenDayLightDriver(driver.Driver):
data = self._prepare_cache(endpoint, params, cache) data = self._prepare_cache(endpoint, params, cache)
samples = [] samples = []
for name, value in six.iteritems(data): for name, value in data.items():
for sample in iter(extractor, value): for sample in iter(extractor, value):
if sample is not None: if sample is not None:
# set controller name and container name # set controller name and container name
@ -412,7 +411,7 @@ class OpenDayLightDriver(driver.Driver):
""" """
val_iter, key_func = None, None val_iter, key_func = None, None
if isinstance(value, dict): if isinstance(value, dict):
val_iter = six.iteritems(value) val_iter = value.items()
key_func = lambda k: key_base + '.' + k if key_base else k # noqa key_func = lambda k: key_base + '.' + k if key_base else k # noqa
elif isinstance(value, (tuple, list)): elif isinstance(value, (tuple, list)):
val_iter = enumerate(value) val_iter = enumerate(value)

@ -19,7 +19,6 @@ import abc
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
import oslo_messaging import oslo_messaging
import six
from ceilometer import agent from ceilometer import agent
from ceilometer import publisher from ceilometer import publisher
@ -136,8 +135,7 @@ class Sink(object):
"""Flush data after all events have been injected to pipeline.""" """Flush data after all events have been injected to pipeline."""
@six.add_metaclass(abc.ABCMeta) class Pipeline(object, metaclass=abc.ABCMeta):
class Pipeline(object):
"""Represents a coupling between a sink and a corresponding source.""" """Represents a coupling between a sink and a corresponding source."""
def __init__(self, conf, source, sink): def __init__(self, conf, source, sink):

@ -30,7 +30,6 @@ from oslo_config import cfg
from oslo_log import log from oslo_log import log
import oslo_messaging import oslo_messaging
from oslo_utils import timeutils from oslo_utils import timeutils
import six
from stevedore import extension from stevedore import extension
from tooz import coordination from tooz import coordination
from urllib import parse as urlparse from urllib import parse as urlparse
@ -101,7 +100,7 @@ class Resources(object):
not self.agent_manager.partition_coordinator or not self.agent_manager.partition_coordinator or
self.agent_manager.hashrings[ self.agent_manager.hashrings[
static_resources_group].belongs_to_self( static_resources_group].belongs_to_self(
six.text_type(v))] + source_discovery str(v))] + source_discovery
return source_discovery return source_discovery
@ -550,7 +549,7 @@ class AgentManager(cotyledon.Service):
discovered = [ discovered = [
v for v in discovered if self.hashrings[ v for v in discovered if self.hashrings[
self.construct_group_id(discoverer.group_id) self.construct_group_id(discoverer.group_id)
].belongs_to_self(six.text_type(v))] ].belongs_to_self(str(v))]
resources.extend(discovered) resources.extend(discovered)
if discovery_cache is not None: if discovery_cache is not None:

@ -17,7 +17,6 @@
import abc import abc
import six
from stevedore import extension from stevedore import extension
@ -45,8 +44,7 @@ class PollsterPermanentError(Exception):
self.fail_res_list = resources self.fail_res_list = resources
@six.add_metaclass(abc.ABCMeta) class PollsterBase(object, metaclass=abc.ABCMeta):
class PollsterBase(object):
"""Base class for plugins that support the polling API.""" """Base class for plugins that support the polling API."""
def setup_environment(self): def setup_environment(self):
@ -124,8 +122,7 @@ class PollsterBase(object):
return extensions return extensions
@six.add_metaclass(abc.ABCMeta) class DiscoveryBase(object, metaclass=abc.ABCMeta):
class DiscoveryBase(object):
KEYSTONE_REQUIRED_FOR_SERVICE = None KEYSTONE_REQUIRED_FOR_SERVICE = None
"""Service type required in keystone catalog to works""" """Service type required in keystone catalog to works"""

@ -18,7 +18,6 @@ import abc
from oslo_log import log from oslo_log import log
from oslo_utils import netutils from oslo_utils import netutils
import six
from stevedore import driver from stevedore import driver
@ -36,8 +35,7 @@ def get_publisher(conf, url, namespace):
return loaded_driver.driver(conf, parse_result) return loaded_driver.driver(conf, parse_result)
@six.add_metaclass(abc.ABCMeta) class ConfigPublisherBase(object, metaclass=abc.ABCMeta):
class ConfigPublisherBase(object):
"""Base class for plugins that publish data.""" """Base class for plugins that publish data."""
def __init__(self, conf, parsed_url): def __init__(self, conf, parsed_url):

@ -26,7 +26,6 @@ from keystoneauth1 import exceptions as ka_exceptions
from oslo_log import log from oslo_log import log
from oslo_utils import fnmatch from oslo_utils import fnmatch
from oslo_utils import timeutils from oslo_utils import timeutils
import six
from stevedore import extension from stevedore import extension
from urllib import parse as urlparse from urllib import parse as urlparse
@ -43,8 +42,7 @@ LOG = log.getLogger(__name__)
def cache_key_mangler(key): def cache_key_mangler(key):
"""Construct an opaque cache key.""" """Construct an opaque cache key."""
if six.PY2:
key = key.encode('utf-8')
return uuid.uuid5(CACHE_NAMESPACE, key).hex return uuid.uuid5(CACHE_NAMESPACE, key).hex
@ -53,10 +51,10 @@ EVENT_CREATE, EVENT_UPDATE, EVENT_DELETE = ("create", "update", "delete")
class ResourcesDefinition(object): class ResourcesDefinition(object):
MANDATORY_FIELDS = {'resource_type': six.string_types, MANDATORY_FIELDS = {'resource_type': str,
'metrics': (dict, list)} 'metrics': (dict, list)}
MANDATORY_EVENT_FIELDS = {'id': six.string_types} MANDATORY_EVENT_FIELDS = {'id': str}
def __init__(self, definition_cfg, archive_policy_default, def __init__(self, definition_cfg, archive_policy_default,
archive_policy_override, plugin_manager): archive_policy_override, plugin_manager):
@ -373,9 +371,9 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
try: try:
self.batch_measures(measures, gnocchi_data) self.batch_measures(measures, gnocchi_data)
except gnocchi_exc.ClientException as e: except gnocchi_exc.ClientException as e:
LOG.error(six.text_type(e)) LOG.error(str(e))
except Exception as e: except Exception as e:
LOG.error(six.text_type(e), exc_info=True) LOG.error(str(e), exc_info=True)
for info in gnocchi_data.values(): for info in gnocchi_data.values():
resource = info["resource"] resource = info["resource"]
@ -387,9 +385,9 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
self._if_not_cached(resource_type, resource['id'], self._if_not_cached(resource_type, resource['id'],
resource_extra) resource_extra)
except gnocchi_exc.ClientException as e: except gnocchi_exc.ClientException as e:
LOG.error(six.text_type(e)) LOG.error(str(e))
except Exception as e: except Exception as e:
LOG.error(six.text_type(e), exc_info=True) LOG.error(str(e), exc_info=True)
@staticmethod @staticmethod
def _extract_resources_from_error(e, resource_infos): def _extract_resources_from_error(e, resource_infos):
@ -422,7 +420,7 @@ class GnocchiPublisher(publisher.ConfigPublisherBase):
pass pass
except gnocchi_exc.ClientException as e: except gnocchi_exc.ClientException as e:
LOG.error('Error creating resource %(id)s: %(err)s', LOG.error('Error creating resource %(id)s: %(err)s',
{'id': resource['id'], 'err': six.text_type(e)}) {'id': resource['id'], 'err': str(e)})
# We cannot post measures for this resource # We cannot post measures for this resource
# and we can't patch it later # and we can't patch it later
del measures[resource['id']] del measures[resource['id']]

@ -25,7 +25,6 @@ from oslo_log import log
import oslo_messaging import oslo_messaging
from oslo_utils import encodeutils from oslo_utils import encodeutils
from oslo_utils import excutils from oslo_utils import excutils
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from ceilometer.i18n import _ from ceilometer.i18n import _
@ -70,8 +69,7 @@ def raise_delivery_failure(exc):
cause=exc) cause=exc)
@six.add_metaclass(abc.ABCMeta) class MessagingPublisher(publisher.ConfigPublisherBase, metaclass=abc.ABCMeta):
class MessagingPublisher(publisher.ConfigPublisherBase):
def __init__(self, conf, parsed_url): def __init__(self, conf, parsed_url):
super(MessagingPublisher, self).__init__(conf, parsed_url) super(MessagingPublisher, self).__init__(conf, parsed_url)

@ -19,7 +19,6 @@ import datetime
from jsonpath_rw_ext import parser from jsonpath_rw_ext import parser
from oslo_log import log from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
import six
import yaml import yaml
from ceilometer import sample as sample_util from ceilometer import sample as sample_util
@ -135,7 +134,7 @@ class MonascaDataFilter(object):
if len(val_matches) > 0: if len(val_matches) > 0:
# resolve the find to the first match and get value # resolve the find to the first match and get value
val = val_matches[0].value val = val_matches[0].value
if not isinstance(val, (str, six.text_type)) \ if not isinstance(val, (str, str)) \
and not isinstance(val, int): and not isinstance(val, int):
# Don't support lists or dicts or ... # Don't support lists or dicts or ...
raise CeiloscaMappingDefinitionException( raise CeiloscaMappingDefinitionException(

@ -20,7 +20,6 @@ import hmac
from oslo_config import cfg from oslo_config import cfg
from oslo_utils import secretutils from oslo_utils import secretutils
import six
OPTS = [ OPTS = [
@ -47,7 +46,7 @@ def decode_unicode(input):
# predictable insertion order to avoid inconsistencies in the # predictable insertion order to avoid inconsistencies in the
# message signature computation for equivalent payloads modulo # message signature computation for equivalent payloads modulo
# ordering # ordering
for key, value in sorted(six.iteritems(input)): for key, value in sorted(input.items()):
temp[decode_unicode(key)] = decode_unicode(value) temp[decode_unicode(key)] = decode_unicode(value)
return temp return temp
elif isinstance(input, (tuple, list)): elif isinstance(input, (tuple, list)):
@ -55,9 +54,9 @@ def decode_unicode(input):
# the tuple would become list. So we have to generate the value as # the tuple would become list. So we have to generate the value as
# list here. # list here.
return [decode_unicode(element) for element in input] return [decode_unicode(element) for element in input]
elif isinstance(input, six.text_type): elif isinstance(input, str):
return input.encode('utf-8') return input.encode('utf-8')
elif six.PY3 and isinstance(input, six.binary_type): elif isinstance(input, bytes):
return input.decode('utf-8') return input.decode('utf-8')
else: else:
return input return input
@ -65,7 +64,7 @@ def decode_unicode(input):
def recursive_keypairs(d, separator=':'): def recursive_keypairs(d, separator=':'):
"""Generator that produces sequence of keypairs for nested dictionaries.""" """Generator that produces sequence of keypairs for nested dictionaries."""
for name, value in sorted(six.iteritems(d)): for name, value in sorted(d.items()):
if isinstance(value, dict): if isinstance(value, dict):
for subname, subvalue in recursive_keypairs(value, separator): for subname, subvalue in recursive_keypairs(value, separator):
yield ('%s%s%s' % (name, separator, subname), subvalue) yield ('%s%s%s' % (name, separator, subname), subvalue)
@ -80,7 +79,7 @@ def compute_signature(message, secret):
if not secret: if not secret:
return '' return ''
if isinstance(secret, six.text_type): if isinstance(secret, str):
secret = secret.encode('utf-8') secret = secret.encode('utf-8')
digest_maker = hmac.new(secret, b'', hashlib.sha256) digest_maker = hmac.new(secret, b'', hashlib.sha256)
for name, value in recursive_keypairs(message): for name, value in recursive_keypairs(message):
@ -88,8 +87,8 @@ def compute_signature(message, secret):
# Skip any existing signature value, which would not have # Skip any existing signature value, which would not have
# been part of the original message. # been part of the original message.
continue continue
digest_maker.update(six.text_type(name).encode('utf-8')) digest_maker.update(str(name).encode('utf-8'))
digest_maker.update(six.text_type(value).encode('utf-8')) digest_maker.update(str(value).encode('utf-8'))
return digest_maker.hexdigest() return digest_maker.hexdigest()
@ -105,13 +104,12 @@ def verify_signature(message, secret):
old_sig = message.get('message_signature', '') old_sig = message.get('message_signature', '')
new_sig = compute_signature(message, secret) new_sig = compute_signature(message, secret)
if isinstance(old_sig, six.text_type): if isinstance(old_sig, str):
try: try:
old_sig = old_sig.encode('ascii') old_sig = old_sig.encode('ascii')
except UnicodeDecodeError: except UnicodeDecodeError:
return False return False
if six.PY3: new_sig = new_sig.encode('ascii')
new_sig = new_sig.encode('ascii')
return secretutils.constant_time_compare(new_sig, old_sig) return secretutils.constant_time_compare(new_sig, old_sig)

@ -25,7 +25,6 @@ import uuid
from oslo_config import cfg from oslo_config import cfg
from oslo_utils import timeutils from oslo_utils import timeutils
import six
OPTS = [ OPTS = [
cfg.StrOpt('sample_source', cfg.StrOpt('sample_source',
@ -51,7 +50,7 @@ def add_reserved_user_metadata(conf, src_metadata, dest_metadata):
for prefix in conf.reserved_metadata_namespace: for prefix in conf.reserved_metadata_namespace:
md = dict( md = dict(
(k[len(prefix):].replace('.', '_'), (k[len(prefix):].replace('.', '_'),
v[:limit] if isinstance(v, six.string_types) else v) v[:limit] if isinstance(v, str) else v)
for k, v in src_metadata.items() for k, v in src_metadata.items()
if (k.startswith(prefix) and if (k.startswith(prefix) and
k[len(prefix):].replace('.', '_') not in dest_metadata) k[len(prefix):].replace('.', '_') not in dest_metadata)
@ -61,7 +60,7 @@ def add_reserved_user_metadata(conf, src_metadata, dest_metadata):
for metadata_key in conf.reserved_metadata_keys: for metadata_key in conf.reserved_metadata_keys:
md = dict( md = dict(
(k.replace('.', '_'), (k.replace('.', '_'),
v[:limit] if isinstance(v, six.string_types) else v) v[:limit] if isinstance(v, str) else v)
for k, v in src_metadata.items() for k, v in src_metadata.items()
if (k == metadata_key and if (k == metadata_key and
k.replace('.', '_') not in dest_metadata) k.replace('.', '_') not in dest_metadata)

@ -21,7 +21,6 @@ import fixtures
import oslo_messaging.conffixture import oslo_messaging.conffixture
from oslo_utils import timeutils from oslo_utils import timeutils
from oslotest import base from oslotest import base
import six
from testtools import testcase from testtools import testcase
import yaml import yaml
@ -96,7 +95,7 @@ def _skip_decorator(func):
try: try:
return func(*args, **kwargs) return func(*args, **kwargs)
except ceilometer.NotImplementedError as e: except ceilometer.NotImplementedError as e:
raise testcase.TestSkipped(six.text_type(e)) raise testcase.TestSkipped(str(e))
return skip_if_not_implemented return skip_if_not_implemented

@ -17,7 +17,6 @@
""" """
from oslotest import base from oslotest import base
import six
from ceilometer.compute.pollsters import util from ceilometer.compute.pollsters import util
from ceilometer.polling import manager from ceilometer.polling import manager
@ -83,7 +82,7 @@ class TestLocationMetadata(base.BaseTestCase):
def test_metadata(self): def test_metadata(self):
md = util._get_metadata_from_object(self.CONF, self.instance) md = util._get_metadata_from_object(self.CONF, self.instance)
for prop, value in six.iteritems(self.INSTANCE_PROPERTIES): for prop, value in self.INSTANCE_PROPERTIES.items():
if prop not in ("metadata"): if prop not in ("metadata"):
# Special cases # Special cases
if prop == 'name': if prop == 'name':

@ -17,7 +17,6 @@ import datetime
from unittest import mock from unittest import mock
import jsonpath_rw_ext import jsonpath_rw_ext
import six
from ceilometer import declarative from ceilometer import declarative
from ceilometer.event import converter from ceilometer.event import converter
@ -65,7 +64,7 @@ class ConverterBase(base.BaseTestCase):
elif dtype == models.Trait.DATETIME_TYPE: elif dtype == models.Trait.DATETIME_TYPE:
self.assertIsInstance(trait.value, datetime.datetime) self.assertIsInstance(trait.value, datetime.datetime)
elif dtype == models.Trait.TEXT_TYPE: elif dtype == models.Trait.TEXT_TYPE:
self.assertIsInstance(trait.value, six.string_types) self.assertIsInstance(trait.value, str)
def assertDoesNotHaveTrait(self, event, name): def assertDoesNotHaveTrait(self, event, name):
traits = [trait for trait in event.traits if trait.name == name] traits = [trait for trait in event.traits if trait.name == name]

@ -18,7 +18,6 @@ from unittest import mock
import fixtures import fixtures
import oslo_messaging import oslo_messaging
from oslo_utils import fileutils from oslo_utils import fileutils
import six
import yaml import yaml
from ceilometer.pipeline import event as event_pipe from ceilometer.pipeline import event as event_pipe
@ -101,8 +100,7 @@ class TestEventEndpoint(tests_base.BaseTestCase):
}] }]
}) })
if six.PY3: ev_pipeline = ev_pipeline.encode('utf-8')
ev_pipeline = ev_pipeline.encode('utf-8')
ev_pipeline_cfg_file = fileutils.write_to_tempfile( ev_pipeline_cfg_file = fileutils.write_to_tempfile(
content=ev_pipeline, prefix="event_pipeline", suffix="yaml") content=ev_pipeline, prefix="event_pipeline", suffix="yaml")
self.CONF.set_override('event_pipeline_cfg_file', self.CONF.set_override('event_pipeline_cfg_file',

@ -18,7 +18,6 @@ from unittest import mock
import fixtures import fixtures
from oslo_utils import netutils from oslo_utils import netutils
from pysnmp.proto import rfc1905 from pysnmp.proto import rfc1905
import six
from ceilometer.hardware.inspector import snmp from ceilometer.hardware.inspector import snmp
from ceilometer.tests import base as test_base from ceilometer.tests import base as test_base
@ -45,7 +44,7 @@ class FakeCommandGenerator(object):
for oid in oids for oid in oids
if oid not in emptyOIDs if oid not in emptyOIDs
] ]
for emptyOID, exc in six.iteritems(emptyOIDs): for emptyOID, exc in emptyOIDs.items():
if emptyOID in oids: if emptyOID in oids:
varBinds += [(FakeObjectName(emptyOID), exc)] varBinds += [(FakeObjectName(emptyOID), exc)]
return (None, None, 0, varBinds) return (None, None, 0, varBinds)

@ -17,7 +17,6 @@ from unittest import mock
import fixtures import fixtures
from oslo_utils import fileutils from oslo_utils import fileutils
import six
import yaml import yaml
from ceilometer import declarative from ceilometer import declarative
@ -128,8 +127,7 @@ class TestGenericPollsters(test_base.BaseTestCase):
self.pollster = generic.GenericHardwareDeclarativePollster(self.conf) self.pollster = generic.GenericHardwareDeclarativePollster(self.conf)
def _setup_meter_def_file(self, cfg): def _setup_meter_def_file(self, cfg):
if six.PY3: cfg = cfg.encode('utf-8')
cfg = cfg.encode('utf-8')
meter_cfg_file = fileutils.write_to_tempfile(content=cfg, meter_cfg_file = fileutils.write_to_tempfile(content=cfg,
prefix="snmp", prefix="snmp",
suffix="yaml") suffix="yaml")

@ -17,7 +17,6 @@ import tempfile
from unittest import mock from unittest import mock
from oslotest import base from oslotest import base
import six
from ceilometer.ipmi.platform import intel_node_manager as node_manager from ceilometer.ipmi.platform import intel_node_manager as node_manager
from ceilometer.privsep import ipmitool from ceilometer.privsep import ipmitool
@ -25,8 +24,7 @@ from ceilometer import service
from ceilometer.tests.unit.ipmi.platform import fake_utils from ceilometer.tests.unit.ipmi.platform import fake_utils
@six.add_metaclass(abc.ABCMeta) class _Base(base.BaseTestCase, metaclass=abc.ABCMeta):
class _Base(base.BaseTestCase):
@abc.abstractmethod @abc.abstractmethod
def init_test_engine(self): def init_test_engine(self):
@ -42,15 +40,11 @@ class _Base(base.BaseTestCase):
@staticmethod @staticmethod
def _new_no_singleton(cls, *args, **kwargs): def _new_no_singleton(cls, *args, **kwargs):
if six.PY3: # We call init manually due to a py3 bug:
# We call init manually due to a py3 bug: # https://bugs.python.org/issue25731
# https://bugs.python.org/issue25731 obj = super(node_manager.NodeManager, cls).__new__(cls)
obj = super(node_manager.NodeManager, cls).__new__(cls) obj.__init__(*args, **kwargs)
obj.__init__(*args, **kwargs) return obj
return obj
else:
return super(node_manager.NodeManager, cls).__new__(
cls, *args, **kwargs)
class TestNodeManagerV3(_Base): class TestNodeManagerV3(_Base):

@ -16,15 +16,13 @@ import abc
from unittest import mock from unittest import mock
import fixtures import fixtures
import six
from ceilometer.polling import manager from ceilometer.polling import manager
from ceilometer import service from ceilometer import service
from ceilometer.tests import base from ceilometer.tests import base
@six.add_metaclass(abc.ABCMeta) class TestPollsterBase(base.BaseTestCase, metaclass=abc.ABCMeta):
class TestPollsterBase(base.BaseTestCase):
def setUp(self): def setUp(self):
super(TestPollsterBase, self).setUp() super(TestPollsterBase, self).setUp()

@ -17,7 +17,6 @@ from unittest import mock
import fixtures import fixtures
from oslo_utils import encodeutils from oslo_utils import encodeutils
from oslo_utils import fileutils from oslo_utils import fileutils
import six
import yaml import yaml
from ceilometer import declarative from ceilometer import declarative
@ -291,8 +290,7 @@ class TestMeterProcessing(test.BaseTestCase):
cfgs = [cfgs] cfgs = [cfgs]
meter_cfg_files = list() meter_cfg_files = list()
for cfg in cfgs: for cfg in cfgs:
if six.PY3: cfg = cfg.encode('utf-8')
cfg = cfg.encode('utf-8')
meter_cfg_files.append(fileutils.write_to_tempfile(content=cfg, meter_cfg_files.append(fileutils.write_to_tempfile(content=cfg,
path=self.path, path=self.path,
prefix="meters", prefix="meters",

@ -17,7 +17,6 @@ from unittest import mock
from oslo_config import fixture as config_fixture from oslo_config import fixture as config_fixture
from oslotest import base from oslotest import base
from requests import auth as req_auth from requests import auth as req_auth
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from ceilometer.i18n import _ from ceilometer.i18n import _
@ -147,7 +146,7 @@ class TestClientHTTPBasicAuth(base.BaseTestCase):
_('OpenDaylight API returned %(status)s %(reason)s') % _('OpenDaylight API returned %(status)s %(reason)s') %
{'status': self.resp.status_code, {'status': self.resp.status_code,
'reason': self.resp.reason}, 'reason': self.resp.reason},
six.text_type(e)) str(e))
def test_other_error(self): def test_other_error(self):

@ -16,15 +16,13 @@ import abc
from unittest import mock from unittest import mock
from oslotest import base from oslotest import base
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from ceilometer.network.statistics.opendaylight import driver from ceilometer.network.statistics.opendaylight import driver
from ceilometer import service from ceilometer import service
@six.add_metaclass(abc.ABCMeta) class _Base(base.BaseTestCase, metaclass=abc.ABCMeta):
class _Base(base.BaseTestCase):
@abc.abstractproperty @abc.abstractproperty
def flow_data(self): def flow_data(self):

@ -20,7 +20,6 @@ from unittest import mock
import fixtures import fixtures
from oslo_utils import timeutils from oslo_utils import timeutils
import six
from ceilometer.pipeline import base as pipe_base from ceilometer.pipeline import base as pipe_base
from ceilometer.pipeline import sample as pipeline from ceilometer.pipeline import sample as pipeline
@ -31,8 +30,7 @@ from ceilometer import service
from ceilometer.tests import base from ceilometer.tests import base
@six.add_metaclass(abc.ABCMeta) class BasePipelineTestCase(base.BaseTestCase, metaclass=abc.ABCMeta):
class BasePipelineTestCase(base.BaseTestCase):
def get_publisher(self, conf, url, namespace=''): def get_publisher(self, conf, url, namespace=''):
fake_drivers = {'test://': test_publisher.TestPublisher, fake_drivers = {'test://': test_publisher.TestPublisher,

@ -26,7 +26,6 @@ from oslo_utils import fixture as utils_fixture
from oslo_utils import netutils from oslo_utils import netutils
from oslo_utils import timeutils from oslo_utils import timeutils
import requests import requests
import six
from stevedore import extension from stevedore import extension
import testscenarios import testscenarios
@ -281,8 +280,7 @@ class PublisherTest(base.BaseTestCase):
] ]
for content in contents: for content in contents:
if six.PY3: content = content.encode('utf-8')
content = content.encode('utf-8')
temp = fileutils.write_to_tempfile(content=content, temp = fileutils.write_to_tempfile(content=content,
prefix='gnocchi_resources', prefix='gnocchi_resources',

@ -17,7 +17,6 @@ import subprocess
import time import time
from oslo_utils import fileutils from oslo_utils import fileutils
import six
from ceilometer.tests import base from ceilometer.tests import base
@ -27,8 +26,7 @@ class BinTestCase(base.BaseTestCase):
super(BinTestCase, self).setUp() super(BinTestCase, self).setUp()
content = ("[DEFAULT]\n" content = ("[DEFAULT]\n"
"transport_url = fake://\n") "transport_url = fake://\n")
if six.PY3: content = content.encode('utf-8')
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content, self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',
suffix='.conf') suffix='.conf')
@ -52,8 +50,7 @@ class BinSendSampleTestCase(base.BaseTestCase):
content = ("[DEFAULT]\n" content = ("[DEFAULT]\n"
"transport_url = fake://\n" "transport_url = fake://\n"
"pipeline_cfg_file={0}\n".format(pipeline_cfg_file)) "pipeline_cfg_file={0}\n".format(pipeline_cfg_file))
if six.PY3: content = content.encode('utf-8')
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content, self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',
@ -89,8 +86,7 @@ class BinCeilometerPollingServiceTestCase(base.BaseTestCase):
def test_starting_with_duplication_namespaces(self): def test_starting_with_duplication_namespaces(self):
content = ("[DEFAULT]\n" content = ("[DEFAULT]\n"
"transport_url = fake://\n") "transport_url = fake://\n")
if six.PY3: content = content.encode('utf-8')
content = content.encode('utf-8')
self.tempfile = fileutils.write_to_tempfile(content=content, self.tempfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer', prefix='ceilometer',
suffix='.conf') suffix='.conf')

@ -18,7 +18,6 @@ import time
from unittest import mock from unittest import mock
from oslo_utils import fileutils from oslo_utils import fileutils
import six
import yaml import yaml
from ceilometer import messaging from ceilometer import messaging
@ -144,8 +143,7 @@ class BaseRealNotification(BaseNotificationTest):
'publishers': ['test://'] 'publishers': ['test://']
}] }]
}) })
if six.PY3: pipeline = pipeline.encode('utf-8')
pipeline = pipeline.encode('utf-8')
pipeline_cfg_file = fileutils.write_to_tempfile(content=pipeline, pipeline_cfg_file = fileutils.write_to_tempfile(content=pipeline,
prefix="pipeline", prefix="pipeline",
@ -164,8 +162,7 @@ class BaseRealNotification(BaseNotificationTest):
'publishers': ['test://'] 'publishers': ['test://']
}] }]
}) })
if six.PY3: ev_pipeline = ev_pipeline.encode('utf-8')
ev_pipeline = ev_pipeline.encode('utf-8')
ev_pipeline_cfg_file = fileutils.write_to_tempfile( ev_pipeline_cfg_file = fileutils.write_to_tempfile(
content=ev_pipeline, prefix="event_pipeline", suffix="yaml") content=ev_pipeline, prefix="event_pipeline", suffix="yaml")

@ -30,8 +30,7 @@ python-swiftclient>=3.2.0 # Apache-2.0
python-cinderclient>=3.3.0 # Apache-2.0 python-cinderclient>=3.3.0 # Apache-2.0
PyYAML>=5.1 # MIT PyYAML>=5.1 # MIT
requests!=2.9.0,>=2.8.1 # Apache-2.0 requests!=2.9.0,>=2.8.1 # Apache-2.0
six>=1.10.0 # MIT stevedore>=1.20.0 # Apache-2.0
stevedore>=3.0.0 # Apache-2.0
tenacity>=4.12.0 # Apache-2.0 tenacity>=4.12.0 # Apache-2.0
tooz[zake]>=1.47.0 # Apache-2.0 tooz[zake]>=1.47.0 # Apache-2.0
os-xenapi>=0.3.3 # Apache-2.0 os-xenapi>=0.3.3 # Apache-2.0