Remove six

Replace the following items with Python 3 style code.

- six.moves
- six.add_metaclass
- six.PY3
- six.text_type
- six.string_type
- six.string_types
- six.iteritems
- six.integer_types

Implements: blueprint six-removal
Change-Id: Ifdb5ffad4203f14c58959b87338d0de34af76674
This commit is contained in:
xuanyandong 2020-12-25 15:59:59 +08:00 committed by Yandong Xuan
parent 213087869a
commit 335aa966f3
34 changed files with 73 additions and 129 deletions

View File

@ -14,7 +14,6 @@ Cloudkitty Specific Commandments
- [C312] Use assertTrue(...) rather than assertEqual(True, ...). - [C312] Use assertTrue(...) rather than assertEqual(True, ...).
- [C313] Validate that logs are not translated. - [C313] Validate that logs are not translated.
- [C314] str() and unicode() cannot be used on an exception. - [C314] str() and unicode() cannot be used on an exception.
Remove or use six.text_type().
- [C315] Translated messages cannot be concatenated. String should be - [C315] Translated messages cannot be concatenated. String should be
included in translated message. included in translated message.
- [C317] `oslo_` should be used instead of `oslo.` - [C317] `oslo_` should be used instead of `oslo.`

View File

@ -16,7 +16,6 @@
from oslo_log import log as logging from oslo_log import log as logging
import pecan import pecan
from pecan import rest from pecan import rest
import six
from wsme import types as wtypes from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
@ -47,7 +46,7 @@ class MappingController(rest.RestController):
return collector_models.ServiceToCollectorMapping( return collector_models.ServiceToCollectorMapping(
**mapping.as_dict()) **mapping.as_dict())
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(collector_models.ServiceToCollectorMappingCollection, @wsme_pecan.wsexpose(collector_models.ServiceToCollectorMappingCollection,
wtypes.text) wtypes.text)
@ -94,7 +93,7 @@ class MappingController(rest.RestController):
try: try:
self._db.delete_mapping(service) self._db.delete_mapping(service)
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
class CollectorStateController(rest.RestController): class CollectorStateController(rest.RestController):

View File

@ -17,7 +17,6 @@ from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
import pecan import pecan
from pecan import rest from pecan import rest
import six
import voluptuous import voluptuous
from wsme import types as wtypes from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
@ -70,7 +69,7 @@ def get_one_metric(metric_name):
policy.authorize(pecan.request.context, 'info:get_metric_info', {}) policy.authorize(pecan.request.context, 'info:get_metric_info', {})
metric = _find_metric(metric_name, metrics_conf) metric = _find_metric(metric_name, metrics_conf)
if not metric: if not metric:
pecan.abort(404, six.text_type(metric_name)) pecan.abort(404, str(metric_name))
info = metric.copy() info = metric.copy()
info['metric_id'] = info['alt_name'] info['metric_id'] = info['alt_name']
return info_models.CloudkittyMetricInfo(**info) return info_models.CloudkittyMetricInfo(**info)

View File

@ -15,11 +15,8 @@
# #
import abc import abc
import six
class BaseIOBackend(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class BaseIOBackend(object):
def __init__(self, path): def __init__(self, path):
self.open(path) self.open(path)

View File

@ -18,7 +18,6 @@ import fractions
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
import six
from stevedore import driver from stevedore import driver
from voluptuous import All from voluptuous import All
from voluptuous import Any from voluptuous import Any
@ -153,8 +152,7 @@ class NoDataCollected(Exception):
self.resource = resource self.resource = resource
@six.add_metaclass(abc.ABCMeta) class BaseCollector(object, metaclass=abc.ABCMeta):
class BaseCollector(object):
collector_name = None collector_name = None
def __init__(self, **kwargs): def __init__(self, **kwargs):

View File

@ -16,8 +16,6 @@
from datetime import timedelta from datetime import timedelta
import requests import requests
import six
from gnocchiclient import auth as gauth from gnocchiclient import auth as gauth
from gnocchiclient import client as gclient from gnocchiclient import client as gclient
from gnocchiclient import exceptions as gexceptions from gnocchiclient import exceptions as gexceptions
@ -93,7 +91,7 @@ for agg in list(BASIC_AGGREGATION_METHODS):
BASIC_AGGREGATION_METHODS.add("rate:%s" % agg) BASIC_AGGREGATION_METHODS.add("rate:%s" % agg)
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY = set( EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY = set(
(str(i) + 'pct' for i in six.moves.range(1, 100))) (str(i) + 'pct' for i in range(1, 100)))
for agg in list(EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY): for agg in list(EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY):
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg) EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg)
@ -321,7 +319,7 @@ class GnocchiCollector(collector.BaseCollector):
# FIXME(peschk_l): gnocchiclient seems to be raising a BadRequest # FIXME(peschk_l): gnocchiclient seems to be raising a BadRequest
# when it should be raising MetricNotFound # when it should be raising MetricNotFound
if isinstance(e, gexceptions.BadRequest): if isinstance(e, gexceptions.BadRequest):
if 'Metrics not found' not in six.text_type(e): if 'Metrics not found' not in e.args[0]:
raise raise
LOG.warning('[{scope}] Skipping this metric for the ' LOG.warning('[{scope}] Skipping this metric for the '
'current cycle.'.format(scope=project_id, err=e)) 'current cycle.'.format(scope=project_id, err=e))

View File

@ -23,7 +23,6 @@ from oslo_log import log as logging
from oslo_policy import opts as policy_opts from oslo_policy import opts as policy_opts
from oslo_policy import policy from oslo_policy import policy
from oslo_utils import excutils from oslo_utils import excutils
import six
from cloudkitty.common import policies from cloudkitty.common import policies
@ -49,7 +48,7 @@ class PolicyNotAuthorized(Exception):
super(PolicyNotAuthorized, self).__init__(self.msg) super(PolicyNotAuthorized, self).__init__(self.msg)
def __unicode__(self): def __unicode__(self):
return six.text_type(self.msg) return str(self.msg)
def reset(): def reset():

View File

@ -17,7 +17,6 @@ import abc
from oslo_config import cfg from oslo_config import cfg
from oslo_db import api as db_api from oslo_db import api as db_api
import six
_BACKEND_MAPPING = {'sqlalchemy': 'cloudkitty.db.sqlalchemy.api'} _BACKEND_MAPPING = {'sqlalchemy': 'cloudkitty.db.sqlalchemy.api'}
IMPL = db_api.DBAPI.from_config(cfg.CONF, IMPL = db_api.DBAPI.from_config(cfg.CONF,
@ -30,8 +29,7 @@ def get_instance():
return IMPL return IMPL
@six.add_metaclass(abc.ABCMeta) class State(object, metaclass=abc.ABCMeta):
class State(object):
"""Base class for state tracking.""" """Base class for state tracking."""
@abc.abstractmethod @abc.abstractmethod
@ -68,8 +66,7 @@ class State(object):
""" """
@six.add_metaclass(abc.ABCMeta) class ModuleInfo(object, metaclass=abc.ABCMeta):
class ModuleInfo(object):
"""Base class for module info management.""" """Base class for module info management."""
@abc.abstractmethod @abc.abstractmethod
@ -114,8 +111,7 @@ class NoSuchMapping(Exception):
self.service = service self.service = service
@six.add_metaclass(abc.ABCMeta) class ServiceToCollectorMapping(object, metaclass=abc.ABCMeta):
class ServiceToCollectorMapping(object):
"""Base class for service to collector mapping.""" """Base class for service to collector mapping."""
@abc.abstractmethod @abc.abstractmethod

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import abc import abc
import six
from oslo_config import cfg from oslo_config import cfg
@ -27,8 +26,7 @@ fetcher_opts = [
cfg.CONF.register_opts(fetcher_opts, 'fetcher') cfg.CONF.register_opts(fetcher_opts, 'fetcher')
@six.add_metaclass(abc.ABCMeta) class BaseFetcher(object, metaclass=abc.ABCMeta):
class BaseFetcher(object):
"""CloudKitty tenants fetcher. """CloudKitty tenants fetcher.
Provides Cloudkitty integration with a backend announcing ratable scopes. Provides Cloudkitty integration with a backend announcing ratable scopes.

View File

@ -17,7 +17,6 @@ import ast
import re import re
from hacking import core from hacking import core
import six
""" """
@ -150,7 +149,7 @@ class CheckLoggingFormatArgs(BaseASTChecker):
if obj_name is None: if obj_name is None:
return None return None
return obj_name + '.' + method_name return obj_name + '.' + method_name
elif isinstance(node, six.string_types): elif isinstance(node, str):
return node return node
else: # could be Subscript, Call or many more else: # could be Subscript, Call or many more
return None return None
@ -221,7 +220,7 @@ class CheckForStrUnicodeExc(BaseASTChecker):
version = "1.0" version = "1.0"
CHECK_DESC = ('C314 str() and unicode() cannot be used on an ' CHECK_DESC = ('C314 str() and unicode() cannot be used on an '
'exception. Remove or use six.text_type()') 'exception. Remove it.')
def __init__(self, tree, filename): def __init__(self, tree, filename):
super(CheckForStrUnicodeExc, self).__init__(tree, filename) super(CheckForStrUnicodeExc, self).__init__(tree, filename)

View File

@ -17,15 +17,13 @@ import abc
import pecan import pecan
from pecan import rest from pecan import rest
import six
from cloudkitty.common import policy from cloudkitty.common import policy
from cloudkitty.db import api as db_api from cloudkitty.db import api as db_api
from cloudkitty import messaging from cloudkitty import messaging
@six.add_metaclass(abc.ABCMeta) class RatingProcessorBase(object, metaclass=abc.ABCMeta):
class RatingProcessorBase(object):
"""Provides the Cloudkitty integration code to the rating processors. """Provides the Cloudkitty integration code to the rating processors.
Every rating processor should subclass this and override at least Every rating processor should subclass this and override at least
@ -142,6 +140,6 @@ class RatingRestControllerBase(rest.RestController):
try: try:
policy.authorize(request.context, 'rating:module_config', {}) policy.authorize(request.context, 'rating:module_config', {})
except policy.PolicyNotAuthorized as e: except policy.PolicyNotAuthorized as e:
pecan.abort(403, six.text_type(e)) pecan.abort(403, e.args[0])
return super(RatingRestControllerBase, self)._route(args, request) return super(RatingRestControllerBase, self)._route(args, request)

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
@ -60,7 +59,7 @@ class HashMapFieldsController(rating.RatingRestControllerBase):
field_db = hashmap.get_field(uuid=field_id) field_db = hashmap.get_field(uuid=field_id)
return field_models.Field(**field_db.export_model()) return field_models.Field(**field_db.export_model())
except db_api.NoSuchField as e: except db_api.NoSuchField as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(field_models.Field, @wsme_pecan.wsexpose(field_models.Field,
body=field_models.Field, body=field_models.Field,
@ -82,9 +81,9 @@ class HashMapFieldsController(rating.RatingRestControllerBase):
return field_models.Field( return field_models.Field(
**field_db.export_model()) **field_db.export_model())
except db_api.FieldAlreadyExists as e: except db_api.FieldAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -98,4 +97,4 @@ class HashMapFieldsController(rating.RatingRestControllerBase):
try: try:
hashmap.delete_field(uuid=field_id) hashmap.delete_field(uuid=field_id)
except db_api.NoSuchField as e: except db_api.NoSuchField as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
@ -95,7 +94,7 @@ class HashMapGroupsController(rating.RatingRestControllerBase):
group_db = hashmap.get_group(uuid=group_id) group_db = hashmap.get_group(uuid=group_id)
return group_models.Group(**group_db.export_model()) return group_models.Group(**group_db.export_model())
except db_api.NoSuchGroup as e: except db_api.NoSuchGroup as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(group_models.Group, @wsme_pecan.wsexpose(group_models.Group,
body=group_models.Group, body=group_models.Group,
@ -115,9 +114,9 @@ class HashMapGroupsController(rating.RatingRestControllerBase):
return group_models.Group( return group_models.Group(
**group_db.export_model()) **group_db.export_model())
except db_api.GroupAlreadyExists as e: except db_api.GroupAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -133,4 +132,4 @@ class HashMapGroupsController(rating.RatingRestControllerBase):
try: try:
hashmap.delete_group(uuid=group_id, recurse=recursive) hashmap.delete_group(uuid=group_id, recurse=recursive)
except db_api.NoSuchGroup as e: except db_api.NoSuchGroup as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
@ -45,7 +44,7 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
uuid=mapping_id) uuid=mapping_id)
return group_models.Group(**group_db.export_model()) return group_models.Group(**group_db.export_model())
except db_api.MappingHasNoGroup as e: except db_api.MappingHasNoGroup as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(mapping_models.MappingCollection, @wsme_pecan.wsexpose(mapping_models.MappingCollection,
ck_types.UuidType(), ck_types.UuidType(),
@ -105,7 +104,7 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
return mapping_models.Mapping( return mapping_models.Mapping(
**mapping_db.export_model()) **mapping_db.export_model())
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(mapping_models.Mapping, @wsme_pecan.wsexpose(mapping_models.Mapping,
body=mapping_models.Mapping, body=mapping_models.Mapping,
@ -132,9 +131,9 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
return mapping_models.Mapping( return mapping_models.Mapping(
**mapping_db.export_model()) **mapping_db.export_model())
except db_api.MappingAlreadyExists as e: except db_api.MappingAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -158,11 +157,11 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
tenant_id=mapping.tenant_id) tenant_id=mapping.tenant_id)
pecan.response.headers['Location'] = pecan.request.path pecan.response.headers['Location'] = pecan.request.path
except db_api.MappingAlreadyExists as e: except db_api.MappingAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -176,4 +175,4 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
try: try:
hashmap.delete_mapping(uuid=mapping_id) hashmap.delete_mapping(uuid=mapping_id)
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
@ -60,7 +59,7 @@ class HashMapServicesController(rating.RatingRestControllerBase):
service_db = hashmap.get_service(uuid=service_id) service_db = hashmap.get_service(uuid=service_id)
return service_models.Service(**service_db.export_model()) return service_models.Service(**service_db.export_model())
except db_api.NoSuchService as e: except db_api.NoSuchService as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(service_models.Service, @wsme_pecan.wsexpose(service_models.Service,
body=service_models.Service, body=service_models.Service,
@ -80,7 +79,7 @@ class HashMapServicesController(rating.RatingRestControllerBase):
return service_models.Service( return service_models.Service(
**service_db.export_model()) **service_db.export_model())
except db_api.ServiceAlreadyExists as e: except db_api.ServiceAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
@wsme_pecan.wsexpose(None, ck_types.UuidType(), status_code=204) @wsme_pecan.wsexpose(None, ck_types.UuidType(), status_code=204)
def delete(self, service_id): def delete(self, service_id):
@ -92,4 +91,4 @@ class HashMapServicesController(rating.RatingRestControllerBase):
try: try:
hashmap.delete_service(uuid=service_id) hashmap.delete_service(uuid=service_id)
except db_api.NoSuchService as e: except db_api.NoSuchService as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
@ -45,7 +44,7 @@ class HashMapThresholdsController(rating.RatingRestControllerBase):
uuid=threshold_id) uuid=threshold_id)
return group_models.Group(**group_db.export_model()) return group_models.Group(**group_db.export_model())
except db_api.ThresholdHasNoGroup as e: except db_api.ThresholdHasNoGroup as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(threshold_models.ThresholdCollection, @wsme_pecan.wsexpose(threshold_models.ThresholdCollection,
ck_types.UuidType(), ck_types.UuidType(),
@ -104,7 +103,7 @@ class HashMapThresholdsController(rating.RatingRestControllerBase):
return threshold_models.Threshold( return threshold_models.Threshold(
**threshold_db.export_model()) **threshold_db.export_model())
except db_api.NoSuchThreshold as e: except db_api.NoSuchThreshold as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(threshold_models.Threshold, @wsme_pecan.wsexpose(threshold_models.Threshold,
body=threshold_models.Threshold, body=threshold_models.Threshold,
@ -131,9 +130,9 @@ class HashMapThresholdsController(rating.RatingRestControllerBase):
return threshold_models.Threshold( return threshold_models.Threshold(
**threshold_db.export_model()) **threshold_db.export_model())
except db_api.ThresholdAlreadyExists as e: except db_api.ThresholdAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -157,11 +156,11 @@ class HashMapThresholdsController(rating.RatingRestControllerBase):
tenant_id=threshold.tenant_id) tenant_id=threshold.tenant_id)
pecan.response.headers['Location'] = pecan.request.path pecan.response.headers['Location'] = pecan.request.path
except db_api.ThresholdAlreadyExists as e: except db_api.ThresholdAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
except db_api.NoSuchThreshold as e: except db_api.NoSuchThreshold as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
except db_api.ClientHashMapError as e: except db_api.ClientHashMapError as e:
pecan.abort(400, six.text_type(e)) pecan.abort(400, e.args[0])
@wsme_pecan.wsexpose(None, @wsme_pecan.wsexpose(None,
ck_types.UuidType(), ck_types.UuidType(),
@ -175,4 +174,4 @@ class HashMapThresholdsController(rating.RatingRestControllerBase):
try: try:
hashmap.delete_threshold(uuid=threshold_id) hashmap.delete_threshold(uuid=threshold_id)
except db_api.NoSuchThreshold as e: except db_api.NoSuchThreshold as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -17,7 +17,6 @@ import abc
from oslo_config import cfg from oslo_config import cfg
from oslo_db import api as db_api from oslo_db import api as db_api
import six
from cloudkitty.i18n import _ from cloudkitty.i18n import _
@ -195,8 +194,7 @@ class ThresholdHasNoGroup(ClientHashMapError):
self.uuid = uuid self.uuid = uuid
@six.add_metaclass(abc.ABCMeta) class HashMap(object, metaclass=abc.ABCMeta):
class HashMap(object):
"""Base class for hashmap configuration.""" """Base class for hashmap configuration."""
@abc.abstractmethod @abc.abstractmethod

View File

@ -25,7 +25,6 @@ down_revision = '10d2738b67df'
import copy # noqa: E402 import copy # noqa: E402
from alembic import op # noqa: E402 from alembic import op # noqa: E402
import six # noqa: E402
from cloudkitty.rating.hash.db.sqlalchemy.alembic.models import ( # noqa: E402 from cloudkitty.rating.hash.db.sqlalchemy.alembic.models import ( # noqa: E402
f8c799db4aa0_fix_unnamed_constraints as models) f8c799db4aa0_fix_unnamed_constraints as models)
@ -154,7 +153,7 @@ POST_OPS = {
def upgrade_sqlite(): def upgrade_sqlite():
# NOTE(sheeprine): Batch automatically recreates tables, # NOTE(sheeprine): Batch automatically recreates tables,
# use it as a lazy way to recreate tables and transfer data automagically. # use it as a lazy way to recreate tables and transfer data automagically.
for name, table in six.iteritems(models.Base.metadata.tables): for name, table in models.Base.metadata.tables.items():
with op.batch_alter_table(name, copy_from=table) as batch_op: with op.batch_alter_table(name, copy_from=table) as batch_op:
# NOTE(sheeprine): Dummy operation to force recreate. # NOTE(sheeprine): Dummy operation to force recreate.
# Easier than delete and create. # Easier than delete and create.
@ -168,13 +167,13 @@ def upgrade_mysql():
tables['hashmap_fields'].constraints = set() tables['hashmap_fields'].constraints = set()
tables['hashmap_mappings'].constraints = set() tables['hashmap_mappings'].constraints = set()
tables['hashmap_thresholds'].constraints = set() tables['hashmap_thresholds'].constraints = set()
for name, table in six.iteritems(tables): for name, table in tables.items():
with op.batch_alter_table(name, with op.batch_alter_table(name,
copy_from=table, copy_from=table,
recreate='always') as batch_op: recreate='always') as batch_op:
batch_op.alter_column('id') batch_op.alter_column('id')
# Final copy with constraints # Final copy with constraints
for name, table in six.iteritems(models.Base.metadata.tables): for name, table in models.Base.metadata.tables.items():
with op.batch_alter_table(name, with op.batch_alter_table(name,
copy_from=table, copy_from=table,
recreate='always') as batch_op: recreate='always') as batch_op:
@ -208,8 +207,8 @@ def upgrade_postgresql():
ops_list = [POST_OPS] ops_list = [POST_OPS]
for cur_ops in ops_list: for cur_ops in ops_list:
for constraint_type in ('foreignkey', 'unique', 'primary'): for constraint_type in ('foreignkey', 'unique', 'primary'):
for table_name, constraints in six.iteritems( for table_name, constraints in cur_ops.get(constraint_type,
cur_ops.get(constraint_type, dict())): dict()).items():
for constraint in constraints: for constraint in constraints:
old_name = constraint[0] old_name = constraint[0]
translate_op( translate_op(
@ -218,8 +217,8 @@ def upgrade_postgresql():
old_name, old_name,
table_name) table_name)
for constraint_type in ('primary', 'unique', 'foreignkey'): for constraint_type in ('primary', 'unique', 'foreignkey'):
for table_name, constraints in six.iteritems( for table_name, constraints in cur_ops.get(constraint_type,
cur_ops.get(constraint_type, dict())): dict()).items():
for constraint in constraints: for constraint in constraints:
new_name = constraint[1] new_name = constraint[1]
params = constraint[2] params = constraint[2]

View File

@ -14,7 +14,6 @@
# under the License. # under the License.
# #
import pecan import pecan
import six
from wsme import types as wtypes from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
@ -36,7 +35,7 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
""" """
if data == wtypes.Unset: if data == wtypes.Unset:
return '' return ''
if not isinstance(data, six.binary_type): if not isinstance(data, bytes):
data = data.encode('utf-8') data = data.encode('utf-8')
return data return data
@ -71,7 +70,7 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
script_db = pyscripts.get_script(uuid=script_id) script_db = pyscripts.get_script(uuid=script_id)
return script_models.Script(**script_db.export_model()) return script_models.Script(**script_db.export_model())
except db_api.NoSuchScript as e: except db_api.NoSuchScript as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(script_models.Script, @wsme_pecan.wsexpose(script_models.Script,
body=script_models.Script, body=script_models.Script,
@ -92,7 +91,7 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
return script_models.Script( return script_models.Script(
**script_db.export_model()) **script_db.export_model())
except db_api.ScriptAlreadyExists as e: except db_api.ScriptAlreadyExists as e:
pecan.abort(409, six.text_type(e)) pecan.abort(409, e.args[0])
@wsme_pecan.wsexpose(script_models.Script, @wsme_pecan.wsexpose(script_models.Script,
ck_types.UuidType(), ck_types.UuidType(),
@ -117,7 +116,7 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
return script_models.Script( return script_models.Script(
**script_db.export_model()) **script_db.export_model())
except db_api.NoSuchScript as e: except db_api.NoSuchScript as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(None, ck_types.UuidType(), status_code=204) @wsme_pecan.wsexpose(None, ck_types.UuidType(), status_code=204)
def delete(self, script_id): def delete(self, script_id):
@ -129,4 +128,4 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
try: try:
pyscripts.delete_script(uuid=script_id) pyscripts.delete_script(uuid=script_id)
except db_api.NoSuchScript as e: except db_api.NoSuchScript as e:
pecan.abort(404, six.text_type(e)) pecan.abort(404, e.args[0])

View File

@ -17,7 +17,6 @@ import abc
from oslo_config import cfg from oslo_config import cfg
from oslo_db import api as db_api from oslo_db import api as db_api
import six
from cloudkitty.i18n import _ from cloudkitty.i18n import _
@ -55,8 +54,7 @@ class ScriptAlreadyExists(Exception):
self.uuid = uuid self.uuid = uuid
@six.add_metaclass(abc.ABCMeta) class PyScripts(object, metaclass=abc.ABCMeta):
class PyScripts(object):
"""Base class for pyscripts configuration.""" """Base class for pyscripts configuration."""
@abc.abstractmethod @abc.abstractmethod

View File

@ -18,16 +18,13 @@ from datetime import timedelta
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
import six
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
@six.add_metaclass(abc.ABCMeta) class BaseStorage(object, metaclass=abc.ABCMeta):
class BaseStorage(object):
"""Base Storage class: """Base Storage class:
Handle incoming data from the global orchestrator, and store them. Handle incoming data from the global orchestrator, and store them.

View File

@ -15,11 +15,8 @@
# #
import abc import abc
import six
class BaseHybridBackend(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class BaseHybridBackend(object):
"""Base Backend class for the Hybrid Storage. """Base Backend class for the Hybrid Storage.
This is the interface that all backends for the hybrid storage This is the interface that all backends for the hybrid storage

View File

@ -22,7 +22,6 @@ from keystoneauth1 import loading as ks_loading
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
from oslo_utils import uuidutils from oslo_utils import uuidutils
import six
from cloudkitty.collector import validate_conf from cloudkitty.collector import validate_conf
from cloudkitty.storage.v1.hybrid.backends import BaseHybridBackend from cloudkitty.storage.v1.hybrid.backends import BaseHybridBackend
@ -47,7 +46,7 @@ gnocchi_storage_opts = [
# The archive policy definition MUST include the collect period granularity # The archive policy definition MUST include the collect period granularity
cfg.StrOpt('archive_policy_definition', cfg.StrOpt('archive_policy_definition',
default='[{"granularity": ' default='[{"granularity": '
+ six.text_type(CONF.collect.period) + + str(CONF.collect.period) +
', "timespan": "90 days"}, ' ', "timespan": "90 days"}, '
'{"granularity": 86400, "timespan": "360 days"}, ' '{"granularity": 86400, "timespan": "360 days"}, '
'{"granularity": 2592000, "timespan": "1800 days"}]', '{"granularity": 2592000, "timespan": "1800 days"}]',
@ -398,7 +397,7 @@ class GnocchiStorage(BaseHybridBackend):
price_dict = {'price': float(price)} price_dict = {'price': float(price)}
# Getting vol # Getting vol
if isinstance(res_type_info['qty_metric'], six.string_types): if isinstance(res_type_info['qty_metric'], str):
try: try:
qty = self._conn.metric.get_measures( qty = self._conn.metric.get_measures(
resource['metrics'][res_type_info['qty_metric']], resource['metrics'][res_type_info['qty_metric']],

View File

@ -17,7 +17,6 @@ import abc
import datetime import datetime
from oslo_config import cfg from oslo_config import cfg
import six
from cloudkitty import storage_state from cloudkitty import storage_state
@ -35,8 +34,7 @@ CONF = cfg.CONF
CONF.register_opts(storage_opts, 'storage') CONF.register_opts(storage_opts, 'storage')
@six.add_metaclass(abc.ABCMeta) class BaseStorage(object, metaclass=abc.ABCMeta):
class BaseStorage(object):
"""Abstract class for v2 storage objects.""" """Abstract class for v2 storage objects."""
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@ -13,11 +13,10 @@
# under the License. # under the License.
# #
import datetime import datetime
import influxdb import influxdb
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
import six
from cloudkitty import dataframe from cloudkitty import dataframe
from cloudkitty.storage import v2 as v2_storage from cloudkitty.storage import v2 as v2_storage
@ -162,9 +161,9 @@ class InfluxClient(object):
@staticmethod @staticmethod
def _get_filter(key, value): def _get_filter(key, value):
format_string = '' format_string = ''
if isinstance(value, six.string_types): if isinstance(value, str):
format_string = """"{}"='{}'""" format_string = """"{}"='{}'"""
elif isinstance(value, (six.integer_types, float)): elif isinstance(value, (int, float)):
format_string = """"{}"={}""" format_string = """"{}"={}"""
return format_string.format(key, value) return format_string.format(key, value)

View File

@ -20,7 +20,6 @@ Create Date: 2019-05-15 17:02:56.595274
""" """
from alembic import op from alembic import op
import six
from cloudkitty.storage_state import models from cloudkitty.storage_state import models
@ -32,7 +31,7 @@ depends_on = None
def upgrade(): def upgrade():
for name, table in six.iteritems(models.Base.metadata.tables): for name, table in models.Base.metadata.tables.items():
if name == 'cloudkitty_storage_states': if name == 'cloudkitty_storage_states':
with op.batch_alter_table(name, with op.batch_alter_table(name,

View File

@ -28,7 +28,6 @@ from oslo_db.sqlalchemy import utils
import oslo_messaging import oslo_messaging
from oslo_messaging import conffixture from oslo_messaging import conffixture
from oslo_policy import opts as policy_opts from oslo_policy import opts as policy_opts
import six
from stevedore import driver from stevedore import driver
from stevedore import extension from stevedore import extension
import webob.dec import webob.dec
@ -70,8 +69,7 @@ class UUIDFixture(fixture.GabbiFixture):
self.patcher.stop() self.patcher.stop()
@six.add_metaclass(abc.ABCMeta) class BaseExtensionFixture(fixture.GabbiFixture, metaclass=abc.ABCMeta):
class BaseExtensionFixture(fixture.GabbiFixture):
klass = None klass = None
namespace = None namespace = None
stevedore_mgr = None stevedore_mgr = None

View File

@ -20,7 +20,6 @@ from unittest import mock
import zlib import zlib
from oslo_utils import uuidutils from oslo_utils import uuidutils
import six
from cloudkitty.rating import pyscripts from cloudkitty.rating import pyscripts
from cloudkitty.rating.pyscripts.db import api from cloudkitty.rating.pyscripts.db import api
@ -220,7 +219,7 @@ class PyScriptsRatingTest(tests.TestCase):
'<PyScripts Script[{uuid}]: name={name}>'.format( '<PyScripts Script[{uuid}]: name={name}>'.format(
uuid=script_db.script_id, uuid=script_db.script_id,
name=script_db.name), name=script_db.name),
six.text_type(script_db)) str(script_db))
# Checksum tests # Checksum tests
def test_validate_checksum(self): def test_validate_checksum(self):

View File

@ -17,9 +17,9 @@ import contextlib
import datetime import datetime
import decimal import decimal
import fractions import fractions
import importlib
import math import math
import shutil import shutil
import six
from string import Template from string import Template
import sys import sys
import tempfile import tempfile
@ -27,7 +27,6 @@ import yaml
from oslo_log import log as logging from oslo_log import log as logging
from oslo_utils import timeutils from oslo_utils import timeutils
from six import moves
from stevedore import extension from stevedore import extension
from cloudkitty.utils import tz as tzutils from cloudkitty.utils import tz as tzutils
@ -192,7 +191,7 @@ def refresh_stevedore(namespace=None):
# python2, do nothing # python2, do nothing
pass pass
# Force working_set reload # Force working_set reload
moves.reload_module(sys.modules['pkg_resources']) importlib.reload(sys.modules['pkg_resources'])
# Clear stevedore cache # Clear stevedore cache
cache = extension.ExtensionManager.ENTRY_POINT_CACHE cache = extension.ExtensionManager.ENTRY_POINT_CACHE
if namespace: if namespace:
@ -249,8 +248,7 @@ def tempdir(**kwargs):
try: try:
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
except OSError as e: except OSError as e:
LOG.debug('Could not remove tmpdir: %s', LOG.debug('Could not remove tmpdir: %s', e)
six.text_type(e))
def mutate(value, mode='NONE'): def mutate(value, mode='NONE'):

View File

@ -18,9 +18,6 @@ try:
except ImportError: except ImportError:
from collections import Iterable from collections import Iterable
import functools import functools
import six
import voluptuous import voluptuous
@ -95,4 +92,4 @@ class IterableValuesDict(DictTypeValidator):
def get_string_type(): def get_string_type():
"""Returns ``basestring`` in python2 and ``str`` in python3.""" """Returns ``basestring`` in python2 and ``str`` in python3."""
return six.string_types[0] return str

View File

@ -15,14 +15,11 @@
# #
import abc import abc
import six
from cloudkitty import state from cloudkitty import state
from cloudkitty import utils as ck_utils from cloudkitty import utils as ck_utils
@six.add_metaclass(abc.ABCMeta) class BaseReportWriter(object, metaclass=abc.ABCMeta):
class BaseReportWriter(object):
"""Base report writer.""" """Base report writer."""
report_type = None report_type = None

View File

@ -23,7 +23,6 @@ oslo.utils==4.7.0 # Apache-2.0
oslo.upgradecheck==1.2.0 # Apache-2.0 oslo.upgradecheck==1.2.0 # Apache-2.0
python-dateutil==2.7.0 # BSD python-dateutil==2.7.0 # BSD
SQLAlchemy==1.3.20 # MIT SQLAlchemy==1.3.20 # MIT
six==1.15.0 # MIT
stevedore==3.2.2 # Apache-2.0 stevedore==3.2.2 # Apache-2.0
tooz==2.7.1 # Apache-2.0 tooz==2.7.1 # Apache-2.0
voluptuous==0.12.0 # BSD-3 voluptuous==0.12.0 # BSD-3

View File

@ -25,7 +25,6 @@ oslo.utils>=4.7.0 # Apache-2.0
oslo.upgradecheck>=1.2.0 # Apache-2.0 oslo.upgradecheck>=1.2.0 # Apache-2.0
python-dateutil>=2.7.0 # BSD python-dateutil>=2.7.0 # BSD
SQLAlchemy>=1.3.20 # MIT SQLAlchemy>=1.3.20 # MIT
six>=1.15.0 # MIT
stevedore>=3.2.2 # Apache-2.0 stevedore>=3.2.2 # Apache-2.0
tooz>=2.7.1 # Apache-2.0 tooz>=2.7.1 # Apache-2.0
voluptuous>=0.12.0 # BSD License voluptuous>=0.12.0 # BSD License

View File

@ -19,7 +19,6 @@ oslo.middleware>=2.6.1 # Apache-2.0
oslo.policy>=0.5.0 # Apache-2.0 oslo.policy>=0.5.0 # Apache-2.0
oslo.utils>=2.0.0 # Apache-2.0 oslo.utils>=2.0.0 # Apache-2.0
SQLAlchemy<1.1.0,>=0.9.7 SQLAlchemy<1.1.0,>=0.9.7
six>=1.9.0
stevedore>=1.5.0 # Apache-2.0 stevedore>=1.5.0 # Apache-2.0
hacking<0.10,>=0.9.2 hacking<0.10,>=0.9.2
coverage>=3.6 coverage>=3.6