Update hacking for Python3
The repo is Python 3 now, so update hacking to version 3.0.1 which supports Python 3. Fix problems found. Remove hacking and friends from lower-constraints, they are not needed for installation. Change-Id: I6691687f0cd2fe4c9e7f2a76d333ca9eacb0cbf3
This commit is contained in:
parent
ee26961cfc
commit
0764f5dab1
|
@ -188,7 +188,7 @@ def get_request_url(request):
|
|||
forwarded = headers.get('X-Forwarded-Host')
|
||||
if forwarded:
|
||||
url_parts = list(urllib.parse.urlsplit(url))
|
||||
url_parts[1] = re.split(',\s?', forwarded)[-1]
|
||||
url_parts[1] = re.split(r',\s?', forwarded)[-1]
|
||||
url = urllib.parse.urlunsplit(url_parts).rstrip('/')
|
||||
return url
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import routes
|
|||
|
||||
class APIMapper(routes.Mapper):
|
||||
def routematch(self, url=None, environ=None):
|
||||
if url is "":
|
||||
if url == "":
|
||||
result = self._match("", environ)
|
||||
return result[0], result[1]
|
||||
return routes.Mapper.routematch(self, url, environ)
|
||||
|
|
|
@ -146,7 +146,7 @@ class TriggersController(wsgi.Controller):
|
|||
try:
|
||||
with StartNotification(context, id=id):
|
||||
self.operationengine_api.delete_trigger(context, id)
|
||||
except exception.TriggerNotFound as ex:
|
||||
except exception.TriggerNotFound:
|
||||
pass
|
||||
except (exception.DeleteTriggerNotAllowed,
|
||||
Exception) as ex:
|
||||
|
|
|
@ -16,19 +16,19 @@
|
|||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
|
||||
import sys
|
||||
import sys # noqa: E402
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_config import cfg # noqa: E402
|
||||
from oslo_log import log as logging # noqa: E402
|
||||
|
||||
# Need to register global_opts
|
||||
from karbor.common import config # noqa
|
||||
from karbor import i18n
|
||||
from karbor import i18n # noqa: E402
|
||||
i18n.enable_lazy()
|
||||
from karbor import objects
|
||||
from karbor import rpc
|
||||
from karbor import service
|
||||
from karbor import version
|
||||
from karbor import objects # noqa: E402
|
||||
from karbor import rpc # noqa: E402
|
||||
from karbor import service # noqa: E402
|
||||
from karbor import version # noqa: E402
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -15,9 +15,6 @@
|
|||
CLI interface for karbor management.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -31,14 +28,14 @@ i18n.enable_lazy()
|
|||
|
||||
# Need to register global_opts
|
||||
from karbor.common import config # noqa
|
||||
from karbor import context
|
||||
from karbor import db
|
||||
from karbor.db import migration as db_migration
|
||||
from karbor.db.sqlalchemy import api as db_api
|
||||
from karbor.i18n import _
|
||||
from karbor import objects
|
||||
from karbor import utils
|
||||
from karbor import version
|
||||
from karbor import context # noqa: E402
|
||||
from karbor import db # noqa: E402
|
||||
from karbor.db import migration as db_migration # noqa: E402
|
||||
from karbor.db.sqlalchemy import api as db_api # noqa: E402
|
||||
from karbor.i18n import _ # noqa: E402
|
||||
from karbor import objects # noqa: E402
|
||||
from karbor import utils # noqa: E402
|
||||
from karbor import version # noqa: E402
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -16,19 +16,19 @@
|
|||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
|
||||
import sys
|
||||
import sys # noqa: E402
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_config import cfg # noqa: E402
|
||||
from oslo_log import log as logging # noqa: E402
|
||||
|
||||
from karbor import i18n
|
||||
from karbor import i18n # noqa: E402
|
||||
i18n.enable_lazy()
|
||||
from karbor import objects
|
||||
from karbor import objects # noqa: E402
|
||||
|
||||
# Need to register global_opts
|
||||
from karbor.common import config # noqa
|
||||
from karbor import service
|
||||
from karbor import version
|
||||
from karbor import service # noqa: E402
|
||||
from karbor import version # noqa: E402
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -16,19 +16,19 @@
|
|||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
|
||||
import sys
|
||||
import sys # noqa: E402
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_config import cfg # noqa: E402
|
||||
from oslo_log import log as logging # noqa: E402
|
||||
|
||||
from karbor import i18n
|
||||
from karbor import i18n # noqa: E402
|
||||
i18n.enable_lazy()
|
||||
from karbor import objects
|
||||
from karbor import objects # noqa: E402
|
||||
|
||||
# Need to register global_opts
|
||||
from karbor.common import config # noqa
|
||||
from karbor import service
|
||||
from karbor import version
|
||||
from karbor import service # noqa: E402
|
||||
from karbor import version # noqa: E402
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -78,6 +78,7 @@ def get_session(**kwargs):
|
|||
def dispose_engine():
|
||||
get_engine().dispose()
|
||||
|
||||
|
||||
_DEFAULT_QUOTA_NAME = 'default'
|
||||
|
||||
|
||||
|
|
|
@ -187,9 +187,9 @@ class DbQuotaDriver(object):
|
|||
|
||||
# Filter resources
|
||||
if has_sync:
|
||||
sync_filt = lambda x: hasattr(x, 'sync')
|
||||
sync_filt = lambda x: hasattr(x, 'sync') # noqa: E731
|
||||
else:
|
||||
sync_filt = lambda x: not hasattr(x, 'sync')
|
||||
sync_filt = lambda x: not hasattr(x, 'sync') # noqa: E731
|
||||
desired = set(keys)
|
||||
sub_resources = dict((k, v) for k, v in resources.items()
|
||||
if k in desired and sync_filt(v))
|
||||
|
|
|
@ -71,8 +71,8 @@ def validate_dir(key):
|
|||
|
||||
|
||||
class Bank(object):
|
||||
_KEY_VALIDATION = re.compile('^[A-Za-z0-9/_.\-@]+(?<!/)$')
|
||||
_KEY_DOT_VALIDATION = re.compile('/\.{1,2}(/|$)')
|
||||
_KEY_VALIDATION = re.compile(r'^[A-Za-z0-9/_.\-@]+(?<!/)$')
|
||||
_KEY_DOT_VALIDATION = re.compile(r'/\.{1,2}(/|$)')
|
||||
|
||||
def __init__(self, plugin):
|
||||
super(Bank, self).__init__()
|
||||
|
@ -162,8 +162,8 @@ class BankSection(object):
|
|||
a bank to another entity and make sure it is only capable of
|
||||
accessing part of it.
|
||||
"""
|
||||
_SECTION_VALIDATION = re.compile('^/?[A-Za-z0-9/_.\-@]*/?$')
|
||||
_SECTION_DOT_VALIDATION = re.compile('/\.{1,2}(/|$)')
|
||||
_SECTION_VALIDATION = re.compile(r'^/?[A-Za-z0-9/_.\-@]*/?$')
|
||||
_SECTION_DOT_VALIDATION = re.compile(r'/\.{1,2}(/|$)')
|
||||
|
||||
def __init__(self, bank, section, is_writable=True):
|
||||
super(BankSection, self).__init__()
|
||||
|
|
|
@ -75,7 +75,7 @@ def get_flow(context, protectable_registry, workflow_engine, plan, provider,
|
|||
checkpoint_copy.commit()
|
||||
operation_log = utils.create_operation_log(context, checkpoint_copy,
|
||||
constants.OPERATION_COPY)
|
||||
flow_name = "Copy_" + plan.get('id')+checkpoint.id
|
||||
flow_name = "Copy_" + plan.get('id') + checkpoint.id
|
||||
copy_flow = workflow_engine.build_flow(flow_name, 'linear')
|
||||
plugins = provider.load_plugins()
|
||||
parameters = {}
|
||||
|
@ -98,10 +98,10 @@ def get_flow(context, protectable_registry, workflow_engine, plan, provider,
|
|||
}
|
||||
workflow_engine.add_tasks(
|
||||
copy_flow,
|
||||
InitiateCopyTask(name='InitiateCopyTask_'+checkpoint_copy.id,
|
||||
InitiateCopyTask(name='InitiateCopyTask_' + checkpoint_copy.id,
|
||||
inject=store_dict),
|
||||
resources_task_flow,
|
||||
CompleteCopyTask(name='CompleteCopyTask_'+checkpoint_copy.id,
|
||||
CompleteCopyTask(name='CompleteCopyTask_' + checkpoint_copy.id,
|
||||
inject=store_dict),
|
||||
)
|
||||
return copy_flow
|
||||
|
|
|
@ -24,7 +24,7 @@ from oslo_log import log as logging
|
|||
|
||||
image_backup_opts = [
|
||||
cfg.IntOpt('backup_image_object_size',
|
||||
default=65536*10,
|
||||
default=65536 * 10,
|
||||
help='The size in bytes of instance image objects. '
|
||||
'The value must be a multiple of 65536('
|
||||
'the size of image\'s chunk).'),
|
||||
|
|
|
@ -66,7 +66,7 @@ class ProtectOperation(protection_plugin.Operation):
|
|||
'subnets',
|
||||
'tags',
|
||||
'tenant_id'
|
||||
]
|
||||
]
|
||||
|
||||
for network in networks:
|
||||
network_metadata = {
|
||||
|
@ -101,7 +101,7 @@ class ProtectOperation(protection_plugin.Operation):
|
|||
'network_id',
|
||||
'subnetpool_id',
|
||||
'tenant_id'
|
||||
]
|
||||
]
|
||||
|
||||
for subnet in subnets:
|
||||
subnet_metadata = {
|
||||
|
@ -137,7 +137,7 @@ class ProtectOperation(protection_plugin.Operation):
|
|||
'security_groups',
|
||||
'status',
|
||||
'tenant_id'
|
||||
]
|
||||
]
|
||||
|
||||
for port in ports:
|
||||
port_metadata = {
|
||||
|
@ -165,7 +165,7 @@ class ProtectOperation(protection_plugin.Operation):
|
|||
'name',
|
||||
'routes',
|
||||
'status'
|
||||
]
|
||||
]
|
||||
|
||||
for router in routers:
|
||||
router_metadata = {
|
||||
|
@ -191,7 +191,7 @@ class ProtectOperation(protection_plugin.Operation):
|
|||
'name',
|
||||
'security_group_rules',
|
||||
'tenant_id'
|
||||
]
|
||||
]
|
||||
|
||||
for sg in sgs:
|
||||
sg_metadata = {k: sg[k] for k in sg if k in allowed_keys}
|
||||
|
|
|
@ -264,7 +264,7 @@ class RestoreOperation(protection_plugin.Operation):
|
|||
# create volume
|
||||
volume_property = {
|
||||
'name': parameters.get(
|
||||
'restore_name', '%s@%s' % (checkpoint.id, resource_id))
|
||||
'restore_name', '%s@%s' % (checkpoint.id, resource_id))
|
||||
}
|
||||
if 'restore_description' in parameters:
|
||||
volume_property['description'] = parameters['restore_description']
|
||||
|
|
|
@ -37,7 +37,7 @@ volume_glance_opts = [
|
|||
'it. Minimizes the time the volume is unavailable.'
|
||||
),
|
||||
cfg.IntOpt('backup_image_object_size',
|
||||
default=65536*512,
|
||||
default=65536 * 512,
|
||||
help='The size in bytes of temporary image objects. '
|
||||
'The value must be a multiple of 65536('
|
||||
'the size of image\'s chunk).'),
|
||||
|
|
|
@ -126,7 +126,7 @@ class NetworkProtectablePluginTest(base.TestCase):
|
|||
u'description': u'',
|
||||
u'name': u'ext_net',
|
||||
u'tenant_id': u'abcd'}
|
||||
]}
|
||||
]}
|
||||
|
||||
mock_client_list_networks.return_value = fake_network_info
|
||||
self.assertEqual(plugin.list_resources(self._context),
|
||||
|
@ -149,7 +149,7 @@ class NetworkProtectablePluginTest(base.TestCase):
|
|||
u'description': u'',
|
||||
u'name': u'ext_net',
|
||||
u'tenant_id': u'abcd'}
|
||||
]}
|
||||
]}
|
||||
mock_client_list_networks.return_value = fake_network_info
|
||||
self.assertEqual(plugin.get_dependent_resources(self._context,
|
||||
project),
|
||||
|
|
|
@ -85,4 +85,4 @@ class PodProtectablePluginTest(base.TestCase):
|
|||
uuid.uuid5(uuid.NAMESPACE_OID,
|
||||
"default:busybox-test"),
|
||||
{'name': 'default:busybox-test'})
|
||||
)
|
||||
)
|
||||
|
|
|
@ -363,7 +363,7 @@ class CinderProtectionPluginTest(base.TestCase):
|
|||
mocks['volumes'].get.return_value.status = 'available'
|
||||
mocks['restores'].restore = RestoreResponse(volume_id)
|
||||
call_hooks(operation, checkpoint, resource, self.cntxt, parameters,
|
||||
**{'restore': None, 'new_resources': {}})
|
||||
**{'restore': None, 'new_resources': {}})
|
||||
mocks['volumes'].update.assert_called_with(
|
||||
volume_id,
|
||||
**{'name': parameters['restore_name'],
|
||||
|
@ -398,7 +398,7 @@ class CinderProtectionPluginTest(base.TestCase):
|
|||
mocks['backups'].get.return_value = mock.Mock()
|
||||
mocks['backups'].get.return_value.status = 'available'
|
||||
call_hooks(operation, checkpoint, resource, self.cntxt, parameters,
|
||||
**{'verify': None, 'new_resources': {}})
|
||||
**{'verify': None, 'new_resources': {}})
|
||||
mock_update_verify.assert_called_with(
|
||||
None, resource.type, volume_id, 'available')
|
||||
|
||||
|
@ -425,7 +425,7 @@ class CinderProtectionPluginTest(base.TestCase):
|
|||
self.assertRaises(
|
||||
exception.KarborException, call_hooks,
|
||||
operation, checkpoint, resource, self.cntxt,
|
||||
{}, **{'restore': None})
|
||||
{}, **{'restore': None})
|
||||
|
||||
@mock.patch('karbor.services.protection.clients.cinder.create')
|
||||
@mock.patch('karbor.services.protection.protection_plugins.utils.'
|
||||
|
@ -457,7 +457,7 @@ class CinderProtectionPluginTest(base.TestCase):
|
|||
self.assertRaises(
|
||||
exception.RestoreResourceFailed, call_hooks,
|
||||
operation, checkpoint, resource, self.cntxt,
|
||||
{}, **{'restore': None})
|
||||
{}, **{'restore': None})
|
||||
|
||||
mock_update_restore.assert_called_with(
|
||||
None, resource.type, volume_id,
|
||||
|
|
|
@ -227,6 +227,6 @@ class CinderSnapshotProtectionPluginTest(base.TestCase):
|
|||
fake_bank_section.get_object.return_value = {
|
||||
"snapshot_id": "456"}
|
||||
call_hooks(operation, checkpoint, resource, self.cntxt, parameters,
|
||||
**{'verify': None, 'new_resources': {}})
|
||||
**{'verify': None, 'new_resources': {}})
|
||||
mock_update_verify.assert_called_with(
|
||||
None, resource.type, volume_id, 'available')
|
||||
|
|
|
@ -225,7 +225,7 @@ class GlanceProtectionPluginTest(base.TestCase):
|
|||
|
||||
@mock.patch('karbor.services.protection.protection_plugins.utils.'
|
||||
'update_resource_verify_result')
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
resource = Resource(id="123",
|
||||
type=constants.IMAGE_RESOURCE_TYPE,
|
||||
name='fake')
|
||||
|
|
|
@ -28,17 +28,17 @@ class GraphBuilderTest(base.TestCase):
|
|||
"A": ["B"],
|
||||
"B": ["C"],
|
||||
"C": [],
|
||||
}, {"A"}),
|
||||
}, {"A"}),
|
||||
({
|
||||
"A": [],
|
||||
"B": ["C"],
|
||||
"C": [],
|
||||
}, {"A", "B"}),
|
||||
}, {"A", "B"}),
|
||||
({
|
||||
"A": ["C"],
|
||||
"B": ["C"],
|
||||
"C": [],
|
||||
}, {"A", "B"}),
|
||||
}, {"A", "B"}),
|
||||
)
|
||||
|
||||
for g, expected_result in test_matrix:
|
||||
|
@ -53,22 +53,22 @@ class GraphBuilderTest(base.TestCase):
|
|||
"A": ["B"],
|
||||
"B": ["C"],
|
||||
"C": [],
|
||||
}, False),
|
||||
}, False),
|
||||
({
|
||||
"A": [],
|
||||
"B": ["C"],
|
||||
"C": [],
|
||||
}, False),
|
||||
}, False),
|
||||
({
|
||||
"A": ["C"],
|
||||
"B": ["C"],
|
||||
"C": ["A"],
|
||||
}, True),
|
||||
}, True),
|
||||
({
|
||||
"A": ["B"],
|
||||
"B": ["C"],
|
||||
"C": ["A"],
|
||||
}, True),
|
||||
}, True),
|
||||
)
|
||||
|
||||
for g, expected_result in test_matrix:
|
||||
|
|
|
@ -72,7 +72,7 @@ FakeNetworks = {'networks': [
|
|||
u'tenant_id': u'f6f6d0b2591f41acb8257656d70029fc',
|
||||
u'created_at': u'2016-04-25T07:14:53',
|
||||
u'mtu': 1500}
|
||||
]}
|
||||
]}
|
||||
|
||||
FakeSubnets = {'subnets': [
|
||||
{u'description': u'',
|
||||
|
@ -93,7 +93,7 @@ FakeSubnets = {'subnets': [
|
|||
u'id': u'808c3b3f-3d79-4c5b-a5b6-95dd07abeb2d',
|
||||
u'subnetpool_id': None,
|
||||
u'name': u'ext_subnet'},
|
||||
]}
|
||||
]}
|
||||
|
||||
FakePorts = {'ports': [
|
||||
{u'allowed_address_pairs': [],
|
||||
|
@ -121,7 +121,7 @@ FakePorts = {'ports': [
|
|||
u'created_at': u'2016-04-25T07:15:59',
|
||||
u'binding:vnic_type': u'normal',
|
||||
u'tenant_id': u''},
|
||||
]}
|
||||
]}
|
||||
|
||||
FakeRoutes = {'routers': [
|
||||
{u'status': u'ACTIVE',
|
||||
|
@ -142,7 +142,7 @@ FakeRoutes = {'routers': [
|
|||
u'ha': False,
|
||||
u'id': u'7fc86d4b-4c0e-4ed8-8d39-e27b7c1b7ae8',
|
||||
u'name': u'provider_route'}
|
||||
]}
|
||||
]}
|
||||
|
||||
FakeSecGroup = {'security_groups': [
|
||||
{u'tenant_id': u'23b119d06168447c8dbb4483d9567bd8',
|
||||
|
@ -185,7 +185,7 @@ FakeSecGroup = {'security_groups': [
|
|||
u'port_range_min': None,
|
||||
u'tenant_id': u'23b119d06168447c8dbb4483d9567bd8',
|
||||
u'id': u'c24e7148-820c-4147-9032-6fcdb96db6f7'}]},
|
||||
]}
|
||||
]}
|
||||
|
||||
|
||||
def call_hooks(operation, checkpoint, resource, context, parameters, **kwargs):
|
||||
|
@ -253,6 +253,7 @@ class FakeBankPlugin(BankPlugin):
|
|||
def get_owner_id(self, context=None):
|
||||
return
|
||||
|
||||
|
||||
fake_checkpointid = "checkpoint_id"
|
||||
fake_project_id = "abcd"
|
||||
fake_bank = Bank(FakeBankPlugin())
|
||||
|
@ -373,9 +374,8 @@ class NeutronProtectionPluginTest(base.TestCase):
|
|||
"id": "4c0e-4ed8-8d39-e27b7c1b7ae8",
|
||||
"admin_state_up": True,
|
||||
"availability_zone_hints": [],
|
||||
"fixed_ips": {"network_id": "9bb2-4b8f-9eea-e45563efc420",
|
||||
"enable_snat": True
|
||||
}
|
||||
"fixed_ips": {"network_id": "9bb2-4b8f-9eea-e45563efc420",
|
||||
"enable_snat": True}
|
||||
},
|
||||
"security-group_metadata": {
|
||||
"id": "4ccc-44c0-bc50-b7bbfc3508eb",
|
||||
|
@ -392,7 +392,7 @@ class NeutronProtectionPluginTest(base.TestCase):
|
|||
|
||||
@mock.patch('karbor.services.protection.protection_plugins.utils.'
|
||||
'update_resource_verify_result')
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
resource = Resource(id="abcd",
|
||||
type=constants.NETWORK_RESOURCE_TYPE,
|
||||
name="test")
|
||||
|
|
|
@ -392,7 +392,6 @@ class NovaProtectionPluginTest(base.TestCase):
|
|||
)
|
||||
resource_definition = {
|
||||
"resource_id": "vm_id_2",
|
||||
"attach_metadata": {"vol_id_1": "/dev/vdb"},
|
||||
'boot_metadata': {'boot_device_type': 'volume'},
|
||||
"server_metadata": {
|
||||
"availability_zone": "nova",
|
||||
|
@ -512,7 +511,7 @@ class NovaProtectionPluginTest(base.TestCase):
|
|||
|
||||
@mock.patch('karbor.services.protection.protection_plugins.utils.'
|
||||
'update_resource_verify_result')
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
resource = Resource(id="123",
|
||||
type=constants.SERVER_RESOURCE_TYPE,
|
||||
name='fake')
|
||||
|
|
|
@ -160,7 +160,7 @@ class PodProtectionPluginTest(base.TestCase):
|
|||
|
||||
@mock.patch('karbor.services.protection.protection_plugins.utils.'
|
||||
'update_resource_verify_result')
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
def test_verify_backup(self, mock_update_verify):
|
||||
resource = Resource(id="c88b92a8-e8b4-504c-bad4-343d92061871",
|
||||
type=constants.POD_RESOURCE_TYPE,
|
||||
name='default:busybox-test')
|
||||
|
|
|
@ -20,17 +20,17 @@ from karbor import objects
|
|||
|
||||
warnings.simplefilter('once', DeprecationWarning)
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_service import wsgi
|
||||
from oslo_config import cfg # noqa: E402
|
||||
from oslo_log import log as logging # noqa: E402
|
||||
from oslo_service import wsgi # noqa: E402
|
||||
|
||||
from karbor import i18n
|
||||
from karbor import i18n # noqa: E402
|
||||
i18n.enable_lazy()
|
||||
|
||||
# Need to register global_opts
|
||||
from karbor.common import config # noqa
|
||||
from karbor import rpc
|
||||
from karbor import version
|
||||
from karbor import rpc # noqa: E402
|
||||
from karbor import version # noqa: E402
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
|
|
@ -28,11 +28,9 @@ eventlet==0.18.2
|
|||
extras==1.0.0
|
||||
fasteners==0.14.1
|
||||
fixtures==3.0.0
|
||||
flake8==2.5.5
|
||||
futurist==1.8.0
|
||||
google-auth==1.4.1
|
||||
greenlet==0.4.10
|
||||
hacking==0.12.0
|
||||
icalendar==3.10
|
||||
idna==2.6
|
||||
imagesize==1.0.0
|
||||
|
@ -50,7 +48,6 @@ kubernetes==5.0.0
|
|||
linecache2==1.0.0
|
||||
Mako==1.0.7
|
||||
MarkupSafe==1.0
|
||||
mccabe==0.2.1
|
||||
mock==2.0.0
|
||||
monotonic==1.4
|
||||
mox3==0.25.0
|
||||
|
@ -86,7 +83,6 @@ packaging==17.1
|
|||
Paste==2.0.2
|
||||
PasteDeploy==1.5.0
|
||||
pbr==2.0.0
|
||||
pep8==1.5.7
|
||||
pika-pool==0.1.3
|
||||
pika==0.10.0
|
||||
prettytable==0.7.2
|
||||
|
@ -94,7 +90,6 @@ pyasn1-modules==0.2.1
|
|||
pyasn1==0.4.2
|
||||
pycadf==2.7.0
|
||||
pycparser==2.18
|
||||
pyflakes==0.8.1
|
||||
Pygments==2.2.0
|
||||
pyinotify==0.9.6
|
||||
pyOpenSSL==17.5.0
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
||||
hacking!=3.0.1,<3.1.0 # Apache-2.0
|
||||
botocore>=1.5.1 # Apache-2.0
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
croniter>=0.3.4 # MIT License
|
||||
|
|
|
@ -67,5 +67,6 @@ def main(argv):
|
|||
install.install_dependencies()
|
||||
print_help()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv)
|
||||
|
|
4
tox.ini
4
tox.ini
|
@ -75,7 +75,9 @@ commands = oslopolicy-sample-generator --config-file=etc/karbor-policy-generator
|
|||
|
||||
[flake8]
|
||||
show-source = True
|
||||
ignore =
|
||||
# W503 line break before binary operator
|
||||
# W504 line break after binary operator
|
||||
ignore = W503,W504
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,releasenotes
|
||||
|
||||
|
|
Loading…
Reference in New Issue