P8: Fix pep8 error in cyborg/tests and add post_mortem_debug.py
1. Add post_mortem_debug.py because accelerator/drivers/gpu/test_base.py
lack this file[1]. Reference Neutron[2,3].
[1]. https://github.com/openstack/cyborg/blob/master/cyborg/tests/base.py#L104
[2]. 43352e67e5/neutron/tests/base.py (L229)
[3]. https://github.com/openstack/neutron/blob/master/neutron/tests/post_mortem_debug.py
2. Remove old useless file cyborg/tests/unit/fake_accelerator.py[4,5].
[4]. https://github.com/openstack/cyborg/search?q=fake_accelerator_obj&unscoped_q=fake_accelerator_obj
[5]. https://review.opendev.org/#/c/625630/
Change-Id: I147bfe9a8dfb6bdbe7e59fa3dc0c146c70e4d3a7
This commit is contained in:
parent
ac4c4ea15c
commit
306b82e214
@ -20,12 +20,16 @@ from oslo_config import fixture as config_fixture
|
||||
from oslo_context import context
|
||||
from oslo_db import options
|
||||
from oslo_log import log
|
||||
from oslo_utils import excutils
|
||||
from oslotest import base
|
||||
import pecan
|
||||
|
||||
import contextlib
|
||||
import eventlet
|
||||
import mock
|
||||
import testtools
|
||||
|
||||
from cyborg.common import config as cyborg_config
|
||||
from cyborg.tests import post_mortem_debug
|
||||
from cyborg.tests.unit import policy_fixture
|
||||
|
||||
|
||||
|
112
cyborg/tests/post_mortem_debug.py
Normal file
112
cyborg/tests/post_mortem_debug.py
Normal file
@ -0,0 +1,112 @@
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import traceback
|
||||
|
||||
|
||||
def get_exception_handler(debugger_name):
|
||||
debugger = _get_debugger(debugger_name)
|
||||
return functools.partial(_exception_handler, debugger)
|
||||
|
||||
|
||||
def _get_debugger(debugger_name):
|
||||
try:
|
||||
debugger = __import__(debugger_name)
|
||||
except ImportError:
|
||||
raise ValueError("can't import %s module as a post mortem debugger" %
|
||||
debugger_name)
|
||||
if 'post_mortem' in dir(debugger):
|
||||
return debugger
|
||||
else:
|
||||
raise ValueError("%s is not a supported post mortem debugger" %
|
||||
debugger_name)
|
||||
|
||||
|
||||
def _exception_handler(debugger, exc_info):
|
||||
"""Exception handler enabling post-mortem debugging.
|
||||
A class extending testtools.TestCase can add this handler in setUp():
|
||||
self.addOnException(post_mortem_debug.exception_handler)
|
||||
When an exception occurs, the user will be dropped into a debugger
|
||||
session in the execution environment of the failure.
|
||||
Frames associated with the testing framework are excluded so that
|
||||
the post-mortem session for an assertion failure will start at the
|
||||
assertion call (e.g. self.assertTrue) rather than the framework code
|
||||
that raises the failure exception (e.g. the assertTrue method).
|
||||
"""
|
||||
tb = exc_info[2]
|
||||
ignored_traceback = get_ignored_traceback(tb)
|
||||
if ignored_traceback:
|
||||
tb = FilteredTraceback(tb, ignored_traceback)
|
||||
traceback.print_exception(exc_info[0], exc_info[1], tb)
|
||||
debugger.post_mortem(tb)
|
||||
|
||||
|
||||
def get_ignored_traceback(tb):
|
||||
"""Retrieve the first traceback of an ignored trailing chain.
|
||||
Given an initial traceback, find the first traceback of a trailing
|
||||
chain of tracebacks that should be ignored. The criteria for
|
||||
whether a traceback should be ignored is whether its frame's
|
||||
globals include the __unittest marker variable. This criteria is
|
||||
culled from:
|
||||
unittest.TestResult._is_relevant_tb_level
|
||||
For example:
|
||||
tb.tb_next => tb0.tb_next => tb1.tb_next
|
||||
- If no tracebacks were to be ignored, None would be returned.
|
||||
- If only tb1 was to be ignored, tb1 would be returned.
|
||||
- If tb0 and tb1 were to be ignored, tb0 would be returned.
|
||||
- If either of only tb or only tb0 was to be ignored, None would
|
||||
be returned because neither tb or tb0 would be part of a
|
||||
trailing chain of ignored tracebacks.
|
||||
"""
|
||||
# Turn the traceback chain into a list
|
||||
tb_list = []
|
||||
while tb:
|
||||
tb_list.append(tb)
|
||||
tb = tb.tb_next
|
||||
|
||||
# Find all members of an ignored trailing chain
|
||||
ignored_tracebacks = []
|
||||
for tb in reversed(tb_list):
|
||||
if '__unittest' in tb.tb_frame.f_globals:
|
||||
ignored_tracebacks.append(tb)
|
||||
else:
|
||||
break
|
||||
|
||||
# Return the first member of the ignored trailing chain
|
||||
if ignored_tracebacks:
|
||||
return ignored_tracebacks[-1]
|
||||
|
||||
|
||||
class FilteredTraceback(object):
|
||||
"""Wraps a traceback to filter unwanted frames."""
|
||||
|
||||
def __init__(self, tb, filtered_traceback):
|
||||
"""Constructor.
|
||||
:param tb: The start of the traceback chain to filter.
|
||||
:param filtered_traceback: The first traceback of a trailing
|
||||
chain that is to be filtered.
|
||||
"""
|
||||
self._tb = tb
|
||||
self.tb_lasti = self._tb.tb_lasti
|
||||
self.tb_lineno = self._tb.tb_lineno
|
||||
self.tb_frame = self._tb.tb_frame
|
||||
self._filtered_traceback = filtered_traceback
|
||||
|
||||
@property
|
||||
def tb_next(self):
|
||||
tb_next = self._tb.tb_next
|
||||
if tb_next and tb_next != self._filtered_traceback:
|
||||
return FilteredTraceback(tb_next, self._filtered_traceback)
|
@ -13,8 +13,8 @@
|
||||
import json
|
||||
import mock
|
||||
|
||||
from cyborg.tests import base
|
||||
from cyborg.accelerator.drivers.aichip.huawei.ascend import AscendDriver
|
||||
from cyborg.tests import base
|
||||
|
||||
d100_pci_res = [
|
||||
"0000:00:0c.0 Processing accelerators [1200]:"
|
||||
|
@ -231,7 +231,7 @@ def gen_fpga_vf_soft_link(path, bdf):
|
||||
|
||||
|
||||
def create_devices_path_and_files(tree, device_path, class_fpga_path,
|
||||
vf=False, pfinfo={}):
|
||||
vf=False, pfinfo=None):
|
||||
for k, v in tree.items():
|
||||
bdf = v["bdf"]
|
||||
pci_path = "pci" + bdf.rsplit(":", 1)[0]
|
||||
|
@ -18,8 +18,8 @@ import subprocess
|
||||
|
||||
import fixtures
|
||||
|
||||
from cyborg.accelerator.drivers.fpga.intel import sysinfo
|
||||
from cyborg.accelerator.drivers.fpga.intel.driver import IntelFPGADriver
|
||||
from cyborg.accelerator.drivers.fpga.intel import sysinfo
|
||||
from cyborg.tests import base
|
||||
from cyborg.tests.unit.accelerator.drivers.fpga.intel import prepare_test_data
|
||||
|
||||
|
@ -20,7 +20,8 @@ from cyborg.tests import base
|
||||
|
||||
class TestGPUDriver(base.TestCase):
|
||||
def test_create(self):
|
||||
GPUDriver.create("nvidia")
|
||||
# NVIDIAGPUDriver.VENDOR == 'nvidia'
|
||||
GPUDriver.create(NVIDIAGPUDriver.VENDOR)
|
||||
self.assertRaises(LookupError, GPUDriver.create, "matrox")
|
||||
|
||||
def test_discover(self):
|
||||
|
@ -12,14 +12,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
import mock
|
||||
import copy
|
||||
import subprocess
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from cyborg.accelerator.drivers.gpu import utils
|
||||
from cyborg import objects
|
||||
from cyborg.tests import base
|
||||
|
||||
NVIDIA_GPU_INFO = "0000:00:06.0 3D controller [0302]: NVIDIA Corporation GP100GL " \
|
||||
|
@ -13,11 +13,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from cyborg.tests import base
|
||||
import mock
|
||||
|
||||
from cyborg.accelerator.drivers.spdk.nvmf.nvmf import NVMFDRIVER
|
||||
from cyborg.accelerator.drivers.spdk.util import common_fun
|
||||
from cyborg.accelerator.drivers.spdk.util.pyspdk.nvmf_client import NvmfTgt
|
||||
from cyborg.tests import base
|
||||
|
||||
|
||||
class TestNVMFDRIVER(base.TestCase):
|
||||
|
@ -13,11 +13,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from cyborg.tests import base
|
||||
import mock
|
||||
from cyborg.accelerator.drivers.spdk.vhost.vhost import VHOSTDRIVER
|
||||
|
||||
from cyborg.accelerator.drivers.spdk.util import common_fun
|
||||
from cyborg.accelerator.drivers.spdk.util.pyspdk.vhost_client import VhostTgt
|
||||
from cyborg.accelerator.drivers.spdk.vhost.vhost import VHOSTDRIVER
|
||||
from cyborg.tests import base
|
||||
|
||||
|
||||
class TestVHOSTDRIVER(base.TestCase):
|
||||
|
@ -1,7 +1,20 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import six
|
||||
|
||||
from cyborg.tests import base
|
||||
from cyborg.accelerator.drivers.driver import GenericDriver
|
||||
from cyborg.tests import base
|
||||
|
||||
|
||||
class WellDoneDriver(GenericDriver):
|
||||
@ -23,7 +36,6 @@ class NotCompleteDriver(GenericDriver):
|
||||
class TestGenericDriver(base.TestCase):
|
||||
|
||||
def test_generic_driver(self):
|
||||
driver = WellDoneDriver()
|
||||
# Can't instantiate abstract class NotCompleteDriver with
|
||||
# abstract methods get_stats, update
|
||||
result = self.assertRaises(TypeError, NotCompleteDriver)
|
||||
|
@ -16,10 +16,8 @@
|
||||
import mock
|
||||
from six.moves import http_client
|
||||
|
||||
from cyborg.api.controllers.v1.deployables import Deployable
|
||||
from cyborg.tests.unit.api.controllers.v1 import base as v1_test
|
||||
from cyborg.tests.unit import fake_deployable
|
||||
from cyborg.agent.rpcapi import AgentAPI
|
||||
|
||||
|
||||
class TestFPGAProgramController(v1_test.APITestV1):
|
||||
|
@ -27,4 +27,5 @@ class TestAPI(v2_test.APITestV2):
|
||||
self.assertEqual(data['status'], "CURRENT")
|
||||
self.assertEqual(data['max_version'], "2.0")
|
||||
self.assertEqual(data['id'], "v2.0")
|
||||
self.assertTrue(isinstance(data['links'], list))
|
||||
result = isinstance(data['links'], list)
|
||||
self.assertTrue(result)
|
||||
|
@ -18,7 +18,6 @@ from six.moves import http_client
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from cyborg.common import exception
|
||||
from cyborg.tests.unit.api.controllers.v2 import base as v2_test
|
||||
from cyborg.tests.unit import fake_device_profile
|
||||
from cyborg.tests.unit import fake_extarq
|
||||
@ -69,7 +68,8 @@ class TestARQsController(v2_test.APITestV2):
|
||||
data = self.get_json(self.ARQ_URL, headers=self.headers)
|
||||
out_arqs = data['arqs']
|
||||
|
||||
self.assertTrue(isinstance(out_arqs, list))
|
||||
result = isinstance(out_arqs, list)
|
||||
self.assertTrue(result)
|
||||
self.assertTrue(len(out_arqs), len(self.fake_extarqs))
|
||||
for in_extarq, out_arq in zip(self.fake_extarqs, out_arqs):
|
||||
self._validate_arq(in_extarq.arq, out_arq)
|
||||
|
@ -18,8 +18,6 @@ from six.moves import http_client
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from cyborg.api.controllers.v2.device_profiles import DeviceProfilesController
|
||||
from cyborg.common import exception
|
||||
from cyborg.tests.unit.api.controllers.v2 import base as v2_test
|
||||
from cyborg.tests.unit import fake_device_profile
|
||||
|
||||
@ -68,7 +66,8 @@ class TestDeviceProfileController(v2_test.APITestV2):
|
||||
data = self.get_json(self.DP_URL, headers=self.headers)
|
||||
out_dps = data['device_profiles']
|
||||
|
||||
self.assertTrue(isinstance(out_dps, list))
|
||||
result = isinstance(out_dps, list)
|
||||
self.assertTrue(result)
|
||||
self.assertTrue(len(out_dps), len(self.fake_dp_objs))
|
||||
for in_dp, out_dp in zip(self.fake_dp_objs, out_dps):
|
||||
self._validate_dp(in_dp, out_dp)
|
||||
|
@ -15,9 +15,12 @@
|
||||
"""Unit tests for the DB api."""
|
||||
|
||||
import datetime
|
||||
from cyborg.tests.unit.db import base
|
||||
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from cyborg.db import api as dbapi
|
||||
from cyborg.db.sqlalchemy import api as sqlalchemyapi
|
||||
from cyborg.tests.unit.db import base
|
||||
|
||||
|
||||
def _quota_reserve(context, project_id):
|
||||
@ -36,7 +39,7 @@ def _quota_reserve(context, project_id):
|
||||
deltas[resource] = i + 1
|
||||
return sqlalchemy_api.quota_reserve(
|
||||
context, resources, deltas,
|
||||
datetime.datetime.utcnow(), datetime.datetime.utcnow(),
|
||||
timeutils.utcnow(), timeutils.utcnow(),
|
||||
datetime.timedelta(days=1), project_id
|
||||
)
|
||||
|
||||
@ -89,7 +92,7 @@ class DBAPIReservationTestCase(base.DbTestCase):
|
||||
'project_id': 'project1',
|
||||
'resource': 'resource',
|
||||
'delta': 42,
|
||||
'expire': (datetime.datetime.utcnow() +
|
||||
'expire': (timeutils.utcnow() +
|
||||
datetime.timedelta(days=1)),
|
||||
'usage': {'id': 1}
|
||||
}
|
||||
|
@ -1,66 +0,0 @@
|
||||
# Copyright 2018 Huawei Technologies Co.,LTD.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
from cyborg.objects import fields
|
||||
|
||||
|
||||
def fake_db_accelerator(**updates):
|
||||
db_accelerator = {
|
||||
'id': 1,
|
||||
'deleted': False,
|
||||
'uuid': uuidutils.generate_uuid(),
|
||||
'name': 'fake-name',
|
||||
'description': 'fake-desc',
|
||||
'project_id': 'fake-pid',
|
||||
'user_id': 'fake-uid',
|
||||
'device_type': 'fake-dtype',
|
||||
'acc_type': 'fake-acc_type',
|
||||
'acc_capability': 'fake-cap',
|
||||
'vendor_id': 'fake-vid',
|
||||
'product_id': 'fake-pid',
|
||||
'remotable': 0
|
||||
}
|
||||
|
||||
for name, field in objects.Accelerator.fields.items():
|
||||
if name in db_accelerator:
|
||||
continue
|
||||
if field.nullable:
|
||||
db_accelerator[name] = None
|
||||
elif field.default != fields.UnspecifiedDefault:
|
||||
db_accelerator[name] = field.default
|
||||
else:
|
||||
raise Exception('fake_db_accelerator needs help with %s' % name)
|
||||
|
||||
if updates:
|
||||
db_accelerator.update(updates)
|
||||
|
||||
return db_accelerator
|
||||
|
||||
|
||||
def fake_accelerator_obj(context, obj_accelerator_class=None, **updates):
|
||||
if obj_accelerator_class is None:
|
||||
obj_accelerator_class = objects.Accelerator
|
||||
expected_attrs = updates.pop('expected_attrs', None)
|
||||
acc = obj_instance_class._from_db_object(context,
|
||||
obj_instance_class(),
|
||||
fake_db_instance(**updates),
|
||||
expected_attrs=expected_attrs)
|
||||
acc.obj_reset_changes()
|
||||
return acc
|
@ -12,13 +12,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
from cyborg.objects import fields
|
||||
from cyborg.tests.unit import fake_db_deployable
|
||||
|
||||
|
||||
def fake_db_attribute(**updates):
|
||||
|
@ -12,9 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
|
@ -12,9 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
|
@ -13,6 +13,7 @@
|
||||
# under the License.
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from cyborg.objects import device_profile
|
||||
|
||||
"""
|
||||
|
@ -46,10 +46,10 @@ def _get_arqs_as_dict():
|
||||
},
|
||||
]
|
||||
new_arqs = []
|
||||
for idx, arq in enumerate(arqs):
|
||||
arq.update(common)
|
||||
arq.update(id=idx)
|
||||
new_arqs.append(arq)
|
||||
for idx, new_arq in enumerate(arqs):
|
||||
new_arq.update(common)
|
||||
new_arq.update(id=idx)
|
||||
new_arqs.append(new_arq)
|
||||
return new_arqs
|
||||
|
||||
|
||||
|
@ -12,9 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
@ -70,4 +67,4 @@ def fake_physical_function_obj(context, obj_pf_class=None, **updates):
|
||||
fake_db_physical_function(**updates),
|
||||
expected_attrs=expected_attrs)
|
||||
pf.obj_reset_changes()
|
||||
return vf
|
||||
return pf
|
||||
|
@ -12,9 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg import objects
|
||||
|
@ -15,16 +15,15 @@
|
||||
|
||||
import mock
|
||||
|
||||
from testtools.matchers import HasLength
|
||||
from cyborg import objects
|
||||
from cyborg.tests.unit.db import base
|
||||
from cyborg.tests.unit.db import utils
|
||||
from cyborg.tests.unit import fake_device
|
||||
from cyborg.tests.unit import fake_deployable
|
||||
from cyborg.tests.unit import fake_attribute
|
||||
from cyborg.tests.unit.objects import test_objects
|
||||
from cyborg.tests.unit.db.base import DbTestCase
|
||||
from oslo_db import exception as db_exc
|
||||
|
||||
from cyborg.common import exception
|
||||
from cyborg import objects
|
||||
from cyborg.tests.unit.db.base import DbTestCase
|
||||
from cyborg.tests.unit import fake_attribute
|
||||
from cyborg.tests.unit import fake_deployable
|
||||
from cyborg.tests.unit import fake_device
|
||||
from cyborg.tests.unit.objects import test_objects
|
||||
|
||||
|
||||
class _TestDeployableObject(DbTestCase):
|
||||
@ -197,20 +196,12 @@ class _TestDeployableObject(DbTestCase):
|
||||
|
||||
dpl.device_id = device_get.id
|
||||
dpl.create(self.context)
|
||||
dpl_get = objects.Deployable.get(self.context, dpl.uuid)
|
||||
|
||||
db_dpl2 = self.fake_deployable2
|
||||
dpl2 = objects.Deployable(context=self.context,
|
||||
**db_dpl2)
|
||||
dpl2.device_id = device_get.id
|
||||
dpl2.create(self.context)
|
||||
dpl2_get = objects.Deployable.get(self.context, dpl2.uuid)
|
||||
|
||||
db_attr = self.fake_attribute
|
||||
|
||||
db_attr2 = self.fake_attribute2
|
||||
|
||||
db_attr3 = self.fake_attribute3
|
||||
|
||||
dpl.add_attribute(self.context, 'attr_key', 'attr_val')
|
||||
dpl.save(self.context)
|
||||
|
@ -16,9 +16,9 @@
|
||||
import mock
|
||||
|
||||
from testtools.matchers import HasLength
|
||||
|
||||
from cyborg import objects
|
||||
from cyborg.tests.unit.db import base
|
||||
from cyborg.tests.unit.db import utils
|
||||
from cyborg.tests.unit import fake_extarq
|
||||
|
||||
|
||||
@ -82,7 +82,7 @@ class TestExtARQObject(base.DbTestCase):
|
||||
'instance_uuid': instance_uuid}
|
||||
}
|
||||
patch_list = {
|
||||
str(uuid) : [
|
||||
str(uuid): [
|
||||
{"path": "/hostname", "op": "add",
|
||||
"value": obj_extarq.arq.hostname},
|
||||
{"path": "/device_rp_uuid", "op": "add",
|
||||
|
@ -12,25 +12,12 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import datetime
|
||||
import inspect
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
from oslo_log import log
|
||||
from oslo_utils import timeutils
|
||||
from oslo_versionedobjects import base as ovo_base
|
||||
from oslo_versionedobjects import exception as ovo_exc
|
||||
from oslo_versionedobjects import fixture
|
||||
import six
|
||||
|
||||
from oslo_context import context
|
||||
|
||||
from cyborg.common import exception
|
||||
from cyborg import objects
|
||||
from cyborg.objects import base
|
||||
from cyborg.objects import fields
|
||||
from cyborg import tests as test
|
||||
|
@ -12,13 +12,15 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from cyborg.tests import base
|
||||
import mock
|
||||
from cyborg.services import report as placement_client
|
||||
from oslo_utils import uuidutils
|
||||
from cyborg.common import exception as c_exc
|
||||
|
||||
from keystoneauth1 import exceptions as ks_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from cyborg.common import exception as c_exc
|
||||
from cyborg.services import report as placement_client
|
||||
from cyborg.tests import base
|
||||
|
||||
|
||||
class PlacementAPIClientTestCase(base.DietTestCase):
|
||||
|
2
tox.ini
2
tox.ini
@ -107,7 +107,7 @@ show-source = True
|
||||
ignore = E123,E125,H405
|
||||
builtins = _
|
||||
enable-extensions = H106,H203,H904
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes,cyborg/hacking/,cyborg/tests/
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes
|
||||
|
||||
[hacking]
|
||||
local-check-factory = cyborg.hacking.checks.factory
|
||||
|
Loading…
Reference in New Issue
Block a user