Bump to hacking 1.1.0
This brings in a couple of new checks which must be addressed, many of which involve a rather large amount of changes, so these are ignored for now. A series of follow-up changes will resolved these. 'pycodestyle' is added as a dependency rather than it being pulled in transitively. This is necessary since we're using it in tests. Change-Id: I35c654bd39f343417e0a1124263ff31dcd0b05c9 Signed-off-by: Stephen Finucane <sfinucan@redhat.com>
This commit is contained in:
parent
e18b79ef87
commit
3e65f778bd
@ -101,7 +101,6 @@ paramiko==2.0.0
|
||||
Paste==2.0.2
|
||||
PasteDeploy==1.5.0
|
||||
pbr==2.0.0
|
||||
pep8==1.5.7
|
||||
pluggy==0.6.0
|
||||
ply==3.11
|
||||
prettytable==0.7.1
|
||||
@ -113,6 +112,7 @@ pyasn1-modules==0.2.1
|
||||
pycadf==2.7.0
|
||||
pycparser==2.18
|
||||
pyflakes==0.8.1
|
||||
pycodestyle==2.0.0
|
||||
pyinotify==0.9.6
|
||||
pyroute2==0.5.4
|
||||
PyJWT==1.7.0
|
||||
|
@ -88,9 +88,11 @@ class AvailabilityZoneController(wsgi.Controller):
|
||||
hosts[host] = {}
|
||||
for service in host_services[zone + host]:
|
||||
alive = self.servicegroup_api.service_is_up(service)
|
||||
hosts[host][service['binary']] = {'available': alive,
|
||||
'active': True != service['disabled'],
|
||||
'updated_at': service['updated_at']}
|
||||
hosts[host][service['binary']] = {
|
||||
'available': alive,
|
||||
'active': service['disabled'] is not True,
|
||||
'updated_at': service['updated_at']
|
||||
}
|
||||
result.append({'zoneName': zone,
|
||||
'zoneState': {'available': True},
|
||||
"hosts": hosts})
|
||||
|
@ -190,4 +190,6 @@ def _register_network_quota():
|
||||
QUOTAS.register_resource(quota.CountableResource('networks',
|
||||
_network_count,
|
||||
'quota_networks'))
|
||||
|
||||
|
||||
_register_network_quota()
|
||||
|
@ -67,7 +67,7 @@ class NotFound(_BaseException):
|
||||
|
||||
|
||||
class Exists(_BaseException):
|
||||
msg_fmt = _("Resource already exists.")
|
||||
msg_fmt = _("Resource already exists.")
|
||||
|
||||
|
||||
class InvalidInventory(_BaseException):
|
||||
@ -188,7 +188,7 @@ class ProjectNotFound(NotFound):
|
||||
|
||||
|
||||
class ProjectExists(Exists):
|
||||
msg_fmt = _("The project %(external_id)s already exists.")
|
||||
msg_fmt = _("The project %(external_id)s already exists.")
|
||||
|
||||
|
||||
class UserNotFound(NotFound):
|
||||
@ -196,7 +196,7 @@ class UserNotFound(NotFound):
|
||||
|
||||
|
||||
class UserExists(Exists):
|
||||
msg_fmt = _("The user %(external_id)s already exists.")
|
||||
msg_fmt = _("The user %(external_id)s already exists.")
|
||||
|
||||
|
||||
class ConsumerNotFound(NotFound):
|
||||
|
@ -19,13 +19,13 @@ from oslo_log import log as logging
|
||||
import paste.urlmap
|
||||
import six
|
||||
|
||||
from nova.api.openstack import wsgi
|
||||
|
||||
if six.PY2:
|
||||
import urllib2
|
||||
else:
|
||||
from urllib import request as urllib2
|
||||
|
||||
from nova.api.openstack import wsgi
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -164,6 +164,7 @@ def _build_regex_range(ws=True, invert=False, exclude=None):
|
||||
regex += "-" + re.escape(c)
|
||||
return regex
|
||||
|
||||
|
||||
valid_name_regex_base = '^(?![%s])[%s]*(?<![%s])$'
|
||||
|
||||
|
||||
|
@ -1677,9 +1677,9 @@ class CellV2Commands(object):
|
||||
|
||||
if (self._non_unique_transport_url_database_connection_checker(ctxt,
|
||||
cell_mapping, transport_url, db_connection)):
|
||||
# We use the return code 3 before 2 to avoid changing the
|
||||
# semantic meanings of return codes.
|
||||
return 3
|
||||
# We use the return code 3 before 2 to avoid changing the
|
||||
# semantic meanings of return codes.
|
||||
return 3
|
||||
|
||||
if transport_url:
|
||||
cell_mapping.transport_url = transport_url
|
||||
|
@ -413,7 +413,7 @@ class CrossCellLister(object):
|
||||
|
||||
if context.is_cell_failure_sentinel(item._db_record):
|
||||
if (not CONF.api.list_records_by_skipping_down_cells and
|
||||
not cell_down_support):
|
||||
not cell_down_support):
|
||||
# Value the config
|
||||
# ``CONF.api.list_records_by_skipping_down_cells`` only if
|
||||
# cell_down_support is False and generate the exception
|
||||
@ -422,10 +422,10 @@ class CrossCellLister(object):
|
||||
# be skipped now to either construct minimal constructs
|
||||
# later if cell_down_support is True or to simply return
|
||||
# the skipped results if cell_down_support is False.
|
||||
raise exception.NovaException(
|
||||
_('Cell %s is not responding but configuration '
|
||||
'indicates that we should fail.')
|
||||
% item.cell_uuid)
|
||||
raise exception.NovaException(
|
||||
_('Cell %s is not responding but configuration '
|
||||
'indicates that we should fail.')
|
||||
% item.cell_uuid)
|
||||
LOG.warning('Cell %s is not responding and hence is '
|
||||
'being omitted from the results',
|
||||
item.cell_uuid)
|
||||
|
@ -105,6 +105,7 @@ def enrich_help_text(alt_db_opts):
|
||||
# texts here if needed.
|
||||
alt_db_opt.help = db_opt.help + alt_db_opt.help
|
||||
|
||||
|
||||
# NOTE(cdent): See the note above on api_db_group. The same issues
|
||||
# apply here.
|
||||
|
||||
|
@ -232,7 +232,8 @@ class RequestContext(context.RequestContext):
|
||||
return context
|
||||
|
||||
def can(self, action, target=None, fatal=True):
|
||||
"""Verifies that the given action is valid on the target in this context.
|
||||
"""Verifies that the given action is valid on the target in this
|
||||
context.
|
||||
|
||||
:param action: string representing the action to be checked.
|
||||
:param target: dictionary representing the object of the action
|
||||
|
@ -17,7 +17,6 @@ import ast
|
||||
import os
|
||||
import re
|
||||
|
||||
import pep8
|
||||
import six
|
||||
|
||||
"""
|
||||
@ -122,7 +121,7 @@ class BaseASTChecker(ast.NodeVisitor):
|
||||
"""
|
||||
|
||||
def __init__(self, tree, filename):
|
||||
"""This object is created automatically by pep8.
|
||||
"""This object is created automatically by pycodestyle.
|
||||
|
||||
:param tree: an AST tree
|
||||
:param filename: name of the file being analyzed
|
||||
@ -132,12 +131,12 @@ class BaseASTChecker(ast.NodeVisitor):
|
||||
self._errors = []
|
||||
|
||||
def run(self):
|
||||
"""Called automatically by pep8."""
|
||||
"""Called automatically by pycodestyle."""
|
||||
self.visit(self._tree)
|
||||
return self._errors
|
||||
|
||||
def add_error(self, node, message=None):
|
||||
"""Add an error caused by a node to the list of errors for pep8."""
|
||||
"""Add an error caused by a node to the list of errors."""
|
||||
message = message or self.CHECK_DESC
|
||||
error = (node.lineno, node.col_offset, message, self.__class__)
|
||||
self._errors.append(error)
|
||||
@ -558,10 +557,10 @@ def assert_equal_in(logical_line):
|
||||
"contents.")
|
||||
|
||||
|
||||
def check_http_not_implemented(logical_line, physical_line, filename):
|
||||
def check_http_not_implemented(logical_line, physical_line, filename, noqa):
|
||||
msg = ("N339: HTTPNotImplemented response must be implemented with"
|
||||
" common raise_feature_not_supported().")
|
||||
if pep8.noqa(physical_line):
|
||||
if noqa:
|
||||
return
|
||||
if ("nova/api/openstack/compute" not in filename):
|
||||
return
|
||||
@ -722,7 +721,7 @@ def no_log_warn(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def check_context_log(logical_line, physical_line, filename):
|
||||
def check_context_log(logical_line, physical_line, filename, noqa):
|
||||
"""check whether context is being passed to the logs
|
||||
|
||||
Not correct: LOG.info(_LI("Rebooting instance"), context=context)
|
||||
@ -731,10 +730,10 @@ def check_context_log(logical_line, physical_line, filename):
|
||||
|
||||
N353
|
||||
"""
|
||||
if "nova/tests" in filename:
|
||||
if noqa:
|
||||
return
|
||||
|
||||
if pep8.noqa(physical_line):
|
||||
if "nova/tests" in filename:
|
||||
return
|
||||
|
||||
if log_remove_context.match(logical_line):
|
||||
|
@ -88,6 +88,7 @@ def _monkey_patch():
|
||||
"importing and not executing nova code.",
|
||||
', '.join(problems))
|
||||
|
||||
|
||||
# NOTE(mdbooth): This workaround is required to avoid breaking sphinx. See
|
||||
# separate comment in doc/source/conf.py. It may also be useful for other
|
||||
# non-nova utilities. Ideally the requirement for this workaround will be
|
||||
|
@ -57,6 +57,7 @@ def get_binary_name():
|
||||
"""Grab the name of the binary we're running in."""
|
||||
return os.path.basename(inspect.stack()[-1][1])[:16]
|
||||
|
||||
|
||||
binary_name = get_binary_name()
|
||||
|
||||
|
||||
|
26
nova/rpc.py
26
nova/rpc.py
@ -12,19 +12,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
__all__ = [
|
||||
'init',
|
||||
'cleanup',
|
||||
'set_defaults',
|
||||
'add_extra_exmods',
|
||||
'clear_extra_exmods',
|
||||
'get_allowed_exmods',
|
||||
'RequestContextSerializer',
|
||||
'get_client',
|
||||
'get_server',
|
||||
'get_notifier',
|
||||
]
|
||||
|
||||
import functools
|
||||
|
||||
from oslo_log import log as logging
|
||||
@ -40,6 +27,19 @@ import nova.context
|
||||
import nova.exception
|
||||
from nova.i18n import _
|
||||
|
||||
__all__ = [
|
||||
'init',
|
||||
'cleanup',
|
||||
'set_defaults',
|
||||
'add_extra_exmods',
|
||||
'clear_extra_exmods',
|
||||
'get_allowed_exmods',
|
||||
'RequestContextSerializer',
|
||||
'get_client',
|
||||
'get_server',
|
||||
'get_notifier',
|
||||
]
|
||||
|
||||
profiler = importutils.try_import("osprofiler.profiler")
|
||||
|
||||
|
||||
|
@ -469,7 +469,7 @@ class TestCompareResult(test.NoDBTestCase):
|
||||
response_data = 'bar'
|
||||
|
||||
with testtools.ExpectedException(KeyError):
|
||||
self.ast._compare_result(
|
||||
expected=template_data,
|
||||
result=response_data,
|
||||
result_str="Test")
|
||||
self.ast._compare_result(
|
||||
expected=template_data,
|
||||
result=response_data,
|
||||
result_str="Test")
|
||||
|
@ -522,7 +522,7 @@ class ApiSampleTestBase(integrated_helpers._IntegratedTestBase):
|
||||
body = self._read_template(name) % self.subs
|
||||
sample = self._get_sample(name, self.microversion)
|
||||
if self.generate_samples and not os.path.exists(sample):
|
||||
self._write_sample(name, body)
|
||||
self._write_sample(name, body)
|
||||
return self._get_response(url, method, body, headers=headers)
|
||||
|
||||
def _do_put(self, url, name=None, subs=None, headers=None):
|
||||
|
@ -254,9 +254,9 @@ class InstanceHelperMixin(object):
|
||||
break
|
||||
retry_count += 1
|
||||
if retry_count == max_retries:
|
||||
self.fail('Wait for state change failed, '
|
||||
'expected_params=%s, server=%s'
|
||||
% (expected_params, server))
|
||||
self.fail('Wait for state change failed, '
|
||||
'expected_params=%s, server=%s' % (
|
||||
expected_params, server))
|
||||
time.sleep(0.5)
|
||||
|
||||
return server
|
||||
|
@ -84,7 +84,7 @@ class BaseCellsTest(test.NoDBTestCase):
|
||||
cells[1]['transport_url'] = insecure_transport_url(
|
||||
cells[1]['transport_url'])
|
||||
for i, cell in enumerate(cells):
|
||||
cell['capabilities'] = self.fake_capabilities[i]
|
||||
cell['capabilities'] = self.fake_capabilities[i]
|
||||
return cells
|
||||
|
||||
|
||||
|
@ -75,9 +75,9 @@ def fake_get_flavor_by_flavor_id(context, flavorid):
|
||||
|
||||
def _has_flavor_access(flavorid, projectid):
|
||||
for access in ACCESS_LIST:
|
||||
if access['flavor_id'] == flavorid and \
|
||||
access['project_id'] == projectid:
|
||||
return True
|
||||
if (access['flavor_id'] == flavorid and
|
||||
access['project_id'] == projectid):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
@ -88,12 +88,13 @@ def network_api_disassociate(self, context, instance, floating_address):
|
||||
|
||||
|
||||
def fake_instance_get(context, instance_id):
|
||||
return objects.Instance(**{
|
||||
return objects.Instance(**{
|
||||
"id": 1,
|
||||
"uuid": uuids.fake,
|
||||
"name": 'fake',
|
||||
"user_id": 'fakeuser',
|
||||
"project_id": '123'})
|
||||
"project_id": '123'
|
||||
})
|
||||
|
||||
|
||||
def stub_nw_info(test):
|
||||
@ -759,8 +760,8 @@ class FloatingIpTestV21(test.TestCase):
|
||||
fixed_address=None):
|
||||
floating_ips = ["10.10.10.10", "10.10.10.11"]
|
||||
if floating_address not in floating_ips:
|
||||
raise exception.FloatingIpNotFoundForAddress(
|
||||
address=floating_address)
|
||||
raise exception.FloatingIpNotFoundForAddress(
|
||||
address=floating_address)
|
||||
|
||||
self.stubs.Set(network.api.API, "associate_floating_ip",
|
||||
fake_network_api_associate)
|
||||
@ -775,8 +776,8 @@ class FloatingIpTestV21(test.TestCase):
|
||||
floating_address):
|
||||
floating_ips = ["10.10.10.10", "10.10.10.11"]
|
||||
if floating_address not in floating_ips:
|
||||
raise exception.FloatingIpNotFoundForAddress(
|
||||
address=floating_address)
|
||||
raise exception.FloatingIpNotFoundForAddress(
|
||||
address=floating_address)
|
||||
|
||||
self.stubs.Set(network.api.API, "get_floating_ip_by_address",
|
||||
network_api_get_floating_ip_by_address)
|
||||
|
@ -151,8 +151,9 @@ class ServerMigrationsTestsV21(test.NoDBTestCase):
|
||||
webob.exc.HTTPNotFound)
|
||||
|
||||
def test_force_complete_unexpected_error(self):
|
||||
self._test_force_complete_failed_with_exception(
|
||||
exception.NovaException(), webob.exc.HTTPInternalServerError)
|
||||
self._test_force_complete_failed_with_exception(
|
||||
exception.NovaException(),
|
||||
webob.exc.HTTPInternalServerError)
|
||||
|
||||
|
||||
class ServerMigrationsTestsV223(ServerMigrationsTestsV21):
|
||||
|
@ -327,7 +327,7 @@ class ServerTagsTest(test.TestCase):
|
||||
NON_EXISTING_UUID, TAG1)
|
||||
|
||||
def test_delete_all_non_existing_instance(self):
|
||||
req = self._get_request(
|
||||
'/v2/fake/servers/%s/tags' % NON_EXISTING_UUID, 'DELETE')
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.delete_all,
|
||||
req, NON_EXISTING_UUID)
|
||||
req = self._get_request(
|
||||
'/v2/fake/servers/%s/tags' % NON_EXISTING_UUID, 'DELETE')
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.delete_all,
|
||||
req, NON_EXISTING_UUID)
|
||||
|
@ -252,7 +252,7 @@ class ResourceTest(MicroversionedTest):
|
||||
def get_req_id_header_name(self, request):
|
||||
header_name = 'x-openstack-request-id'
|
||||
if utils.get_api_version(request) < 3:
|
||||
header_name = 'x-compute-request-id'
|
||||
header_name = 'x-compute-request-id'
|
||||
|
||||
return header_name
|
||||
|
||||
|
@ -2519,10 +2519,10 @@ class ComputeTestCase(BaseTestCase,
|
||||
|
||||
with testtools.ExpectedException(
|
||||
exception.InstanceNotRescuable, expected_message):
|
||||
self.compute.rescue_instance(
|
||||
self.context, instance=inst_obj,
|
||||
rescue_password='password', rescue_image_ref=None,
|
||||
clean_shutdown=True)
|
||||
self.compute.rescue_instance(
|
||||
self.context, instance=inst_obj,
|
||||
rescue_password='password', rescue_image_ref=None,
|
||||
clean_shutdown=True)
|
||||
|
||||
self.assertEqual(vm_states.ERROR, inst_obj.vm_state)
|
||||
mock_get.assert_called_once_with(mock.ANY, inst_obj, mock.ANY)
|
||||
|
@ -4192,7 +4192,7 @@ class _ComputeAPIUnitTestMixIn(object):
|
||||
with context.target_cell(self.context, mapping.cell_mapping) as cc:
|
||||
cell_context = cc
|
||||
for instance in instances:
|
||||
instance._context = cell_context
|
||||
instance._context = cell_context
|
||||
|
||||
volume_id = uuidutils.generate_uuid()
|
||||
events = [
|
||||
@ -4260,7 +4260,7 @@ class _ComputeAPIUnitTestMixIn(object):
|
||||
with context.target_cell(self.context, mapping.cell_mapping) as cc:
|
||||
cell_context = cc
|
||||
for instance in instances:
|
||||
instance._context = cell_context
|
||||
instance._context = cell_context
|
||||
|
||||
events = [
|
||||
objects.InstanceExternalEvent(
|
||||
@ -6817,12 +6817,12 @@ class ComputeAPIUnitTestCase(_ComputeAPIUnitTestMixIn, test.NoDBTestCase):
|
||||
connector = conn_info['connector']
|
||||
|
||||
with mock.patch.object(self.compute_api.network_api,
|
||||
'deallocate_for_instance') as mock_deallocate, \
|
||||
mock.patch.object(self.compute_api.volume_api,
|
||||
'terminate_connection') as mock_terminate_conn, \
|
||||
mock.patch.object(self.compute_api.volume_api,
|
||||
'detach') as mock_detach:
|
||||
self.compute_api.delete(self.context, inst)
|
||||
'deallocate_for_instance') as mock_deallocate, \
|
||||
mock.patch.object(self.compute_api.volume_api,
|
||||
'terminate_connection') as mock_terminate_conn, \
|
||||
mock.patch.object(self.compute_api.volume_api,
|
||||
'detach') as mock_detach:
|
||||
self.compute_api.delete(self.context, inst)
|
||||
|
||||
mock_deallocate.assert_called_once_with(self.context, inst)
|
||||
mock_detach.assert_called_once_with(self.context, volume_id,
|
||||
|
@ -252,11 +252,11 @@ class ComputeManagerUnitTestCase(test.NoDBTestCase,
|
||||
bdms=mock_bdms)])
|
||||
|
||||
def _make_compute_node(self, hyp_hostname, cn_id):
|
||||
cn = mock.Mock(spec_set=['hypervisor_hostname', 'id',
|
||||
'destroy'])
|
||||
cn.id = cn_id
|
||||
cn.hypervisor_hostname = hyp_hostname
|
||||
return cn
|
||||
cn = mock.Mock(spec_set=['hypervisor_hostname', 'id',
|
||||
'destroy'])
|
||||
cn.id = cn_id
|
||||
cn.hypervisor_hostname = hyp_hostname
|
||||
return cn
|
||||
|
||||
def test_update_available_resource_for_node(self):
|
||||
rt = self._mock_rt(spec_set=['update_available_resource'])
|
||||
|
@ -2501,12 +2501,12 @@ class TestResize(BaseTestCase):
|
||||
ctx = mock.MagicMock()
|
||||
|
||||
with test.nested(
|
||||
mock.patch.object(self.rt, '_update'),
|
||||
mock.patch.object(self.rt.pci_tracker, 'free_device')
|
||||
) as (update_mock, mock_pci_free_device):
|
||||
self.rt.drop_move_claim(ctx, instance, _NODENAME)
|
||||
mock_pci_free_device.assert_called_once_with(
|
||||
pci_dev, mock.ANY)
|
||||
mock.patch.object(self.rt, '_update'),
|
||||
mock.patch.object(self.rt.pci_tracker, 'free_device')
|
||||
) as (update_mock, mock_pci_free_device):
|
||||
self.rt.drop_move_claim(ctx, instance, _NODENAME)
|
||||
mock_pci_free_device.assert_called_once_with(
|
||||
pci_dev, mock.ANY)
|
||||
|
||||
@mock.patch('nova.compute.utils.is_volume_backed_instance',
|
||||
return_value=False)
|
||||
|
@ -292,6 +292,7 @@ class _FakeImageService(object):
|
||||
return 'fake_location'
|
||||
return None
|
||||
|
||||
|
||||
_fakeImageService = _FakeImageService()
|
||||
|
||||
|
||||
|
@ -48,6 +48,7 @@ class tzinfo(datetime.tzinfo):
|
||||
def utcoffset(*args, **kwargs):
|
||||
return datetime.timedelta()
|
||||
|
||||
|
||||
NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22, tzinfo=tzinfo())
|
||||
|
||||
|
||||
@ -67,6 +68,7 @@ class FakeSchema(object):
|
||||
def raw(self):
|
||||
return copy.deepcopy(self.raw_schema)
|
||||
|
||||
|
||||
image_fixtures = {
|
||||
'active_image_v1': {
|
||||
'checksum': 'eb9139e4942121f22bbc2afc0400b2a4',
|
||||
|
@ -3629,22 +3629,21 @@ class TestNeutronv2WithMock(TestNeutronv2Base):
|
||||
return return_value
|
||||
|
||||
with test.nested(
|
||||
mock.patch.object(self.api, '_get_available_networks',
|
||||
return_value=nets),
|
||||
mock.patch.object(client.Client, 'list_ports',
|
||||
side_effect=_fake_list_ports),
|
||||
mock.patch.object(client.Client, 'show_quota',
|
||||
return_value={'quota': {'port': 1}})):
|
||||
|
||||
exc = self.assertRaises(exception.PortLimitExceeded,
|
||||
self.api.validate_networks,
|
||||
self.context, requested_networks, 1)
|
||||
expected_exception_msg = ('The number of defined ports: '
|
||||
'%(ports)d is over the limit: '
|
||||
'%(quota)d' %
|
||||
{'ports': 5,
|
||||
'quota': 1})
|
||||
self.assertEqual(expected_exception_msg, str(exc))
|
||||
mock.patch.object(self.api, '_get_available_networks',
|
||||
return_value=nets),
|
||||
mock.patch.object(client.Client, 'list_ports',
|
||||
side_effect=_fake_list_ports),
|
||||
mock.patch.object(client.Client, 'show_quota',
|
||||
return_value={'quota': {'port': 1}})):
|
||||
exc = self.assertRaises(exception.PortLimitExceeded,
|
||||
self.api.validate_networks,
|
||||
self.context, requested_networks, 1)
|
||||
expected_exception_msg = ('The number of defined ports: '
|
||||
'%(ports)d is over the limit: '
|
||||
'%(quota)d' %
|
||||
{'ports': 5,
|
||||
'quota': 1})
|
||||
self.assertEqual(expected_exception_msg, str(exc))
|
||||
|
||||
def test_validate_networks_fixed_ip_no_dup1(self):
|
||||
# Test validation for a request for a network with a
|
||||
|
@ -364,6 +364,7 @@ class TestNotificationBase(test.NoDBTestCase):
|
||||
self.assertFalse(payload.populated)
|
||||
self.assertFalse(mock_emit.called)
|
||||
|
||||
|
||||
notification_object_data = {
|
||||
'AggregateNotification': '1.0-a73147b93b520ff0061865849d3dfa56',
|
||||
'AggregatePayload': '1.1-1eb9adcc4440d8627de6ec37c6398746',
|
||||
|
@ -56,6 +56,7 @@ def _fake_service(**kwargs):
|
||||
fake_service.update(kwargs)
|
||||
return fake_service
|
||||
|
||||
|
||||
fake_service = _fake_service()
|
||||
|
||||
OPTIONAL = ['availability_zone', 'compute_node']
|
||||
|
@ -16,7 +16,7 @@ import sys
|
||||
import textwrap
|
||||
|
||||
import mock
|
||||
import pep8
|
||||
import pycodestyle
|
||||
import testtools
|
||||
|
||||
from nova.hacking import checks
|
||||
@ -25,10 +25,10 @@ from nova import test
|
||||
|
||||
class HackingTestCase(test.NoDBTestCase):
|
||||
"""This class tests the hacking checks in nova.hacking.checks by passing
|
||||
strings to the check methods like the pep8/flake8 parser would. The parser
|
||||
loops over each line in the file and then passes the parameters to the
|
||||
check method. The parameter names in the check method dictate what type of
|
||||
object is passed to the check method. The parameter types are::
|
||||
strings to the check methods like the pycodestyle/flake8 parser would. The
|
||||
parser loops over each line in the file and then passes the parameters to
|
||||
the check method. The parameter names in the check method dictate what type
|
||||
of object is passed to the check method. The parameter types are::
|
||||
|
||||
logical_line: A processed line with the following modifications:
|
||||
- Multi-line statements converted to a single line.
|
||||
@ -45,7 +45,7 @@ class HackingTestCase(test.NoDBTestCase):
|
||||
indent_level: indentation (with tabs expanded to multiples of 8)
|
||||
previous_indent_level: indentation on previous line
|
||||
previous_logical: previous logical line
|
||||
filename: Path of the file being run through pep8
|
||||
filename: Path of the file being run through pycodestyle
|
||||
|
||||
When running a test on a check method the return will be False/None if
|
||||
there is no violation in the sample input. If there is an error a tuple is
|
||||
@ -270,20 +270,20 @@ class HackingTestCase(test.NoDBTestCase):
|
||||
len(list(checks.use_jsonutils("json.dumb",
|
||||
"./nova/virt/xenapi/driver.py"))))
|
||||
|
||||
# We are patching pep8 so that only the check under test is actually
|
||||
# We are patching pycodestyle so that only the check under test is actually
|
||||
# installed.
|
||||
@mock.patch('pep8._checks',
|
||||
@mock.patch('pycodestyle._checks',
|
||||
{'physical_line': {}, 'logical_line': {}, 'tree': {}})
|
||||
def _run_check(self, code, checker, filename=None):
|
||||
pep8.register_check(checker)
|
||||
pycodestyle.register_check(checker)
|
||||
|
||||
lines = textwrap.dedent(code).strip().splitlines(True)
|
||||
|
||||
checker = pep8.Checker(filename=filename, lines=lines)
|
||||
checker = pycodestyle.Checker(filename=filename, lines=lines)
|
||||
# NOTE(sdague): the standard reporter has printing to stdout
|
||||
# as a normal part of check_all, which bleeds through to the
|
||||
# test output stream in an unhelpful way. This blocks that printing.
|
||||
with mock.patch('pep8.StandardReport.get_file_results'):
|
||||
with mock.patch('pycodestyle.StandardReport.get_file_results'):
|
||||
checker.check_all()
|
||||
checker.report._deferred_print.sort()
|
||||
return checker.report._deferred_print
|
||||
@ -587,7 +587,7 @@ class HackingTestCase(test.NoDBTestCase):
|
||||
def test_check_doubled_words(self):
|
||||
errors = [(1, 0, "N343")]
|
||||
|
||||
# Artificial break to stop pep8 detecting the test !
|
||||
# Artificial break to stop flake8 detecting the test !
|
||||
code = "This is the" + " the best comment"
|
||||
self._assert_has_errors(code, checks.check_doubled_words,
|
||||
expected_errors=errors)
|
||||
|
@ -103,6 +103,7 @@ def get_test_instance(context=None, flavor=None, obj=False):
|
||||
instance = db.instance_create(context, test_instance)
|
||||
return instance
|
||||
|
||||
|
||||
FAKE_NETWORK_VLAN = 100
|
||||
FAKE_NETWORK_BRIDGE = 'br0'
|
||||
FAKE_NETWORK_INTERFACE = 'eth0'
|
||||
|
@ -1034,7 +1034,7 @@ class Domain(object):
|
||||
<address uuid='%(address_uuid)s'/>
|
||||
</source>
|
||||
</hostdev>
|
||||
''' % hostdev
|
||||
''' % hostdev # noqa
|
||||
|
||||
return '''<domain type='kvm'>
|
||||
<name>%(name)s</name>
|
||||
|
@ -7002,9 +7002,9 @@ class LibvirtConnTestCase(test.NoDBTestCase,
|
||||
# Call can be unpackaged as a tuple of args and kwargs
|
||||
# so we want to check the first arg in the args list
|
||||
if (len(call) == 2 and len(call[0]) == 2 and
|
||||
call[0][1] in perf_events and
|
||||
'Monitoring Intel CMT' in call[0][0]):
|
||||
warning_count += 1
|
||||
call[0][1] in perf_events and
|
||||
'Monitoring Intel CMT' in call[0][0]):
|
||||
warning_count += 1
|
||||
self.assertEqual(3, warning_count)
|
||||
|
||||
def test_xml_and_uri_no_ramdisk_no_kernel(self):
|
||||
|
@ -1101,7 +1101,7 @@ def fake_fetch_image(context, instance, host, port, dc_name, ds_name,
|
||||
def _get_vm_mdo(vm_ref):
|
||||
"""Gets the Virtual Machine with the ref from the db."""
|
||||
if _db_content.get("VirtualMachine", None) is None:
|
||||
raise exception.NotFound("There is no VM registered")
|
||||
raise exception.NotFound("There is no VM registered")
|
||||
if vm_ref not in _db_content.get("VirtualMachine"):
|
||||
raise exception.NotFound("Virtual Machine with ref %s is not "
|
||||
"there" % vm_ref)
|
||||
|
@ -153,17 +153,17 @@ class VMwareVolumeOpsTestCase(test.NoDBTestCase):
|
||||
@mock.patch.object(vm_util, 'reconfigure_vm')
|
||||
def test_update_volume_details(self, reconfigure_vm,
|
||||
get_vm_extra_config_spec):
|
||||
volume_uuid = '26f5948e-52a3-4ee6-8d48-0a379afd0828'
|
||||
device_uuid = '0d86246a-2adb-470d-a9f7-bce09930c5d'
|
||||
self._volumeops._update_volume_details(
|
||||
mock.sentinel.vm_ref, volume_uuid, device_uuid)
|
||||
volume_uuid = '26f5948e-52a3-4ee6-8d48-0a379afd0828'
|
||||
device_uuid = '0d86246a-2adb-470d-a9f7-bce09930c5d'
|
||||
self._volumeops._update_volume_details(
|
||||
mock.sentinel.vm_ref, volume_uuid, device_uuid)
|
||||
|
||||
get_vm_extra_config_spec.assert_called_once_with(
|
||||
self._volumeops._session.vim.client.factory,
|
||||
{'volume-%s' % volume_uuid: device_uuid})
|
||||
reconfigure_vm.assert_called_once_with(self._volumeops._session,
|
||||
mock.sentinel.vm_ref,
|
||||
mock.sentinel.extra_config)
|
||||
get_vm_extra_config_spec.assert_called_once_with(
|
||||
self._volumeops._session.vim.client.factory,
|
||||
{'volume-%s' % volume_uuid: device_uuid})
|
||||
reconfigure_vm.assert_called_once_with(self._volumeops._session,
|
||||
mock.sentinel.vm_ref,
|
||||
mock.sentinel.extra_config)
|
||||
|
||||
def _fake_connection_info(self):
|
||||
return {'driver_volume_type': 'vmdk',
|
||||
|
@ -27,9 +27,6 @@ import os
|
||||
import random
|
||||
import tempfile
|
||||
|
||||
if os.name != 'nt':
|
||||
import crypt
|
||||
|
||||
from oslo_concurrency import processutils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
@ -44,6 +41,9 @@ from nova.virt.disk.vfs import api as vfs
|
||||
from nova.virt.image import model as imgmodel
|
||||
from nova.virt import images
|
||||
|
||||
if os.name != 'nt':
|
||||
import crypt
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -164,6 +164,7 @@ class InjectionInfo(collections.namedtuple(
|
||||
return ('InjectionInfo(network_info=%r, files=%r, '
|
||||
'admin_pass=<SANITIZED>)') % (self.network_info, self.files)
|
||||
|
||||
|
||||
libvirt_volume_drivers = [
|
||||
'iscsi=nova.virt.libvirt.volume.iscsi.LibvirtISCSIVolumeDriver',
|
||||
'iser=nova.virt.libvirt.volume.iser.LibvirtISERVolumeDriver',
|
||||
@ -582,14 +583,14 @@ class LibvirtDriver(driver.ComputeDriver):
|
||||
# new enough) and "native TLS" options at the same time is
|
||||
# nonsensical.
|
||||
if (CONF.libvirt.live_migration_tunnelled and
|
||||
CONF.libvirt.live_migration_with_native_tls):
|
||||
msg = _("Setting both 'live_migration_tunnelled' and "
|
||||
"'live_migration_with_native_tls' at the same "
|
||||
"time is invalid. If you have the relevant "
|
||||
"libvirt and QEMU versions, and TLS configured "
|
||||
"in your environment, pick "
|
||||
"'live_migration_with_native_tls'.")
|
||||
raise exception.Invalid(msg)
|
||||
CONF.libvirt.live_migration_with_native_tls):
|
||||
msg = _("Setting both 'live_migration_tunnelled' and "
|
||||
"'live_migration_with_native_tls' at the same "
|
||||
"time is invalid. If you have the relevant "
|
||||
"libvirt and QEMU versions, and TLS configured "
|
||||
"in your environment, pick "
|
||||
"'live_migration_with_native_tls'.")
|
||||
raise exception.Invalid(msg)
|
||||
|
||||
# TODO(sbauza): Remove this code once mediated devices are persisted
|
||||
# across reboots.
|
||||
|
@ -145,6 +145,7 @@ def vm_ref_cache_from_name(func):
|
||||
return _vm_ref_cache(id, func, session, name)
|
||||
return wrapper
|
||||
|
||||
|
||||
# the config key which stores the VNC port
|
||||
VNC_CONFIG_KEY = 'config.extraConfig["RemoteDisplay.vnc.port"]'
|
||||
|
||||
|
@ -424,7 +424,8 @@ def translate_volume_exception(method):
|
||||
|
||||
|
||||
def translate_attachment_exception(method):
|
||||
"""Transforms the exception for the attachment but keeps its traceback intact.
|
||||
"""Transforms the exception for the attachment but keeps its traceback
|
||||
intact.
|
||||
"""
|
||||
def wrapper(self, ctx, attachment_id, *args, **kwargs):
|
||||
try:
|
||||
|
@ -2,7 +2,7 @@
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
||||
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
ddt>=1.0.1 # MIT
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
@ -10,6 +10,7 @@ mock>=2.0.0 # BSD
|
||||
mox3>=0.20.0 # Apache-2.0
|
||||
psycopg2>=2.7 # LGPL/ZPL
|
||||
PyMySQL>=0.7.6 # MIT License
|
||||
pycodestyle>=2.0.0 # MIT License
|
||||
python-barbicanclient>=4.5.2 # Apache-2.0
|
||||
python-ironicclient>=2.7.0 # Apache-2.0
|
||||
requests-mock>=1.2.0 # Apache-2.0
|
||||
|
11
tox.ini
11
tox.ini
@ -250,14 +250,19 @@ commands = bandit -r nova -x tests -n 5 -ll
|
||||
# line. Rejecting code for this reason is wrong.
|
||||
#
|
||||
# E251 Skipped due to https://github.com/jcrocholl/pep8/issues/301
|
||||
#
|
||||
# W504 skipped since you must choose either W503 or W504 (they conflict)
|
||||
#
|
||||
# W503, W605, E731, and E741 temporarily skipped because of the number of
|
||||
# these that have to be fixed
|
||||
enable-extensions = H106,H203,H904
|
||||
ignore = E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E251,H405
|
||||
ignore = E121,E122,E123,E124,E125,E126,E127,E128,E129,E131,E251,H405,W504,W605,W503,E731,E741
|
||||
exclude = .venv,.git,.tox,dist,*lib/python*,*egg,build,tools/xenserver*,releasenotes
|
||||
# To get a list of functions that are more complex than 25, set max-complexity
|
||||
# to 25 and run 'tox -epep8'.
|
||||
# 34 is currently the most complex thing we have
|
||||
# 39 is currently the most complex thing we have
|
||||
# TODO(jogo): get this number down to 25 or so
|
||||
max-complexity=35
|
||||
max-complexity=40
|
||||
|
||||
[hacking]
|
||||
local-check-factory = nova.hacking.checks.factory
|
||||
|
Loading…
x
Reference in New Issue
Block a user