Add autopep8 to tox.ini
autopep8 is an automated code formatting tool for python, it does not know everything, also not super fast, but it is faster than I can manually reformat the code. tools/format.sh will call the formatting, the tox will check did you called it. Also adding a tox -eautopep8 way to use it. autopep8 formats the lines in one way, where tempest used a another way it was changed. Change-Id: I6d51b14a5a5b87761071d0927fca23ba1651aa41
This commit is contained in:
parent
c975f08e9b
commit
40fcb9ba28
|
@ -143,7 +143,7 @@ class NoVNCConsoleTestJSON(base.BaseV2ComputeTest):
|
||||||
data_length = len(data) if data is not None else 0
|
data_length = len(data) if data is not None else 0
|
||||||
self.assertFalse(data_length <= 24 or
|
self.assertFalse(data_length <= 24 or
|
||||||
data_length != (struct.unpack(">L",
|
data_length != (struct.unpack(">L",
|
||||||
data[20:24])[0] + 24),
|
data[20:24])[0] + 24),
|
||||||
'Server initialization was not the right format.')
|
'Server initialization was not the right format.')
|
||||||
# Since the rest of the data on the screen is arbitrary, we will
|
# Since the rest of the data on the screen is arbitrary, we will
|
||||||
# close the socket and end our validation of the data at this point
|
# close the socket and end our validation of the data at this point
|
||||||
|
@ -151,7 +151,7 @@ class NoVNCConsoleTestJSON(base.BaseV2ComputeTest):
|
||||||
# initialization was the right format
|
# initialization was the right format
|
||||||
self.assertFalse(data_length <= 24 or
|
self.assertFalse(data_length <= 24 or
|
||||||
data_length != (struct.unpack(">L",
|
data_length != (struct.unpack(">L",
|
||||||
data[20:24])[0] + 24))
|
data[20:24])[0] + 24))
|
||||||
|
|
||||||
def _validate_websocket_upgrade(self):
|
def _validate_websocket_upgrade(self):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
|
|
|
@ -206,7 +206,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
||||||
for k in port['fixed_ips']])
|
for k in port['fixed_ips']])
|
||||||
real_dhcp_ip, real_eui_ip = [real_ips[sub['id']]
|
real_dhcp_ip, real_eui_ip = [real_ips[sub['id']]
|
||||||
for sub in [subnet_dhcp,
|
for sub in [subnet_dhcp,
|
||||||
subnet_slaac]]
|
subnet_slaac]]
|
||||||
self.ports_client.delete_port(port['id'])
|
self.ports_client.delete_port(port['id'])
|
||||||
self.ports.pop()
|
self.ports.pop()
|
||||||
body = self.ports_client.list_ports()
|
body = self.ports_client.list_ports()
|
||||||
|
@ -257,7 +257,7 @@ class NetworksTestDHCPv6(base.BaseNetworkTest):
|
||||||
for k in port['fixed_ips']])
|
for k in port['fixed_ips']])
|
||||||
real_dhcp_ip, real_eui_ip = [real_ips[sub['id']]
|
real_dhcp_ip, real_eui_ip = [real_ips[sub['id']]
|
||||||
for sub in [subnet_dhcp,
|
for sub in [subnet_dhcp,
|
||||||
subnet_slaac]]
|
subnet_slaac]]
|
||||||
self._clean_network()
|
self._clean_network()
|
||||||
self.assertEqual(real_eui_ip,
|
self.assertEqual(real_eui_ip,
|
||||||
eui_ip,
|
eui_ip,
|
||||||
|
|
|
@ -317,7 +317,7 @@ class NetworksTest(BaseNetworkTestResources):
|
||||||
|
|
||||||
subnet = self.create_subnet(
|
subnet = self.create_subnet(
|
||||||
network, **self.subnet_dict(['gateway', 'host_routes',
|
network, **self.subnet_dict(['gateway', 'host_routes',
|
||||||
'dns_nameservers',
|
'dns_nameservers',
|
||||||
'allocation_pools']))
|
'allocation_pools']))
|
||||||
subnet_id = subnet['id']
|
subnet_id = subnet['id']
|
||||||
new_gateway = str(netaddr.IPAddress(
|
new_gateway = str(netaddr.IPAddress(
|
||||||
|
|
|
@ -107,7 +107,7 @@ class PortsTestJSON(sec_base.BaseSecGroupTest):
|
||||||
address = self.cidr
|
address = self.cidr
|
||||||
address.prefixlen = self.mask_bits
|
address.prefixlen = self.mask_bits
|
||||||
if ((address.version == 4 and address.prefixlen >= 30) or
|
if ((address.version == 4 and address.prefixlen >= 30) or
|
||||||
(address.version == 6 and address.prefixlen >= 126)):
|
(address.version == 6 and address.prefixlen >= 126)):
|
||||||
msg = ("Subnet %s isn't large enough for the test" % address.cidr)
|
msg = ("Subnet %s isn't large enough for the test" % address.cidr)
|
||||||
raise exceptions.InvalidConfiguration(msg)
|
raise exceptions.InvalidConfiguration(msg)
|
||||||
allocation_pools = {'allocation_pools': [{'start': str(address[2]),
|
allocation_pools = {'allocation_pools': [{'start': str(address[2]),
|
||||||
|
|
|
@ -310,8 +310,8 @@ class TempestCleanup(command.Command):
|
||||||
svc.run()
|
svc.run()
|
||||||
|
|
||||||
with open(SAVED_STATE_JSON, 'w+') as f:
|
with open(SAVED_STATE_JSON, 'w+') as f:
|
||||||
f.write(json.dumps(data,
|
f.write(json.dumps(data, sort_keys=True,
|
||||||
sort_keys=True, indent=2, separators=(',', ': ')))
|
indent=2, separators=(',', ': ')))
|
||||||
|
|
||||||
def _load_json(self, saved_state_json=SAVED_STATE_JSON):
|
def _load_json(self, saved_state_json=SAVED_STATE_JSON):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -202,8 +202,8 @@ class TempestRun(command.Command):
|
||||||
svc.run()
|
svc.run()
|
||||||
|
|
||||||
with open(SAVED_STATE_JSON, 'w+') as f:
|
with open(SAVED_STATE_JSON, 'w+') as f:
|
||||||
f.write(json.dumps(data,
|
f.write(json.dumps(data, sort_keys=True,
|
||||||
sort_keys=True, indent=2, separators=(',', ': ')))
|
indent=2, separators=(',', ': ')))
|
||||||
|
|
||||||
def get_parser(self, prog_name):
|
def get_parser(self, prog_name):
|
||||||
parser = super(TempestRun, self).get_parser(prog_name)
|
parser = super(TempestRun, self).get_parser(prog_name)
|
||||||
|
|
|
@ -26,7 +26,7 @@ def get_project_by_name(client, project_name):
|
||||||
if project['name'] == project_name:
|
if project['name'] == project_name:
|
||||||
return project
|
return project
|
||||||
raise lib_exc.NotFound('No such project(%s) in %s' % (project_name,
|
raise lib_exc.NotFound('No such project(%s) in %s' % (project_name,
|
||||||
projects))
|
projects))
|
||||||
|
|
||||||
|
|
||||||
def get_tenant_by_name(client, tenant_name):
|
def get_tenant_by_name(client, tenant_name):
|
||||||
|
|
|
@ -19,7 +19,6 @@ from tempest.lib import exceptions as lib_exc
|
||||||
|
|
||||||
def get_unused_ip_addresses(ports_client, subnets_client,
|
def get_unused_ip_addresses(ports_client, subnets_client,
|
||||||
network_id, subnet_id, count):
|
network_id, subnet_id, count):
|
||||||
|
|
||||||
"""Return a list with the specified number of unused IP addresses
|
"""Return a list with the specified number of unused IP addresses
|
||||||
|
|
||||||
This method uses the given ports_client to find the specified number of
|
This method uses the given ports_client to find the specified number of
|
||||||
|
|
|
@ -1249,7 +1249,7 @@ class TempestConfigPrivate(object):
|
||||||
|
|
||||||
logging_cfg_path = "%s/logging.conf" % os.path.dirname(path)
|
logging_cfg_path = "%s/logging.conf" % os.path.dirname(path)
|
||||||
if ((not hasattr(_CONF, 'log_config_append') or
|
if ((not hasattr(_CONF, 'log_config_append') or
|
||||||
_CONF.log_config_append is None) and
|
_CONF.log_config_append is None) and
|
||||||
os.path.isfile(logging_cfg_path)):
|
os.path.isfile(logging_cfg_path)):
|
||||||
# if logging conf is in place we need to set log_config_append
|
# if logging conf is in place we need to set log_config_append
|
||||||
_CONF.log_config_append = logging_cfg_path
|
_CONF.log_config_append = logging_cfg_path
|
||||||
|
|
|
@ -110,7 +110,7 @@ class TestChecker(object):
|
||||||
for item in files:
|
for item in files:
|
||||||
if item.endswith('.py'):
|
if item.endswith('.py'):
|
||||||
module_name = '.'.join((root_package,
|
module_name = '.'.join((root_package,
|
||||||
os.path.splitext(item)[0]))
|
os.path.splitext(item)[0]))
|
||||||
if not module_name.startswith(UNIT_TESTS_EXCLUDE):
|
if not module_name.startswith(UNIT_TESTS_EXCLUDE):
|
||||||
modules.append(module_name)
|
modules.append(module_name)
|
||||||
return modules
|
return modules
|
||||||
|
|
|
@ -54,7 +54,7 @@ def check_skip_with_microversion(test_min_version, test_max_version,
|
||||||
config_min_version = api_version_request.APIVersionRequest(cfg_min_version)
|
config_min_version = api_version_request.APIVersionRequest(cfg_min_version)
|
||||||
config_max_version = api_version_request.APIVersionRequest(cfg_max_version)
|
config_max_version = api_version_request.APIVersionRequest(cfg_max_version)
|
||||||
if ((min_version > max_version) or
|
if ((min_version > max_version) or
|
||||||
(config_min_version > config_max_version)):
|
(config_min_version > config_max_version)):
|
||||||
msg = ("Test Class versions [%s - %s]. "
|
msg = ("Test Class versions [%s - %s]. "
|
||||||
"Configuration versions [%s - %s]."
|
"Configuration versions [%s - %s]."
|
||||||
% (min_version.get_string(),
|
% (min_version.get_string(),
|
||||||
|
|
|
@ -273,7 +273,7 @@ class PreProvisionedCredentialProvider(cred_provider.CredentialProvider):
|
||||||
# NOTE(andreaf) Not all fields may be available on all credentials
|
# NOTE(andreaf) Not all fields may be available on all credentials
|
||||||
# so defaulting to None for that case.
|
# so defaulting to None for that case.
|
||||||
if all([getattr(creds, k, None) == hash_attributes.get(k, None) for
|
if all([getattr(creds, k, None) == hash_attributes.get(k, None) for
|
||||||
k in init_attributes]):
|
k in init_attributes]):
|
||||||
return _hash
|
return _hash
|
||||||
raise AttributeError('Invalid credentials %s' % creds)
|
raise AttributeError('Invalid credentials %s' % creds)
|
||||||
|
|
||||||
|
|
|
@ -170,7 +170,7 @@ def random_bytes(size=1024):
|
||||||
:rtype: string
|
:rtype: string
|
||||||
"""
|
"""
|
||||||
return b''.join([six.int2byte(random.randint(0, 255))
|
return b''.join([six.int2byte(random.randint(0, 255))
|
||||||
for i in range(size)])
|
for i in range(size)])
|
||||||
|
|
||||||
|
|
||||||
# Courtesy of http://stackoverflow.com/a/312464
|
# Courtesy of http://stackoverflow.com/a/312464
|
||||||
|
|
|
@ -172,7 +172,7 @@ class FlavorsClient(base_compute_client.BaseComputeClient):
|
||||||
https://developer.openstack.org/api-ref/compute/#show-an-extra-spec-for-a-flavor
|
https://developer.openstack.org/api-ref/compute/#show-an-extra-spec-for-a-flavor
|
||||||
"""
|
"""
|
||||||
resp, body = self.get('flavors/%s/os-extra_specs/%s' % (flavor_id,
|
resp, body = self.get('flavors/%s/os-extra_specs/%s' % (flavor_id,
|
||||||
key))
|
key))
|
||||||
body = json.loads(body)
|
body = json.loads(body)
|
||||||
self.validate_response(
|
self.validate_response(
|
||||||
schema_extra_specs.set_get_flavor_extra_specs_key,
|
schema_extra_specs.set_get_flavor_extra_specs_key,
|
||||||
|
|
|
@ -636,7 +636,7 @@ class ServersClient(base_compute_client.BaseComputeClient):
|
||||||
def list_virtual_interfaces(self, server_id):
|
def list_virtual_interfaces(self, server_id):
|
||||||
"""List the virtual interfaces used in an instance."""
|
"""List the virtual interfaces used in an instance."""
|
||||||
resp, body = self.get('/'.join(['servers', server_id,
|
resp, body = self.get('/'.join(['servers', server_id,
|
||||||
'os-virtual-interfaces']))
|
'os-virtual-interfaces']))
|
||||||
body = json.loads(body)
|
body = json.loads(body)
|
||||||
self.validate_response(schema.list_virtual_interfaces, resp, body)
|
self.validate_response(schema.list_virtual_interfaces, resp, body)
|
||||||
return rest_client.ResponseBody(resp, body)
|
return rest_client.ResponseBody(resp, body)
|
||||||
|
|
|
@ -166,7 +166,7 @@ class ScenarioTest(tempest.test.BaseTestCase):
|
||||||
clients.security_groups_client.list_security_groups(
|
clients.security_groups_client.list_security_groups(
|
||||||
).get('security_groups')
|
).get('security_groups')
|
||||||
sec_dict = dict([(s['name'], s['id'])
|
sec_dict = dict([(s['name'], s['id'])
|
||||||
for s in security_groups])
|
for s in security_groups])
|
||||||
|
|
||||||
sec_groups_names = [s['name'] for s in kwargs.pop(
|
sec_groups_names = [s['name'] for s in kwargs.pop(
|
||||||
'security_groups')]
|
'security_groups')]
|
||||||
|
|
|
@ -48,6 +48,7 @@ class TestMinimumBasicScenario(manager.ScenarioTest):
|
||||||
10. Check SSH connection to instance after reboot
|
10. Check SSH connection to instance after reboot
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def nova_show(self, server):
|
def nova_show(self, server):
|
||||||
got_server = (self.servers_client.show_server(server['id'])
|
got_server = (self.servers_client.show_server(server['id'])
|
||||||
['server'])
|
['server'])
|
||||||
|
|
|
@ -86,7 +86,6 @@ class TestVolumeBootPattern(manager.EncryptionScenarioTest):
|
||||||
'Cinder volume snapshots are disabled')
|
'Cinder volume snapshots are disabled')
|
||||||
@utils.services('compute', 'volume', 'image')
|
@utils.services('compute', 'volume', 'image')
|
||||||
def test_volume_boot_pattern(self):
|
def test_volume_boot_pattern(self):
|
||||||
|
|
||||||
"""This test case attempts to reproduce the following steps:
|
"""This test case attempts to reproduce the following steps:
|
||||||
|
|
||||||
* Create in Cinder some bootable volume importing a Glance image
|
* Create in Cinder some bootable volume importing a Glance image
|
||||||
|
|
|
@ -179,6 +179,7 @@ class TempestTestPluginManager(object):
|
||||||
This class is used to manage the lifecycle of external tempest test
|
This class is used to manage the lifecycle of external tempest test
|
||||||
plugins. It provides functions for getting set
|
plugins. It provides functions for getting set
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.ext_plugins = stevedore.ExtensionManager(
|
self.ext_plugins = stevedore.ExtensionManager(
|
||||||
'tempest.test_plugins', invoke_on_load=True,
|
'tempest.test_plugins', invoke_on_load=True,
|
||||||
|
|
|
@ -37,7 +37,7 @@ def load_tests(loader, tests, pattern):
|
||||||
top_level_dir=base_path))
|
top_level_dir=base_path))
|
||||||
else:
|
else:
|
||||||
suite.addTests(loader.discover(full_test_dir, pattern=pattern,
|
suite.addTests(loader.discover(full_test_dir, pattern=pattern,
|
||||||
top_level_dir=base_path))
|
top_level_dir=base_path))
|
||||||
|
|
||||||
plugin_load_tests = ext_plugins.get_plugin_load_tests_tuple()
|
plugin_load_tests = ext_plugins.get_plugin_load_tests_tuple()
|
||||||
if not plugin_load_tests:
|
if not plugin_load_tests:
|
||||||
|
|
|
@ -48,7 +48,7 @@ class TestTempestWorkspace(TestTempestWorkspaceBase):
|
||||||
stdout, stderr = process.communicate()
|
stdout, stderr = process.communicate()
|
||||||
return_code = process.returncode
|
return_code = process.returncode
|
||||||
msg = ("%s failed with:\nstdout: %s\nstderr: %s" % (' '.join(cmd),
|
msg = ("%s failed with:\nstdout: %s\nstderr: %s" % (' '.join(cmd),
|
||||||
stdout, stderr))
|
stdout, stderr))
|
||||||
self.assertEqual(return_code, expected, msg)
|
self.assertEqual(return_code, expected, msg)
|
||||||
|
|
||||||
def test_run_workspace_list(self):
|
def test_run_workspace_list(self):
|
||||||
|
|
|
@ -109,8 +109,8 @@ class TestDynamicCredentialProvider(base.TestCase):
|
||||||
return_value=(rest_client.ResponseBody
|
return_value=(rest_client.ResponseBody
|
||||||
(200,
|
(200,
|
||||||
{'roles': [{'id': id, 'name': name},
|
{'roles': [{'id': id, 'name': name},
|
||||||
{'id': '1', 'name': 'FakeRole'},
|
{'id': '1', 'name': 'FakeRole'},
|
||||||
{'id': '2', 'name': 'Member'}]}))))
|
{'id': '2', 'name': 'Member'}]}))))
|
||||||
return roles_fix
|
return roles_fix
|
||||||
|
|
||||||
def _mock_list_2_roles(self):
|
def _mock_list_2_roles(self):
|
||||||
|
@ -120,8 +120,8 @@ class TestDynamicCredentialProvider(base.TestCase):
|
||||||
return_value=(rest_client.ResponseBody
|
return_value=(rest_client.ResponseBody
|
||||||
(200,
|
(200,
|
||||||
{'roles': [{'id': '1234', 'name': 'role1'},
|
{'roles': [{'id': '1234', 'name': 'role1'},
|
||||||
{'id': '1', 'name': 'FakeRole'},
|
{'id': '1', 'name': 'FakeRole'},
|
||||||
{'id': '12345', 'name': 'role2'}]}))))
|
{'id': '12345', 'name': 'role2'}]}))))
|
||||||
return roles_fix
|
return roles_fix
|
||||||
|
|
||||||
def _mock_assign_user_role(self):
|
def _mock_assign_user_role(self):
|
||||||
|
|
|
@ -186,15 +186,19 @@ class TestImagesClient(base.BaseServiceTest):
|
||||||
def _test_resource_deleted(self, bytes_body=False):
|
def _test_resource_deleted(self, bytes_body=False):
|
||||||
params = {"id": self.FAKE_IMAGE_ID}
|
params = {"id": self.FAKE_IMAGE_ID}
|
||||||
expected_op = self.FAKE_IMAGE_DATA['show']
|
expected_op = self.FAKE_IMAGE_DATA['show']
|
||||||
self.useFixture(fixtures.MockPatch('tempest.lib.services.compute'
|
self.useFixture(
|
||||||
'.images_client.ImagesClient.show_image',
|
fixtures.MockPatch(
|
||||||
side_effect=lib_exc.NotFound))
|
'tempest.lib.services.compute'
|
||||||
|
'.images_client.ImagesClient.show_image',
|
||||||
|
side_effect=lib_exc.NotFound))
|
||||||
self.assertEqual(True, self.client.is_resource_deleted(**params))
|
self.assertEqual(True, self.client.is_resource_deleted(**params))
|
||||||
tempdata = copy.deepcopy(self.FAKE_IMAGE_DATA['show'])
|
tempdata = copy.deepcopy(self.FAKE_IMAGE_DATA['show'])
|
||||||
tempdata['image']['id'] = None
|
tempdata['image']['id'] = None
|
||||||
self.useFixture(fixtures.MockPatch('tempest.lib.services.compute'
|
self.useFixture(
|
||||||
'.images_client.ImagesClient.show_image',
|
fixtures.MockPatch(
|
||||||
return_value=expected_op))
|
'tempest.lib.services.compute'
|
||||||
|
'.images_client.ImagesClient.show_image',
|
||||||
|
return_value=expected_op))
|
||||||
self.assertEqual(False, self.client.is_resource_deleted(**params))
|
self.assertEqual(False, self.client.is_resource_deleted(**params))
|
||||||
|
|
||||||
def test_list_images_with_str_body(self):
|
def test_list_images_with_str_body(self):
|
||||||
|
|
|
@ -62,7 +62,7 @@ class TestSchedulerStatsClient(base.BaseServiceTest):
|
||||||
resp_body = self.FAKE_POOLS_LIST
|
resp_body = self.FAKE_POOLS_LIST
|
||||||
else:
|
else:
|
||||||
resp_body = {'pools': [{'name': pool['name']}
|
resp_body = {'pools': [{'name': pool['name']}
|
||||||
for pool in self.FAKE_POOLS_LIST['pools']]}
|
for pool in self.FAKE_POOLS_LIST['pools']]}
|
||||||
self.check_service_client_function(
|
self.check_service_client_function(
|
||||||
self.client.list_pools,
|
self.client.list_pools,
|
||||||
'tempest.lib.common.rest_client.RestClient.get',
|
'tempest.lib.common.rest_client.RestClient.get',
|
||||||
|
|
|
@ -48,6 +48,7 @@ class HackingTestCase(base.TestCase):
|
||||||
just assertTrue if the check is expected to fail and assertFalse if it
|
just assertTrue if the check is expected to fail and assertFalse if it
|
||||||
should pass.
|
should pass.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_no_setup_teardown_class_for_tests(self):
|
def test_no_setup_teardown_class_for_tests(self):
|
||||||
self.assertTrue(checks.no_setup_teardown_class_for_tests(
|
self.assertTrue(checks.no_setup_teardown_class_for_tests(
|
||||||
" def setUpClass(cls):", './tempest/tests/fake_test.py'))
|
" def setUpClass(cls):", './tempest/tests/fake_test.py'))
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/bin/bash
|
||||||
|
cd $(dirname "$(readlink -f "$0")")
|
||||||
|
|
||||||
|
autopep8 --exit-code --max-line-length=79 --experimental --in-place -r ../tempest ../setup.py && echo Formatting was not needed. >&2
|
||||||
|
|
10
tox.ini
10
tox.ini
|
@ -197,11 +197,21 @@ commands =
|
||||||
whitelist_externals = rm
|
whitelist_externals = rm
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
|
deps =
|
||||||
|
-r test-requirements.txt
|
||||||
|
autopep8
|
||||||
basepython = python3
|
basepython = python3
|
||||||
commands =
|
commands =
|
||||||
|
autopep8 --exit-code --max-line-length=79 --experimental --diff -r tempest setup.py
|
||||||
flake8 {posargs}
|
flake8 {posargs}
|
||||||
check-uuid
|
check-uuid
|
||||||
|
|
||||||
|
[testenv:autopep8]
|
||||||
|
deps = autopep8
|
||||||
|
basepython = python3
|
||||||
|
commands =
|
||||||
|
autopep8 --max-line-length=79 --experimental --in-place -r tempest setup.py
|
||||||
|
|
||||||
[testenv:uuidgen]
|
[testenv:uuidgen]
|
||||||
commands =
|
commands =
|
||||||
check-uuid --fix
|
check-uuid --fix
|
||||||
|
|
Loading…
Reference in New Issue