Use sequence directly instead of using len()
This commit makes to use sequence directly instead of using len(SEQUENCE). The original code works correctly, and it's really straight forward. However, PEP8 recommends like below[1]. And it makes code more simple, too. ``` For sequences, (strings, lists, tuples), use the fact that empty sequences are false. Yes: if not seq: if seq: No: if len(seq): if not len(seq): ``` [1] https://www.python.org/dev/peps/pep-0008/#programming-recommendations Change-Id: I8d41e16d82b1b3860a98e5217cb7a541fc83b907
This commit is contained in:
parent
549dfc93fb
commit
0c0f0143e1
|
@ -104,7 +104,7 @@ class HypervisorAdminTestJSON(base.BaseV2ComputeAdminTest):
|
|||
try:
|
||||
uptime = (self.client.show_hypervisor_uptime(hyper['id'])
|
||||
['hypervisor'])
|
||||
if len(uptime) > 0:
|
||||
if uptime:
|
||||
has_valid_uptime = True
|
||||
break
|
||||
except Exception:
|
||||
|
|
|
@ -82,7 +82,7 @@ class NoVNCConsoleTestJSON(base.BaseV2ComputeTest):
|
|||
"""Verify we can connect to novnc and do the websocket connection."""
|
||||
# Turn the Socket into a WebSocket to do the communication
|
||||
data = self._websocket.receive_frame()
|
||||
self.assertFalse(data is None or len(data) == 0,
|
||||
self.assertFalse(data is None or not data,
|
||||
'Token must be invalid because the connection '
|
||||
'closed.')
|
||||
# Parse the RFB version from the data to make sure it is valid
|
||||
|
@ -181,6 +181,6 @@ class NoVNCConsoleTestJSON(base.BaseV2ComputeTest):
|
|||
self._websocket = compute.create_websocket(url)
|
||||
# Make sure the novncproxy rejected the connection and closed it
|
||||
data = self._websocket.receive_frame()
|
||||
self.assertTrue(data is None or len(data) == 0,
|
||||
self.assertTrue(data is None or not data,
|
||||
"The novnc proxy actually sent us some data, but we "
|
||||
"expected it to close the connection.")
|
||||
|
|
|
@ -31,7 +31,7 @@ class ExtensionsTestJSON(base.BaseV2ComputeTest):
|
|||
@decorators.idempotent_id('3bb27738-b759-4e0d-a5fa-37d7a6df07d1')
|
||||
def test_list_extensions(self):
|
||||
# List of all extensions
|
||||
if len(CONF.compute_feature_enabled.api_extensions) == 0:
|
||||
if not CONF.compute_feature_enabled.api_extensions:
|
||||
raise self.skipException('There are not any extensions configured')
|
||||
extensions = self.extensions_client.list_extensions()['extensions']
|
||||
ext = CONF.compute_feature_enabled.api_extensions[0]
|
||||
|
|
|
@ -47,7 +47,7 @@ class BaseIdentityTest(tempest.test.BaseTestCase):
|
|||
else:
|
||||
users = cls.users_client.list_users()['users']
|
||||
user = [u for u in users if u['name'] == name]
|
||||
if len(user) > 0:
|
||||
if user:
|
||||
return user[0]
|
||||
|
||||
@classmethod
|
||||
|
@ -57,14 +57,14 @@ class BaseIdentityTest(tempest.test.BaseTestCase):
|
|||
except AttributeError:
|
||||
tenants = cls.projects_client.list_projects()['projects']
|
||||
tenant = [t for t in tenants if t['name'] == name]
|
||||
if len(tenant) > 0:
|
||||
if tenant:
|
||||
return tenant[0]
|
||||
|
||||
@classmethod
|
||||
def get_role_by_name(cls, name):
|
||||
roles = cls.roles_client.list_roles()['roles']
|
||||
role = [r for r in roles if r['name'] == name]
|
||||
if len(role) > 0:
|
||||
if role:
|
||||
return role[0]
|
||||
|
||||
def create_test_user(self, **kwargs):
|
||||
|
|
|
@ -106,7 +106,7 @@ class BaseNetworkTest(tempest.test.BaseTestCase):
|
|||
|
||||
# Clean up metering label rules
|
||||
# Not all classes in the hierarchy have the client class variable
|
||||
if len(cls.metering_label_rules) > 0:
|
||||
if cls.metering_label_rules:
|
||||
label_rules_client = cls.admin_metering_label_rules_client
|
||||
for metering_label_rule in cls.metering_label_rules:
|
||||
test_utils.call_and_ignore_notfound_exc(
|
||||
|
|
|
@ -32,7 +32,7 @@ class ExtensionsTestJSON(base.BaseVolumeTest):
|
|||
# List of all extensions
|
||||
extensions = (self.volumes_extension_client.list_extensions()
|
||||
['extensions'])
|
||||
if len(CONF.volume_feature_enabled.api_extensions) == 0:
|
||||
if not CONF.volume_feature_enabled.api_extensions:
|
||||
raise self.skipException('There are not any extensions configured')
|
||||
extension_list = [extension.get('alias') for extension in extensions]
|
||||
LOG.debug("Cinder extensions: %s", ','.join(extension_list))
|
||||
|
|
|
@ -47,7 +47,7 @@ class VolumesListTestJSON(base.BaseVolumeTest):
|
|||
fetched_list = [fieldsgetter(item) for item in fetched_list]
|
||||
|
||||
missing_vols = [v for v in expected_list if v not in fetched_list]
|
||||
if len(missing_vols) == 0:
|
||||
if not missing_vols:
|
||||
return
|
||||
|
||||
def str_vol(vol):
|
||||
|
|
|
@ -105,7 +105,7 @@ class BaseService(object):
|
|||
|
||||
def _filter_by_tenant_id(self, item_list):
|
||||
if (item_list is None
|
||||
or len(item_list) == 0
|
||||
or not item_list
|
||||
or not hasattr(self, 'tenant_id')
|
||||
or self.tenant_id is None
|
||||
or 'tenant_id' not in item_list[0]):
|
||||
|
|
|
@ -258,7 +258,7 @@ class _WebSocket(object):
|
|||
while True:
|
||||
header = self._socket.recv(2)
|
||||
# If we didn't receive any data, just return None
|
||||
if len(header) == 0:
|
||||
if not header:
|
||||
return None
|
||||
# We will make the assumption that we are only dealing with
|
||||
# frames less than 125 bytes here (for the negotiation) and
|
||||
|
@ -313,6 +313,6 @@ class _WebSocket(object):
|
|||
self._socket.sendall(reqdata.encode('utf8'))
|
||||
self.response = data = self._socket.recv(4096)
|
||||
# Loop through & concatenate all of the data in the response body
|
||||
while len(data) > 0 and self.response.find(b'\r\n\r\n') < 0:
|
||||
while data and self.response.find(b'\r\n\r\n') < 0:
|
||||
data = self._socket.recv(4096)
|
||||
self.response += data
|
||||
|
|
|
@ -241,7 +241,7 @@ class PreProvisionedCredentialProvider(cred_provider.CredentialProvider):
|
|||
|
||||
def _get_creds(self, roles=None):
|
||||
useable_hashes = self._get_match_hash_list(roles)
|
||||
if len(useable_hashes) == 0:
|
||||
if not useable_hashes:
|
||||
msg = 'No users configured for type/roles %s' % roles
|
||||
raise lib_exc.InvalidCredentials(msg)
|
||||
free_hash = self._get_free_hash(useable_hashes)
|
||||
|
|
|
@ -539,18 +539,18 @@ class KeystoneV3AuthProvider(KeystoneAuthProvider):
|
|||
|
||||
# Select entries with matching service type
|
||||
service_catalog = [ep for ep in catalog if ep['type'] == service]
|
||||
if len(service_catalog) > 0:
|
||||
if service_catalog:
|
||||
if name is not None:
|
||||
service_catalog = (
|
||||
[ep for ep in service_catalog if ep['name'] == name])
|
||||
if len(service_catalog) > 0:
|
||||
if service_catalog:
|
||||
service_catalog = service_catalog[0]['endpoints']
|
||||
else:
|
||||
raise exceptions.EndpointNotFound(name)
|
||||
else:
|
||||
service_catalog = service_catalog[0]['endpoints']
|
||||
else:
|
||||
if len(catalog) == 0 and service == 'identity':
|
||||
if not catalog and service == 'identity':
|
||||
# NOTE(andreaf) If there's no catalog at all and the service
|
||||
# is identity, it's a valid use case. Having a non-empty
|
||||
# catalog with no identity in it is not valid instead.
|
||||
|
@ -571,13 +571,13 @@ class KeystoneV3AuthProvider(KeystoneAuthProvider):
|
|||
# Filter by endpoint type (interface)
|
||||
filtered_catalog = [ep for ep in service_catalog if
|
||||
ep['interface'] == endpoint_type]
|
||||
if len(filtered_catalog) == 0:
|
||||
if not filtered_catalog:
|
||||
# No matching type, keep all and try matching by region at least
|
||||
filtered_catalog = service_catalog
|
||||
# Filter by region
|
||||
filtered_catalog = [ep for ep in filtered_catalog if
|
||||
ep['region'] == region]
|
||||
if len(filtered_catalog) == 0:
|
||||
if not filtered_catalog:
|
||||
# No matching region (or name), take the first endpoint
|
||||
filtered_catalog = [service_catalog[0]]
|
||||
# There should be only one match. If not take the first.
|
||||
|
|
|
@ -114,7 +114,7 @@ def tables(output_lines):
|
|||
label = line
|
||||
else:
|
||||
LOG.warning('Invalid line between tables: %s', line)
|
||||
if len(table_) > 0:
|
||||
if table_:
|
||||
LOG.warning('Missing end of table')
|
||||
|
||||
return tables_
|
||||
|
|
|
@ -32,7 +32,7 @@ class TempestException(Exception):
|
|||
except Exception:
|
||||
# at least get the core message out if something happened
|
||||
self._error_string = self.message
|
||||
if len(args) > 0:
|
||||
if args:
|
||||
# If there is a non-kwarg parameter, assume it's the error
|
||||
# message or reason description and tack it on to the end
|
||||
# of the exception message
|
||||
|
|
|
@ -118,7 +118,7 @@ class ImagesClient(rest_client.RestClient):
|
|||
if 'changes_since' in kwargs:
|
||||
kwargs['changes-since'] = kwargs.pop('changes_since')
|
||||
|
||||
if len(kwargs) > 0:
|
||||
if kwargs:
|
||||
url += '?%s' % urllib.urlencode(kwargs)
|
||||
|
||||
resp, body = self.get(url)
|
||||
|
|
|
@ -139,7 +139,7 @@ def is_extension_enabled(extension_name, service):
|
|||
'object': CONF.object_storage_feature_enabled.discoverable_apis,
|
||||
'identity': CONF.identity_feature_enabled.api_extensions
|
||||
}
|
||||
if len(config_dict[service]) == 0:
|
||||
if not config_dict[service]:
|
||||
return False
|
||||
if config_dict[service][0] == 'all':
|
||||
return True
|
||||
|
@ -160,7 +160,7 @@ def is_scheduler_filter_enabled(filter_name):
|
|||
"""
|
||||
|
||||
filters = CONF.compute_feature_enabled.scheduler_available_filters
|
||||
if len(filters) == 0:
|
||||
if not filters:
|
||||
return False
|
||||
if 'all' in filters:
|
||||
return True
|
||||
|
@ -634,7 +634,7 @@ class BaseTestCase(testtools.testcase.WithAttributes,
|
|||
"""
|
||||
if msg is None:
|
||||
msg = "sequence or collection is not empty: %s" % items
|
||||
self.assertEqual(0, len(items), msg)
|
||||
self.assertFalse(items, msg)
|
||||
|
||||
def assertNotEmpty(self, items, msg=None):
|
||||
"""Asserts whether a sequence or collection is not empty
|
||||
|
@ -645,4 +645,4 @@ class BaseTestCase(testtools.testcase.WithAttributes,
|
|||
"""
|
||||
if msg is None:
|
||||
msg = "sequence or collection is empty."
|
||||
self.assertGreater(len(items), 0, msg)
|
||||
self.assertTrue(items, msg)
|
||||
|
|
Loading…
Reference in New Issue