Cleanup of .keys()

Cleanup of .keys() from dict_object.keys() *in* operator.

Change-Id: I61b146858d8740cbeb3011a6ed109ecae8da5e4b
This commit is contained in:
dharmendra 2019-12-05 12:12:21 +00:00 committed by Yasufumi Ogawa
parent 4a2fd6c292
commit 0be03680e5
20 changed files with 47 additions and 51 deletions

View File

@ -232,7 +232,7 @@ class PaginationEmulatedHelper(PaginationHelper):
class PaginationNativeHelper(PaginationEmulatedHelper):
def update_args(self, args):
if self.primary_key not in dict(args.get('sorts', [])).keys():
if self.primary_key not in dict(args.get('sorts', [])):
args.setdefault('sorts', []).append((self.primary_key, True))
args.update({'limit': self.limit, 'marker': self.marker,
'page_reverse': self.page_reverse})
@ -269,7 +269,7 @@ class SortingEmulatedHelper(SortingHelper):
def update_fields(self, original_fields, fields_to_add):
if not original_fields:
return
for key in dict(self.sort_dict).keys():
for key in dict(self.sort_dict):
if key not in original_fields:
original_fields.append(key)
fields_to_add.append(key)

View File

@ -326,7 +326,7 @@ class ExtensionMiddleware(wsgi.Middleware):
"""Return a dict of ActionExtensionController-s by collection."""
action_controllers = {}
for action in ext_mgr.get_actions():
if action.collection not in action_controllers.keys():
if action.collection not in action_controllers:
controller = ActionExtensionController(application)
mapper.connect("/%s/:(id)/action.:(format)" %
action.collection,
@ -345,7 +345,7 @@ class ExtensionMiddleware(wsgi.Middleware):
"""Returns a dict of RequestExtensionController-s by collection."""
request_ext_controllers = {}
for req_ext in ext_mgr.get_request_extensions():
if req_ext.key not in request_ext_controllers.keys():
if req_ext.key not in request_ext_controllers:
controller = RequestExtensionController(application)
mapper.connect(req_ext.url_route + '.:(format)',
action='process',

View File

@ -120,7 +120,7 @@ class Controller(object):
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
for attr_name in data:
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(

View File

@ -30,7 +30,7 @@ class VnfpkgmAPIRouter(wsgi.Router):
def _setup_route(self, mapper, url, methods, controller, default_resource):
all_methods = ['HEAD', 'GET', 'POST', 'PUT', 'PATCH', 'DELETE']
missing_methods = [m for m in all_methods if m not in methods.keys()]
missing_methods = [m for m in all_methods if m not in methods]
allowed_methods_str = ",".join(methods.keys())
for method, action in methods.items():

View File

@ -36,13 +36,13 @@ def _generate_associated_tables_map(inspector):
for fk in fk_list:
k = str(fk['referred_table'])
v = str(fk['constrained_columns'][0])
if k not in assoc_map.keys():
if k not in assoc_map:
assoc_map[k] = {str(t): v}
else:
assoc_map[k][str(t)] = v
assoc_keys = assoc_map.keys()
for k, v in assoc_map.items():
for k1 in v.keys():
for k1 in v:
if k1 in assoc_keys:
del assoc_map[k][k1]
return assoc_map
@ -52,7 +52,7 @@ def _purge_resource_tables(t, meta, engine, time_line, assoc_map):
table_load = sqlalchemy.Table(t, meta, autoload=True)
table_del_query = table_load.delete().where(
table_load.c.deleted_at <= time_line)
if t in assoc_map.keys():
if t in assoc_map:
select_id_query = sqlalchemy.select([table_load.c.id]).where(
table_load.c.deleted_at <= time_line)
resource_ids = [i[0] for i in list(engine.execute(select_id_query))]
@ -93,7 +93,7 @@ def purge_deleted(tacker_config, table_name, age, granularity='days'):
msg = _("'%s' - age should be a positive integer") % age
raise exceptions.InvalidInput(error_message=msg)
if granularity not in GRANULARITY.keys():
if granularity not in GRANULARITY:
msg = _("'%s' granularity should be days, hours, minutes, "
"or seconds") % granularity
raise exceptions.InvalidInput(error_message=msg)

View File

@ -334,7 +334,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
else:
raise nfvo.VnffgParamValueFormatError(
param_value=param_vattrs_dict)
for param_key in param_vattrs_dict.keys():
for param_key in param_vattrs_dict:
if param_matched.get(param_key) is None:
LOG.warning("Param input %s not used.", param_key)
@ -480,7 +480,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
nfp_name, 'path')
# Build physical port chain
for element in logical_chain:
if element['forwarder'] not in vnf_mapping.keys():
if element['forwarder'] not in vnf_mapping:
raise nfvo.NfpForwarderNotFoundException(vnfd=element[
'forwarder'],
mapping=vnf_mapping)
@ -607,7 +607,7 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
else:
# if no VNF mapping, we need to abstractly look for instances
# that match VNFD
if vnf_mapping is None or vnfd not in vnf_mapping.keys():
if vnf_mapping is None or vnfd not in vnf_mapping:
# find suitable VNFs from vnfd_id
LOG.debug('Searching VNFS with id %s', vnfd_id)
vnf_list = vnfm_plugin.get_vnfs(context,

View File

@ -271,7 +271,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
:param resource_name: name of resource to locate
:return: ID of resource
"""
if resource_type in _VALID_RESOURCE_TYPES.keys():
if resource_type in _VALID_RESOURCE_TYPES:
res_cmd_map = _VALID_RESOURCE_TYPES[resource_type]
client_type = res_cmd_map['client']
cmd = res_cmd_map['cmd']

View File

@ -143,7 +143,7 @@ class WorkflowGenerator(workflow_generator.WorkflowGeneratorBase):
delayed_tasks.append(wait_task)
previous_task = None
for vnffg_name in vnffg_ids.keys():
for vnffg_name in vnffg_ids:
task = 'delete_vnffg_%s' % vnffg_name
if previous_task:
wait_tasks = delayed_tasks + [previous_task]
@ -257,7 +257,7 @@ class WorkflowGenerator(workflow_generator.WorkflowGeneratorBase):
ns_dict = {'vnfd_details': {}}
vnf_ids = ast.literal_eval(ns['vnf_ids'])
self.definition[self.wf_identifier]['input'] = []
for vnf in vnf_ids.keys():
for vnf in vnf_ids:
vnf_key = 'vnf_id_' + vnf
self.definition[self.wf_identifier]['input'].append(vnf_key)
self.input_dict[vnf_key] = vnf_ids[vnf]
@ -266,7 +266,7 @@ class WorkflowGenerator(workflow_generator.WorkflowGeneratorBase):
vnffg_ids = ast.literal_eval(ns.get('vnffg_ids'))
if len(vnffg_ids):
for vnffg_name in vnffg_ids.keys():
for vnffg_name in vnffg_ids:
self.definition[self.wf_identifier]['input'].append(vnffg_name)
self.input_dict[vnffg_name] = vnffg_ids[vnffg_name]
ns_dict['vnffg_details'] = vnffg_ids

View File

@ -764,7 +764,7 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
vnfd_dict = dict()
for node_name, node_val in \
(nsd_dict['topology_template']['node_templates']).items():
if node_val.get('type') not in vnfds.keys():
if node_val.get('type') not in vnfds:
continue
vnfd_name = vnfds[node_val.get('type')]
if not vnfd_dict.get(vnfd_name):

View File

@ -149,11 +149,11 @@ class VnfDeploymentFlavour(base.TackerObject, base.TackerPersistentObject):
updates['id'] = uuidutils.generate_uuid()
self.id = updates['id']
if 'software_images' in updates.keys():
if 'software_images' in updates:
updates.pop('software_images')
special_key = 'instantiation_levels'
if special_key in updates.keys():
if special_key in updates:
updates[special_key] = jsonutils.dumps(updates.get(special_key))
db_flavour = _vnf_deployment_flavour_create(self._context, updates)

View File

@ -460,7 +460,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self.id = updates['id']
for key in ['vnf_deployment_flavours']:
if key in updates.keys():
if key in updates:
updates.pop(key)
user_data = updates.pop('user_data', None)
@ -487,7 +487,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
def save(self):
updates = self.tacker_obj_get_changes()
for key in ['vnf_deployment_flavours']:
if key in updates.keys():
if key in updates:
updates.pop(key)
db_vnf_package = _vnf_package_update(self._context,

View File

@ -75,7 +75,7 @@ class VnfTestToscaCreate(base.BaseTackerTest):
# Verify anti spoofing settings
stack_id = vnf_show_out['instance_id']
template_dict = tosca_dict['topology_template']['node_templates']
for field in template_dict.keys():
for field in template_dict:
prop_dict = template_dict[field]['properties']
if prop_dict.get('anti_spoofing_protection'):
self.verify_antispoofing_in_stack(stack_id=stack_id,

View File

@ -72,7 +72,7 @@ class VnfBlockStorageTestToscaCreate(base.BaseTackerTest):
# Verify anti spoofing settings
stack_id = vnf_show_out['instance_id']
template_dict = tosca_dict['topology_template']['node_templates']
for field in template_dict.keys():
for field in template_dict:
prop_dict = template_dict[field]['properties']
if prop_dict.get('anti_spoofing_protection'):
self.verify_antispoofing_in_stack(stack_id=stack_id,

View File

@ -530,7 +530,7 @@ class TackerPolicyTestCase(base.BaseTestCase):
expected_rules):
policy._set_rules(json.dumps(input_rules))
# verify deprecated policy has been removed
for pol in input_rules.keys():
for pol in input_rules:
self.assertNotIn(pol, common_policy._rules)
# verify deprecated policy was correctly translated. Iterate
# over items for compatibility with unittest2 in python 2.6

View File

@ -91,22 +91,22 @@ class TestToscaUtils(testtools.TestCase):
nt.type_definition.is_derived_from(toscautils.PLACEMENT)):
invalidNodes += 1
if nt.type in toscautils.delpropmap.keys():
if nt.type in toscautils.delpropmap:
for prop in toscautils.delpropmap[nt.type]:
for p in nt.get_properties_objects():
if prop == p.name:
deletedProperties += 1
if nt.type in toscautils.convert_prop_values:
for prop in toscautils.convert_prop_values[nt.type].keys():
for prop in toscautils.convert_prop_values[nt.type]:
convertmap = toscautils.convert_prop_values[nt.type][prop]
for p in nt.get_properties_objects():
if (prop == p.name and
p.value in convertmap.keys()):
p.value in convertmap):
convertedValues += 1
if nt.type in toscautils.convert_prop:
for prop in toscautils.convert_prop[nt.type].keys():
for prop in toscautils.convert_prop[nt.type]:
for p in nt.get_properties_objects():
if prop == p.name:
convertedProperties += 1

View File

@ -182,7 +182,7 @@ def get_vdu_applicationmonitoring(template):
for node in node_list:
nt = template[tpl_temp][n_temp][node]
if nt['type'] == TACKERVDU:
if poly in nt['properties'].keys():
if poly in nt['properties']:
mon_policy = nt['properties'][poly]
if mon_policy != 'noop':
policy_dict['vdus'][node] = {}
@ -951,7 +951,7 @@ def post_process_template(template):
template.nodetemplates.remove(nt)
continue
if nt.type in delpropmap.keys():
if nt.type in delpropmap:
for prop in delpropmap[nt.type]:
for p in nt.get_properties_objects():
if prop == p.name:
@ -959,16 +959,16 @@ def post_process_template(template):
# change the property value first before the property key
if nt.type in convert_prop_values:
for prop in convert_prop_values[nt.type].keys():
for prop in convert_prop_values[nt.type]:
for p in nt.get_properties_objects():
if (prop == p.name and
p.value in
convert_prop_values[nt.type][prop].keys()):
convert_prop_values[nt.type][prop]):
v = convert_prop_values[nt.type][prop][p.value]
p.value = v
if nt.type in convert_prop:
for prop in convert_prop[nt.type].keys():
for prop in convert_prop[nt.type]:
for p in nt.get_properties_objects():
if prop == p.name:
schema_dict = {'type': p.type}
@ -1128,9 +1128,9 @@ def get_image_dict(template):
continue
artifacts = vdu.entity_tpl["artifacts"]
for name, artifact in (artifacts).items():
if ('type' in artifact.keys() and
if ('type' in artifact and
artifact["type"] == IMAGE):
if 'file' not in artifact.keys():
if 'file' not in artifact:
raise vnfm.FilePathMissing()
image_dict[vdu.name] = {
"location": artifact["file"],

View File

@ -171,7 +171,7 @@ class Parser(object):
memory_real_value = 0
# Translate memory's byte size based on SCALAR_UNIT_DICT
if memory_unit in SCALAR_UNIT_DICT.keys():
if memory_unit in SCALAR_UNIT_DICT:
memory_real_value = \
int(memory_value) * SCALAR_UNIT_DICT[memory_unit]
return memory_real_value

View File

@ -163,13 +163,13 @@ class VNFMonitor(object):
vnf_delay = hosting_vnf['monitoring_policy'].get(
'monitoring_delay', self.boot_wait)
for vdu in vdupolicies.keys():
for vdu in vdupolicies:
if hosting_vnf.get('dead') or (
hosting_vnf['vnf']['status']) == constants.PENDING_HEAL:
return
policy = vdupolicies[vdu]
for driver in policy.keys():
for driver in policy:
params = policy[driver].get('monitoring_params', {})
vdu_delay = params.get('monitoring_delay', vnf_delay)

View File

@ -54,7 +54,6 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
return plugin_descript
def monitor_get_config(self, plugin, context, vnf):
"""Return dict of monitor configuration data.
:param plugin:
@ -198,7 +197,7 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
temp_vdu_name = self.hostinfo[vdu]['appinfo']['app_name']
temp_vdu_port = self.hostinfo[vdu]['appinfo']['app_port']
for para in VNFMonitorZabbix.params:
for item in self.hostinfo[vdu]['parameters'][para].keys():
for item in self.hostinfo[vdu]['parameters'][para]:
action_list = copy.deepcopy(zapi.dACTION_LIST)
temp_item = self.hostinfo[vdu]['parameters'][para][item]
@ -208,7 +207,7 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
and ('ssh_password' != item):
if 'condition' \
in temp_item.keys():
in temp_item:
temp_con = temp_item['condition']
if len(temp_con) == 2:
@ -239,9 +238,7 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
'*', str(temp_vdu_port))) \
+ str(
zapi.COMP_VALUE[temp_comparrision])
if 'actionname' in \
temp_item.keys():
if 'actionname' in temp_item:
trig_act_pa.append(temp_trigger_list[item][0])
response = self.create_trigger(trig_act_pa, vdu)
del trig_act_pa[:]
@ -280,12 +277,11 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
for para in VNFMonitorZabbix.params:
if 'application' == para:
for app_info in \
temp_app.keys():
for app_info in temp_app:
self.hostinfo[vdu]['appinfo'][app_info] = \
temp_app[app_info]
for item in (self.hostinfo[vdu]['parameters'][para].keys()):
for item in self.hostinfo[vdu]['parameters'][para]:
if ('app_name' != item) and ('app_port' != item) \
and ('ssh_username' != item) \
and ('ssh_password' != item):
@ -392,9 +388,9 @@ class VNFMonitorZabbix(abstract_driver.VNFMonitorAbstractDriver):
temp_vduname = self.kwargs['vdus'].keys()
for node in temp_vduname:
if 'application' in \
self.kwargs['vdus'][node]['parameters'].keys() \
self.kwargs['vdus'][node]['parameters'] \
and 'OS'\
in self.kwargs['vdus'][node]['parameters'].keys():
in self.kwargs['vdus'][node]['parameters']:
self.vduname.append(node)
self.hostinfo[node] = copy.deepcopy(zapi.dVDU_INFO)
self.set_zbx_info(node)

View File

@ -702,7 +702,7 @@ class VNFMPlugin(vnfm_db.VNFMPluginDb, VNFMMgmtMixin):
def _validate_scaling_policy():
type = policy['type']
if type not in constants.POLICY_ACTIONS.keys():
if type not in constants.POLICY_ACTIONS:
raise exceptions.VnfPolicyTypeInvalid(
type=type,
valid_types=constants.POLICY_ACTIONS.keys(),