Merge "Refactor removal of duplicate projects/domains"
This commit is contained in:
commit
78eae3d497
|
@ -120,6 +120,23 @@ def attr_as_boolean(val_attr):
|
|||
return strutils.bool_from_string(val_attr, default=True)
|
||||
|
||||
|
||||
def remove_duplicate_dicts_by_id(item_list):
|
||||
"""Return a list with duplicate items removed.
|
||||
|
||||
This compares duplicates based on item id. This is to account
|
||||
for dictionaries of items that can contain values, such as a
|
||||
list of tags, that will not be guaranteed to be in order.
|
||||
|
||||
:param item_list: a list of dictionaries
|
||||
:returns: a list of unique dictionaries
|
||||
|
||||
"""
|
||||
unique = {}
|
||||
for i in item_list:
|
||||
unique[i['id']] = i
|
||||
return list(unique.values())
|
||||
|
||||
|
||||
def get_blob_from_credential(credential):
|
||||
try:
|
||||
blob = jsonutils.loads(credential.blob)
|
||||
|
|
|
@ -453,7 +453,7 @@ class DomainV3(controller.V3Controller):
|
|||
domains = domains + self.assignment_api.list_domains_for_user(
|
||||
request.auth_context['user_id'])
|
||||
# remove duplicates
|
||||
domains = [dict(t) for t in set([tuple(d.items()) for d in domains])]
|
||||
domains = k_utils.remove_duplicate_dicts_by_id(domains)
|
||||
return DomainV3.wrap_collection(request.context_dict, domains)
|
||||
|
||||
|
||||
|
@ -479,7 +479,7 @@ class ProjectAssignmentV3(controller.V3Controller):
|
|||
projects = projects + self.assignment_api.list_projects_for_user(
|
||||
request.auth_context['user_id'])
|
||||
# remove duplicates
|
||||
projects = [dict(t) for t in set([tuple(d.items()) for d in projects])]
|
||||
projects = k_utils.remove_duplicate_dicts_by_id(projects)
|
||||
return ProjectAssignmentV3.wrap_collection(request.context_dict,
|
||||
projects)
|
||||
|
||||
|
|
|
@ -71,6 +71,34 @@ class UtilsTestCase(unit.BaseTestCase):
|
|||
self.assertRaises(ValueError, common_utils.resource_uuid,
|
||||
value)
|
||||
|
||||
def test_remove_duplicate_dicts_from_list(self):
|
||||
dict_list = []
|
||||
num_of_duplicate = 10
|
||||
dup_value = {
|
||||
'id': uuid.uuid4().hex,
|
||||
'name': uuid.uuid4().hex,
|
||||
'tags': ['foo', 'bar']
|
||||
}
|
||||
# Add in 10 unique items, and 10 of the same item
|
||||
for i in range(num_of_duplicate):
|
||||
new_dict = {
|
||||
'id': i,
|
||||
'name': uuid.uuid4().hex,
|
||||
'tags': ['foo', i]
|
||||
}
|
||||
dict_list.append(new_dict)
|
||||
dict_list.append(dup_value)
|
||||
self.assertEqual(len(dict_list), 20)
|
||||
result = common_utils.remove_duplicate_dicts_by_id(dict_list)
|
||||
# Assert that 9 duplicate items have been removed
|
||||
self.assertEqual(len(result), len(dict_list) - 9)
|
||||
# Show that the duplicate item is only stored once
|
||||
count = 0
|
||||
for r in result:
|
||||
if r['id'] == dup_value['id']:
|
||||
count += 1
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
def test_hash(self):
|
||||
password = 'right'
|
||||
wrong = 'wrongwrong' # Two wrongs don't make a right
|
||||
|
|
Loading…
Reference in New Issue