Avoid duplicate keys in default_data

Change-Id: Id31fc589266d3a9a22f0e3f6a48cad8dd3130c87
This commit is contained in:
Ilya Shakhat
2015-03-27 13:44:51 +03:00
parent 0c4e735e58
commit 6030b8c7fc
2 changed files with 25 additions and 3 deletions

View File

@@ -6639,9 +6639,7 @@
{
"company_name": "Mirantis",
"end_date": "2014-Dec-31"
}
],
"companies": [
},
{
"company_name": "*independent",
"end_date": null

View File

@@ -21,6 +21,17 @@ import six
import testtools
def dict_raise_on_duplicates(ordered_pairs):
"""Reject duplicate keys."""
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError("duplicate key: %s (value: %s)" % (k, v))
else:
d[k] = v
return d
class TestConfigFiles(testtools.TestCase):
def setUp(self):
super(TestConfigFiles, self).setUp()
@@ -55,6 +66,19 @@ class TestConfigFiles(testtools.TestCase):
schema = self._read_file('etc/corrections.schema.json')
jsonschema.validate(corrections, schema)
def _verify_default_data_duplicate_keys(self, file_name):
try:
json.loads(self._read_raw_file(file_name),
object_pairs_hook=dict_raise_on_duplicates)
except ValueError as ve:
self.fail(ve)
def test_default_data_duplicate_keys(self):
self._verify_default_data_duplicate_keys('etc/default_data.json')
def test_test_default_data_duplicate_keys(self):
self._verify_default_data_duplicate_keys('etc/test_default_data.json')
def _verify_default_data_by_schema(self, file_name):
default_data = self._read_file(file_name)
schema = self._read_file('etc/default_data.schema.json')