Use xml.dom.minidom and xmlutils in unit tests

This is an example using xml parsing in existing unit
tests instead of assertIn and string comparisons to verify
workflow generation.  It's much less fragile.

If folks like this, we can change other tests similarly.

Change-Id: Ib1c266933a4a3104af34586527148c2612019000
This commit is contained in:
Trevor McKay 2014-12-12 11:13:29 -05:00
parent 3e1fbe3f48
commit a12701cf32
3 changed files with 62 additions and 47 deletions

View File

@ -30,8 +30,8 @@ def load_properties_file(path):
def load_xml_file(path):
kv_mapper = lambda i: (x._get_text_from_node(i, 'name'),
x._adjust_field(x._get_text_from_node(i, 'value')))
kv_mapper = lambda i: (x.get_text_from_node(i, 'name'),
x._adjust_field(x.get_text_from_node(i, 'value')))
strip_mapper = lambda i: (i[0].strip(), i[1].strip())
props = x.load_xml_document(path).getElementsByTagName('property')
return dict(map(strip_mapper, map(kv_mapper, props)))

View File

@ -14,6 +14,7 @@
# limitations under the License.
import copy
import xml.dom.minidom as xml
import mock
import testtools
@ -29,7 +30,7 @@ from sahara.tests.unit import base
from sahara.tests.unit.service.edp import edp_test_utils as u
from sahara.utils import edp
from sahara.utils import patches as p
from sahara.utils import xmlutils
conductor = cond.API
@ -399,21 +400,23 @@ class TestJobManager(base.SaharaWithDbTestCase):
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.password</name>
<value>admin1</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>admin</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
doc = xml.parseString(res)
hive = doc.getElementsByTagName('hive')[0]
self.assertEqual(xmlutils.get_text_from_node(hive, 'job-xml'),
'/user/hadoop/conf/hive-site.xml')
configuration = hive.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual({'fs.swift.service.sahara.password': 'admin1',
'fs.swift.service.sahara.username': 'admin'},
properties)
self.assertEqual(xmlutils.get_text_from_node(hive, 'script'),
'script.q')
params = xmlutils.get_param_dict(hive)
self.assertEqual({'INPUT': 'swift://ex.sahara/i',
'OUTPUT': 'swift://ex.sahara/o'}, params)
# testing workflow creation with a proxy domain
self.override_config('use_domain_for_proxy_users', True)
@ -425,29 +428,22 @@ class TestJobManager(base.SaharaWithDbTestCase):
job, u.create_cluster(), job_exec, input_data, output_data,
'hadoop')
self.assertIn("""
<job-xml>/user/hadoop/conf/hive-site.xml</job-xml>
<configuration>
<property>
<name>fs.swift.service.sahara.domain.name</name>
<value>sahara_proxy_domain</value>
</property>
<property>
<name>fs.swift.service.sahara.password</name>
<value>55555555-6666-7777-8888-999999999999</value>
</property>
<property>
<name>fs.swift.service.sahara.trust.id</name>
<value>0123456789abcdef0123456789abcdef</value>
</property>
<property>
<name>fs.swift.service.sahara.username</name>
<value>job_00000000-1111-2222-3333-4444444444444444</value>
</property>
</configuration>
<script>script.q</script>
<param>INPUT=swift://ex.sahara/i</param>
<param>OUTPUT=swift://ex.sahara/o</param>""", res)
doc = xml.parseString(res)
hive = doc.getElementsByTagName('hive')[0]
configuration = hive.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0])
self.assertEqual({
'fs.swift.service.sahara.domain.name':
'sahara_proxy_domain',
'fs.swift.service.sahara.trust.id':
'0123456789abcdef0123456789abcdef',
'fs.swift.service.sahara.password':
'55555555-6666-7777-8888-999999999999',
'fs.swift.service.sahara.username':
'job_00000000-1111-2222-3333-4444444444444444'}, properties)
def test_update_job_dict(self):
w = workflow_factory.BaseFactory()

View File

@ -29,10 +29,10 @@ def load_hadoop_xml_defaults(file_name):
prop = doc.getElementsByTagName('property')
for elements in prop:
configs.append({
"name": _get_text_from_node(elements, 'name'),
"value": _adjust_field(_get_text_from_node(elements, 'value')),
"name": get_text_from_node(elements, 'name'),
"value": _adjust_field(get_text_from_node(elements, 'value')),
"description": _adjust_field(
_get_text_from_node(elements, 'description'))
get_text_from_node(elements, 'description'))
})
return configs
@ -43,8 +43,8 @@ def parse_hadoop_xml_with_name_and_value(data):
prop = doc.getElementsByTagName('property')
for elements in prop:
configs.append({
'name': _get_text_from_node(elements, 'name'),
'value': _get_text_from_node(elements, 'value')
'name': get_text_from_node(elements, 'name'),
'value': get_text_from_node(elements, 'value')
})
return configs
@ -92,7 +92,7 @@ def load_xml_document(file_name, strip=False):
return xml.parse(fname)
def _get_text_from_node(element, name):
def get_text_from_node(element, name):
element = element.getElementsByTagName(name) if element else None
return element[0].firstChild.nodeValue if (
element and element[0].hasChildNodes()) else ''
@ -154,3 +154,22 @@ def add_equal_separated_dict(doc, parent_tag, each_elem_tag, value):
def add_tagged_list(doc, parent_tag, each_elem_tag, values):
for v in values:
add_text_element_to_tag(doc, parent_tag, each_elem_tag, v)
def get_property_dict(elem):
res = {}
properties = elem.getElementsByTagName('property')
for prop in properties:
k = get_text_from_node(prop, 'name')
v = get_text_from_node(prop, 'value')
res[k] = v
return res
def get_param_dict(elem):
res = {}
params = elem.getElementsByTagName('param')
for param in params:
k, v = param.firstChild.nodeValue.split('=')
res[k] = v
return res