diff --git a/sahara/plugins/mapr/util/config_file_utils.py b/sahara/plugins/mapr/util/config_file_utils.py index 53d86c19..a88c4112 100644 --- a/sahara/plugins/mapr/util/config_file_utils.py +++ b/sahara/plugins/mapr/util/config_file_utils.py @@ -30,8 +30,8 @@ def load_properties_file(path): def load_xml_file(path): - kv_mapper = lambda i: (x._get_text_from_node(i, 'name'), - x._adjust_field(x._get_text_from_node(i, 'value'))) + kv_mapper = lambda i: (x.get_text_from_node(i, 'name'), + x._adjust_field(x.get_text_from_node(i, 'value'))) strip_mapper = lambda i: (i[0].strip(), i[1].strip()) props = x.load_xml_document(path).getElementsByTagName('property') return dict(map(strip_mapper, map(kv_mapper, props))) diff --git a/sahara/tests/unit/service/edp/test_job_manager.py b/sahara/tests/unit/service/edp/test_job_manager.py index 375f9905..5f21a849 100644 --- a/sahara/tests/unit/service/edp/test_job_manager.py +++ b/sahara/tests/unit/service/edp/test_job_manager.py @@ -14,6 +14,7 @@ # limitations under the License. import copy +import xml.dom.minidom as xml import mock import testtools @@ -29,7 +30,7 @@ from sahara.tests.unit import base from sahara.tests.unit.service.edp import edp_test_utils as u from sahara.utils import edp from sahara.utils import patches as p - +from sahara.utils import xmlutils conductor = cond.API @@ -399,21 +400,23 @@ class TestJobManager(base.SaharaWithDbTestCase): job, u.create_cluster(), job_exec, input_data, output_data, 'hadoop') - self.assertIn(""" - /user/hadoop/conf/hive-site.xml - - - fs.swift.service.sahara.password - admin1 - - - fs.swift.service.sahara.username - admin - - - - INPUT=swift://ex.sahara/i - OUTPUT=swift://ex.sahara/o""", res) + doc = xml.parseString(res) + hive = doc.getElementsByTagName('hive')[0] + self.assertEqual(xmlutils.get_text_from_node(hive, 'job-xml'), + '/user/hadoop/conf/hive-site.xml') + + configuration = hive.getElementsByTagName('configuration') + properties = xmlutils.get_property_dict(configuration[0]) + self.assertEqual({'fs.swift.service.sahara.password': 'admin1', + 'fs.swift.service.sahara.username': 'admin'}, + properties) + + self.assertEqual(xmlutils.get_text_from_node(hive, 'script'), + 'script.q') + + params = xmlutils.get_param_dict(hive) + self.assertEqual({'INPUT': 'swift://ex.sahara/i', + 'OUTPUT': 'swift://ex.sahara/o'}, params) # testing workflow creation with a proxy domain self.override_config('use_domain_for_proxy_users', True) @@ -425,29 +428,22 @@ class TestJobManager(base.SaharaWithDbTestCase): job, u.create_cluster(), job_exec, input_data, output_data, 'hadoop') - self.assertIn(""" - /user/hadoop/conf/hive-site.xml - - - fs.swift.service.sahara.domain.name - sahara_proxy_domain - - - fs.swift.service.sahara.password - 55555555-6666-7777-8888-999999999999 - - - fs.swift.service.sahara.trust.id - 0123456789abcdef0123456789abcdef - - - fs.swift.service.sahara.username - job_00000000-1111-2222-3333-4444444444444444 - - - - INPUT=swift://ex.sahara/i - OUTPUT=swift://ex.sahara/o""", res) + doc = xml.parseString(res) + hive = doc.getElementsByTagName('hive')[0] + configuration = hive.getElementsByTagName('configuration') + properties = xmlutils.get_property_dict(configuration[0]) + self.assertEqual({ + 'fs.swift.service.sahara.domain.name': + 'sahara_proxy_domain', + + 'fs.swift.service.sahara.trust.id': + '0123456789abcdef0123456789abcdef', + + 'fs.swift.service.sahara.password': + '55555555-6666-7777-8888-999999999999', + + 'fs.swift.service.sahara.username': + 'job_00000000-1111-2222-3333-4444444444444444'}, properties) def test_update_job_dict(self): w = workflow_factory.BaseFactory() diff --git a/sahara/utils/xmlutils.py b/sahara/utils/xmlutils.py index d4522e97..4e8a415b 100644 --- a/sahara/utils/xmlutils.py +++ b/sahara/utils/xmlutils.py @@ -29,10 +29,10 @@ def load_hadoop_xml_defaults(file_name): prop = doc.getElementsByTagName('property') for elements in prop: configs.append({ - "name": _get_text_from_node(elements, 'name'), - "value": _adjust_field(_get_text_from_node(elements, 'value')), + "name": get_text_from_node(elements, 'name'), + "value": _adjust_field(get_text_from_node(elements, 'value')), "description": _adjust_field( - _get_text_from_node(elements, 'description')) + get_text_from_node(elements, 'description')) }) return configs @@ -43,8 +43,8 @@ def parse_hadoop_xml_with_name_and_value(data): prop = doc.getElementsByTagName('property') for elements in prop: configs.append({ - 'name': _get_text_from_node(elements, 'name'), - 'value': _get_text_from_node(elements, 'value') + 'name': get_text_from_node(elements, 'name'), + 'value': get_text_from_node(elements, 'value') }) return configs @@ -92,7 +92,7 @@ def load_xml_document(file_name, strip=False): return xml.parse(fname) -def _get_text_from_node(element, name): +def get_text_from_node(element, name): element = element.getElementsByTagName(name) if element else None return element[0].firstChild.nodeValue if ( element and element[0].hasChildNodes()) else '' @@ -154,3 +154,22 @@ def add_equal_separated_dict(doc, parent_tag, each_elem_tag, value): def add_tagged_list(doc, parent_tag, each_elem_tag, values): for v in values: add_text_element_to_tag(doc, parent_tag, each_elem_tag, v) + + +def get_property_dict(elem): + res = {} + properties = elem.getElementsByTagName('property') + for prop in properties: + k = get_text_from_node(prop, 'name') + v = get_text_from_node(prop, 'value') + res[k] = v + return res + + +def get_param_dict(elem): + res = {} + params = elem.getElementsByTagName('param') + for param in params: + k, v = param.firstChild.nodeValue.split('=') + res[k] = v + return res