Merge "Parallel testing EDP jobs"
This commit is contained in:
commit
4d3d9cfa9f
@ -19,7 +19,6 @@ import time
|
||||
import uuid
|
||||
|
||||
import fixtures
|
||||
from oslo.utils import excutils
|
||||
import six
|
||||
|
||||
from sahara.swift import swift_helper as sw
|
||||
@ -103,41 +102,54 @@ class EDPTest(base.ITestCase):
|
||||
self.edp_info = EDPJobInfo()
|
||||
|
||||
def _create_data_source(self, name, data_type, url, description=''):
|
||||
return self.sahara.data_sources.create(
|
||||
source_id = self.sahara.data_sources.create(
|
||||
name, description, data_type, url, self.common_config.OS_USERNAME,
|
||||
self.common_config.OS_PASSWORD).id
|
||||
self.addCleanup(self.sahara.data_sources.delete, source_id)
|
||||
return source_id
|
||||
|
||||
def _create_job_binary_internals(self, name, data):
|
||||
return self.sahara.job_binary_internals.create(name, data).id
|
||||
job_binary_id = self.sahara.job_binary_internals.create(name, data).id
|
||||
self.addCleanup(self.sahara.job_binary_internals.delete, job_binary_id)
|
||||
return job_binary_id
|
||||
|
||||
def _create_job_binary(self, name, url, extra=None, description=None):
|
||||
return self.sahara.job_binaries.create(
|
||||
job_binary_id = self.sahara.job_binaries.create(
|
||||
name, url, description or '', extra or {}).id
|
||||
self.addCleanup(self.sahara.job_binaries.delete, job_binary_id)
|
||||
return job_binary_id
|
||||
|
||||
def _create_job(self, name, job_type, mains, libs):
|
||||
return self.sahara.jobs.create(name, job_type, mains, libs,
|
||||
job_id = self.sahara.jobs.create(name, job_type, mains, libs,
|
||||
description='').id
|
||||
self.addCleanup(self.sahara.jobs.delete, job_id)
|
||||
return job_id
|
||||
|
||||
def _await_job_execution(self, job):
|
||||
timeout = self.common_config.JOB_LAUNCH_TIMEOUT * 60
|
||||
status = self.sahara.job_executions.get(job.id).info['status']
|
||||
def _get_job_status(self, job_id):
|
||||
return self.sahara.job_executions.get(job_id).info['status']
|
||||
|
||||
def poll_jobs_status(self, job_ids):
|
||||
timeout = self.common_config.JOB_LAUNCH_TIMEOUT * 60 * len(job_ids)
|
||||
try:
|
||||
with fixtures.Timeout(timeout, gentle=True):
|
||||
while status != edp.JOB_STATUS_SUCCEEDED:
|
||||
if status == edp.JOB_STATUS_KILLED:
|
||||
self.fail("Job status == '{0}'.".format(
|
||||
edp.JOB_STATUS_KILLED))
|
||||
|
||||
time.sleep(10)
|
||||
status = self.sahara.job_executions.get(
|
||||
job.id).info['status']
|
||||
success = False
|
||||
while not success:
|
||||
success = True
|
||||
for job_id in job_ids:
|
||||
status = self._get_job_status(job_id)
|
||||
if status in [edp.JOB_STATUS_FAILED,
|
||||
edp.JOB_STATUS_KILLED,
|
||||
edp.JOB_STATUS_DONEWITHERROR]:
|
||||
self.fail(
|
||||
'Job status "%s" \'%s\'.' % (job_id, status))
|
||||
if status != edp.JOB_STATUS_SUCCEEDED:
|
||||
success = False
|
||||
|
||||
time.sleep(5)
|
||||
except fixtures.TimeoutException:
|
||||
self.fail(
|
||||
"Job did not return to '{0}' status within {1:d} minute(s)."
|
||||
.format(edp.JOB_STATUS_SUCCEEDED,
|
||||
self.common_config.JOB_LAUNCH_TIMEOUT)
|
||||
)
|
||||
"Jobs did not return to '{0}' status within {1:d} minute(s)."
|
||||
.format(edp.JOB_STATUS_SUCCEEDED, timeout / 60))
|
||||
|
||||
def _create_job_binaries(self, job_data_list, job_binary_internal_list,
|
||||
job_binary_list, swift_connection=None,
|
||||
@ -172,23 +184,6 @@ class EDPTest(base.ITestCase):
|
||||
)
|
||||
)
|
||||
|
||||
def _delete_job(self, execution_job, job_id, job_binary_list,
|
||||
job_binary_internal_list, input_id, output_id):
|
||||
if execution_job:
|
||||
self.sahara.job_executions.delete(execution_job.id)
|
||||
if job_id:
|
||||
self.sahara.jobs.delete(job_id)
|
||||
if job_binary_list:
|
||||
for job_binary_id in job_binary_list:
|
||||
self.sahara.job_binaries.delete(job_binary_id)
|
||||
if job_binary_internal_list:
|
||||
for internal_id in job_binary_internal_list:
|
||||
self.sahara.job_binary_internals.delete(internal_id)
|
||||
if input_id:
|
||||
self.sahara.data_sources.delete(input_id)
|
||||
if output_id:
|
||||
self.sahara.data_sources.delete(output_id)
|
||||
|
||||
def _add_swift_configs(self, configs):
|
||||
|
||||
if "configs" not in configs:
|
||||
@ -201,8 +196,7 @@ class EDPTest(base.ITestCase):
|
||||
configs["configs"][
|
||||
sw.HADOOP_SWIFT_PASSWORD] = self.common_config.OS_PASSWORD
|
||||
|
||||
@base.skip_test('SKIP_EDP_TEST',
|
||||
'Test for EDP was skipped.')
|
||||
@base.skip_test('SKIP_EDP_TEST', 'Test for EDP was skipped.')
|
||||
def edp_testing(self, job_type, job_data_list, lib_data_list=None,
|
||||
configs=None, pass_input_output_args=False,
|
||||
swift_binaries=False, hdfs_local_output=False):
|
||||
@ -210,10 +204,10 @@ class EDPTest(base.ITestCase):
|
||||
lib_data_list = lib_data_list or []
|
||||
configs = configs or {}
|
||||
|
||||
try:
|
||||
swift = self.connect_to_swift()
|
||||
container_name = 'Edp-test-%s' % str(uuid.uuid4())[:8]
|
||||
swift.put_container(container_name)
|
||||
self.addCleanup(self.delete_swift_container, swift, container_name)
|
||||
swift.put_object(
|
||||
container_name, 'input', ''.join(
|
||||
random.choice(':' + ' ' + '\n' + string.ascii_lowercase)
|
||||
@ -221,15 +215,10 @@ class EDPTest(base.ITestCase):
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.delete_swift_container(swift, container_name)
|
||||
print(str(e))
|
||||
input_id = None
|
||||
output_id = None
|
||||
job_id = None
|
||||
job_execution = None
|
||||
try:
|
||||
job_binary_list = []
|
||||
lib_binary_list = []
|
||||
job_binary_internal_list = []
|
||||
@ -296,26 +285,13 @@ class EDPTest(base.ITestCase):
|
||||
job_type, edp.JOB_TYPE_JAVA) and pass_input_output_args:
|
||||
self._add_swift_configs(configs)
|
||||
if "args" in configs:
|
||||
configs["args"].extend([swift_input_url,
|
||||
output_url])
|
||||
configs["args"].extend([swift_input_url, output_url])
|
||||
else:
|
||||
configs["args"] = [swift_input_url,
|
||||
output_url]
|
||||
configs["args"] = [swift_input_url, output_url]
|
||||
|
||||
job_execution = self.sahara.job_executions.create(
|
||||
job_id, self.cluster_id, input_id, output_id,
|
||||
configs=configs)
|
||||
self.addCleanup(self.sahara.job_executions.delete, job_execution.id)
|
||||
|
||||
if job_execution:
|
||||
self._await_job_execution(job_execution)
|
||||
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
print(str(e))
|
||||
|
||||
finally:
|
||||
self.delete_swift_container(swift, container_name)
|
||||
self._delete_job(
|
||||
job_execution, job_id, job_binary_list + lib_binary_list,
|
||||
job_binary_internal_list, input_id, output_id
|
||||
)
|
||||
return job_execution.id
|
||||
|
@ -199,13 +199,14 @@ class CDHGatingTest(cluster_configs.ClusterConfigTest,
|
||||
|
||||
@b.errormsg("Failure while EDP testing: ")
|
||||
def _check_edp(self):
|
||||
self._edp_test()
|
||||
self.poll_jobs_status(list(self._run_edp_test()))
|
||||
|
||||
def _edp_test(self):
|
||||
def _run_edp_test(self):
|
||||
# check pig
|
||||
pig_job = self.edp_info.read_pig_example_script()
|
||||
pig_lib = self.edp_info.read_pig_example_jar()
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job}],
|
||||
lib_data_list=[{'jar': pig_lib}],
|
||||
swift_binaries=False,
|
||||
@ -214,7 +215,8 @@ class CDHGatingTest(cluster_configs.ClusterConfigTest,
|
||||
# check mapreduce
|
||||
mapreduce_jar = self.edp_info.read_mapreduce_example_jar()
|
||||
mapreduce_configs = self.edp_info.mapreduce_example_configs()
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': mapreduce_jar}],
|
||||
configs=mapreduce_configs,
|
||||
@ -222,7 +224,8 @@ class CDHGatingTest(cluster_configs.ClusterConfigTest,
|
||||
hdfs_local_output=True)
|
||||
|
||||
# check mapreduce streaming
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_data_list=[],
|
||||
lib_data_list=[],
|
||||
configs=self.edp_info.mapreduce_streaming_configs(),
|
||||
@ -278,7 +281,7 @@ class CDHGatingTest(cluster_configs.ClusterConfigTest,
|
||||
|
||||
@b.errormsg("Failure while EDP testing after cluster scaling: ")
|
||||
def _check_edp_after_scaling(self):
|
||||
self._edp_test()
|
||||
self._check_edp()
|
||||
|
||||
@testcase.skipIf(
|
||||
cfg.ITConfig().cdh_config.SKIP_ALL_TESTS_FOR_PLUGIN,
|
||||
|
@ -124,14 +124,15 @@ class HDP2GatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
|
||||
@b.errormsg("Failure while EDP testing: ")
|
||||
def _check_edp(self):
|
||||
self._edp_test()
|
||||
self.poll_jobs_status(list(self._run_edp_test()))
|
||||
|
||||
def _edp_test(self):
|
||||
def _run_edp_test(self):
|
||||
# check pig
|
||||
pig_job = self.edp_info.read_pig_example_script()
|
||||
pig_lib = self.edp_info.read_pig_example_jar()
|
||||
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job}],
|
||||
lib_data_list=[{'jar': pig_lib}],
|
||||
swift_binaries=True,
|
||||
@ -140,7 +141,8 @@ class HDP2GatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
# check mapreduce
|
||||
mapreduce_jar = self.edp_info.read_mapreduce_example_jar()
|
||||
mapreduce_configs = self.edp_info.mapreduce_example_configs()
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': mapreduce_jar}],
|
||||
configs=mapreduce_configs,
|
||||
@ -148,7 +150,8 @@ class HDP2GatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
hdfs_local_output=True)
|
||||
|
||||
# check mapreduce streaming
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
yield self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_data_list=[],
|
||||
lib_data_list=[],
|
||||
configs=self.edp_info.mapreduce_streaming_configs())
|
||||
@ -156,7 +159,8 @@ class HDP2GatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
# check java
|
||||
java_jar = self.edp_info.read_java_example_lib(2)
|
||||
java_configs = self.edp_info.java_example_configs(2)
|
||||
self.edp_testing(utils_edp.JOB_TYPE_JAVA,
|
||||
yield self.edp_testing(
|
||||
utils_edp.JOB_TYPE_JAVA,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': java_jar}],
|
||||
configs=java_configs)
|
||||
@ -192,7 +196,7 @@ class HDP2GatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
|
||||
@b.errormsg("Failure while EDP testing after cluster scaling: ")
|
||||
def _check_edp_after_scaling(self):
|
||||
self._edp_test()
|
||||
self._check_edp()
|
||||
|
||||
@testcase.attr('hdp2')
|
||||
@testcase.skipIf(config.SKIP_ALL_TESTS_FOR_PLUGIN,
|
||||
|
@ -166,27 +166,39 @@ class HDPGatingTest(cinder.CinderVolumeTest, edp.EDPTest,
|
||||
java_lib_data = self.edp_info.read_java_example_lib()
|
||||
|
||||
try:
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_ids = []
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job_data}],
|
||||
lib_data_list=[{'jar': pig_lib_data}],
|
||||
swift_binaries=True,
|
||||
hdfs_local_output=True)
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': mapreduce_jar_data}],
|
||||
configs=self.edp_info.mapreduce_example_configs(),
|
||||
swift_binaries=True,
|
||||
hdfs_local_output=True)
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_data_list=[],
|
||||
lib_data_list=[],
|
||||
configs=(
|
||||
self.edp_info.mapreduce_streaming_configs()))
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_JAVA,
|
||||
configs=self.edp_info.mapreduce_streaming_configs())
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_JAVA,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': java_lib_data}],
|
||||
configs=self.edp_info.java_example_configs(),
|
||||
pass_input_output_args=True)
|
||||
job_ids.append(job_id)
|
||||
self.poll_jobs_status(job_ids)
|
||||
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
|
@ -121,10 +121,12 @@ class SparkGatingTest(swift.SwiftTest, scaling.ScalingTest,
|
||||
# check spark
|
||||
spark_jar = self.edp_info.read_spark_example_jar()
|
||||
spark_configs = self.edp_info.spark_example_configs()
|
||||
self.edp_testing(utils_edp.JOB_TYPE_SPARK,
|
||||
job_id = self.edp_testing(
|
||||
utils_edp.JOB_TYPE_SPARK,
|
||||
job_data_list=[{'jar': spark_jar}],
|
||||
lib_data_list=[],
|
||||
configs=spark_configs)
|
||||
self.poll_jobs_status([job_id])
|
||||
|
||||
@b.errormsg("Failure while cluster scaling: ")
|
||||
def _check_scaling(self):
|
||||
|
@ -101,9 +101,10 @@ class TransientGatingTest(edp.EDPTest):
|
||||
def _check_transient(self):
|
||||
pig_job_data = self.edp_info.read_pig_example_script()
|
||||
pig_lib_data = self.edp_info.read_pig_example_jar()
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_id = self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job_data}],
|
||||
lib_data_list=[{'jar': pig_lib_data}])
|
||||
self.poll_jobs_status([job_id])
|
||||
|
||||
# set timeout in seconds
|
||||
timeout = self.common_config.TRANSIENT_CLUSTER_TIMEOUT * 60
|
||||
|
@ -255,40 +255,40 @@ class VanillaGatingTest(cinder.CinderVolumeTest,
|
||||
# This is a modified version of WordCount that takes swift configs
|
||||
java_lib_data = self.edp_info.read_java_example_lib()
|
||||
|
||||
try:
|
||||
self.edp_testing(
|
||||
job_ids = []
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job_data}],
|
||||
lib_data_list=[{'jar': pig_lib_data}],
|
||||
swift_binaries=True,
|
||||
hdfs_local_output=True)
|
||||
self.edp_testing(
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': mapreduce_jar_data}],
|
||||
configs=self.edp_info.mapreduce_example_configs(),
|
||||
swift_binaries=True,
|
||||
hdfs_local_output=True)
|
||||
self.edp_testing(
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_data_list=[],
|
||||
lib_data_list=[],
|
||||
configs=self.edp_info.mapreduce_streaming_configs())
|
||||
self.edp_testing(
|
||||
job_ids.append(job_id)
|
||||
|
||||
job_id = self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_JAVA,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': java_lib_data}],
|
||||
configs=self.edp_info.java_example_configs(),
|
||||
pass_input_output_args=True)
|
||||
job_ids.append(job_id)
|
||||
|
||||
except Exception as e:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.delete_objects(
|
||||
cluster_info['cluster_id'], cluster_template_id,
|
||||
node_group_template_id_list
|
||||
)
|
||||
message = 'Failure while EDP testing: '
|
||||
self.print_error_log(message, e)
|
||||
self.poll_jobs_status(job_ids)
|
||||
|
||||
edp_test()
|
||||
|
||||
|
@ -63,6 +63,17 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
self.volumes_per_node = 2
|
||||
self.volume_size = 2
|
||||
|
||||
ng_params = {
|
||||
'MapReduce': {
|
||||
'yarn.app.mapreduce.am.resource.mb': 256,
|
||||
'yarn.app.mapreduce.am.command-opts': '-Xmx256m'
|
||||
},
|
||||
'YARN': {
|
||||
'yarn.scheduler.minimum-allocation-mb': 256,
|
||||
'yarn.scheduler.maximum-allocation-mb': 1024
|
||||
}
|
||||
}
|
||||
|
||||
@b.errormsg("Failure while 'nm-dn' node group template creation: ")
|
||||
def _create_nm_dn_ng_template(self):
|
||||
template = {
|
||||
@ -71,7 +82,7 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
'description': 'test node group template for Vanilla plugin',
|
||||
'node_processes': ['nodemanager', 'datanode'],
|
||||
'floating_ip_pool': self.floating_ip_pool,
|
||||
'node_configs': {}
|
||||
'node_configs': self.ng_params
|
||||
}
|
||||
self.ng_tmpl_nm_dn_id = self.create_node_group_template(**template)
|
||||
self.ng_template_ids.append(self.ng_tmpl_nm_dn_id)
|
||||
@ -86,7 +97,7 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
'volume_size': self.volume_size,
|
||||
'node_processes': ['nodemanager'],
|
||||
'floating_ip_pool': self.floating_ip_pool,
|
||||
'node_configs': {}
|
||||
'node_configs': self.ng_params
|
||||
}
|
||||
self.ng_tmpl_nm_id = self.create_node_group_template(**template)
|
||||
self.ng_template_ids.append(self.ng_tmpl_nm_id)
|
||||
@ -101,7 +112,7 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
'volume_size': self.volume_size,
|
||||
'node_processes': ['datanode'],
|
||||
'floating_ip_pool': self.floating_ip_pool,
|
||||
'node_configs': {}
|
||||
'node_configs': self.ng_params
|
||||
}
|
||||
self.ng_tmpl_dn_id = self.create_node_group_template(**template)
|
||||
self.ng_template_ids.append(self.ng_tmpl_dn_id)
|
||||
@ -123,7 +134,8 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
'flavor_id': self.flavor_id,
|
||||
'node_processes': ['namenode', 'resourcemanager'],
|
||||
'floating_ip_pool': self.floating_ip_pool,
|
||||
'count': 1
|
||||
'count': 1,
|
||||
'node_configs': self.ng_params
|
||||
},
|
||||
{
|
||||
'name': 'master-node-oo-hs',
|
||||
@ -131,7 +143,8 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
'node_processes': ['oozie', 'historyserver',
|
||||
'secondarynamenode'],
|
||||
'floating_ip_pool': self.floating_ip_pool,
|
||||
'count': 1
|
||||
'count': 1,
|
||||
'node_configs': self.ng_params
|
||||
},
|
||||
{
|
||||
'name': 'worker-node-nm-dn',
|
||||
@ -183,23 +196,26 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
|
||||
@b.errormsg("Failure while EDP testing: ")
|
||||
def _check_edp(self):
|
||||
self.poll_jobs_status(list(self._run_edp_tests()))
|
||||
|
||||
def _run_edp_tests(self):
|
||||
skipped_edp_job_types = self.vanilla_two_config.SKIP_EDP_JOB_TYPES
|
||||
|
||||
if utils_edp.JOB_TYPE_PIG not in skipped_edp_job_types:
|
||||
self._edp_pig_test()
|
||||
yield self._edp_pig_test()
|
||||
if utils_edp.JOB_TYPE_MAPREDUCE not in skipped_edp_job_types:
|
||||
self._edp_mapreduce_test()
|
||||
yield self._edp_mapreduce_test()
|
||||
if utils_edp.JOB_TYPE_MAPREDUCE_STREAMING not in skipped_edp_job_types:
|
||||
self._edp_mapreduce_streaming_test()
|
||||
yield self._edp_mapreduce_streaming_test()
|
||||
if utils_edp.JOB_TYPE_JAVA not in skipped_edp_job_types:
|
||||
self._edp_java_test()
|
||||
yield self._edp_java_test()
|
||||
|
||||
def _edp_pig_test(self):
|
||||
|
||||
pig_job = self.edp_info.read_pig_example_script()
|
||||
pig_lib = self.edp_info.read_pig_example_jar()
|
||||
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_PIG,
|
||||
return self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_PIG,
|
||||
job_data_list=[{'pig': pig_job}],
|
||||
lib_data_list=[{'jar': pig_lib}],
|
||||
swift_binaries=True,
|
||||
@ -208,7 +224,8 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
def _edp_mapreduce_test(self):
|
||||
mapreduce_jar = self.edp_info.read_mapreduce_example_jar()
|
||||
mapreduce_configs = self.edp_info.mapreduce_example_configs()
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
return self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': mapreduce_jar}],
|
||||
configs=mapreduce_configs,
|
||||
@ -216,7 +233,8 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
hdfs_local_output=True)
|
||||
|
||||
def _edp_mapreduce_streaming_test(self):
|
||||
self.edp_testing(job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
return self.edp_testing(
|
||||
job_type=utils_edp.JOB_TYPE_MAPREDUCE_STREAMING,
|
||||
job_data_list=[],
|
||||
lib_data_list=[],
|
||||
configs=self.edp_info.mapreduce_streaming_configs())
|
||||
@ -224,7 +242,8 @@ class VanillaTwoGatingTest(cluster_configs.ClusterConfigTest,
|
||||
def _edp_java_test(self):
|
||||
java_jar = self.edp_info.read_java_example_lib(2)
|
||||
java_configs = self.edp_info.java_example_configs(2)
|
||||
self.edp_testing(utils_edp.JOB_TYPE_JAVA,
|
||||
return self.edp_testing(
|
||||
utils_edp.JOB_TYPE_JAVA,
|
||||
job_data_list=[],
|
||||
lib_data_list=[{'jar': java_jar}],
|
||||
configs=java_configs)
|
||||
|
Loading…
Reference in New Issue
Block a user