Merge "Adding option for Spark jobs to use Swift paths"

This commit is contained in:
Jenkins 2015-09-18 15:50:30 +00:00 committed by Gerrit Code Review
commit 8ea37ebb1c
3 changed files with 20 additions and 1 deletions

View File

@ -171,6 +171,9 @@
$("[name=hbase_common_lib]").closest(".form-group").hide();
$("[name=adapt_oozie]").closest(".form-group").hide();
}
if (job_type != "Spark") {
$("[name=adapt_swift_spark]").closest(".form-group").hide();
}
}
function override_field_labels() {

View File

@ -186,6 +186,7 @@ class DataProcessingJobTests(test.TestCase):
'config': {},
'argument_ids': '{}',
'adapt_oozie': 'on',
'adapt_swift_spark': 'on',
'hbase_common_lib': 'on',
'java_opts': '',
'job_args_array': [[], []],

View File

@ -162,6 +162,7 @@ class JobConfigAction(workflows.Action):
EDP_PREFIX = "edp."
EDP_HBASE_COMMON_LIB = "edp.hbase_common_lib"
EDP_ADAPT_FOR_OOZIE = "edp.java.adapt_for_oozie"
EDP_ADAPT_SPARK_SWIFT = "edp.spark.adapt_for_swift"
property_name = forms.ChoiceField(
required=False,
@ -205,6 +206,12 @@ class JobConfigAction(workflows.Action):
" Oozie will handle exit codes correctly."),
required=False, initial=True)
adapt_spark_swift = forms.BooleanField(
label=_("Enable Swift Paths"),
help_text=_("Modify the configuration so that swift URLs can "
"be dereferenced through HDFS at runtime."),
required=False, initial=True)
def __init__(self, request, *args, **kwargs):
super(JobConfigAction, self).__init__(request, *args, **kwargs)
job_ex_id = request.REQUEST.get("job_execution_id")
@ -248,6 +255,9 @@ class JobConfigAction(workflows.Action):
if self.EDP_ADAPT_FOR_OOZIE in edp_configs:
self.fields['adapt_oozie'].initial = (
edp_configs[self.EDP_ADAPT_FOR_OOZIE])
if self.EDP_ADAPT_SPARK_SWIFT in edp_configs:
self.fields['adapt_spark_swift'].initial = (
edp_configs[self.EDP_ADAPT_SPARK_SWIFT])
def clean(self):
cleaned_data = super(workflows.Action, self).clean()
@ -283,7 +293,8 @@ class JobConfigAction(workflows.Action):
self.EDP_REDUCER,
self.MAIN_CLASS,
self.JAVA_OPTS,
self.EDP_ADAPT_FOR_OOZIE, ]:
self.EDP_ADAPT_FOR_OOZIE,
self.EDP_ADAPT_SPARK_SWIFT]:
del configs[rmkey]
return (configs, edp_configs)
@ -348,6 +359,10 @@ class JobConfig(workflows.Step):
context["job_config"]["configs"][
JobConfigAction.EDP_ADAPT_FOR_OOZIE] = (
data.get("adapt_oozie", True))
if job_type == "Spark":
context["job_config"]["configs"][
JobConfigAction.EDP_ADAPT_SPARK_SWIFT] = (
data.get("adapt_spark_swift", True))
elif job_type == "MapReduce.Streaming":
context["job_config"]["configs"][JobConfigAction.EDP_MAPPER] = (
data.get("streaming_mapper", ""))