Add Storm Job to UI

This patch adds Storm as a plugin to allow Storm jobs being submitted
via UI.

Change-Id: I6dddb09aa7d810d2ad4013af1f45f4e3249288a3
Partially-implements: bp storm-integration
This commit is contained in:
Telles Nobrega 2015-07-17 14:11:47 -03:00
parent 5c8d52c731
commit 4157c00993
6 changed files with 8 additions and 4 deletions

View File

@ -9,6 +9,7 @@
<li>{% blocktrans %}Pig{% endblocktrans %}</li> <li>{% blocktrans %}Pig{% endblocktrans %}</li>
<li>{% blocktrans %}Hive{% endblocktrans %}</li> <li>{% blocktrans %}Hive{% endblocktrans %}</li>
<li>{% blocktrans %}Spark{% endblocktrans %}</li> <li>{% blocktrans %}Spark{% endblocktrans %}</li>
<li>{% blocktrans %}Storm{% endblocktrans %}</li>
<li>{% blocktrans %}MapReduce{% endblocktrans %}</li> <li>{% blocktrans %}MapReduce{% endblocktrans %}</li>
<li>{% blocktrans %}Java Action{% endblocktrans %}</li> <li>{% blocktrans %}Java Action{% endblocktrans %}</li>
<li>{% blocktrans %}Shell Action{% endblocktrans %}</li> <li>{% blocktrans %}Shell Action{% endblocktrans %}</li>

View File

@ -137,7 +137,7 @@
if ($.inArray(job_type, ["Hive", "Pig", "Shell"]) != -1) { if ($.inArray(job_type, ["Hive", "Pig", "Shell"]) != -1) {
res.push("params"); res.push("params");
} }
if ($.inArray(job_type, ["Java", "Pig", "Spark", "Shell"]) != -1) { if ($.inArray(job_type, ["Java", "Pig", "Spark", "Shell", "Storm"]) != -1) {
res.push("args_array"); res.push("args_array");
} }
return res; return res;
@ -155,7 +155,7 @@
} }
function hide_unnecessary_fields() { function hide_unnecessary_fields() {
if ($.inArray(job_type, ["Java", "Spark", "Shell"]) != -1) { if ($.inArray(job_type, ["Java", "Spark", "Shell", "Storm"]) != -1) {
$("[name=job_input]").closest(".form-group").hide(); $("[name=job_input]").closest(".form-group").hide();
$("[name=job_output]").closest(".form-group").hide(); $("[name=job_output]").closest(".form-group").hide();
} }

View File

@ -91,6 +91,7 @@ class GeneralConfigAction(workflows.Action):
'data-jobtype-hive': _("Choose a main binary"), 'data-jobtype-hive': _("Choose a main binary"),
'data-jobtype-shell': _("Choose a shell script"), 'data-jobtype-shell': _("Choose a shell script"),
'data-jobtype-spark': _("Choose a main binary"), 'data-jobtype-spark': _("Choose a main binary"),
'data-jobtype-storm': _("Choose a main binary"),
'data-jobtype-mapreduce.streaming': _("Choose a main binary") 'data-jobtype-mapreduce.streaming': _("Choose a main binary")
})) }))

View File

@ -296,7 +296,7 @@ class JobConfig(workflows.Step):
context["job_config"] = {"configs": job_config} context["job_config"] = {"configs": job_config}
context["job_config"]["args"] = job_args_array context["job_config"]["args"] = job_args_array
if job_type in ["Java", "Spark"]: if job_type in ["Java", "Spark", "Storm"]:
context["job_config"]["configs"][JobConfigAction.MAIN_CLASS] = ( context["job_config"]["configs"][JobConfigAction.MAIN_CLASS] = (
data.get("main_class", "")) data.get("main_class", ""))
context["job_config"]["configs"][JobConfigAction.JAVA_OPTS] = ( context["job_config"]["configs"][JobConfigAction.JAVA_OPTS] = (

View File

@ -126,6 +126,7 @@ class Helpers(object):
JOB_TYPE_MAP = {"pig": [_("Pig"), "Pig"], JOB_TYPE_MAP = {"pig": [_("Pig"), "Pig"],
"hive": [_("Hive"), "Hive"], "hive": [_("Hive"), "Hive"],
"spark": [_("Spark"), "Spark"], "spark": [_("Spark"), "Spark"],
"storm": [_("Storm"), "Storm"],
"mapreduce": [_("MapReduce"), "MapReduce"], "mapreduce": [_("MapReduce"), "MapReduce"],
"mapreduce.streaming": [_("Streaming MapReduce"), "mapreduce.streaming": [_("Streaming MapReduce"),
"MapReduce.Streaming"], "MapReduce.Streaming"],

View File

@ -64,7 +64,8 @@ class JobExecutionGuideView(horizon_views.APIView):
def show_data_sources(self): def show_data_sources(self):
try: try:
if self.request.session["guide_job_type"] in ["Spark", "Java"]: if self.request.session["guide_job_type"] in ["Spark", "Storm",
"Java"]:
return False return False
return True return True
except Exception: except Exception: