Browse Source

Merge "Adding option for Java jobs to be adapted to Oozie"

Jenkins 3 years ago
parent
commit
51647a585a

+ 1
- 0
openstack_dashboard/contrib/sahara/content/data_processing/jobs/templates/data_processing.jobs/config_template.html View File

@@ -169,6 +169,7 @@
169 169
       }
170 170
       if (job_type != "Java") {
171 171
         $("[name=hbase_common_lib]").closest(".form-group").hide();
172
+        $("[name=adapt_oozie]").closest(".form-group").hide();
172 173
       }
173 174
     }
174 175
 

+ 51
- 0
openstack_dashboard/contrib/sahara/content/data_processing/jobs/tests.py View File

@@ -83,3 +83,54 @@ class DataProcessingJobTests(test.TestCase):
83 83
         self.assertNoFormErrors(res)
84 84
         self.assertRedirectsNoFollow(res, INDEX_URL)
85 85
         self.assertMessageCount(success=1)
86
+
87
+    @test.create_stubs({api.sahara: ('job_execution_create',
88
+                                     'job_get',
89
+                                     'job_get_configs',
90
+                                     'job_list',
91
+                                     'cluster_list',
92
+                                     'data_source_list')})
93
+    def test_launch(self):
94
+        job = self.jobs.first()
95
+        job_execution = self.job_executions.first()
96
+        cluster = self.clusters.first()
97
+        input_ds = self.data_sources.first()
98
+        output_ds = self.data_sources.first()
99
+        api.sahara.job_get(IsA(http.HttpRequest), IsA(unicode)) \
100
+            .AndReturn(job)
101
+        api.sahara.job_get_configs(IsA(http.HttpRequest), job.type) \
102
+            .AndReturn(job)
103
+        api.sahara.cluster_list(IsA(http.HttpRequest)) \
104
+            .AndReturn(self.clusters.list())
105
+        api.sahara.data_source_list(IsA(http.HttpRequest)) \
106
+            .MultipleTimes().AndReturn(self.data_sources.list())
107
+        api.sahara.job_list(IsA(http.HttpRequest)) \
108
+            .AndReturn(self.jobs.list())
109
+        api.sahara.job_execution_create(IsA(http.HttpRequest),
110
+                                        IsA(unicode),
111
+                                        IsA(unicode),
112
+                                        IsA(unicode),
113
+                                        IsA(unicode),
114
+                                        IsA(dict)).AndReturn(job_execution)
115
+        self.mox.ReplayAll()
116
+
117
+        url = reverse('horizon:project:data_processing.jobs:launch-job')
118
+        form_data = {
119
+            'job': self.jobs.first().id,
120
+            'cluster': cluster.id,
121
+            'job_input': input_ds.id,
122
+            'job_output': output_ds.id,
123
+            'config': {},
124
+            'adapt_oozie': 'on',
125
+            'hbase_common_lib': 'on',
126
+            'java_opts': '',
127
+            'job_args_array': [[], []],
128
+            'job_configs': [{}, {}],
129
+            'job_params': [{}, {}],
130
+            'job_type': 'Pig',
131
+            'streaming_mapper': '',
132
+            'streaming_reducer': ''
133
+        }
134
+
135
+        res = self.client.post(url, form_data)
136
+        self.assertNoFormErrors(res)

+ 17
- 1
openstack_dashboard/contrib/sahara/content/data_processing/jobs/workflows/launch.py View File

@@ -134,6 +134,7 @@ class JobConfigAction(workflows.Action):
134 134
     EDP_REDUCER = "edp.streaming.reducer"
135 135
     EDP_PREFIX = "edp."
136 136
     EDP_HBASE_COMMON_LIB = "edp.hbase_common_lib"
137
+    EDP_ADAPT_FOR_OOZIE = "edp.java.adapt_for_oozie"
137 138
 
138 139
     property_name = forms.ChoiceField(
139 140
         required=False,
@@ -170,6 +171,13 @@ class JobConfigAction(workflows.Action):
170 171
         help_text=_("Run HBase EDP Jobs with common HBase library on HDFS"),
171 172
         required=False, initial=True)
172 173
 
174
+    adapt_oozie = forms.BooleanField(
175
+        label=_("Adapt For Oozie"),
176
+        help_text=_("Automatically modify the Hadoop configuration"
177
+                    " so that job config values are set and so that"
178
+                    " Oozie will handle exit codes correctly."),
179
+        required=False, initial=True)
180
+
173 181
     def __init__(self, request, *args, **kwargs):
174 182
         super(JobConfigAction, self).__init__(request, *args, **kwargs)
175 183
         job_ex_id = request.REQUEST.get("job_execution_id")
@@ -209,6 +217,9 @@ class JobConfigAction(workflows.Action):
209 217
             if self.EDP_HBASE_COMMON_LIB in edp_configs:
210 218
                 self.fields['hbase_common_lib'].initial = (
211 219
                     edp_configs[self.EDP_HBASE_COMMON_LIB])
220
+            if self.EDP_ADAPT_FOR_OOZIE in edp_configs:
221
+                self.fields['adapt_oozie'].initial = (
222
+                    edp_configs[self.EDP_ADAPT_FOR_OOZIE])
212 223
 
213 224
     def clean(self):
214 225
         cleaned_data = super(workflows.Action, self).clean()
@@ -243,7 +254,8 @@ class JobConfigAction(workflows.Action):
243 254
                          self.EDP_MAPPER,
244 255
                          self.EDP_REDUCER,
245 256
                          self.MAIN_CLASS,
246
-                         self.JAVA_OPTS]:
257
+                         self.JAVA_OPTS,
258
+                         self.EDP_ADAPT_FOR_OOZIE, ]:
247 259
                 del configs[rmkey]
248 260
         return (configs, edp_configs)
249 261
 
@@ -304,6 +316,10 @@ class JobConfig(workflows.Step):
304 316
             context["job_config"]["configs"][
305 317
                 JobConfigAction.EDP_HBASE_COMMON_LIB] = (
306 318
                     data.get("hbase_common_lib", True))
319
+            if job_type == "Java":
320
+                context["job_config"]["configs"][
321
+                    JobConfigAction.EDP_ADAPT_FOR_OOZIE] = (
322
+                        data.get("adapt_oozie", True))
307 323
         elif job_type == "MapReduce.Streaming":
308 324
             context["job_config"]["configs"][JobConfigAction.EDP_MAPPER] = (
309 325
                 data.get("streaming_mapper", ""))

+ 2
- 1
openstack_dashboard/test/test_data/sahara_data.py View File

@@ -455,7 +455,8 @@ def data(TEST):
455 455
         "name": "pigjob",
456 456
         "tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
457 457
         "type": "Pig",
458
-        "updated_at": None
458
+        "updated_at": None,
459
+        "job_config": {"configs": {}}
459 460
     }
460 461
 
461 462
     job1 = jobs.Job(jobs.JobsManager(None), job1_dict)

Loading…
Cancel
Save