Populating job type choices via api call

Rather than hard-code the choices for the job type
dropdown, they are now generated from an api call
that factors-in currently loaded plugins.  Also updated
the tests to utilize the new call.

Change-Id: I69f7100335bd35a891f23efd286da784c3d34e9f
Closes-Bug: #1371285
This commit is contained in:
Chad Roberts 2015-04-16 13:34:58 -04:00
parent 576736a2e3
commit e81e1e8307
4 changed files with 38 additions and 6 deletions

View File

@ -424,3 +424,7 @@ def job_execution_get(request, jex_id):
def job_execution_delete(request, jex_id):
client(request).job_executions.delete(obj_id=jex_id)
def job_types_list(request):
return client(request).job_types.list()

View File

@ -46,12 +46,16 @@ class DataProcessingJobTests(test.TestCase):
'project/data_processing.jobs/details.html')
self.assertContains(res, 'pigjob')
@test.create_stubs({api.sahara: ('job_binary_list', 'job_create',)})
@test.create_stubs({api.sahara: ('job_binary_list',
'job_create',
'job_types_list')})
def test_create(self):
api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
api.sahara.job_binary_list(IsA(http.HttpRequest)).AndReturn([])
api.sahara.job_create(IsA(http.HttpRequest),
'test', 'Pig', [], [], 'test create')
api.sahara.job_types_list(IsA(http.HttpRequest)) \
.AndReturn(self.job_types.list())
self.mox.ReplayAll()
form_data = {'job_name': 'test',
'job_type': 'pig',

View File

@ -96,11 +96,13 @@ class GeneralConfigAction(workflows.Action):
resolver_match.kwargs["guide_job_type"].lower())
def populate_job_type_choices(self, request, context):
choices = [("pig", _("Pig")), ("hive", _("Hive")),
("spark", _("Spark")),
("mapreduce", _("MapReduce")),
("mapreduce.streaming", _("Streaming MapReduce")),
("java", _("Java Action"))]
choices = []
choices_list = saharaclient.job_types_list(request)
for choice in choices_list:
job_type = choice.name.lower()
if job_type in helpers.JOB_TYPE_MAP:
choices.append((job_type, helpers.JOB_TYPE_MAP[job_type][0]))
return choices
def populate_main_binary_choices(self, request, context):

View File

@ -19,6 +19,7 @@ from saharaclient.api import clusters
from saharaclient.api import data_sources
from saharaclient.api import job_binaries
from saharaclient.api import job_executions
from saharaclient.api import job_types
from saharaclient.api import jobs
from saharaclient.api import node_group_templates
from saharaclient.api import plugins
@ -35,6 +36,7 @@ def data(TEST):
TEST.jobs = utils.TestDataContainer()
TEST.job_executions = utils.TestDataContainer()
TEST.registered_images = copy.copy(TEST.images)
TEST.job_types = utils.TestDataContainer()
plugin1_dict = {
"description": "vanilla plugin",
@ -497,3 +499,23 @@ def data(TEST):
augmented_image.tags = {}
augmented_image.username = 'myusername'
augmented_image.description = 'mydescription'
job_type1_dict = {
"name": "Pig",
"plugins": [
{
"description": "Fake description",
"versions": {
"2.6.0": {
},
"1.2.1": {
}
},
"name": "vanilla",
"title": "Vanilla Apache Hadoop"
},
]
}
job_types1 = job_types.JobType(
job_types.JobTypesManager(None), job_type1_dict)
TEST.job_types.add(job_types1)