Adding filtering to data_processing tables
Adding filtering capability to some of the data processing tables. * Job Executions: Job, Cluster, Id, Status * Jobs: Name, Description * Clusters: Name, Status * Cluster Templates: Name, Plugin, Version, Description * Node Group Templates: Name, Plugin, Version Updating tests to use extra param. Change-Id: Ib53c6da2ae3df2996c5713a5d8cedfdd8ade91ce Implements: bp data-processing-table-filtering
This commit is contained in:
parent
926fa00073
commit
62c5b2159a
@ -49,8 +49,8 @@ def client(request):
|
||||
input_auth_token=request.user.token.id)
|
||||
|
||||
|
||||
def image_list(request):
|
||||
return client(request).images.list()
|
||||
def image_list(request, search_opts=None):
|
||||
return client(request).images.list(search_opts)
|
||||
|
||||
|
||||
def image_get(request, image_id):
|
||||
@ -69,8 +69,8 @@ def image_tags_update(request, image_id, image_tags):
|
||||
client(request).images.update_tags(image_id, image_tags)
|
||||
|
||||
|
||||
def plugin_list(request):
|
||||
return client(request).plugins.list()
|
||||
def plugin_list(request, search_opts=None):
|
||||
return client(request).plugins.list(search_opts)
|
||||
|
||||
|
||||
def plugin_get(request, plugin_name):
|
||||
@ -110,8 +110,8 @@ def nodegroup_template_create(request, name, plugin_name, hadoop_version,
|
||||
availability_zone)
|
||||
|
||||
|
||||
def nodegroup_template_list(request):
|
||||
return client(request).node_group_templates.list()
|
||||
def nodegroup_template_list(request, search_opts=None):
|
||||
return client(request).node_group_templates.list(search_opts)
|
||||
|
||||
|
||||
def nodegroup_template_get(request, ngt_id):
|
||||
@ -161,8 +161,8 @@ def cluster_template_create(request, name, plugin_name, hadoop_version,
|
||||
net_id)
|
||||
|
||||
|
||||
def cluster_template_list(request):
|
||||
return client(request).cluster_templates.list()
|
||||
def cluster_template_list(request, search_opts=None):
|
||||
return client(request).cluster_templates.list(search_opts)
|
||||
|
||||
|
||||
def cluster_template_get(request, ct_id):
|
||||
@ -205,8 +205,8 @@ def cluster_scale(request, cluster_id, scale_object):
|
||||
return client(request).clusters.scale(cluster_id, scale_object)
|
||||
|
||||
|
||||
def cluster_list(request):
|
||||
return client(request).clusters.list()
|
||||
def cluster_list(request, search_opts=None):
|
||||
return client(request).clusters.list(search_opts)
|
||||
|
||||
|
||||
def cluster_get(request, cluster_id):
|
||||
@ -224,8 +224,8 @@ def data_source_create(request, name, description, ds_type, url,
|
||||
credential_pass)
|
||||
|
||||
|
||||
def data_source_list(request):
|
||||
return client(request).data_sources.list()
|
||||
def data_source_list(request, search_opts=None):
|
||||
return client(request).data_sources.list(search_opts)
|
||||
|
||||
|
||||
def data_source_get(request, ds_id):
|
||||
@ -240,8 +240,8 @@ def job_binary_create(request, name, url, description, extra):
|
||||
return client(request).job_binaries.create(name, url, description, extra)
|
||||
|
||||
|
||||
def job_binary_list(request):
|
||||
return client(request).job_binaries.list()
|
||||
def job_binary_list(request, search_opts=None):
|
||||
return client(request).job_binaries.list(search_opts)
|
||||
|
||||
|
||||
def job_binary_get(request, jb_id):
|
||||
@ -260,8 +260,8 @@ def job_binary_internal_create(request, name, data):
|
||||
return client(request).job_binary_internals.create(name, data)
|
||||
|
||||
|
||||
def job_binary_internal_list(request):
|
||||
return client(request).job_binary_internals.list()
|
||||
def job_binary_internal_list(request, search_opts=None):
|
||||
return client(request).job_binary_internals.list(search_opts)
|
||||
|
||||
|
||||
def job_binary_internal_get(request, jbi_id):
|
||||
@ -276,8 +276,8 @@ def job_create(request, name, j_type, mains, libs, description):
|
||||
return client(request).jobs.create(name, j_type, mains, libs, description)
|
||||
|
||||
|
||||
def job_list(request):
|
||||
return client(request).jobs.list()
|
||||
def job_list(request, search_opts=None):
|
||||
return client(request).jobs.list(search_opts)
|
||||
|
||||
|
||||
def job_get(request, job_id):
|
||||
@ -299,8 +299,8 @@ def job_execution_create(request, job_id, cluster_id,
|
||||
configs)
|
||||
|
||||
|
||||
def job_execution_list(request):
|
||||
jex_list = client(request).job_executions.list()
|
||||
def job_execution_list(request, search_opts=None):
|
||||
jex_list = client(request).job_executions.list(search_opts)
|
||||
job_dict = dict((j.id, j) for j in job_list(request))
|
||||
cluster_dict = dict((c.id, c) for c in cluster_list(request))
|
||||
for jex in jex_list:
|
||||
|
@ -26,6 +26,14 @@ from openstack_dashboard.api import sahara as saharaclient
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClusterTemplatesFilterAction(tables.FilterAction):
|
||||
filter_type = "server"
|
||||
filter_choices = (('name', _("Name"), True),
|
||||
('plugin', _("Plugin"), True),
|
||||
('hadoop_version', _("Version"), True),
|
||||
('description', _("Description")))
|
||||
|
||||
|
||||
class UploadFile(tables.LinkAction):
|
||||
name = 'upload_file'
|
||||
verbose_name = _("Upload Template")
|
||||
@ -125,7 +133,8 @@ class ClusterTemplatesTable(tables.DataTable):
|
||||
table_actions = (UploadFile,
|
||||
CreateClusterTemplate,
|
||||
ConfigureClusterTemplate,
|
||||
DeleteTemplate,)
|
||||
DeleteTemplate,
|
||||
ClusterTemplatesFilterAction,)
|
||||
|
||||
row_actions = (CreateCluster,
|
||||
CopyTemplate,
|
||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
||||
class DataProcessingClusterTemplateTests(test.TestCase):
|
||||
@test.create_stubs({api.sahara: ('cluster_template_list',)})
|
||||
def test_index(self):
|
||||
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.cluster_templates.list())
|
||||
self.mox.ReplayAll()
|
||||
res = self.client.get(INDEX_URL)
|
||||
@ -85,7 +85,7 @@ class DataProcessingClusterTemplateTests(test.TestCase):
|
||||
'cluster_template_delete')})
|
||||
def test_delete(self):
|
||||
ct = self.cluster_templates.first()
|
||||
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.cluster_templates.list())
|
||||
api.sahara.cluster_template_delete(IsA(http.HttpRequest), ct.id)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -44,8 +44,12 @@ class ClusterTemplatesView(tables.DataTableView):
|
||||
|
||||
def get_data(self):
|
||||
try:
|
||||
search_opts = {}
|
||||
filter = self.get_server_filter_info(self.request)
|
||||
if filter['value'] and filter['field']:
|
||||
search_opts = {filter['field']: filter['value']}
|
||||
cluster_templates = saharaclient.cluster_template_list(
|
||||
self.request)
|
||||
self.request, search_opts)
|
||||
except Exception:
|
||||
cluster_templates = []
|
||||
exceptions.handle(self.request,
|
||||
|
@ -28,6 +28,12 @@ from saharaclient.api import base as api_base
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClustersFilterAction(tables.FilterAction):
|
||||
filter_type = "server"
|
||||
filter_choices = (('name', _("Name"), True),
|
||||
('status', _("Status"), True))
|
||||
|
||||
|
||||
class CreateCluster(tables.LinkAction):
|
||||
name = "create"
|
||||
verbose_name = _("Launch Cluster")
|
||||
@ -119,6 +125,7 @@ class ClustersTable(tables.DataTable):
|
||||
status_columns = ["status"]
|
||||
table_actions = (CreateCluster,
|
||||
ConfigureCluster,
|
||||
DeleteCluster)
|
||||
DeleteCluster,
|
||||
ClustersFilterAction)
|
||||
row_actions = (ScaleCluster,
|
||||
DeleteCluster,)
|
||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
||||
class DataProcessingClusterTests(test.TestCase):
|
||||
@test.create_stubs({api.sahara: ('cluster_list',)})
|
||||
def test_index(self):
|
||||
api.sahara.cluster_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.clusters.list())
|
||||
self.mox.ReplayAll()
|
||||
res = self.client.get(INDEX_URL)
|
||||
@ -53,7 +53,7 @@ class DataProcessingClusterTests(test.TestCase):
|
||||
'cluster_delete')})
|
||||
def test_delete(self):
|
||||
cluster = self.clusters.first()
|
||||
api.sahara.cluster_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.clusters.list())
|
||||
api.sahara.cluster_delete(IsA(http.HttpRequest), cluster.id)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -40,7 +40,11 @@ class ClustersView(tables.DataTableView):
|
||||
|
||||
def get_data(self):
|
||||
try:
|
||||
clusters = saharaclient.cluster_list(self.request)
|
||||
search_opts = {}
|
||||
filter = self.get_server_filter_info(self.request)
|
||||
if filter['value'] and filter['field']:
|
||||
search_opts = {filter['field']: filter['value']}
|
||||
clusters = saharaclient.cluster_list(self.request, search_opts)
|
||||
except Exception:
|
||||
clusters = []
|
||||
exceptions.handle(self.request,
|
||||
|
@ -31,6 +31,14 @@ from openstack_dashboard.dashboards.project.data_processing. \
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JobExecutionsFilterAction(tables.FilterAction):
|
||||
filter_type = "server"
|
||||
filter_choices = (('id', _("ID"), True),
|
||||
('job', _("Job"), True),
|
||||
('cluster', _("Cluster"), True),
|
||||
('status', _("Status"), True))
|
||||
|
||||
|
||||
class DeleteJobExecution(tables.DeleteAction):
|
||||
@staticmethod
|
||||
def action_present(count):
|
||||
@ -165,7 +173,8 @@ class JobExecutionsTable(tables.DataTable):
|
||||
row_class = UpdateRow
|
||||
status_columns = ["status"]
|
||||
verbose_name = _("Job Executions")
|
||||
table_actions = [DeleteJobExecution]
|
||||
table_actions = [DeleteJobExecution,
|
||||
JobExecutionsFilterAction]
|
||||
row_actions = [DeleteJobExecution,
|
||||
ReLaunchJobExistingCluster,
|
||||
ReLaunchJobNewCluster]
|
||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
||||
class DataProcessingJobExecutionTests(test.TestCase):
|
||||
@test.create_stubs({api.sahara: ('job_execution_list',)})
|
||||
def test_index(self):
|
||||
api.sahara.job_execution_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.job_executions.list())
|
||||
self.mox.ReplayAll()
|
||||
res = self.client.get(INDEX_URL)
|
||||
@ -49,7 +49,7 @@ class DataProcessingJobExecutionTests(test.TestCase):
|
||||
'job_execution_delete')})
|
||||
def test_delete(self):
|
||||
job_exec = self.job_executions.first()
|
||||
api.sahara.job_execution_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.job_executions.list())
|
||||
api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -30,13 +30,26 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JobExecutionsView(tables.DataTableView):
|
||||
SEARCH_MAPPING = {"cluster": "cluster.name",
|
||||
"job": "job.name"}
|
||||
|
||||
table_class = je_tables.JobExecutionsTable
|
||||
template_name = (
|
||||
'project/data_processing.job_executions/job_executions.html')
|
||||
|
||||
def get_data(self):
|
||||
try:
|
||||
jobs = saharaclient.job_execution_list(self.request)
|
||||
search_opts = {}
|
||||
filter = self.get_server_filter_info(self.request)
|
||||
if filter['value'] and filter['field']:
|
||||
if filter['field'] in self.SEARCH_MAPPING:
|
||||
# Handle special cases for cluster and job
|
||||
# since they are in different database tables.
|
||||
search_opts = {
|
||||
self.SEARCH_MAPPING[filter['field']]: filter['value']}
|
||||
else:
|
||||
search_opts = {filter['field']: filter['value']}
|
||||
jobs = saharaclient.job_execution_list(self.request, search_opts)
|
||||
except Exception:
|
||||
jobs = []
|
||||
exceptions.handle(self.request,
|
||||
|
@ -25,6 +25,12 @@ from openstack_dashboard.api import sahara as saharaclient
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JobsFilterAction(tables.FilterAction):
|
||||
filter_type = "server"
|
||||
filter_choices = (('name', _("Name"), True),
|
||||
('description', _("Description"), True))
|
||||
|
||||
|
||||
class CreateJob(tables.LinkAction):
|
||||
name = "create job"
|
||||
verbose_name = _("Create Job")
|
||||
@ -103,6 +109,5 @@ class JobsTable(tables.DataTable):
|
||||
class Meta:
|
||||
name = "jobs"
|
||||
verbose_name = _("Jobs")
|
||||
table_actions = (CreateJob,
|
||||
DeleteJob)
|
||||
table_actions = (CreateJob, DeleteJob, JobsFilterAction,)
|
||||
row_actions = (LaunchJobExistingCluster, ChoosePlugin, DeleteJob,)
|
||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
||||
class DataProcessingJobTests(test.TestCase):
|
||||
@test.create_stubs({api.sahara: ('job_list',)})
|
||||
def test_index(self):
|
||||
api.sahara.job_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.job_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.jobs.list())
|
||||
self.mox.ReplayAll()
|
||||
res = self.client.get(INDEX_URL)
|
||||
@ -50,7 +50,7 @@ class DataProcessingJobTests(test.TestCase):
|
||||
'job_delete')})
|
||||
def test_delete(self):
|
||||
job = self.jobs.first()
|
||||
api.sahara.job_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.job_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.jobs.list())
|
||||
api.sahara.job_delete(IsA(http.HttpRequest), job.id)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -41,7 +41,11 @@ class JobsView(tables.DataTableView):
|
||||
|
||||
def get_data(self):
|
||||
try:
|
||||
jobs = saharaclient.job_list(self.request)
|
||||
search_opts = {}
|
||||
filter = self.get_server_filter_info(self.request)
|
||||
if filter['value'] and filter['field']:
|
||||
search_opts = {filter['field']: filter['value']}
|
||||
jobs = saharaclient.job_list(self.request, search_opts)
|
||||
except Exception:
|
||||
jobs = []
|
||||
exceptions.handle(self.request,
|
||||
|
@ -23,6 +23,13 @@ from openstack_dashboard.api import sahara as saharaclient
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NodeGroupTemplatesFilterAction(tables.FilterAction):
|
||||
filter_type = "server"
|
||||
filter_choices = (('name', _("Name"), True),
|
||||
('plugin', _("Plugin"), True),
|
||||
('hadoop_version', _("Version"), True))
|
||||
|
||||
|
||||
class CreateNodegroupTemplate(tables.LinkAction):
|
||||
name = "create"
|
||||
verbose_name = _("Create Template")
|
||||
@ -89,6 +96,7 @@ class NodegroupTemplatesTable(tables.DataTable):
|
||||
verbose_name = _("Node Group Templates")
|
||||
table_actions = (CreateNodegroupTemplate,
|
||||
ConfigureNodegroupTemplate,
|
||||
DeleteTemplate)
|
||||
DeleteTemplate,
|
||||
NodeGroupTemplatesFilterAction,)
|
||||
row_actions = (CopyTemplate,
|
||||
DeleteTemplate,)
|
||||
|
@ -35,7 +35,7 @@ CREATE_URL = reverse(
|
||||
class DataProcessingNodeGroupTests(test.TestCase):
|
||||
@test.create_stubs({api.sahara: ('nodegroup_template_list',)})
|
||||
def test_index(self):
|
||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.nodegroup_templates.list())
|
||||
self.mox.ReplayAll()
|
||||
res = self.client.get(INDEX_URL)
|
||||
@ -67,7 +67,7 @@ class DataProcessingNodeGroupTests(test.TestCase):
|
||||
'nodegroup_template_delete')})
|
||||
def test_delete(self):
|
||||
ngt = self.nodegroup_templates.first()
|
||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest)) \
|
||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \
|
||||
.AndReturn(self.nodegroup_templates.list())
|
||||
api.sahara.nodegroup_template_delete(IsA(http.HttpRequest), ngt.id)
|
||||
self.mox.ReplayAll()
|
||||
|
@ -41,7 +41,12 @@ class NodegroupTemplatesView(tables.DataTableView):
|
||||
|
||||
def get_data(self):
|
||||
try:
|
||||
data = saharaclient.nodegroup_template_list(self.request)
|
||||
search_opts = {}
|
||||
filter = self.get_server_filter_info(self.request)
|
||||
if filter['value'] and filter['field']:
|
||||
search_opts = {filter['field']: filter['value']}
|
||||
data = saharaclient.nodegroup_template_list(self.request,
|
||||
search_opts)
|
||||
except Exception:
|
||||
data = []
|
||||
exceptions.handle(self.request,
|
||||
|
Loading…
Reference in New Issue
Block a user