Adding filtering to data_processing tables
Adding filtering capability to some of the data processing tables. * Job Executions: Job, Cluster, Id, Status * Jobs: Name, Description * Clusters: Name, Status * Cluster Templates: Name, Plugin, Version, Description * Node Group Templates: Name, Plugin, Version Updating tests to use extra param. Change-Id: Ib53c6da2ae3df2996c5713a5d8cedfdd8ade91ce Implements: bp data-processing-table-filtering
This commit is contained in:
parent
926fa00073
commit
62c5b2159a
@ -49,8 +49,8 @@ def client(request):
|
|||||||
input_auth_token=request.user.token.id)
|
input_auth_token=request.user.token.id)
|
||||||
|
|
||||||
|
|
||||||
def image_list(request):
|
def image_list(request, search_opts=None):
|
||||||
return client(request).images.list()
|
return client(request).images.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def image_get(request, image_id):
|
def image_get(request, image_id):
|
||||||
@ -69,8 +69,8 @@ def image_tags_update(request, image_id, image_tags):
|
|||||||
client(request).images.update_tags(image_id, image_tags)
|
client(request).images.update_tags(image_id, image_tags)
|
||||||
|
|
||||||
|
|
||||||
def plugin_list(request):
|
def plugin_list(request, search_opts=None):
|
||||||
return client(request).plugins.list()
|
return client(request).plugins.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def plugin_get(request, plugin_name):
|
def plugin_get(request, plugin_name):
|
||||||
@ -110,8 +110,8 @@ def nodegroup_template_create(request, name, plugin_name, hadoop_version,
|
|||||||
availability_zone)
|
availability_zone)
|
||||||
|
|
||||||
|
|
||||||
def nodegroup_template_list(request):
|
def nodegroup_template_list(request, search_opts=None):
|
||||||
return client(request).node_group_templates.list()
|
return client(request).node_group_templates.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def nodegroup_template_get(request, ngt_id):
|
def nodegroup_template_get(request, ngt_id):
|
||||||
@ -161,8 +161,8 @@ def cluster_template_create(request, name, plugin_name, hadoop_version,
|
|||||||
net_id)
|
net_id)
|
||||||
|
|
||||||
|
|
||||||
def cluster_template_list(request):
|
def cluster_template_list(request, search_opts=None):
|
||||||
return client(request).cluster_templates.list()
|
return client(request).cluster_templates.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def cluster_template_get(request, ct_id):
|
def cluster_template_get(request, ct_id):
|
||||||
@ -205,8 +205,8 @@ def cluster_scale(request, cluster_id, scale_object):
|
|||||||
return client(request).clusters.scale(cluster_id, scale_object)
|
return client(request).clusters.scale(cluster_id, scale_object)
|
||||||
|
|
||||||
|
|
||||||
def cluster_list(request):
|
def cluster_list(request, search_opts=None):
|
||||||
return client(request).clusters.list()
|
return client(request).clusters.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def cluster_get(request, cluster_id):
|
def cluster_get(request, cluster_id):
|
||||||
@ -224,8 +224,8 @@ def data_source_create(request, name, description, ds_type, url,
|
|||||||
credential_pass)
|
credential_pass)
|
||||||
|
|
||||||
|
|
||||||
def data_source_list(request):
|
def data_source_list(request, search_opts=None):
|
||||||
return client(request).data_sources.list()
|
return client(request).data_sources.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def data_source_get(request, ds_id):
|
def data_source_get(request, ds_id):
|
||||||
@ -240,8 +240,8 @@ def job_binary_create(request, name, url, description, extra):
|
|||||||
return client(request).job_binaries.create(name, url, description, extra)
|
return client(request).job_binaries.create(name, url, description, extra)
|
||||||
|
|
||||||
|
|
||||||
def job_binary_list(request):
|
def job_binary_list(request, search_opts=None):
|
||||||
return client(request).job_binaries.list()
|
return client(request).job_binaries.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def job_binary_get(request, jb_id):
|
def job_binary_get(request, jb_id):
|
||||||
@ -260,8 +260,8 @@ def job_binary_internal_create(request, name, data):
|
|||||||
return client(request).job_binary_internals.create(name, data)
|
return client(request).job_binary_internals.create(name, data)
|
||||||
|
|
||||||
|
|
||||||
def job_binary_internal_list(request):
|
def job_binary_internal_list(request, search_opts=None):
|
||||||
return client(request).job_binary_internals.list()
|
return client(request).job_binary_internals.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def job_binary_internal_get(request, jbi_id):
|
def job_binary_internal_get(request, jbi_id):
|
||||||
@ -276,8 +276,8 @@ def job_create(request, name, j_type, mains, libs, description):
|
|||||||
return client(request).jobs.create(name, j_type, mains, libs, description)
|
return client(request).jobs.create(name, j_type, mains, libs, description)
|
||||||
|
|
||||||
|
|
||||||
def job_list(request):
|
def job_list(request, search_opts=None):
|
||||||
return client(request).jobs.list()
|
return client(request).jobs.list(search_opts)
|
||||||
|
|
||||||
|
|
||||||
def job_get(request, job_id):
|
def job_get(request, job_id):
|
||||||
@ -299,8 +299,8 @@ def job_execution_create(request, job_id, cluster_id,
|
|||||||
configs)
|
configs)
|
||||||
|
|
||||||
|
|
||||||
def job_execution_list(request):
|
def job_execution_list(request, search_opts=None):
|
||||||
jex_list = client(request).job_executions.list()
|
jex_list = client(request).job_executions.list(search_opts)
|
||||||
job_dict = dict((j.id, j) for j in job_list(request))
|
job_dict = dict((j.id, j) for j in job_list(request))
|
||||||
cluster_dict = dict((c.id, c) for c in cluster_list(request))
|
cluster_dict = dict((c.id, c) for c in cluster_list(request))
|
||||||
for jex in jex_list:
|
for jex in jex_list:
|
||||||
|
@ -26,6 +26,14 @@ from openstack_dashboard.api import sahara as saharaclient
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ClusterTemplatesFilterAction(tables.FilterAction):
|
||||||
|
filter_type = "server"
|
||||||
|
filter_choices = (('name', _("Name"), True),
|
||||||
|
('plugin', _("Plugin"), True),
|
||||||
|
('hadoop_version', _("Version"), True),
|
||||||
|
('description', _("Description")))
|
||||||
|
|
||||||
|
|
||||||
class UploadFile(tables.LinkAction):
|
class UploadFile(tables.LinkAction):
|
||||||
name = 'upload_file'
|
name = 'upload_file'
|
||||||
verbose_name = _("Upload Template")
|
verbose_name = _("Upload Template")
|
||||||
@ -125,7 +133,8 @@ class ClusterTemplatesTable(tables.DataTable):
|
|||||||
table_actions = (UploadFile,
|
table_actions = (UploadFile,
|
||||||
CreateClusterTemplate,
|
CreateClusterTemplate,
|
||||||
ConfigureClusterTemplate,
|
ConfigureClusterTemplate,
|
||||||
DeleteTemplate,)
|
DeleteTemplate,
|
||||||
|
ClusterTemplatesFilterAction,)
|
||||||
|
|
||||||
row_actions = (CreateCluster,
|
row_actions = (CreateCluster,
|
||||||
CopyTemplate,
|
CopyTemplate,
|
||||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
|||||||
class DataProcessingClusterTemplateTests(test.TestCase):
|
class DataProcessingClusterTemplateTests(test.TestCase):
|
||||||
@test.create_stubs({api.sahara: ('cluster_template_list',)})
|
@test.create_stubs({api.sahara: ('cluster_template_list',)})
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
|
api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.cluster_templates.list())
|
.AndReturn(self.cluster_templates.list())
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
res = self.client.get(INDEX_URL)
|
res = self.client.get(INDEX_URL)
|
||||||
@ -85,7 +85,7 @@ class DataProcessingClusterTemplateTests(test.TestCase):
|
|||||||
'cluster_template_delete')})
|
'cluster_template_delete')})
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
ct = self.cluster_templates.first()
|
ct = self.cluster_templates.first()
|
||||||
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
|
api.sahara.cluster_template_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.cluster_templates.list())
|
.AndReturn(self.cluster_templates.list())
|
||||||
api.sahara.cluster_template_delete(IsA(http.HttpRequest), ct.id)
|
api.sahara.cluster_template_delete(IsA(http.HttpRequest), ct.id)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
|
@ -44,8 +44,12 @@ class ClusterTemplatesView(tables.DataTableView):
|
|||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
try:
|
try:
|
||||||
|
search_opts = {}
|
||||||
|
filter = self.get_server_filter_info(self.request)
|
||||||
|
if filter['value'] and filter['field']:
|
||||||
|
search_opts = {filter['field']: filter['value']}
|
||||||
cluster_templates = saharaclient.cluster_template_list(
|
cluster_templates = saharaclient.cluster_template_list(
|
||||||
self.request)
|
self.request, search_opts)
|
||||||
except Exception:
|
except Exception:
|
||||||
cluster_templates = []
|
cluster_templates = []
|
||||||
exceptions.handle(self.request,
|
exceptions.handle(self.request,
|
||||||
|
@ -28,6 +28,12 @@ from saharaclient.api import base as api_base
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ClustersFilterAction(tables.FilterAction):
|
||||||
|
filter_type = "server"
|
||||||
|
filter_choices = (('name', _("Name"), True),
|
||||||
|
('status', _("Status"), True))
|
||||||
|
|
||||||
|
|
||||||
class CreateCluster(tables.LinkAction):
|
class CreateCluster(tables.LinkAction):
|
||||||
name = "create"
|
name = "create"
|
||||||
verbose_name = _("Launch Cluster")
|
verbose_name = _("Launch Cluster")
|
||||||
@ -119,6 +125,7 @@ class ClustersTable(tables.DataTable):
|
|||||||
status_columns = ["status"]
|
status_columns = ["status"]
|
||||||
table_actions = (CreateCluster,
|
table_actions = (CreateCluster,
|
||||||
ConfigureCluster,
|
ConfigureCluster,
|
||||||
DeleteCluster)
|
DeleteCluster,
|
||||||
|
ClustersFilterAction)
|
||||||
row_actions = (ScaleCluster,
|
row_actions = (ScaleCluster,
|
||||||
DeleteCluster,)
|
DeleteCluster,)
|
||||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
|||||||
class DataProcessingClusterTests(test.TestCase):
|
class DataProcessingClusterTests(test.TestCase):
|
||||||
@test.create_stubs({api.sahara: ('cluster_list',)})
|
@test.create_stubs({api.sahara: ('cluster_list',)})
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
api.sahara.cluster_list(IsA(http.HttpRequest)) \
|
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.clusters.list())
|
.AndReturn(self.clusters.list())
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
res = self.client.get(INDEX_URL)
|
res = self.client.get(INDEX_URL)
|
||||||
@ -53,7 +53,7 @@ class DataProcessingClusterTests(test.TestCase):
|
|||||||
'cluster_delete')})
|
'cluster_delete')})
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
cluster = self.clusters.first()
|
cluster = self.clusters.first()
|
||||||
api.sahara.cluster_list(IsA(http.HttpRequest)) \
|
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.clusters.list())
|
.AndReturn(self.clusters.list())
|
||||||
api.sahara.cluster_delete(IsA(http.HttpRequest), cluster.id)
|
api.sahara.cluster_delete(IsA(http.HttpRequest), cluster.id)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
|
@ -40,7 +40,11 @@ class ClustersView(tables.DataTableView):
|
|||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
try:
|
try:
|
||||||
clusters = saharaclient.cluster_list(self.request)
|
search_opts = {}
|
||||||
|
filter = self.get_server_filter_info(self.request)
|
||||||
|
if filter['value'] and filter['field']:
|
||||||
|
search_opts = {filter['field']: filter['value']}
|
||||||
|
clusters = saharaclient.cluster_list(self.request, search_opts)
|
||||||
except Exception:
|
except Exception:
|
||||||
clusters = []
|
clusters = []
|
||||||
exceptions.handle(self.request,
|
exceptions.handle(self.request,
|
||||||
|
@ -31,6 +31,14 @@ from openstack_dashboard.dashboards.project.data_processing. \
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class JobExecutionsFilterAction(tables.FilterAction):
|
||||||
|
filter_type = "server"
|
||||||
|
filter_choices = (('id', _("ID"), True),
|
||||||
|
('job', _("Job"), True),
|
||||||
|
('cluster', _("Cluster"), True),
|
||||||
|
('status', _("Status"), True))
|
||||||
|
|
||||||
|
|
||||||
class DeleteJobExecution(tables.DeleteAction):
|
class DeleteJobExecution(tables.DeleteAction):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def action_present(count):
|
def action_present(count):
|
||||||
@ -165,7 +173,8 @@ class JobExecutionsTable(tables.DataTable):
|
|||||||
row_class = UpdateRow
|
row_class = UpdateRow
|
||||||
status_columns = ["status"]
|
status_columns = ["status"]
|
||||||
verbose_name = _("Job Executions")
|
verbose_name = _("Job Executions")
|
||||||
table_actions = [DeleteJobExecution]
|
table_actions = [DeleteJobExecution,
|
||||||
|
JobExecutionsFilterAction]
|
||||||
row_actions = [DeleteJobExecution,
|
row_actions = [DeleteJobExecution,
|
||||||
ReLaunchJobExistingCluster,
|
ReLaunchJobExistingCluster,
|
||||||
ReLaunchJobNewCluster]
|
ReLaunchJobNewCluster]
|
||||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
|||||||
class DataProcessingJobExecutionTests(test.TestCase):
|
class DataProcessingJobExecutionTests(test.TestCase):
|
||||||
@test.create_stubs({api.sahara: ('job_execution_list',)})
|
@test.create_stubs({api.sahara: ('job_execution_list',)})
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
api.sahara.job_execution_list(IsA(http.HttpRequest)) \
|
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.job_executions.list())
|
.AndReturn(self.job_executions.list())
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
res = self.client.get(INDEX_URL)
|
res = self.client.get(INDEX_URL)
|
||||||
@ -49,7 +49,7 @@ class DataProcessingJobExecutionTests(test.TestCase):
|
|||||||
'job_execution_delete')})
|
'job_execution_delete')})
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
job_exec = self.job_executions.first()
|
job_exec = self.job_executions.first()
|
||||||
api.sahara.job_execution_list(IsA(http.HttpRequest)) \
|
api.sahara.job_execution_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.job_executions.list())
|
.AndReturn(self.job_executions.list())
|
||||||
api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id)
|
api.sahara.job_execution_delete(IsA(http.HttpRequest), job_exec.id)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
|
@ -30,13 +30,26 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class JobExecutionsView(tables.DataTableView):
|
class JobExecutionsView(tables.DataTableView):
|
||||||
|
SEARCH_MAPPING = {"cluster": "cluster.name",
|
||||||
|
"job": "job.name"}
|
||||||
|
|
||||||
table_class = je_tables.JobExecutionsTable
|
table_class = je_tables.JobExecutionsTable
|
||||||
template_name = (
|
template_name = (
|
||||||
'project/data_processing.job_executions/job_executions.html')
|
'project/data_processing.job_executions/job_executions.html')
|
||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
try:
|
try:
|
||||||
jobs = saharaclient.job_execution_list(self.request)
|
search_opts = {}
|
||||||
|
filter = self.get_server_filter_info(self.request)
|
||||||
|
if filter['value'] and filter['field']:
|
||||||
|
if filter['field'] in self.SEARCH_MAPPING:
|
||||||
|
# Handle special cases for cluster and job
|
||||||
|
# since they are in different database tables.
|
||||||
|
search_opts = {
|
||||||
|
self.SEARCH_MAPPING[filter['field']]: filter['value']}
|
||||||
|
else:
|
||||||
|
search_opts = {filter['field']: filter['value']}
|
||||||
|
jobs = saharaclient.job_execution_list(self.request, search_opts)
|
||||||
except Exception:
|
except Exception:
|
||||||
jobs = []
|
jobs = []
|
||||||
exceptions.handle(self.request,
|
exceptions.handle(self.request,
|
||||||
|
@ -25,6 +25,12 @@ from openstack_dashboard.api import sahara as saharaclient
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class JobsFilterAction(tables.FilterAction):
|
||||||
|
filter_type = "server"
|
||||||
|
filter_choices = (('name', _("Name"), True),
|
||||||
|
('description', _("Description"), True))
|
||||||
|
|
||||||
|
|
||||||
class CreateJob(tables.LinkAction):
|
class CreateJob(tables.LinkAction):
|
||||||
name = "create job"
|
name = "create job"
|
||||||
verbose_name = _("Create Job")
|
verbose_name = _("Create Job")
|
||||||
@ -103,6 +109,5 @@ class JobsTable(tables.DataTable):
|
|||||||
class Meta:
|
class Meta:
|
||||||
name = "jobs"
|
name = "jobs"
|
||||||
verbose_name = _("Jobs")
|
verbose_name = _("Jobs")
|
||||||
table_actions = (CreateJob,
|
table_actions = (CreateJob, DeleteJob, JobsFilterAction,)
|
||||||
DeleteJob)
|
|
||||||
row_actions = (LaunchJobExistingCluster, ChoosePlugin, DeleteJob,)
|
row_actions = (LaunchJobExistingCluster, ChoosePlugin, DeleteJob,)
|
||||||
|
@ -27,7 +27,7 @@ DETAILS_URL = reverse(
|
|||||||
class DataProcessingJobTests(test.TestCase):
|
class DataProcessingJobTests(test.TestCase):
|
||||||
@test.create_stubs({api.sahara: ('job_list',)})
|
@test.create_stubs({api.sahara: ('job_list',)})
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
api.sahara.job_list(IsA(http.HttpRequest)) \
|
api.sahara.job_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.jobs.list())
|
.AndReturn(self.jobs.list())
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
res = self.client.get(INDEX_URL)
|
res = self.client.get(INDEX_URL)
|
||||||
@ -50,7 +50,7 @@ class DataProcessingJobTests(test.TestCase):
|
|||||||
'job_delete')})
|
'job_delete')})
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
job = self.jobs.first()
|
job = self.jobs.first()
|
||||||
api.sahara.job_list(IsA(http.HttpRequest)) \
|
api.sahara.job_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.jobs.list())
|
.AndReturn(self.jobs.list())
|
||||||
api.sahara.job_delete(IsA(http.HttpRequest), job.id)
|
api.sahara.job_delete(IsA(http.HttpRequest), job.id)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
|
@ -41,7 +41,11 @@ class JobsView(tables.DataTableView):
|
|||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
try:
|
try:
|
||||||
jobs = saharaclient.job_list(self.request)
|
search_opts = {}
|
||||||
|
filter = self.get_server_filter_info(self.request)
|
||||||
|
if filter['value'] and filter['field']:
|
||||||
|
search_opts = {filter['field']: filter['value']}
|
||||||
|
jobs = saharaclient.job_list(self.request, search_opts)
|
||||||
except Exception:
|
except Exception:
|
||||||
jobs = []
|
jobs = []
|
||||||
exceptions.handle(self.request,
|
exceptions.handle(self.request,
|
||||||
|
@ -23,6 +23,13 @@ from openstack_dashboard.api import sahara as saharaclient
|
|||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NodeGroupTemplatesFilterAction(tables.FilterAction):
|
||||||
|
filter_type = "server"
|
||||||
|
filter_choices = (('name', _("Name"), True),
|
||||||
|
('plugin', _("Plugin"), True),
|
||||||
|
('hadoop_version', _("Version"), True))
|
||||||
|
|
||||||
|
|
||||||
class CreateNodegroupTemplate(tables.LinkAction):
|
class CreateNodegroupTemplate(tables.LinkAction):
|
||||||
name = "create"
|
name = "create"
|
||||||
verbose_name = _("Create Template")
|
verbose_name = _("Create Template")
|
||||||
@ -89,6 +96,7 @@ class NodegroupTemplatesTable(tables.DataTable):
|
|||||||
verbose_name = _("Node Group Templates")
|
verbose_name = _("Node Group Templates")
|
||||||
table_actions = (CreateNodegroupTemplate,
|
table_actions = (CreateNodegroupTemplate,
|
||||||
ConfigureNodegroupTemplate,
|
ConfigureNodegroupTemplate,
|
||||||
DeleteTemplate)
|
DeleteTemplate,
|
||||||
|
NodeGroupTemplatesFilterAction,)
|
||||||
row_actions = (CopyTemplate,
|
row_actions = (CopyTemplate,
|
||||||
DeleteTemplate,)
|
DeleteTemplate,)
|
||||||
|
@ -35,7 +35,7 @@ CREATE_URL = reverse(
|
|||||||
class DataProcessingNodeGroupTests(test.TestCase):
|
class DataProcessingNodeGroupTests(test.TestCase):
|
||||||
@test.create_stubs({api.sahara: ('nodegroup_template_list',)})
|
@test.create_stubs({api.sahara: ('nodegroup_template_list',)})
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest)) \
|
api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.nodegroup_templates.list())
|
.AndReturn(self.nodegroup_templates.list())
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
res = self.client.get(INDEX_URL)
|
res = self.client.get(INDEX_URL)
|
||||||
@ -67,7 +67,7 @@ class DataProcessingNodeGroupTests(test.TestCase):
|
|||||||
'nodegroup_template_delete')})
|
'nodegroup_template_delete')})
|
||||||
def test_delete(self):
|
def test_delete(self):
|
||||||
ngt = self.nodegroup_templates.first()
|
ngt = self.nodegroup_templates.first()
|
||||||
api.sahara.nodegroup_template_list(IsA(http.HttpRequest)) \
|
api.sahara.nodegroup_template_list(IsA(http.HttpRequest), {}) \
|
||||||
.AndReturn(self.nodegroup_templates.list())
|
.AndReturn(self.nodegroup_templates.list())
|
||||||
api.sahara.nodegroup_template_delete(IsA(http.HttpRequest), ngt.id)
|
api.sahara.nodegroup_template_delete(IsA(http.HttpRequest), ngt.id)
|
||||||
self.mox.ReplayAll()
|
self.mox.ReplayAll()
|
||||||
|
@ -41,7 +41,12 @@ class NodegroupTemplatesView(tables.DataTableView):
|
|||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
try:
|
try:
|
||||||
data = saharaclient.nodegroup_template_list(self.request)
|
search_opts = {}
|
||||||
|
filter = self.get_server_filter_info(self.request)
|
||||||
|
if filter['value'] and filter['field']:
|
||||||
|
search_opts = {filter['field']: filter['value']}
|
||||||
|
data = saharaclient.nodegroup_template_list(self.request,
|
||||||
|
search_opts)
|
||||||
except Exception:
|
except Exception:
|
||||||
data = []
|
data = []
|
||||||
exceptions.handle(self.request,
|
exceptions.handle(self.request,
|
||||||
|
Loading…
Reference in New Issue
Block a user