diff --git a/openstack_dashboard/contrib/sahara/api/sahara.py b/openstack_dashboard/contrib/sahara/api/sahara.py index 9eef0b34f..6a441806b 100644 --- a/openstack_dashboard/contrib/sahara/api/sahara.py +++ b/openstack_dashboard/contrib/sahara/api/sahara.py @@ -275,8 +275,10 @@ def cluster_list(request, search_opts=None): return client(request).clusters.list(search_opts=search_opts) -def cluster_get(request, cluster_id): - return client(request).clusters.get(cluster_id=cluster_id) +def cluster_get(request, cluster_id, show_progress=False): + return client(request).clusters.get( + cluster_id=cluster_id, + show_progress=show_progress) def cluster_delete(request, cluster_id): diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tabs.py b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tabs.py index eb967b267..21de040d7 100644 --- a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tabs.py +++ b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tabs.py @@ -176,7 +176,20 @@ class InstancesTab(tabs.TableTab): return instances +class EventLogTab(tabs.Tab): + name = _("Cluster Events") + slug = "cluster_event_log" + template_name = "project/data_processing.clusters/_event_log.html" + + def get_context_data(self, request, **kwargs): + cluster_id = self.tab_group.kwargs['cluster_id'] + kwargs["cluster_id"] = cluster_id + kwargs['data_update_url'] = request.get_full_path() + + return kwargs + + class ClusterDetailsTabs(tabs.TabGroup): slug = "cluster_details" - tabs = (GeneralTab, NodeGroupsTab, InstancesTab, ) + tabs = (GeneralTab, NodeGroupsTab, InstancesTab, EventLogTab) sticky = True diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html new file mode 100644 index 000000000..74291eb5e --- /dev/null +++ b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/templates/data_processing.clusters/_event_log.html @@ -0,0 +1,62 @@ +{% load i18n %} + +

{% trans "Cluster provision steps" %}

+ + + + + + + + + + + + +
{% trans "Step Description" %}{% trans "Started at" %}{% trans "Duration" %}{% trans "Progress" %}{% trans "Status" %}
+ + + + diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tests.py b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tests.py index 945d42f79..dd7e1fc30 100644 --- a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tests.py +++ b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/tests.py @@ -10,6 +10,8 @@ # License for the specific language governing permissions and limitations # under the License. +import json + from django.core.urlresolvers import reverse from django import http @@ -49,6 +51,31 @@ class DataProcessingClusterTests(test.TestCase): self.assertContains(res, "No Images Available") self.assertContains(res, "No Templates Available") + @test.create_stubs({api.sahara: ('cluster_get',)}) + def test_event_log_tab(self): + cluster = self.clusters.list()[-1] + api.sahara.cluster_get(IsA(http.HttpRequest), + "cl2", show_progress=True).AndReturn(cluster) + self.mox.ReplayAll() + + url = reverse( + 'horizon:project:data_processing.clusters:events', args=["cl2"]) + res = self.client.get(url) + data = json.loads(res.content) + + self.assertIn("provision_steps", data) + self.assertEqual(data["need_update"], False) + + step_0 = data["provision_steps"][0] + self.assertEqual(2, step_0["completed"]) + self.assertEqual(2, len(step_0["events"])) + for evt in step_0["events"]: + self.assertEqual(True, evt["successful"]) + + step_1 = data["provision_steps"][1] + self.assertEqual(3, step_1["completed"]) + self.assertEqual(0, len(step_1["events"])) + @test.create_stubs({api.sahara: ('cluster_list', 'cluster_delete')}) def test_delete(self): diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/urls.py b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/urls.py index 54441ca7d..f1e67d542 100644 --- a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/urls.py +++ b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/urls.py @@ -33,6 +33,9 @@ urlpatterns = patterns('', url(r'^(?P[^/]+)$', views.ClusterDetailsView.as_view(), name='details'), + url(r'^(?P[^/]+)/events$', + views.ClusterEventsView.as_view(), + name='events'), url(r'^(?P[^/]+)/scale$', views.ScaleClusterView.as_view(), name='scale')) diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/views.py b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/views.py index c2ee37c30..cf59c9f46 100644 --- a/openstack_dashboard/contrib/sahara/content/data_processing/clusters/views.py +++ b/openstack_dashboard/contrib/sahara/content/data_processing/clusters/views.py @@ -11,9 +11,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime +import json import logging -from django.utils.translation import ugettext_lazy as _ +from django.http import HttpResponse +from django.utils.translation import ugettext as _ +from django.views.generic import base as django_base +import six from horizon import exceptions from horizon import tables @@ -32,6 +37,7 @@ import openstack_dashboard.contrib.sahara.content.data_processing.clusters. \ workflows.create as create_flow import openstack_dashboard.contrib.sahara.content.data_processing.clusters. \ workflows.scale as scale_flow +from saharaclient.api.base import APIException LOG = logging.getLogger(__name__) @@ -77,6 +83,94 @@ class ClusterDetailsView(tabs.TabView): return context +class ClusterEventsView(django_base.View): + + _date_format = "%Y-%m-%dT%H:%M:%S" + + @staticmethod + def _created_at_key(obj): + return datetime.strptime(obj["created_at"], + ClusterEventsView._date_format) + + def get(self, request, *args, **kwargs): + + cluster_id = kwargs.get("cluster_id") + + try: + cluster = saharaclient.cluster_get(request, cluster_id, + show_progress=True) + node_group_mapping = {} + for node_group in cluster.node_groups: + node_group_mapping[node_group["id"]] = node_group["name"] + + provision_steps = cluster.provision_progress + + # Sort by create time + provision_steps = sorted(provision_steps, + key=ClusterEventsView._created_at_key, + reverse=True) + + for step in provision_steps: + # Sort events of the steps also + step["events"] = sorted(step["events"], + key=ClusterEventsView._created_at_key, + reverse=True) + + successful_events_count = 0 + + for event in step["events"]: + if event["node_group_id"]: + event["node_group_name"] = node_group_mapping[ + event["node_group_id"]] + + event_result = _("Unknown") + if event["successful"] is True: + successful_events_count += 1 + event_result = _("Completed Successfully") + elif event["successful"] is False: + event_result = _("Failed") + + event["result"] = event_result + + if not event["event_info"]: + event["event_info"] = _("No info available") + + start_time = datetime.strptime(step["created_at"], + self._date_format) + end_time = datetime.now() + # Clear out microseconds. There is no need for that precision. + end_time = end_time.replace(microsecond=0) + if step["successful"] is not None: + updated_at = step["updated_at"] + end_time = datetime.strptime(updated_at, + self._date_format) + step["duration"] = six.text_type(end_time - start_time) + + result = _("In progress") + step["completed"] = successful_events_count + + if step["successful"] is True: + step["completed"] = step["total"] + result = _("Completed Successfully") + elif step["successful"] is False: + result = _("Failed") + + step["result"] = result + + status = cluster.status.lower() + need_update = status not in ("active", "error") + except APIException: + # Cluster is not available. Returning empty event log. + need_update = False + provision_steps = [] + + context = {"provision_steps": provision_steps, + "need_update": need_update} + + return HttpResponse(json.dumps(context), + content_type='application/json') + + class CreateClusterView(workflows.WorkflowView): workflow_class = create_flow.CreateCluster success_url = \ diff --git a/openstack_dashboard/contrib/sahara/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js b/openstack_dashboard/contrib/sahara/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js new file mode 100644 index 000000000..fbb3acd35 --- /dev/null +++ b/openstack_dashboard/contrib/sahara/content/data_processing/static/dashboard/project/data_processing/data_processing.event_log.js @@ -0,0 +1,145 @@ +horizon.event_log = { + cluster_id: null, + data_update_url: null, + cached_data: null, + modal_step_id: null, + + fetch_update_events: function() { + var url = this.data_update_url + "/events"; + $.get(url).done(function (data) { + horizon.event_log.cached_data = data; + horizon.event_log.update_view(data); + horizon.event_log.schedule_next_update(data); + }).fail(function() { + // Event log is not available for some reason. + horizon.alert("error", gettext("Event log is not available.")); + }); + }, + + update_view: function (data) { + this.update_step_rows(data.provision_steps); + this.update_events_rows(data); + }, + + update_step_rows: function (steps) { + // Clear steps + $("#steps_body").find("tr").remove(); + + $(steps).each(function (i, step) { + horizon.event_log.create_step_row(step); + }); + }, + + create_step_row: function (step) { + var step_row_template = "" + + "" + + "%step_descr%" + + "%started_at%" + + "%duration%" + + "%progress%" + + "%result% " + + "" + + gettext('Show events') + "" + + "" + + ""; + + + var started_at = new Date(step.created_at).toString(); + var progress = "" + step.completed + " / " + step.total; + var description = step.step_type + "
" + step.step_name; + + var row = step_row_template + .replace(/%step_id%/g, step.id) + .replace(/%step_descr%/g, description) + .replace(/%started_at%/g, started_at) + .replace(/%duration%/g, step.duration) + .replace(/%progress%/g, progress) + .replace(/%result%/g, step.result); + + $("#steps_body").append(row); + if (step.successful === true) { + $("#" + step.id + "_show_events_btn").hide(); + } + }, + + update_events_rows: function(data) { + if (!this.modal_step_id) { + return; + } + var current_step = null; + $(data.provision_steps).each(function (i, step) { + if (step.id === horizon.event_log.modal_step_id) { + current_step = step; + } + }); + + var header = current_step.step_type + "
" + current_step.step_name; + $("#events_modal_header").html(header); + + // Clear events + this.clear_events(); + this.clear_modal_status(); + + if (current_step.successful === true) { + this.mark_modal_as_successful(); + return; + } + var events = current_step.events; + $(events).each(function (i, event) { + event.step_name = current_step.step_name; + }); + + $(events).each(function (i, event) { + horizon.event_log.create_event_row(event); + }); + + }, + + clear_events: function() { + $("#events_body").find("tr").remove(); + }, + + clear_modal_status: function() { + $("#modal_status_marker").text(""); + }, + + mark_modal_as_successful: function() { + $("#modal_status_marker").text(gettext( + "The step has completed successfully. No events to display.")); + }, + + create_event_row: function(event) { + var step_row_template = "" + + "" + + "%node_group_name%" + + "%instance%" + + "%time%" + + "%info%" + + "%result%" + + ""; + + var event_time = new Date(event.created_at).toString(); + + var row = step_row_template + .replace(/%event_id%/g, event.id) + .replace(/%node_group_name%/g, event.node_group_name) + .replace(/%instance%/g, event.instance_name) + .replace(/%time%/g, event_time) + .replace(/%info%/g, event.event_info) + .replace(/%result%/g, event.result); + + $("#events_body").append(row); + }, + + schedule_next_update: function(data) { + // 2-3 sec delay so that if there are multiple tabs polling the backed + // the requests are spread in time + var delay = 2000 + Math.floor((Math.random() * 1000) + 1); + + if (data.need_update) { + setTimeout(function () { + horizon.event_log.fetch_update_events(); + }, delay); + } + } +}; diff --git a/openstack_dashboard/enabled/_1820_data_processing_clusters_panel.py b/openstack_dashboard/enabled/_1820_data_processing_clusters_panel.py index c549960f4..e52209a17 100644 --- a/openstack_dashboard/enabled/_1820_data_processing_clusters_panel.py +++ b/openstack_dashboard/enabled/_1820_data_processing_clusters_panel.py @@ -22,3 +22,10 @@ PANEL_GROUP = 'data_processing' ADD_PANEL = \ ('openstack_dashboard.contrib.sahara.' 'content.data_processing.clusters.panel.ClustersPanel') + +ADD_INSTALLED_APPS = \ + ["openstack_dashboard.contrib.sahara.content.data_processing", ] + +ADD_JS_FILES = [ + 'dashboard/project/data_processing/data_processing.event_log.js' +] diff --git a/openstack_dashboard/test/test_data/sahara_data.py b/openstack_dashboard/test/test_data/sahara_data.py index 12c0fb0ae..a8c0a05c2 100644 --- a/openstack_dashboard/test/test_data/sahara_data.py +++ b/openstack_dashboard/test/test_data/sahara_data.py @@ -250,6 +250,7 @@ def data(TEST): "volumes_size": 0, "security_groups": [], "volumes_availability_zone": None, + "id": "ng1" }, { "count": 2, @@ -292,6 +293,7 @@ def data(TEST): "volumes_size": 0, "security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"], "volumes_availability_zone": None, + "id": "ng2" } ], "plugin_name": "vanilla", @@ -307,6 +309,53 @@ def data(TEST): clusters.ClusterManager(None), cluster1_dict) TEST.clusters.add(cluster1) + cluster2_dict = copy.deepcopy(cluster1_dict) + cluster2_dict.update({ + "id": "cl2", + "name": "cl2_name", + "provision_progress": [ + { + "created_at": "2015-03-27T15:51:54", + "updated_at": "2015-03-27T15:59:34", + "step_name": "first_step", + "step_type": "some_type", + "successful": True, + "events": [], + "total": 3 + }, + { + "created_at": "2015-03-27T16:01:54", + "updated_at": "2015-03-27T16:10:22", + "step_name": "second_step", + "step_type": "some_other_type", + "successful": None, + "events": [ + { + "id": "evt1", + "created_at": "2015-03-27T16:01:22", + "node_group_id": "ng1", + "instance_name": "cercluster-master-001", + "successful": True, + "event_info": None + }, + { + "id": "evt2", + "created_at": "2015-03-27T16:04:51", + "node_group_id": "ng2", + "instance_name": "cercluster-workers-001", + "successful": True, + "event_info": None + } + ], + "total": 3 + } + ] + }) + + cluster2 = clusters.Cluster( + clusters.ClusterManager(None), cluster2_dict) + TEST.clusters.add(cluster2) + # Data Sources. data_source1_dict = { "created_at": "2014-06-04 14:01:10.371562",