[Sahara] Add support for event logs

Sahara API provides a log of events happening while the cluster is being
set up or deleted. The tab in the 'cluster details' view was added to
expose these events to a user.

Implements blueprint: sahara-event-log

Change-Id: Ie8b0d895b0b5af4b3cf2fffe7ac490eac633d97a
This commit is contained in:
Nikita Konovalov 2015-02-24 17:47:55 +03:00 committed by David Lyle
parent d7016d6b51
commit e0cd466ecf
9 changed files with 406 additions and 4 deletions

View File

@ -275,8 +275,10 @@ def cluster_list(request, search_opts=None):
return client(request).clusters.list(search_opts=search_opts)
def cluster_get(request, cluster_id):
return client(request).clusters.get(cluster_id=cluster_id)
def cluster_get(request, cluster_id, show_progress=False):
return client(request).clusters.get(
cluster_id=cluster_id,
show_progress=show_progress)
def cluster_delete(request, cluster_id):

View File

@ -176,7 +176,20 @@ class InstancesTab(tabs.TableTab):
return instances
class EventLogTab(tabs.Tab):
name = _("Cluster Events")
slug = "cluster_event_log"
template_name = "project/data_processing.clusters/_event_log.html"
def get_context_data(self, request, **kwargs):
cluster_id = self.tab_group.kwargs['cluster_id']
kwargs["cluster_id"] = cluster_id
kwargs['data_update_url'] = request.get_full_path()
return kwargs
class ClusterDetailsTabs(tabs.TabGroup):
slug = "cluster_details"
tabs = (GeneralTab, NodeGroupsTab, InstancesTab, )
tabs = (GeneralTab, NodeGroupsTab, InstancesTab, EventLogTab)
sticky = True

View File

@ -0,0 +1,62 @@
{% load i18n %}
<h4>{% trans "Cluster provision steps" %}</h4>
<table id="steps_table" class="table table-bordered datatable">
<thead>
<tr>
<th>{% trans "Step Description" %}</th>
<th>{% trans "Started at" %}</th>
<th>{% trans "Duration" %}</th>
<th>{% trans "Progress" %}</th>
<th>{% trans "Status" %}</th>
</tr>
</thead>
<tbody id="steps_body">
</tbody>
</table>
<div id="events_modal" class="modal fade">
<div class="modal-dialog" style="width: 85%">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h4 id="events_modal_header"></h4>
<span id="modal_status_marker"></span>
</div>
<div class="modal-body">
<table id="events_table" class="table table-bordered datatable">
<thead>
<tr>
<th>{% trans "Node Group" %}</th>
<th>{% trans "Instance" %}</th>
<th>{% trans "Event time" %}</th>
<th>{% trans "Info" %}</th>
<th>{% trans "Status" %}</th>
</tr>
</thead>
<tbody id="events_body">
</tbody>
</table>
</div>
</div>
</div>
</div>
<script type="text/javascript">
$(function () {
// Initialize everything.
horizon.event_log.cluster_id = "{{ cluster_id }}";
horizon.event_log.data_update_url = "{{ data_update_url }}";
horizon.event_log.fetch_update_events();
});
$(".show_events_btn").live("click", function () {
// Bind "show events" buttons to modals.
horizon.event_log.modal_step_id = $(this).data("step-id");
horizon.event_log.clear_events();
horizon.event_log.clear_modal_status();
horizon.event_log.update_events_rows(horizon.event_log.cached_data);
});
</script>

View File

@ -10,6 +10,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import json
from django.core.urlresolvers import reverse
from django import http
@ -49,6 +51,31 @@ class DataProcessingClusterTests(test.TestCase):
self.assertContains(res, "No Images Available")
self.assertContains(res, "No Templates Available")
@test.create_stubs({api.sahara: ('cluster_get',)})
def test_event_log_tab(self):
cluster = self.clusters.list()[-1]
api.sahara.cluster_get(IsA(http.HttpRequest),
"cl2", show_progress=True).AndReturn(cluster)
self.mox.ReplayAll()
url = reverse(
'horizon:project:data_processing.clusters:events', args=["cl2"])
res = self.client.get(url)
data = json.loads(res.content)
self.assertIn("provision_steps", data)
self.assertEqual(data["need_update"], False)
step_0 = data["provision_steps"][0]
self.assertEqual(2, step_0["completed"])
self.assertEqual(2, len(step_0["events"]))
for evt in step_0["events"]:
self.assertEqual(True, evt["successful"])
step_1 = data["provision_steps"][1]
self.assertEqual(3, step_1["completed"])
self.assertEqual(0, len(step_1["events"]))
@test.create_stubs({api.sahara: ('cluster_list',
'cluster_delete')})
def test_delete(self):

View File

@ -33,6 +33,9 @@ urlpatterns = patterns('',
url(r'^(?P<cluster_id>[^/]+)$',
views.ClusterDetailsView.as_view(),
name='details'),
url(r'^(?P<cluster_id>[^/]+)/events$',
views.ClusterEventsView.as_view(),
name='events'),
url(r'^(?P<cluster_id>[^/]+)/scale$',
views.ScaleClusterView.as_view(),
name='scale'))

View File

@ -11,9 +11,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import json
import logging
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import base as django_base
import six
from horizon import exceptions
from horizon import tables
@ -32,6 +37,7 @@ import openstack_dashboard.contrib.sahara.content.data_processing.clusters. \
workflows.create as create_flow
import openstack_dashboard.contrib.sahara.content.data_processing.clusters. \
workflows.scale as scale_flow
from saharaclient.api.base import APIException
LOG = logging.getLogger(__name__)
@ -77,6 +83,94 @@ class ClusterDetailsView(tabs.TabView):
return context
class ClusterEventsView(django_base.View):
_date_format = "%Y-%m-%dT%H:%M:%S"
@staticmethod
def _created_at_key(obj):
return datetime.strptime(obj["created_at"],
ClusterEventsView._date_format)
def get(self, request, *args, **kwargs):
cluster_id = kwargs.get("cluster_id")
try:
cluster = saharaclient.cluster_get(request, cluster_id,
show_progress=True)
node_group_mapping = {}
for node_group in cluster.node_groups:
node_group_mapping[node_group["id"]] = node_group["name"]
provision_steps = cluster.provision_progress
# Sort by create time
provision_steps = sorted(provision_steps,
key=ClusterEventsView._created_at_key,
reverse=True)
for step in provision_steps:
# Sort events of the steps also
step["events"] = sorted(step["events"],
key=ClusterEventsView._created_at_key,
reverse=True)
successful_events_count = 0
for event in step["events"]:
if event["node_group_id"]:
event["node_group_name"] = node_group_mapping[
event["node_group_id"]]
event_result = _("Unknown")
if event["successful"] is True:
successful_events_count += 1
event_result = _("Completed Successfully")
elif event["successful"] is False:
event_result = _("Failed")
event["result"] = event_result
if not event["event_info"]:
event["event_info"] = _("No info available")
start_time = datetime.strptime(step["created_at"],
self._date_format)
end_time = datetime.now()
# Clear out microseconds. There is no need for that precision.
end_time = end_time.replace(microsecond=0)
if step["successful"] is not None:
updated_at = step["updated_at"]
end_time = datetime.strptime(updated_at,
self._date_format)
step["duration"] = six.text_type(end_time - start_time)
result = _("In progress")
step["completed"] = successful_events_count
if step["successful"] is True:
step["completed"] = step["total"]
result = _("Completed Successfully")
elif step["successful"] is False:
result = _("Failed")
step["result"] = result
status = cluster.status.lower()
need_update = status not in ("active", "error")
except APIException:
# Cluster is not available. Returning empty event log.
need_update = False
provision_steps = []
context = {"provision_steps": provision_steps,
"need_update": need_update}
return HttpResponse(json.dumps(context),
content_type='application/json')
class CreateClusterView(workflows.WorkflowView):
workflow_class = create_flow.CreateCluster
success_url = \

View File

@ -0,0 +1,145 @@
horizon.event_log = {
cluster_id: null,
data_update_url: null,
cached_data: null,
modal_step_id: null,
fetch_update_events: function() {
var url = this.data_update_url + "/events";
$.get(url).done(function (data) {
horizon.event_log.cached_data = data;
horizon.event_log.update_view(data);
horizon.event_log.schedule_next_update(data);
}).fail(function() {
// Event log is not available for some reason.
horizon.alert("error", gettext("Event log is not available."));
});
},
update_view: function (data) {
this.update_step_rows(data.provision_steps);
this.update_events_rows(data);
},
update_step_rows: function (steps) {
// Clear steps
$("#steps_body").find("tr").remove();
$(steps).each(function (i, step) {
horizon.event_log.create_step_row(step);
});
},
create_step_row: function (step) {
var step_row_template = "" +
"<tr id='%step_id%'>" +
"<td>%step_descr%</td>" +
"<td>%started_at%</td>" +
"<td>%duration%</td>" +
"<td>%progress%</td>" +
"<td>%result%&nbsp" +
"<a data-target='#events_modal' data-toggle='modal' data-step-id='%step_id%' class='show_events_btn' id='%step_id%_show_events_btn'>" +
gettext('Show events') + "</a>" +
"</td>" +
"</tr>";
var started_at = new Date(step.created_at).toString();
var progress = "" + step.completed + " / " + step.total;
var description = step.step_type + "<br />" + step.step_name;
var row = step_row_template
.replace(/%step_id%/g, step.id)
.replace(/%step_descr%/g, description)
.replace(/%started_at%/g, started_at)
.replace(/%duration%/g, step.duration)
.replace(/%progress%/g, progress)
.replace(/%result%/g, step.result);
$("#steps_body").append(row);
if (step.successful === true) {
$("#" + step.id + "_show_events_btn").hide();
}
},
update_events_rows: function(data) {
if (!this.modal_step_id) {
return;
}
var current_step = null;
$(data.provision_steps).each(function (i, step) {
if (step.id === horizon.event_log.modal_step_id) {
current_step = step;
}
});
var header = current_step.step_type + "<br />" + current_step.step_name;
$("#events_modal_header").html(header);
// Clear events
this.clear_events();
this.clear_modal_status();
if (current_step.successful === true) {
this.mark_modal_as_successful();
return;
}
var events = current_step.events;
$(events).each(function (i, event) {
event.step_name = current_step.step_name;
});
$(events).each(function (i, event) {
horizon.event_log.create_event_row(event);
});
},
clear_events: function() {
$("#events_body").find("tr").remove();
},
clear_modal_status: function() {
$("#modal_status_marker").text("");
},
mark_modal_as_successful: function() {
$("#modal_status_marker").text(gettext(
"The step has completed successfully. No events to display."));
},
create_event_row: function(event) {
var step_row_template = "" +
"<tr id='%event_id%'>" +
"<td>%node_group_name%</td>" +
"<td>%instance%</td>" +
"<td>%time%</td>" +
"<td>%info%</td>" +
"<td>%result%</td>" +
"</tr>";
var event_time = new Date(event.created_at).toString();
var row = step_row_template
.replace(/%event_id%/g, event.id)
.replace(/%node_group_name%/g, event.node_group_name)
.replace(/%instance%/g, event.instance_name)
.replace(/%time%/g, event_time)
.replace(/%info%/g, event.event_info)
.replace(/%result%/g, event.result);
$("#events_body").append(row);
},
schedule_next_update: function(data) {
// 2-3 sec delay so that if there are multiple tabs polling the backed
// the requests are spread in time
var delay = 2000 + Math.floor((Math.random() * 1000) + 1);
if (data.need_update) {
setTimeout(function () {
horizon.event_log.fetch_update_events();
}, delay);
}
}
};

View File

@ -22,3 +22,10 @@ PANEL_GROUP = 'data_processing'
ADD_PANEL = \
('openstack_dashboard.contrib.sahara.'
'content.data_processing.clusters.panel.ClustersPanel')
ADD_INSTALLED_APPS = \
["openstack_dashboard.contrib.sahara.content.data_processing", ]
ADD_JS_FILES = [
'dashboard/project/data_processing/data_processing.event_log.js'
]

View File

@ -250,6 +250,7 @@ def data(TEST):
"volumes_size": 0,
"security_groups": [],
"volumes_availability_zone": None,
"id": "ng1"
},
{
"count": 2,
@ -292,6 +293,7 @@ def data(TEST):
"volumes_size": 0,
"security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"],
"volumes_availability_zone": None,
"id": "ng2"
}
],
"plugin_name": "vanilla",
@ -307,6 +309,53 @@ def data(TEST):
clusters.ClusterManager(None), cluster1_dict)
TEST.clusters.add(cluster1)
cluster2_dict = copy.deepcopy(cluster1_dict)
cluster2_dict.update({
"id": "cl2",
"name": "cl2_name",
"provision_progress": [
{
"created_at": "2015-03-27T15:51:54",
"updated_at": "2015-03-27T15:59:34",
"step_name": "first_step",
"step_type": "some_type",
"successful": True,
"events": [],
"total": 3
},
{
"created_at": "2015-03-27T16:01:54",
"updated_at": "2015-03-27T16:10:22",
"step_name": "second_step",
"step_type": "some_other_type",
"successful": None,
"events": [
{
"id": "evt1",
"created_at": "2015-03-27T16:01:22",
"node_group_id": "ng1",
"instance_name": "cercluster-master-001",
"successful": True,
"event_info": None
},
{
"id": "evt2",
"created_at": "2015-03-27T16:04:51",
"node_group_id": "ng2",
"instance_name": "cercluster-workers-001",
"successful": True,
"event_info": None
}
],
"total": 3
}
]
})
cluster2 = clusters.Cluster(
clusters.ClusterManager(None), cluster2_dict)
TEST.clusters.add(cluster2)
# Data Sources.
data_source1_dict = {
"created_at": "2014-06-04 14:01:10.371562",