Add new configuration file format
This enables a new "artifact"-based configuration file format, intended to work natively with the deployer and to aid future efforts to visualize additional data sources. Among other tweaks, dataset indices are no longer used as the primary differentiator between data files, and instead artifact names (such as `testrepository.subunit`) are used to group related artfacts of various types, such as 'subunit', 'subunit-stats', and 'subunit-details'. Additionally, datasets and artifacts now have access to substantially more metadata about the job that generated the output data. In future patches, this metadata will be used to display and link to additional information about visualized data. This metadata is made available automatically by the deployer, and can be optionally gathered from environment variables when using `stackviz-export` via a new `--env` flag. Change-Id: I3e16cc314624a1b7b4f6bf43fa4d5cdeedcdba0c
This commit is contained in:
parent
d92719c169
commit
d27c01fb6a
@ -9,10 +9,14 @@ function HomeCtrl($scope, $state, datasetService) {
|
||||
|
||||
// ViewModel
|
||||
var vm = this;
|
||||
vm.focus = $state.params.datasetId;
|
||||
vm.focus = $state.params.artifactName;
|
||||
|
||||
datasetService.list().then(function(response) {
|
||||
vm.tempest = response.data.tempest;
|
||||
datasetService.groups().then(function(groups) {
|
||||
vm.groups = groups;
|
||||
|
||||
if (!vm.focus) {
|
||||
vm.focus = groups[0];
|
||||
}
|
||||
});
|
||||
|
||||
// update the page url as the focus id changes, but don't reload
|
||||
@ -20,7 +24,7 @@ function HomeCtrl($scope, $state, datasetService) {
|
||||
return vm.focus;
|
||||
}, function(value, old) {
|
||||
if (value !== old) {
|
||||
$state.go('home', { datasetId: value }, { notify: false });
|
||||
$state.go('home', { artifactName: value }, { notify: false });
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -2,10 +2,6 @@
|
||||
|
||||
var controllersModule = require('./_index');
|
||||
|
||||
/**
|
||||
* @ngInject
|
||||
*/
|
||||
var TestDetailsCtrl =
|
||||
/**
|
||||
* Responsible for making three calls to the dataset service. First, the
|
||||
* dataset corresponding to the given int id is loaded, then the raw and details
|
||||
@ -13,78 +9,84 @@ var TestDetailsCtrl =
|
||||
* of the details JSON is kept in `originalDetails` so that information is not
|
||||
* lost when parsing. Progress of the dataset service calls is recorded and
|
||||
* displayed in a progress bar on `test-details.html`.
|
||||
*/
|
||||
function($scope, $location, $stateParams, $log, datasetService, progressService) {
|
||||
* @ngInject
|
||||
*/
|
||||
function TestDetailsCtrl(
|
||||
$scope, $location, $stateParams, $log, $q,
|
||||
datasetService, progressService) {
|
||||
var vm = this;
|
||||
vm.datasetId = $stateParams.datasetId;
|
||||
var testName = $stateParams.test;
|
||||
vm.testName = testName;
|
||||
vm.artifactName = $stateParams.artifactName;
|
||||
vm.testName = $stateParams.test;
|
||||
|
||||
progressService.start({ parent: 'div[role="main"] .panel-body' });
|
||||
|
||||
// load dataset, raw json, and details json
|
||||
datasetService.get($stateParams.datasetId)
|
||||
.then(function(response) {
|
||||
vm.dataset = response;
|
||||
vm.stats = response.stats;
|
||||
return datasetService.raw(response);
|
||||
})
|
||||
.then(function(raw) {
|
||||
var item = null;
|
||||
for (var t in raw.data) {
|
||||
if (raw.data[t].name === testName) {
|
||||
item = raw.data[t];
|
||||
}
|
||||
}
|
||||
vm.item = item;
|
||||
progressService.inc();
|
||||
return datasetService.details(vm.dataset);
|
||||
})
|
||||
.then(function(deets) {
|
||||
vm.details = deets;
|
||||
vm.originalDetails = angular.copy(deets.data[testName]);
|
||||
vm.itemDetails = deets.data[testName];
|
||||
progressService.done();
|
||||
})
|
||||
.catch(function(error) {
|
||||
$log.error(error);
|
||||
progressService.done();
|
||||
});
|
||||
var statsArtifact = datasetService.artifact(vm.artifactName, 'subunit-stats');
|
||||
var subunitArtifact = datasetService.artifact(vm.artifactName, 'subunit');
|
||||
var detailsArtifact = datasetService.artifact(vm.artifactName, 'subunit-details');
|
||||
|
||||
var statsPromise = statsArtifact.then(function(response) {
|
||||
vm.stats = response.data;
|
||||
});
|
||||
|
||||
var subunitPromise = subunitArtifact.then(function(response) {
|
||||
var item = null;
|
||||
for (var t in response.data) {
|
||||
if (response.data[t].name === vm.testName) {
|
||||
item = response.data[t];
|
||||
}
|
||||
}
|
||||
vm.item = item;
|
||||
progressService.inc();
|
||||
});
|
||||
|
||||
var detailsPromise = detailsArtifact.then(function(details) {
|
||||
vm.details = details;
|
||||
vm.originalDetails = angular.copy(details.data[vm.testName]);
|
||||
vm.itemDetails = details.data[vm.testName];
|
||||
}).catch(function(ex) {
|
||||
// ignore errors, details won't exist for deployer
|
||||
});
|
||||
|
||||
$q.all([statsPromise, subunitPromise, detailsPromise]).catch(function(ex) {
|
||||
$log.error(ex);
|
||||
}).finally(function() {
|
||||
progressService.done();
|
||||
});
|
||||
|
||||
vm.parsePythonLogging =
|
||||
/**
|
||||
* This function changes the `itemDetails.pythonlogging` variable to only
|
||||
* show lines with the log levels specified by the four boolean parameters.
|
||||
* EX: If the `showINFO` parameter is set to true, `itemDetails.pythonlogging`
|
||||
* EX: If the `info` parameter is set to true, `itemDetails.pythonlogging`
|
||||
* will display lines that contain the text `INFO`.
|
||||
* @param {boolean} showINFO
|
||||
* @param {boolean} showDEBUG
|
||||
* @param {boolean} showWARNING
|
||||
* @param {boolean} showERROR
|
||||
* @param {boolean} info
|
||||
* @param {boolean} debug
|
||||
* @param {boolean} warning
|
||||
* @param {boolean} error
|
||||
*/
|
||||
function(showINFO, showDEBUG, showWARNING, showERROR) {
|
||||
vm.parsePythonLogging = function(info, debug, warning, error) {
|
||||
if (vm.originalDetails && vm.originalDetails.pythonlogging) {
|
||||
var log = vm.originalDetails.pythonlogging;
|
||||
var ret = [];
|
||||
var lines = log.split('\n');
|
||||
for (var i in lines) {
|
||||
var line = lines[i];
|
||||
if (showINFO && line.includes("INFO")) {
|
||||
if (info && line.includes("INFO")) {
|
||||
ret.push(line);
|
||||
}
|
||||
if (showDEBUG && line.includes("DEBUG")) {
|
||||
if (debug && line.includes("DEBUG")) {
|
||||
ret.push(line);
|
||||
}
|
||||
if (showWARNING && line.includes("WARNING")) {
|
||||
if (warning && line.includes("WARNING")) {
|
||||
ret.push(line);
|
||||
}
|
||||
if (showERROR && line.includes("ERROR")) {
|
||||
if (error && line.includes("ERROR")) {
|
||||
ret.push(line);
|
||||
}
|
||||
}
|
||||
vm.itemDetails.pythonlogging = ret.join('\n');
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
};
|
||||
controllersModule.controller('TestDetailsController', TestDetailsCtrl);
|
||||
|
@ -9,12 +9,7 @@ function TimelineCtrl($scope, $location, $stateParams, datasetService) {
|
||||
|
||||
// ViewModel
|
||||
var vm = this;
|
||||
|
||||
datasetService.get($stateParams.datasetId).then(function(dataset) {
|
||||
vm.dataset = dataset;
|
||||
}, function(reason) {
|
||||
vm.error = "Unable to load dataset: " + reason;
|
||||
});
|
||||
vm.artifactName = $stateParams.artifactName;
|
||||
|
||||
vm.hoveredItem = null;
|
||||
vm.selectedItem = null;
|
||||
|
@ -7,27 +7,27 @@ var directivesModule = require('./_index.js');
|
||||
*/
|
||||
function tempestSummary() {
|
||||
|
||||
/**
|
||||
* @ngInject
|
||||
*/
|
||||
var controller =
|
||||
/**
|
||||
* Responsible for getting the basic run summary stats via the dataset service.
|
||||
* Also calculates the duration of the run - `timeDiff` - by subtracting the
|
||||
* run's start and end timestamps.
|
||||
* @ngInject
|
||||
*/
|
||||
function($scope, $attrs, datasetService) {
|
||||
$scope.$watch('dataset', function(dataset) {
|
||||
var stats = dataset.stats;
|
||||
$scope.stats = stats;
|
||||
$scope.timeDiff = (new Date(stats.end) - new Date(stats.start)) / 1000;
|
||||
var controller = function($scope, $attrs, datasetService) {
|
||||
$scope.$watch('artifactName', function(artifactName) {
|
||||
datasetService.artifact(artifactName, 'subunit-stats').then(function(response) {
|
||||
var stats = response.data;
|
||||
$scope.stats = stats;
|
||||
$scope.timeDiff = (new Date(stats.end) - new Date(stats.start)) / 1000;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
restrict: 'EA',
|
||||
scope: {
|
||||
'dataset': '='
|
||||
'index': '=',
|
||||
'artifactName': '='
|
||||
},
|
||||
controller: controller,
|
||||
templateUrl: 'directives/tempest-summary.html'
|
||||
|
@ -16,7 +16,7 @@ function timelineDetails() {
|
||||
return {
|
||||
restrict: 'EA',
|
||||
scope: {
|
||||
'dataset': '=',
|
||||
'artifactName': '=',
|
||||
'item': '='
|
||||
},
|
||||
controller: controller,
|
||||
|
@ -7,7 +7,7 @@ var parseDstat = require('../util/dstat-parse');
|
||||
var d3 = require('d3');
|
||||
|
||||
var getDstatLanes = function(data, mins, maxes) {
|
||||
if (!data) {
|
||||
if (!data || !data.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
@ -245,6 +245,10 @@ function timelineDstat($document, $window) {
|
||||
var bottom = y(laneIndex) + laneHeight;
|
||||
|
||||
for (var pathIndex = 0; pathIndex < laneDef.length; pathIndex++) {
|
||||
if (!region.data.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var pathDef = laneDef[pathIndex];
|
||||
var line = pathDef.type === 'line';
|
||||
|
||||
|
@ -378,6 +378,9 @@ function timeline($window, $log, datasetService, progressService) {
|
||||
var accessor = function(d) { return d.system_time; };
|
||||
var minIndex = arrayUtil.binaryMinIndex(min, raw.entries, accessor);
|
||||
var maxIndex = arrayUtil.binaryMaxIndex(max, raw.entries, accessor);
|
||||
if (minIndex < 0) {
|
||||
minIndex = 0;
|
||||
}
|
||||
|
||||
self.dstat = {
|
||||
entries: raw.entries.slice(minIndex, maxIndex),
|
||||
@ -388,8 +391,8 @@ function timeline($window, $log, datasetService, progressService) {
|
||||
$scope.$broadcast('dstatLoaded', self.dstat);
|
||||
};
|
||||
|
||||
$scope.$watch('dataset', function(dataset) {
|
||||
if (!dataset) {
|
||||
$scope.$watch('artifactName', function(artifactName) {
|
||||
if (!artifactName) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -398,11 +401,11 @@ function timeline($window, $log, datasetService, progressService) {
|
||||
// load dataset details (raw log entries and dstat) sequentially
|
||||
// we need to determine the initial date from the subunit data to parse
|
||||
// dstat
|
||||
datasetService.raw(dataset).then(function(response) {
|
||||
datasetService.artifact(artifactName, 'subunit').then(function(response) {
|
||||
progressService.set(0.33);
|
||||
initData(response.data);
|
||||
|
||||
return datasetService.dstat(dataset);
|
||||
return datasetService.artifact('dstat');
|
||||
}).then(function(response) {
|
||||
progressService.set(0.66);
|
||||
var firstDate = new Date(self.dataRaw[0].timestamps[0]);
|
||||
@ -462,7 +465,7 @@ function timeline($window, $log, datasetService, progressService) {
|
||||
transclude: true,
|
||||
templateUrl: 'directives/timeline.html',
|
||||
scope: {
|
||||
'dataset': '=',
|
||||
'artifactName': '=',
|
||||
'hoveredItem': '=',
|
||||
'selectedItem': '=',
|
||||
'preselect': '='
|
||||
|
@ -6,15 +6,15 @@
|
||||
function OnConfig($stateProvider, $locationProvider, $urlRouterProvider) {
|
||||
|
||||
$stateProvider.state('home', {
|
||||
url: '/{datasetId:int}',
|
||||
params: { datasetId: 0 },
|
||||
url: '/{artifactName}',
|
||||
params: { artifactName: null },
|
||||
controller: 'HomeController as home',
|
||||
templateUrl: 'home.html',
|
||||
title: 'Home'
|
||||
});
|
||||
|
||||
$stateProvider.state('timeline', {
|
||||
url: '/{datasetId:int}/timeline?test',
|
||||
url: '/{artifactName}/timeline?test',
|
||||
controller: 'TimelineController as timeline',
|
||||
templateUrl: 'timeline.html',
|
||||
reloadOnSearch: false,
|
||||
@ -22,7 +22,7 @@ function OnConfig($stateProvider, $locationProvider, $urlRouterProvider) {
|
||||
});
|
||||
|
||||
$stateProvider.state('testDetails', {
|
||||
url: '/{datasetId:int}/test-details/{test}',
|
||||
url: '/{artifactName}/test-details/{test}',
|
||||
controller: 'TestDetailsController',
|
||||
controllerAs: 'testDetails',
|
||||
templateUrl: 'test-details.html',
|
||||
|
@ -5,73 +5,258 @@ var servicesModule = require('./_index.js');
|
||||
/**
|
||||
* @ngInject
|
||||
*/
|
||||
function DatasetService($q, $http) {
|
||||
function DatasetService($q, $http, $window) {
|
||||
|
||||
var service = {};
|
||||
|
||||
service.list = function() {
|
||||
var config = null;
|
||||
var datasets = null;
|
||||
var artifacts = new Map();
|
||||
var deployer = false;
|
||||
|
||||
/**
|
||||
* Return a promise to fetch the dataset associated with the current URL path.
|
||||
* This is only valid when in deployer mode.
|
||||
* @return {Promise} an $http promise for the current deployer dataset
|
||||
*/
|
||||
var fetchDeployerDataset = function() {
|
||||
// get uuid from first segment of url, but remove any defined config root
|
||||
var path = $window.location.pathname;
|
||||
if (config.root && path.startsWith(config.root)) {
|
||||
path = path.replace(config.root, '');
|
||||
}
|
||||
|
||||
// remove leading '/' (if any)
|
||||
if (path.startsWith('/')) {
|
||||
path = path.substr(1, path.length - 1);
|
||||
}
|
||||
|
||||
// trim to first segment if necessary
|
||||
if (path.includes('/')) {
|
||||
path = path.substring(0, path.indexOf('/'));
|
||||
}
|
||||
|
||||
return $http({
|
||||
cache: true,
|
||||
url: 'data/config.json',
|
||||
method: 'GET'
|
||||
url: config.apiRoot + '/task',
|
||||
method: 'POST',
|
||||
data: { q: path }
|
||||
});
|
||||
};
|
||||
|
||||
service.get = function(id) {
|
||||
return $q(function(resolve, reject) {
|
||||
service.list().then(function(response) {
|
||||
for (var i in response.data.tempest) {
|
||||
var entry = response.data.tempest[i];
|
||||
if (entry.id === id) {
|
||||
resolve(entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Adds the given list of artifacts to the global artifact map, based on their
|
||||
* `artifact_name` fields.
|
||||
* @param {object[]} artifacts a list of artifacts
|
||||
*/
|
||||
var initArtifacts = function(list) {
|
||||
list.forEach(function(artifact) {
|
||||
if (artifacts.has(artifact.artifact_name)) {
|
||||
artifacts.get(artifact.artifact_name).push(artifact);
|
||||
} else {
|
||||
artifacts.set(artifact.artifact_name, [artifact]);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
reject("Dataset not found with ID: " + id);
|
||||
service.config = function() {
|
||||
return $q(function(resolve, reject) {
|
||||
if (config) {
|
||||
resolve({ config: config, datasets: datasets, artifacts: artifacts });
|
||||
return;
|
||||
}
|
||||
|
||||
$http({
|
||||
cache: true,
|
||||
url: 'data/config.json',
|
||||
method: 'GET'
|
||||
}).then(function(response) {
|
||||
config = response.data;
|
||||
|
||||
if (config.deployer === true) {
|
||||
deployer = true;
|
||||
|
||||
fetchDeployerDataset().then(function(apiResponse) {
|
||||
datasets = [ apiResponse.data ];
|
||||
initArtifacts(apiResponse.data.artifacts);
|
||||
resolve({
|
||||
config: config,
|
||||
datasets: datasets,
|
||||
artifacts: artifacts
|
||||
});
|
||||
}, function(reason) {
|
||||
reject(reason);
|
||||
});
|
||||
} else {
|
||||
datasets = config.datasets;
|
||||
|
||||
// merge all datasets into a 1-level grouping for now
|
||||
config.datasets.forEach(function(dataset) {
|
||||
initArtifacts(dataset.artifacts);
|
||||
});
|
||||
|
||||
resolve({
|
||||
config: config,
|
||||
datasets: datasets,
|
||||
artifacts: artifacts
|
||||
});
|
||||
}
|
||||
}, function(reason) {
|
||||
reject(reason);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
service.raw = function(dataset) {
|
||||
return $http({
|
||||
cache: true,
|
||||
url: "data/" + dataset.raw,
|
||||
method: 'GET'
|
||||
});
|
||||
};
|
||||
|
||||
service.details = function(dataset) {
|
||||
return $http({
|
||||
cache: true,
|
||||
url: "data/" + dataset.details,
|
||||
method: 'GET'
|
||||
});
|
||||
};
|
||||
|
||||
service.tree = function(dataset) {
|
||||
return $http({
|
||||
cache: true,
|
||||
url: "data/" + dataset.tree,
|
||||
method: 'GET'
|
||||
});
|
||||
};
|
||||
|
||||
service.dstat = function(dataset) {
|
||||
/**
|
||||
* Lists all datasets.
|
||||
* @return {Promise} a Promise for the global list of datasets
|
||||
*/
|
||||
service.list = function() {
|
||||
return $q(function(resolve, reject) {
|
||||
if (!dataset.dstat) {
|
||||
reject({ status: -1, statusText: 'Dstat not available for dataset.' });
|
||||
return;
|
||||
/* eslint-disable angular/di */
|
||||
service.config().then(function(config) {
|
||||
resolve(config.datasets);
|
||||
}, reject);
|
||||
/* eslint-enable angular/di */
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Lists all artifact groups that contain at least one artifact. If `primary`
|
||||
* is true (default), only groups with at least one primary artifact are
|
||||
* returned.
|
||||
* @return {Promise} a Promise for the global list of datasets
|
||||
*/
|
||||
service.groups = function(primary) {
|
||||
if (typeof primary === 'undefined') {
|
||||
primary = true;
|
||||
}
|
||||
|
||||
return $q(function(resolve, reject) {
|
||||
/* eslint-disable angular/di */
|
||||
service.config().then(function(config) {
|
||||
var ret = [];
|
||||
config.artifacts.forEach(function(entries, name) {
|
||||
if (primary) {
|
||||
entries = entries.filter(function(artifact) {
|
||||
return artifact.primary;
|
||||
});
|
||||
}
|
||||
|
||||
if (entries.length > 0) {
|
||||
ret.push(name);
|
||||
}
|
||||
});
|
||||
|
||||
resolve(ret);
|
||||
}, reject);
|
||||
/* eslint-enable angular/di */
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the dataset with the given ID. Note that for deployer instances, there
|
||||
* will only ever be a single dataset (id #0). In most cases, dataset #0
|
||||
* should be treated as the 'primary' dataset (and should almost always be the
|
||||
* only one configured).
|
||||
* @param {number} id the index of the dataset to get
|
||||
* @return {Promise} a Promise to retreive the specified dataset
|
||||
*/
|
||||
service.get = function(id) {
|
||||
return $q(function(resolve, reject) {
|
||||
/* eslint-disable angular/di */
|
||||
service.config().then(function(config) {
|
||||
var dataset = config.datasets[id];
|
||||
if (dataset) {
|
||||
resolve(dataset);
|
||||
} else {
|
||||
reject("Dataset not found with ID: " + id);
|
||||
}
|
||||
}, function(reason) {
|
||||
reject(reason);
|
||||
});
|
||||
/* eslint-enable angular/di */
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch all artifacts with the given `artifact_name` field. This should be
|
||||
* the primary method for differentiating between artifacts. If no artifact
|
||||
* name is given, this returns a flat list of all artifacts (via a Promise).
|
||||
* @param {string} [name] an `artifact_name` field value
|
||||
* @return {Promise} a promise for a list of matching artifacts
|
||||
*/
|
||||
service.artifacts = function(name) {
|
||||
return $q(function(resolve, reject) {
|
||||
/* eslint-disable angular/di */
|
||||
service.config().then(function(config) {
|
||||
if (typeof name === 'undefined') {
|
||||
var ret = [];
|
||||
config.datasets.forEach(function(dataset) {
|
||||
ret.push.apply(ret, dataset.artifacts);
|
||||
});
|
||||
resolve(ret);
|
||||
} else {
|
||||
var group = config.artifacts.get(name);
|
||||
if (group && group.length > 0) {
|
||||
resolve(group);
|
||||
} else {
|
||||
reject('No artifacts found with name: ' + name);
|
||||
}
|
||||
}
|
||||
}, reject);
|
||||
/* eslint-enable angular/di */
|
||||
});
|
||||
};
|
||||
|
||||
var _loadArtifact = function(artifact, resolve, reject, message) {
|
||||
if (artifact) {
|
||||
var url = null;
|
||||
if (deployer) {
|
||||
url = config.apiRoot + '/blob/' + artifact.id;
|
||||
} else {
|
||||
url = 'data/' + artifact.path;
|
||||
}
|
||||
|
||||
resolve($http({
|
||||
cache: true,
|
||||
url: "data/" + dataset.dstat,
|
||||
url: url,
|
||||
method: 'GET'
|
||||
}));
|
||||
});
|
||||
} else {
|
||||
reject('No artifact found matching ' + message);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch the artifact with the given `artifact_name` and `artifact_type`
|
||||
* fields. If only one parameter is provided, only `artifact_type` is
|
||||
* considered.
|
||||
* @param {string} [name] an `artifact_name` field value
|
||||
* @param {string} type an `artifact_type` field value (e.g. 'subunit')
|
||||
* @return {Promise} a Promise for the actual data associated with the
|
||||
* artifact
|
||||
*/
|
||||
service.artifact = function(name, type) {
|
||||
if (arguments.length === 1) {
|
||||
type = arguments[0];
|
||||
|
||||
return $q(function(resolve, reject) {
|
||||
service.artifacts().then(function(all) {
|
||||
_loadArtifact(all.find(function(a) {
|
||||
return a.artifact_type === type;
|
||||
}), resolve, reject, 'type=' + type);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
return $q(function(resolve, reject) {
|
||||
service.artifacts(name).then(function(group) {
|
||||
_loadArtifact(group.find(function(a) {
|
||||
return a.artifact_type === type;
|
||||
}), resolve, reject, 'name=' + name + ', type=' + type);
|
||||
}, reject);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return service;
|
||||
|
@ -1,7 +1,7 @@
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title">
|
||||
{{dataset.name}}
|
||||
{{artifactName}}
|
||||
<span class="text-muted" style="font-size: 65%">
|
||||
{{stats.start | date:'d MMM, yyyy'}}
|
||||
</span>
|
||||
@ -33,7 +33,7 @@
|
||||
<div class="btn-group pull-right">
|
||||
<a type="button"
|
||||
class="btn btn-default"
|
||||
ui-sref="timeline({datasetId: dataset.id})">Details</a>
|
||||
ui-sref="timeline({artifactName: artifactName})">Details</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -45,7 +45,7 @@
|
||||
<div class="list-group">
|
||||
<a class="list-group-item"
|
||||
ng-repeat="fail in stats.failures"
|
||||
ui-sref="testDetails({datasetId: dataset.id, test: fail.name})">
|
||||
ui-sref="testDetails({artifactName: artifactName, test: fail.name})">
|
||||
<h4 class="list-group-item-heading">
|
||||
{{fail.name | split:'.' | slice:-2 | join:'.'}}
|
||||
</h4>
|
||||
|
@ -57,7 +57,7 @@
|
||||
<div class="panel-footer clearfix">
|
||||
<div class="btn-group pull-right">
|
||||
<a type="button"
|
||||
ui-sref="testDetails({datasetId: dataset.id, test: item.name})"
|
||||
ui-sref="testDetails({artifactName: artifactName, test: item.name})"
|
||||
class="btn btn-default">Details</a>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -6,18 +6,20 @@
|
||||
</header>
|
||||
|
||||
<div class="container">
|
||||
<div class="row" ng-if="!home.tempest.length">
|
||||
<div class="row" ng-if="!home.groups.length">
|
||||
<div class="col-lg-12">
|
||||
<div class="alert alert-danger">
|
||||
No tempest datasets could be loaded!
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row" ng-if="!!home.tempest.length">
|
||||
<div class="row" ng-if="!!home.groups.length">
|
||||
<div class="col-lg-8">
|
||||
<tempest-summary ng-if="home.tempest.length >= 1" dataset="home.tempest[home.focus]"></tempest-summary>
|
||||
<tempest-summary ng-if="home.groups.length >= 1"
|
||||
artifact-name="home.focus"
|
||||
index="home.focus"></tempest-summary>
|
||||
</div>
|
||||
<div class="col-lg-4" ng-if="home.tempest.length > 1">
|
||||
<div class="col-lg-4" ng-if="home.datasets.length > 1">
|
||||
<div class="panel panel-default">
|
||||
<div class="panel-heading">
|
||||
<h3 class="panel-title">Additional Datasets</h3>
|
||||
@ -25,18 +27,18 @@
|
||||
|
||||
<ul class="list-group">
|
||||
<li class="list-group-item"
|
||||
ng-repeat="dataset in home.tempest"
|
||||
ng-if="$index != home.focus"
|
||||
ng-click="home.focus = $index"
|
||||
ng-repeat="group in home.groups"
|
||||
ng-if="group != home.focus"
|
||||
ng-click="home.focus = group"
|
||||
style="cursor: pointer">
|
||||
{{ dataset.name }}
|
||||
{{ group }}
|
||||
|
||||
<small class="text-muted" style="font-size: 75%">
|
||||
<!--<small class="text-muted" style="font-size: 75%">
|
||||
{{dataset.stats.start | date:'MM/d/yyyy'}}
|
||||
</small>
|
||||
</small>-->
|
||||
|
||||
<a class="btn btn-default btn-xs pull-right"
|
||||
ui-sref="timeline({datasetId: dataset.id})">
|
||||
ui-sref="timeline({artifactName: group})">
|
||||
Details
|
||||
</a>
|
||||
</li>
|
||||
|
@ -48,7 +48,7 @@
|
||||
|
||||
<div class="panel-footer clearfix">
|
||||
<a class="btn btn-default pull-right"
|
||||
ui-sref="timeline({datasetId: testDetails.dataset.id, test: testDetails.item.name})">
|
||||
ui-sref="timeline({artifactName: testDetails.artifactName, test: testDetails.item.name})">
|
||||
Timeline
|
||||
</a>
|
||||
</div>
|
||||
|
@ -1,8 +1,7 @@
|
||||
<header class="bs-header">
|
||||
<div class="container">
|
||||
<h1 class="page-header">
|
||||
Timeline: {{ timeline.dataset.name }}
|
||||
<small>#{{ timeline.dataset.id }}</small>
|
||||
Timeline: {{ timeline.artifactName }}
|
||||
</h1>
|
||||
</div>
|
||||
</header>
|
||||
@ -17,7 +16,7 @@
|
||||
</div>
|
||||
<div class="row" ng-if="!timeline.error">
|
||||
<div class="col-lg-12">
|
||||
<timeline dataset="timeline.dataset"
|
||||
<timeline artifact-name="timeline.artifactName"
|
||||
hovered-item="timeline.hoveredItem"
|
||||
selected-item="timeline.selectedItem"
|
||||
preselect="timeline.preselect"></timeline>
|
||||
@ -26,7 +25,8 @@
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<a name="details"></a>
|
||||
<timeline-details dataset="timeline.dataset" item="timeline.selectedItem"></timeline-details>
|
||||
<timeline-details artifact-name="timeline.artifactName"
|
||||
item="timeline.selectedItem"></timeline-details>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -21,12 +21,31 @@ import os
|
||||
import shutil
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from functools import partial
|
||||
|
||||
from stackviz.parser import tempest_subunit
|
||||
|
||||
_base = os.path.dirname(os.path.abspath(__file__))
|
||||
_tempest_count = 0
|
||||
|
||||
|
||||
def environment_params():
|
||||
r = {}
|
||||
|
||||
if 'ZUUL_PROJECT' in os.environ:
|
||||
r['change_project'] = os.environ['ZUUL_PROJECT']
|
||||
|
||||
if 'ZUUL_CHANGE' in os.environ:
|
||||
r['change_id'] = os.environ['ZUUL_CHANGE']
|
||||
|
||||
if 'ZUUL_PATCHSET' in os.environ:
|
||||
r['revision'] = os.environ['ZUUL_PATCHSET']
|
||||
|
||||
if 'ZUUL_PIPELINE' in os.environ:
|
||||
r['pipeline'] = os.environ['ZUUL_PIPELINE']
|
||||
|
||||
if 'JOB_NAME' in os.environ:
|
||||
r['name'] = os.environ['JOB_NAME']
|
||||
|
||||
return r
|
||||
|
||||
|
||||
def open_compressed(output_dir, file_name, compress):
|
||||
@ -46,40 +65,61 @@ def json_date_handler(object):
|
||||
return None
|
||||
|
||||
|
||||
def export_tempest_tree(stream, output_stream):
|
||||
converted = tempest_subunit.convert_stream(stream, strip_details=True)
|
||||
tree = tempest_subunit.reorganize(converted)
|
||||
json.dump(tree, output_stream, default=json_date_handler)
|
||||
output_stream.close()
|
||||
def build_artifact(path, artifact_name, artifact_type, content_type, primary,
|
||||
compress):
|
||||
ret = {
|
||||
'path': path,
|
||||
'artifact_name': artifact_name,
|
||||
'artifact_type': artifact_type,
|
||||
'content_type': content_type,
|
||||
'primary': primary
|
||||
}
|
||||
|
||||
if compress:
|
||||
ret['content_encoding'] = 'gzip'
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def export_tempest_raw(stream, output_stream):
|
||||
converted = tempest_subunit.convert_stream(stream, strip_details=True)
|
||||
json.dump(converted, output_stream, default=json_date_handler)
|
||||
output_stream.close()
|
||||
def export_tempest_raw(name, subunit, output_dir, prefix, compress):
|
||||
converted = tempest_subunit.convert_stream(subunit, strip_details=True)
|
||||
|
||||
stream, path = open_compressed(output_dir,
|
||||
prefix + '-raw.json',
|
||||
compress)
|
||||
json.dump(converted, stream, default=json_date_handler)
|
||||
stream.close()
|
||||
|
||||
return converted, build_artifact(path, name,
|
||||
'subunit', 'application/json',
|
||||
True, compress)
|
||||
|
||||
|
||||
def export_tempest_details(stream, output_stream):
|
||||
converted = tempest_subunit.convert_stream(stream)
|
||||
|
||||
def export_tempest_details(name, subunit, output_dir, prefix, compress):
|
||||
converted = tempest_subunit.convert_stream(subunit, strip_details=False)
|
||||
output = {}
|
||||
for entry in converted:
|
||||
output[entry['name']] = entry['details']
|
||||
|
||||
json.dump(output, output_stream, default=json_date_handler)
|
||||
output_stream.close()
|
||||
stream, path = open_compressed(output_dir,
|
||||
prefix + '-details.json',
|
||||
compress)
|
||||
json.dump(output, stream, default=json_date_handler)
|
||||
stream.close()
|
||||
|
||||
return build_artifact(path, name,
|
||||
'subunit-details', 'application/json',
|
||||
False, compress)
|
||||
|
||||
|
||||
def get_stats(stream):
|
||||
converted = tempest_subunit.convert_stream(stream, strip_details=False)
|
||||
|
||||
def export_stats(name, subunit_parsed, output_dir, prefix, compress):
|
||||
start = None
|
||||
end = None
|
||||
total_duration = 0
|
||||
failures = []
|
||||
skips = []
|
||||
|
||||
for entry in converted:
|
||||
for entry in subunit_parsed:
|
||||
# find min/max dates
|
||||
entry_start, entry_end = entry['timestamps']
|
||||
if start is None or entry_start < start:
|
||||
@ -112,56 +152,44 @@ def get_stats(stream):
|
||||
'details': entry['details'].get('reason')
|
||||
})
|
||||
|
||||
return {
|
||||
'count': len(converted),
|
||||
stream, path = open_compressed(
|
||||
output_dir, prefix + '-stats.json', compress)
|
||||
|
||||
json.dump({
|
||||
'count': len(subunit_parsed),
|
||||
'start': start,
|
||||
'end': end,
|
||||
'total_duration': total_duration,
|
||||
'failures': failures,
|
||||
'skips': skips
|
||||
}
|
||||
}, stream, default=json_date_handler)
|
||||
stream.close()
|
||||
|
||||
return build_artifact(path, name,
|
||||
'subunit-stats', 'application/json',
|
||||
False, compress)
|
||||
|
||||
|
||||
def export_tempest(provider, output_dir, dstat, compress):
|
||||
global _tempest_count
|
||||
|
||||
def export_tempest(provider, output_dir, compress):
|
||||
ret = []
|
||||
|
||||
for i in range(provider.count):
|
||||
path_base = 'tempest_%s_%d' % (provider.name, i)
|
||||
if provider.count > 1:
|
||||
name = '%s (%d)' % (provider.description, i)
|
||||
else:
|
||||
name = provider.description
|
||||
prefix = '%s-%d' % (provider.name, i)
|
||||
|
||||
open_ = partial(open_compressed,
|
||||
output_dir=output_dir,
|
||||
compress=compress)
|
||||
# convert and save raw (without details)
|
||||
raw, artifact = export_tempest_raw(provider.name,
|
||||
provider.get_stream(i),
|
||||
output_dir, prefix, compress)
|
||||
ret.append(artifact)
|
||||
|
||||
stream_raw, path_raw = open_(file_name=path_base + '_raw.json')
|
||||
export_tempest_raw(provider.get_stream(i), stream_raw)
|
||||
# convert and save details
|
||||
ret.append(export_tempest_details(provider.name,
|
||||
provider.get_stream(i),
|
||||
output_dir, prefix, compress))
|
||||
|
||||
stream_tree, path_tree = open_(file_name=path_base + '_tree.json')
|
||||
export_tempest_tree(provider.get_stream(i), stream_tree)
|
||||
|
||||
stream_details, path_details = open_(
|
||||
file_name=path_base + '_details.json')
|
||||
export_tempest_details(provider.get_stream(i), stream_details)
|
||||
|
||||
stats = get_stats(provider.get_stream(i))
|
||||
|
||||
entry = {
|
||||
'id': _tempest_count,
|
||||
'name': name,
|
||||
'raw': path_raw,
|
||||
'tree': path_tree,
|
||||
'details': path_details,
|
||||
'stats': stats
|
||||
}
|
||||
entry.update({'dstat': dstat} if dstat else {})
|
||||
|
||||
ret.append(entry)
|
||||
_tempest_count += 1
|
||||
# generate and save stats
|
||||
ret.append(export_stats(provider.name, raw, output_dir, prefix,
|
||||
compress))
|
||||
|
||||
return ret
|
||||
|
||||
@ -170,7 +198,7 @@ def export_dstat(path, output_dir, compress):
|
||||
f = open(path, 'rb')
|
||||
out_stream, out_file = open_compressed(
|
||||
output_dir,
|
||||
'dstat_log.csv',
|
||||
'dstat.csv',
|
||||
compress)
|
||||
|
||||
shutil.copyfileobj(f, out_stream)
|
||||
@ -178,7 +206,9 @@ def export_dstat(path, output_dir, compress):
|
||||
f.close()
|
||||
out_stream.close()
|
||||
|
||||
return out_file
|
||||
return build_artifact(out_file, os.path.basename(path),
|
||||
'dstat', 'text/csv',
|
||||
False, compress)
|
||||
|
||||
|
||||
def main():
|
||||
@ -190,12 +220,15 @@ def main():
|
||||
parser.add_argument("-z", "--gzip",
|
||||
help="Enable gzip compression for data files.",
|
||||
action="store_true")
|
||||
parser.add_argument("-e", "--env",
|
||||
help="Include Zuul metadata from environment "
|
||||
"variables.",
|
||||
action="store_true")
|
||||
parser.add_argument("-f", "--stream-file",
|
||||
action="append",
|
||||
help="Include the given direct subunit stream; can be "
|
||||
"used multiple times.")
|
||||
parser.add_argument("-r", "--repository",
|
||||
action="append",
|
||||
help="A directory containing a `.testrepository` to "
|
||||
"include; can be used multiple times.")
|
||||
parser.add_argument("-i", "--stdin",
|
||||
@ -211,28 +244,42 @@ def main():
|
||||
if not os.path.exists(args.path):
|
||||
os.mkdir(args.path)
|
||||
|
||||
dstat = None
|
||||
artifacts = []
|
||||
dataset = {
|
||||
'name': None,
|
||||
'url': None,
|
||||
'status': None,
|
||||
'ci_username': None,
|
||||
'pipeline': None,
|
||||
'change_id': None,
|
||||
'revision': None,
|
||||
'change_project': None,
|
||||
'change_subject': None,
|
||||
'artifacts': artifacts
|
||||
}
|
||||
|
||||
if args.env:
|
||||
dataset.update(environment_params())
|
||||
|
||||
if args.dstat:
|
||||
print("Exporting DStat log")
|
||||
dstat = export_dstat(args.dstat, args.path, args.gzip)
|
||||
artifacts.append(dstat)
|
||||
|
||||
providers = tempest_subunit.get_providers(
|
||||
args.repository,
|
||||
args.stream_file,
|
||||
args.stdin)
|
||||
|
||||
tempest_config_entries = []
|
||||
|
||||
for provider in providers.values():
|
||||
print("Exporting Tempest provider: %s (%d)" % (provider.description,
|
||||
provider.count))
|
||||
tempest_config_entries.extend(
|
||||
export_tempest(provider, args.path, dstat, args.gzip)
|
||||
)
|
||||
artifacts.extend(export_tempest(provider, args.path, args.gzip))
|
||||
|
||||
with open(os.path.join(args.path, 'config.json'), 'w') as f:
|
||||
json.dump({
|
||||
'tempest': tempest_config_entries
|
||||
'deployer': False,
|
||||
'datasets': [dataset]
|
||||
}, f, default=json_date_handler)
|
||||
|
||||
|
||||
|
@ -97,7 +97,7 @@ class RepositoryProvider(SubunitProvider):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "repo_%s" % os.path.basename(self.repository_path)
|
||||
return os.path.basename(self.repository_path)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
@ -120,7 +120,7 @@ class FileProvider(SubunitProvider):
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "file_%s" % os.path.basename(self.path)
|
||||
return os.path.basename(self.path)
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
|
@ -1,17 +1,39 @@
|
||||
/*global angular */
|
||||
|
||||
'use strict';
|
||||
|
||||
describe('Unit: DatasetService', function() {
|
||||
|
||||
var service, httpBackend;
|
||||
var exampleConfig = {"tempest": [
|
||||
{"raw": "tempest_file_freshlog_0_raw.json",
|
||||
"details": "tempest_file_freshlog_0_details.json",
|
||||
"tree": "tempest_file_freshlog_0_tree.json",
|
||||
"id": 0,
|
||||
"name": "Subunit File: freshlog"}
|
||||
]};
|
||||
|
||||
var mockConfig = {
|
||||
"deployer": false,
|
||||
"datasets": [{
|
||||
"status": null, "ci_username": null, "pipeline": null,
|
||||
"change_project": null, "name": null, "url": null,
|
||||
"change_id": null, "change_subject": null, "revision": null,
|
||||
"artifacts": [
|
||||
{
|
||||
"artifact_type": "dstat", "path": "dstat.csv", "primary": false,
|
||||
"content_type": "text/csv", "artifact_name": "dstat-csv.txt"
|
||||
}, {
|
||||
"artifact_type": "subunit", "primary": true,
|
||||
"path": "testrepository.subunit-0-raw.json",
|
||||
"content_type": "application/json",
|
||||
"artifact_name": "testrepository.subunit"
|
||||
}, {
|
||||
"artifact_type": "subunit-details", "primary": false,
|
||||
"path": "testrepository.subunit-0-details.json",
|
||||
"content_type": "application/json",
|
||||
"artifact_name": "testrepository.subunit"
|
||||
}, {
|
||||
"artifact_type": "subunit-stats", "primary": false,
|
||||
"path": "testrepository.subunit-0-stats.json",
|
||||
"content_type": "application/json",
|
||||
"artifact_name": "testrepository.subunit"
|
||||
}
|
||||
]
|
||||
}]
|
||||
};
|
||||
|
||||
beforeEach(function() {
|
||||
// instantiate the app module
|
||||
@ -21,6 +43,8 @@ describe('Unit: DatasetService', function() {
|
||||
angular.mock.inject(function(datasetService, $httpBackend) {
|
||||
service = datasetService;
|
||||
httpBackend = $httpBackend;
|
||||
|
||||
httpBackend.whenGET("data/config.json").respond(mockConfig);
|
||||
});
|
||||
});
|
||||
|
||||
@ -28,40 +52,45 @@ describe('Unit: DatasetService', function() {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return config.json', function() {
|
||||
httpBackend.whenGET("data/config.json").respond(exampleConfig);
|
||||
service.list().then(function(config) {
|
||||
expect(config.data).toEqual(exampleConfig);
|
||||
it('should return the loaded configuration', function() {
|
||||
service.config().then(function(config) {
|
||||
expect(config.config).toEqual(mockConfig);
|
||||
});
|
||||
httpBackend.flush();
|
||||
});
|
||||
|
||||
it('should GET the raw file from a dataset', function() {
|
||||
httpBackend.whenGET(exampleConfig.raw).respond(exampleConfig.raw);
|
||||
service.raw(exampleConfig).then(function(raw) {
|
||||
expect(raw).toEqual(exampleConfig.raw);
|
||||
it('should only have valid primary artifacts', function() {
|
||||
service.groups(true).then(function(groups) {
|
||||
expect(groups.length).toEqual(1);
|
||||
expect(groups).toContain('testrepository.subunit');
|
||||
}, function() {
|
||||
fail('callback should return');
|
||||
});
|
||||
|
||||
httpBackend.flush();
|
||||
});
|
||||
|
||||
it('should GET the details file from a dataset', function() {
|
||||
httpBackend.whenGET(exampleConfig.details).respond(exampleConfig.details);
|
||||
service.details(exampleConfig).then(function(details) {
|
||||
expect(details).toEqual(exampleConfig.details);
|
||||
it('should find all artifacts matching a particular name', function() {
|
||||
service.artifacts('testrepository.subunit').then(function(artifacts) {
|
||||
expect(artifacts.length).toEqual(3);
|
||||
}, function() {
|
||||
fail('callback should return');
|
||||
});
|
||||
|
||||
httpBackend.flush();
|
||||
});
|
||||
|
||||
it('should GET the tree file from a dataset', function() {
|
||||
httpBackend.whenGET(exampleConfig.tree).respond(exampleConfig.tree);
|
||||
service.tree(exampleConfig).then(function(tree) {
|
||||
expect(tree).toEqual(exampleConfig.tree);
|
||||
it('should load an artifact', function() {
|
||||
httpBackend.whenGET('data/testrepository.subunit-0-raw.json').respond({
|
||||
mock: true
|
||||
});
|
||||
});
|
||||
|
||||
it('should GET the dstat file from a dataset', function() {
|
||||
httpBackend.whenGET(exampleConfig.dstat).respond(exampleConfig.dstat);
|
||||
service.dstat(exampleConfig).then(function(dstat) {
|
||||
expect(dstat).toEqual(exampleConfig.dstat);
|
||||
service.artifact('testrepository.subunit', 'subunit').then(function(resp) {
|
||||
expect(resp.data).toEqual({ mock: true });
|
||||
}, function(ex) {
|
||||
fail('promise should return successfully: ' + ex);
|
||||
});
|
||||
});
|
||||
|
||||
httpBackend.flush();
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user