Add new configuration file format
This enables a new "artifact"-based configuration file format, intended to work natively with the deployer and to aid future efforts to visualize additional data sources. Among other tweaks, dataset indices are no longer used as the primary differentiator between data files, and instead artifact names (such as `testrepository.subunit`) are used to group related artfacts of various types, such as 'subunit', 'subunit-stats', and 'subunit-details'. Additionally, datasets and artifacts now have access to substantially more metadata about the job that generated the output data. In future patches, this metadata will be used to display and link to additional information about visualized data. This metadata is made available automatically by the deployer, and can be optionally gathered from environment variables when using `stackviz-export` via a new `--env` flag. Change-Id: I3e16cc314624a1b7b4f6bf43fa4d5cdeedcdba0c
This commit is contained in:
parent
d92719c169
commit
d27c01fb6a
@ -9,10 +9,14 @@ function HomeCtrl($scope, $state, datasetService) {
|
|||||||
|
|
||||||
// ViewModel
|
// ViewModel
|
||||||
var vm = this;
|
var vm = this;
|
||||||
vm.focus = $state.params.datasetId;
|
vm.focus = $state.params.artifactName;
|
||||||
|
|
||||||
datasetService.list().then(function(response) {
|
datasetService.groups().then(function(groups) {
|
||||||
vm.tempest = response.data.tempest;
|
vm.groups = groups;
|
||||||
|
|
||||||
|
if (!vm.focus) {
|
||||||
|
vm.focus = groups[0];
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// update the page url as the focus id changes, but don't reload
|
// update the page url as the focus id changes, but don't reload
|
||||||
@ -20,7 +24,7 @@ function HomeCtrl($scope, $state, datasetService) {
|
|||||||
return vm.focus;
|
return vm.focus;
|
||||||
}, function(value, old) {
|
}, function(value, old) {
|
||||||
if (value !== old) {
|
if (value !== old) {
|
||||||
$state.go('home', { datasetId: value }, { notify: false });
|
$state.go('home', { artifactName: value }, { notify: false });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -2,10 +2,6 @@
|
|||||||
|
|
||||||
var controllersModule = require('./_index');
|
var controllersModule = require('./_index');
|
||||||
|
|
||||||
/**
|
|
||||||
* @ngInject
|
|
||||||
*/
|
|
||||||
var TestDetailsCtrl =
|
|
||||||
/**
|
/**
|
||||||
* Responsible for making three calls to the dataset service. First, the
|
* Responsible for making three calls to the dataset service. First, the
|
||||||
* dataset corresponding to the given int id is loaded, then the raw and details
|
* dataset corresponding to the given int id is loaded, then the raw and details
|
||||||
@ -13,78 +9,84 @@ var TestDetailsCtrl =
|
|||||||
* of the details JSON is kept in `originalDetails` so that information is not
|
* of the details JSON is kept in `originalDetails` so that information is not
|
||||||
* lost when parsing. Progress of the dataset service calls is recorded and
|
* lost when parsing. Progress of the dataset service calls is recorded and
|
||||||
* displayed in a progress bar on `test-details.html`.
|
* displayed in a progress bar on `test-details.html`.
|
||||||
*/
|
* @ngInject
|
||||||
function($scope, $location, $stateParams, $log, datasetService, progressService) {
|
*/
|
||||||
|
function TestDetailsCtrl(
|
||||||
|
$scope, $location, $stateParams, $log, $q,
|
||||||
|
datasetService, progressService) {
|
||||||
var vm = this;
|
var vm = this;
|
||||||
vm.datasetId = $stateParams.datasetId;
|
vm.artifactName = $stateParams.artifactName;
|
||||||
var testName = $stateParams.test;
|
vm.testName = $stateParams.test;
|
||||||
vm.testName = testName;
|
|
||||||
|
|
||||||
progressService.start({ parent: 'div[role="main"] .panel-body' });
|
progressService.start({ parent: 'div[role="main"] .panel-body' });
|
||||||
|
|
||||||
// load dataset, raw json, and details json
|
// load dataset, raw json, and details json
|
||||||
datasetService.get($stateParams.datasetId)
|
var statsArtifact = datasetService.artifact(vm.artifactName, 'subunit-stats');
|
||||||
.then(function(response) {
|
var subunitArtifact = datasetService.artifact(vm.artifactName, 'subunit');
|
||||||
vm.dataset = response;
|
var detailsArtifact = datasetService.artifact(vm.artifactName, 'subunit-details');
|
||||||
vm.stats = response.stats;
|
|
||||||
return datasetService.raw(response);
|
var statsPromise = statsArtifact.then(function(response) {
|
||||||
})
|
vm.stats = response.data;
|
||||||
.then(function(raw) {
|
});
|
||||||
var item = null;
|
|
||||||
for (var t in raw.data) {
|
var subunitPromise = subunitArtifact.then(function(response) {
|
||||||
if (raw.data[t].name === testName) {
|
var item = null;
|
||||||
item = raw.data[t];
|
for (var t in response.data) {
|
||||||
}
|
if (response.data[t].name === vm.testName) {
|
||||||
}
|
item = response.data[t];
|
||||||
vm.item = item;
|
}
|
||||||
progressService.inc();
|
}
|
||||||
return datasetService.details(vm.dataset);
|
vm.item = item;
|
||||||
})
|
progressService.inc();
|
||||||
.then(function(deets) {
|
});
|
||||||
vm.details = deets;
|
|
||||||
vm.originalDetails = angular.copy(deets.data[testName]);
|
var detailsPromise = detailsArtifact.then(function(details) {
|
||||||
vm.itemDetails = deets.data[testName];
|
vm.details = details;
|
||||||
progressService.done();
|
vm.originalDetails = angular.copy(details.data[vm.testName]);
|
||||||
})
|
vm.itemDetails = details.data[vm.testName];
|
||||||
.catch(function(error) {
|
}).catch(function(ex) {
|
||||||
$log.error(error);
|
// ignore errors, details won't exist for deployer
|
||||||
progressService.done();
|
});
|
||||||
});
|
|
||||||
|
$q.all([statsPromise, subunitPromise, detailsPromise]).catch(function(ex) {
|
||||||
|
$log.error(ex);
|
||||||
|
}).finally(function() {
|
||||||
|
progressService.done();
|
||||||
|
});
|
||||||
|
|
||||||
vm.parsePythonLogging =
|
|
||||||
/**
|
/**
|
||||||
* This function changes the `itemDetails.pythonlogging` variable to only
|
* This function changes the `itemDetails.pythonlogging` variable to only
|
||||||
* show lines with the log levels specified by the four boolean parameters.
|
* show lines with the log levels specified by the four boolean parameters.
|
||||||
* EX: If the `showINFO` parameter is set to true, `itemDetails.pythonlogging`
|
* EX: If the `info` parameter is set to true, `itemDetails.pythonlogging`
|
||||||
* will display lines that contain the text `INFO`.
|
* will display lines that contain the text `INFO`.
|
||||||
* @param {boolean} showINFO
|
* @param {boolean} info
|
||||||
* @param {boolean} showDEBUG
|
* @param {boolean} debug
|
||||||
* @param {boolean} showWARNING
|
* @param {boolean} warning
|
||||||
* @param {boolean} showERROR
|
* @param {boolean} error
|
||||||
*/
|
*/
|
||||||
function(showINFO, showDEBUG, showWARNING, showERROR) {
|
vm.parsePythonLogging = function(info, debug, warning, error) {
|
||||||
if (vm.originalDetails && vm.originalDetails.pythonlogging) {
|
if (vm.originalDetails && vm.originalDetails.pythonlogging) {
|
||||||
var log = vm.originalDetails.pythonlogging;
|
var log = vm.originalDetails.pythonlogging;
|
||||||
var ret = [];
|
var ret = [];
|
||||||
var lines = log.split('\n');
|
var lines = log.split('\n');
|
||||||
for (var i in lines) {
|
for (var i in lines) {
|
||||||
var line = lines[i];
|
var line = lines[i];
|
||||||
if (showINFO && line.includes("INFO")) {
|
if (info && line.includes("INFO")) {
|
||||||
ret.push(line);
|
ret.push(line);
|
||||||
}
|
}
|
||||||
if (showDEBUG && line.includes("DEBUG")) {
|
if (debug && line.includes("DEBUG")) {
|
||||||
ret.push(line);
|
ret.push(line);
|
||||||
}
|
}
|
||||||
if (showWARNING && line.includes("WARNING")) {
|
if (warning && line.includes("WARNING")) {
|
||||||
ret.push(line);
|
ret.push(line);
|
||||||
}
|
}
|
||||||
if (showERROR && line.includes("ERROR")) {
|
if (error && line.includes("ERROR")) {
|
||||||
ret.push(line);
|
ret.push(line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
vm.itemDetails.pythonlogging = ret.join('\n');
|
vm.itemDetails.pythonlogging = ret.join('\n');
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
};
|
|
||||||
controllersModule.controller('TestDetailsController', TestDetailsCtrl);
|
controllersModule.controller('TestDetailsController', TestDetailsCtrl);
|
||||||
|
@ -9,12 +9,7 @@ function TimelineCtrl($scope, $location, $stateParams, datasetService) {
|
|||||||
|
|
||||||
// ViewModel
|
// ViewModel
|
||||||
var vm = this;
|
var vm = this;
|
||||||
|
vm.artifactName = $stateParams.artifactName;
|
||||||
datasetService.get($stateParams.datasetId).then(function(dataset) {
|
|
||||||
vm.dataset = dataset;
|
|
||||||
}, function(reason) {
|
|
||||||
vm.error = "Unable to load dataset: " + reason;
|
|
||||||
});
|
|
||||||
|
|
||||||
vm.hoveredItem = null;
|
vm.hoveredItem = null;
|
||||||
vm.selectedItem = null;
|
vm.selectedItem = null;
|
||||||
|
@ -7,27 +7,27 @@ var directivesModule = require('./_index.js');
|
|||||||
*/
|
*/
|
||||||
function tempestSummary() {
|
function tempestSummary() {
|
||||||
|
|
||||||
/**
|
|
||||||
* @ngInject
|
|
||||||
*/
|
|
||||||
var controller =
|
|
||||||
/**
|
/**
|
||||||
* Responsible for getting the basic run summary stats via the dataset service.
|
* Responsible for getting the basic run summary stats via the dataset service.
|
||||||
* Also calculates the duration of the run - `timeDiff` - by subtracting the
|
* Also calculates the duration of the run - `timeDiff` - by subtracting the
|
||||||
* run's start and end timestamps.
|
* run's start and end timestamps.
|
||||||
|
* @ngInject
|
||||||
*/
|
*/
|
||||||
function($scope, $attrs, datasetService) {
|
var controller = function($scope, $attrs, datasetService) {
|
||||||
$scope.$watch('dataset', function(dataset) {
|
$scope.$watch('artifactName', function(artifactName) {
|
||||||
var stats = dataset.stats;
|
datasetService.artifact(artifactName, 'subunit-stats').then(function(response) {
|
||||||
$scope.stats = stats;
|
var stats = response.data;
|
||||||
$scope.timeDiff = (new Date(stats.end) - new Date(stats.start)) / 1000;
|
$scope.stats = stats;
|
||||||
|
$scope.timeDiff = (new Date(stats.end) - new Date(stats.start)) / 1000;
|
||||||
|
});
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
restrict: 'EA',
|
restrict: 'EA',
|
||||||
scope: {
|
scope: {
|
||||||
'dataset': '='
|
'index': '=',
|
||||||
|
'artifactName': '='
|
||||||
},
|
},
|
||||||
controller: controller,
|
controller: controller,
|
||||||
templateUrl: 'directives/tempest-summary.html'
|
templateUrl: 'directives/tempest-summary.html'
|
||||||
|
@ -16,7 +16,7 @@ function timelineDetails() {
|
|||||||
return {
|
return {
|
||||||
restrict: 'EA',
|
restrict: 'EA',
|
||||||
scope: {
|
scope: {
|
||||||
'dataset': '=',
|
'artifactName': '=',
|
||||||
'item': '='
|
'item': '='
|
||||||
},
|
},
|
||||||
controller: controller,
|
controller: controller,
|
||||||
|
@ -7,7 +7,7 @@ var parseDstat = require('../util/dstat-parse');
|
|||||||
var d3 = require('d3');
|
var d3 = require('d3');
|
||||||
|
|
||||||
var getDstatLanes = function(data, mins, maxes) {
|
var getDstatLanes = function(data, mins, maxes) {
|
||||||
if (!data) {
|
if (!data || !data.length) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,6 +245,10 @@ function timelineDstat($document, $window) {
|
|||||||
var bottom = y(laneIndex) + laneHeight;
|
var bottom = y(laneIndex) + laneHeight;
|
||||||
|
|
||||||
for (var pathIndex = 0; pathIndex < laneDef.length; pathIndex++) {
|
for (var pathIndex = 0; pathIndex < laneDef.length; pathIndex++) {
|
||||||
|
if (!region.data.length) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
var pathDef = laneDef[pathIndex];
|
var pathDef = laneDef[pathIndex];
|
||||||
var line = pathDef.type === 'line';
|
var line = pathDef.type === 'line';
|
||||||
|
|
||||||
|
@ -378,6 +378,9 @@ function timeline($window, $log, datasetService, progressService) {
|
|||||||
var accessor = function(d) { return d.system_time; };
|
var accessor = function(d) { return d.system_time; };
|
||||||
var minIndex = arrayUtil.binaryMinIndex(min, raw.entries, accessor);
|
var minIndex = arrayUtil.binaryMinIndex(min, raw.entries, accessor);
|
||||||
var maxIndex = arrayUtil.binaryMaxIndex(max, raw.entries, accessor);
|
var maxIndex = arrayUtil.binaryMaxIndex(max, raw.entries, accessor);
|
||||||
|
if (minIndex < 0) {
|
||||||
|
minIndex = 0;
|
||||||
|
}
|
||||||
|
|
||||||
self.dstat = {
|
self.dstat = {
|
||||||
entries: raw.entries.slice(minIndex, maxIndex),
|
entries: raw.entries.slice(minIndex, maxIndex),
|
||||||
@ -388,8 +391,8 @@ function timeline($window, $log, datasetService, progressService) {
|
|||||||
$scope.$broadcast('dstatLoaded', self.dstat);
|
$scope.$broadcast('dstatLoaded', self.dstat);
|
||||||
};
|
};
|
||||||
|
|
||||||
$scope.$watch('dataset', function(dataset) {
|
$scope.$watch('artifactName', function(artifactName) {
|
||||||
if (!dataset) {
|
if (!artifactName) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -398,11 +401,11 @@ function timeline($window, $log, datasetService, progressService) {
|
|||||||
// load dataset details (raw log entries and dstat) sequentially
|
// load dataset details (raw log entries and dstat) sequentially
|
||||||
// we need to determine the initial date from the subunit data to parse
|
// we need to determine the initial date from the subunit data to parse
|
||||||
// dstat
|
// dstat
|
||||||
datasetService.raw(dataset).then(function(response) {
|
datasetService.artifact(artifactName, 'subunit').then(function(response) {
|
||||||
progressService.set(0.33);
|
progressService.set(0.33);
|
||||||
initData(response.data);
|
initData(response.data);
|
||||||
|
|
||||||
return datasetService.dstat(dataset);
|
return datasetService.artifact('dstat');
|
||||||
}).then(function(response) {
|
}).then(function(response) {
|
||||||
progressService.set(0.66);
|
progressService.set(0.66);
|
||||||
var firstDate = new Date(self.dataRaw[0].timestamps[0]);
|
var firstDate = new Date(self.dataRaw[0].timestamps[0]);
|
||||||
@ -462,7 +465,7 @@ function timeline($window, $log, datasetService, progressService) {
|
|||||||
transclude: true,
|
transclude: true,
|
||||||
templateUrl: 'directives/timeline.html',
|
templateUrl: 'directives/timeline.html',
|
||||||
scope: {
|
scope: {
|
||||||
'dataset': '=',
|
'artifactName': '=',
|
||||||
'hoveredItem': '=',
|
'hoveredItem': '=',
|
||||||
'selectedItem': '=',
|
'selectedItem': '=',
|
||||||
'preselect': '='
|
'preselect': '='
|
||||||
|
@ -6,15 +6,15 @@
|
|||||||
function OnConfig($stateProvider, $locationProvider, $urlRouterProvider) {
|
function OnConfig($stateProvider, $locationProvider, $urlRouterProvider) {
|
||||||
|
|
||||||
$stateProvider.state('home', {
|
$stateProvider.state('home', {
|
||||||
url: '/{datasetId:int}',
|
url: '/{artifactName}',
|
||||||
params: { datasetId: 0 },
|
params: { artifactName: null },
|
||||||
controller: 'HomeController as home',
|
controller: 'HomeController as home',
|
||||||
templateUrl: 'home.html',
|
templateUrl: 'home.html',
|
||||||
title: 'Home'
|
title: 'Home'
|
||||||
});
|
});
|
||||||
|
|
||||||
$stateProvider.state('timeline', {
|
$stateProvider.state('timeline', {
|
||||||
url: '/{datasetId:int}/timeline?test',
|
url: '/{artifactName}/timeline?test',
|
||||||
controller: 'TimelineController as timeline',
|
controller: 'TimelineController as timeline',
|
||||||
templateUrl: 'timeline.html',
|
templateUrl: 'timeline.html',
|
||||||
reloadOnSearch: false,
|
reloadOnSearch: false,
|
||||||
@ -22,7 +22,7 @@ function OnConfig($stateProvider, $locationProvider, $urlRouterProvider) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
$stateProvider.state('testDetails', {
|
$stateProvider.state('testDetails', {
|
||||||
url: '/{datasetId:int}/test-details/{test}',
|
url: '/{artifactName}/test-details/{test}',
|
||||||
controller: 'TestDetailsController',
|
controller: 'TestDetailsController',
|
||||||
controllerAs: 'testDetails',
|
controllerAs: 'testDetails',
|
||||||
templateUrl: 'test-details.html',
|
templateUrl: 'test-details.html',
|
||||||
|
@ -5,73 +5,258 @@ var servicesModule = require('./_index.js');
|
|||||||
/**
|
/**
|
||||||
* @ngInject
|
* @ngInject
|
||||||
*/
|
*/
|
||||||
function DatasetService($q, $http) {
|
function DatasetService($q, $http, $window) {
|
||||||
|
|
||||||
var service = {};
|
var service = {};
|
||||||
|
|
||||||
service.list = function() {
|
var config = null;
|
||||||
|
var datasets = null;
|
||||||
|
var artifacts = new Map();
|
||||||
|
var deployer = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a promise to fetch the dataset associated with the current URL path.
|
||||||
|
* This is only valid when in deployer mode.
|
||||||
|
* @return {Promise} an $http promise for the current deployer dataset
|
||||||
|
*/
|
||||||
|
var fetchDeployerDataset = function() {
|
||||||
|
// get uuid from first segment of url, but remove any defined config root
|
||||||
|
var path = $window.location.pathname;
|
||||||
|
if (config.root && path.startsWith(config.root)) {
|
||||||
|
path = path.replace(config.root, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove leading '/' (if any)
|
||||||
|
if (path.startsWith('/')) {
|
||||||
|
path = path.substr(1, path.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// trim to first segment if necessary
|
||||||
|
if (path.includes('/')) {
|
||||||
|
path = path.substring(0, path.indexOf('/'));
|
||||||
|
}
|
||||||
|
|
||||||
return $http({
|
return $http({
|
||||||
cache: true,
|
cache: true,
|
||||||
url: 'data/config.json',
|
url: config.apiRoot + '/task',
|
||||||
method: 'GET'
|
method: 'POST',
|
||||||
|
data: { q: path }
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
service.get = function(id) {
|
/**
|
||||||
return $q(function(resolve, reject) {
|
* Adds the given list of artifacts to the global artifact map, based on their
|
||||||
service.list().then(function(response) {
|
* `artifact_name` fields.
|
||||||
for (var i in response.data.tempest) {
|
* @param {object[]} artifacts a list of artifacts
|
||||||
var entry = response.data.tempest[i];
|
*/
|
||||||
if (entry.id === id) {
|
var initArtifacts = function(list) {
|
||||||
resolve(entry);
|
list.forEach(function(artifact) {
|
||||||
return;
|
if (artifacts.has(artifact.artifact_name)) {
|
||||||
}
|
artifacts.get(artifact.artifact_name).push(artifact);
|
||||||
}
|
} else {
|
||||||
|
artifacts.set(artifact.artifact_name, [artifact]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
reject("Dataset not found with ID: " + id);
|
service.config = function() {
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
if (config) {
|
||||||
|
resolve({ config: config, datasets: datasets, artifacts: artifacts });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$http({
|
||||||
|
cache: true,
|
||||||
|
url: 'data/config.json',
|
||||||
|
method: 'GET'
|
||||||
|
}).then(function(response) {
|
||||||
|
config = response.data;
|
||||||
|
|
||||||
|
if (config.deployer === true) {
|
||||||
|
deployer = true;
|
||||||
|
|
||||||
|
fetchDeployerDataset().then(function(apiResponse) {
|
||||||
|
datasets = [ apiResponse.data ];
|
||||||
|
initArtifacts(apiResponse.data.artifacts);
|
||||||
|
resolve({
|
||||||
|
config: config,
|
||||||
|
datasets: datasets,
|
||||||
|
artifacts: artifacts
|
||||||
|
});
|
||||||
|
}, function(reason) {
|
||||||
|
reject(reason);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
datasets = config.datasets;
|
||||||
|
|
||||||
|
// merge all datasets into a 1-level grouping for now
|
||||||
|
config.datasets.forEach(function(dataset) {
|
||||||
|
initArtifacts(dataset.artifacts);
|
||||||
|
});
|
||||||
|
|
||||||
|
resolve({
|
||||||
|
config: config,
|
||||||
|
datasets: datasets,
|
||||||
|
artifacts: artifacts
|
||||||
|
});
|
||||||
|
}
|
||||||
}, function(reason) {
|
}, function(reason) {
|
||||||
reject(reason);
|
reject(reason);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
service.raw = function(dataset) {
|
/**
|
||||||
return $http({
|
* Lists all datasets.
|
||||||
cache: true,
|
* @return {Promise} a Promise for the global list of datasets
|
||||||
url: "data/" + dataset.raw,
|
*/
|
||||||
method: 'GET'
|
service.list = function() {
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
service.details = function(dataset) {
|
|
||||||
return $http({
|
|
||||||
cache: true,
|
|
||||||
url: "data/" + dataset.details,
|
|
||||||
method: 'GET'
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
service.tree = function(dataset) {
|
|
||||||
return $http({
|
|
||||||
cache: true,
|
|
||||||
url: "data/" + dataset.tree,
|
|
||||||
method: 'GET'
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
service.dstat = function(dataset) {
|
|
||||||
return $q(function(resolve, reject) {
|
return $q(function(resolve, reject) {
|
||||||
if (!dataset.dstat) {
|
/* eslint-disable angular/di */
|
||||||
reject({ status: -1, statusText: 'Dstat not available for dataset.' });
|
service.config().then(function(config) {
|
||||||
return;
|
resolve(config.datasets);
|
||||||
|
}, reject);
|
||||||
|
/* eslint-enable angular/di */
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lists all artifact groups that contain at least one artifact. If `primary`
|
||||||
|
* is true (default), only groups with at least one primary artifact are
|
||||||
|
* returned.
|
||||||
|
* @return {Promise} a Promise for the global list of datasets
|
||||||
|
*/
|
||||||
|
service.groups = function(primary) {
|
||||||
|
if (typeof primary === 'undefined') {
|
||||||
|
primary = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
/* eslint-disable angular/di */
|
||||||
|
service.config().then(function(config) {
|
||||||
|
var ret = [];
|
||||||
|
config.artifacts.forEach(function(entries, name) {
|
||||||
|
if (primary) {
|
||||||
|
entries = entries.filter(function(artifact) {
|
||||||
|
return artifact.primary;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entries.length > 0) {
|
||||||
|
ret.push(name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
resolve(ret);
|
||||||
|
}, reject);
|
||||||
|
/* eslint-enable angular/di */
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the dataset with the given ID. Note that for deployer instances, there
|
||||||
|
* will only ever be a single dataset (id #0). In most cases, dataset #0
|
||||||
|
* should be treated as the 'primary' dataset (and should almost always be the
|
||||||
|
* only one configured).
|
||||||
|
* @param {number} id the index of the dataset to get
|
||||||
|
* @return {Promise} a Promise to retreive the specified dataset
|
||||||
|
*/
|
||||||
|
service.get = function(id) {
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
/* eslint-disable angular/di */
|
||||||
|
service.config().then(function(config) {
|
||||||
|
var dataset = config.datasets[id];
|
||||||
|
if (dataset) {
|
||||||
|
resolve(dataset);
|
||||||
|
} else {
|
||||||
|
reject("Dataset not found with ID: " + id);
|
||||||
|
}
|
||||||
|
}, function(reason) {
|
||||||
|
reject(reason);
|
||||||
|
});
|
||||||
|
/* eslint-enable angular/di */
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch all artifacts with the given `artifact_name` field. This should be
|
||||||
|
* the primary method for differentiating between artifacts. If no artifact
|
||||||
|
* name is given, this returns a flat list of all artifacts (via a Promise).
|
||||||
|
* @param {string} [name] an `artifact_name` field value
|
||||||
|
* @return {Promise} a promise for a list of matching artifacts
|
||||||
|
*/
|
||||||
|
service.artifacts = function(name) {
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
/* eslint-disable angular/di */
|
||||||
|
service.config().then(function(config) {
|
||||||
|
if (typeof name === 'undefined') {
|
||||||
|
var ret = [];
|
||||||
|
config.datasets.forEach(function(dataset) {
|
||||||
|
ret.push.apply(ret, dataset.artifacts);
|
||||||
|
});
|
||||||
|
resolve(ret);
|
||||||
|
} else {
|
||||||
|
var group = config.artifacts.get(name);
|
||||||
|
if (group && group.length > 0) {
|
||||||
|
resolve(group);
|
||||||
|
} else {
|
||||||
|
reject('No artifacts found with name: ' + name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, reject);
|
||||||
|
/* eslint-enable angular/di */
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var _loadArtifact = function(artifact, resolve, reject, message) {
|
||||||
|
if (artifact) {
|
||||||
|
var url = null;
|
||||||
|
if (deployer) {
|
||||||
|
url = config.apiRoot + '/blob/' + artifact.id;
|
||||||
|
} else {
|
||||||
|
url = 'data/' + artifact.path;
|
||||||
}
|
}
|
||||||
|
|
||||||
resolve($http({
|
resolve($http({
|
||||||
cache: true,
|
cache: true,
|
||||||
url: "data/" + dataset.dstat,
|
url: url,
|
||||||
method: 'GET'
|
method: 'GET'
|
||||||
}));
|
}));
|
||||||
});
|
} else {
|
||||||
|
reject('No artifact found matching ' + message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch the artifact with the given `artifact_name` and `artifact_type`
|
||||||
|
* fields. If only one parameter is provided, only `artifact_type` is
|
||||||
|
* considered.
|
||||||
|
* @param {string} [name] an `artifact_name` field value
|
||||||
|
* @param {string} type an `artifact_type` field value (e.g. 'subunit')
|
||||||
|
* @return {Promise} a Promise for the actual data associated with the
|
||||||
|
* artifact
|
||||||
|
*/
|
||||||
|
service.artifact = function(name, type) {
|
||||||
|
if (arguments.length === 1) {
|
||||||
|
type = arguments[0];
|
||||||
|
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
service.artifacts().then(function(all) {
|
||||||
|
_loadArtifact(all.find(function(a) {
|
||||||
|
return a.artifact_type === type;
|
||||||
|
}), resolve, reject, 'type=' + type);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return $q(function(resolve, reject) {
|
||||||
|
service.artifacts(name).then(function(group) {
|
||||||
|
_loadArtifact(group.find(function(a) {
|
||||||
|
return a.artifact_type === type;
|
||||||
|
}), resolve, reject, 'name=' + name + ', type=' + type);
|
||||||
|
}, reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
return service;
|
return service;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
<div class="panel panel-default">
|
<div class="panel panel-default">
|
||||||
<div class="panel-heading">
|
<div class="panel-heading">
|
||||||
<h3 class="panel-title">
|
<h3 class="panel-title">
|
||||||
{{dataset.name}}
|
{{artifactName}}
|
||||||
<span class="text-muted" style="font-size: 65%">
|
<span class="text-muted" style="font-size: 65%">
|
||||||
{{stats.start | date:'d MMM, yyyy'}}
|
{{stats.start | date:'d MMM, yyyy'}}
|
||||||
</span>
|
</span>
|
||||||
@ -33,7 +33,7 @@
|
|||||||
<div class="btn-group pull-right">
|
<div class="btn-group pull-right">
|
||||||
<a type="button"
|
<a type="button"
|
||||||
class="btn btn-default"
|
class="btn btn-default"
|
||||||
ui-sref="timeline({datasetId: dataset.id})">Details</a>
|
ui-sref="timeline({artifactName: artifactName})">Details</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -45,7 +45,7 @@
|
|||||||
<div class="list-group">
|
<div class="list-group">
|
||||||
<a class="list-group-item"
|
<a class="list-group-item"
|
||||||
ng-repeat="fail in stats.failures"
|
ng-repeat="fail in stats.failures"
|
||||||
ui-sref="testDetails({datasetId: dataset.id, test: fail.name})">
|
ui-sref="testDetails({artifactName: artifactName, test: fail.name})">
|
||||||
<h4 class="list-group-item-heading">
|
<h4 class="list-group-item-heading">
|
||||||
{{fail.name | split:'.' | slice:-2 | join:'.'}}
|
{{fail.name | split:'.' | slice:-2 | join:'.'}}
|
||||||
</h4>
|
</h4>
|
||||||
|
@ -57,7 +57,7 @@
|
|||||||
<div class="panel-footer clearfix">
|
<div class="panel-footer clearfix">
|
||||||
<div class="btn-group pull-right">
|
<div class="btn-group pull-right">
|
||||||
<a type="button"
|
<a type="button"
|
||||||
ui-sref="testDetails({datasetId: dataset.id, test: item.name})"
|
ui-sref="testDetails({artifactName: artifactName, test: item.name})"
|
||||||
class="btn btn-default">Details</a>
|
class="btn btn-default">Details</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -6,18 +6,20 @@
|
|||||||
</header>
|
</header>
|
||||||
|
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="row" ng-if="!home.tempest.length">
|
<div class="row" ng-if="!home.groups.length">
|
||||||
<div class="col-lg-12">
|
<div class="col-lg-12">
|
||||||
<div class="alert alert-danger">
|
<div class="alert alert-danger">
|
||||||
No tempest datasets could be loaded!
|
No tempest datasets could be loaded!
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="row" ng-if="!!home.tempest.length">
|
<div class="row" ng-if="!!home.groups.length">
|
||||||
<div class="col-lg-8">
|
<div class="col-lg-8">
|
||||||
<tempest-summary ng-if="home.tempest.length >= 1" dataset="home.tempest[home.focus]"></tempest-summary>
|
<tempest-summary ng-if="home.groups.length >= 1"
|
||||||
|
artifact-name="home.focus"
|
||||||
|
index="home.focus"></tempest-summary>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-lg-4" ng-if="home.tempest.length > 1">
|
<div class="col-lg-4" ng-if="home.datasets.length > 1">
|
||||||
<div class="panel panel-default">
|
<div class="panel panel-default">
|
||||||
<div class="panel-heading">
|
<div class="panel-heading">
|
||||||
<h3 class="panel-title">Additional Datasets</h3>
|
<h3 class="panel-title">Additional Datasets</h3>
|
||||||
@ -25,18 +27,18 @@
|
|||||||
|
|
||||||
<ul class="list-group">
|
<ul class="list-group">
|
||||||
<li class="list-group-item"
|
<li class="list-group-item"
|
||||||
ng-repeat="dataset in home.tempest"
|
ng-repeat="group in home.groups"
|
||||||
ng-if="$index != home.focus"
|
ng-if="group != home.focus"
|
||||||
ng-click="home.focus = $index"
|
ng-click="home.focus = group"
|
||||||
style="cursor: pointer">
|
style="cursor: pointer">
|
||||||
{{ dataset.name }}
|
{{ group }}
|
||||||
|
|
||||||
<small class="text-muted" style="font-size: 75%">
|
<!--<small class="text-muted" style="font-size: 75%">
|
||||||
{{dataset.stats.start | date:'MM/d/yyyy'}}
|
{{dataset.stats.start | date:'MM/d/yyyy'}}
|
||||||
</small>
|
</small>-->
|
||||||
|
|
||||||
<a class="btn btn-default btn-xs pull-right"
|
<a class="btn btn-default btn-xs pull-right"
|
||||||
ui-sref="timeline({datasetId: dataset.id})">
|
ui-sref="timeline({artifactName: group})">
|
||||||
Details
|
Details
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
@ -48,7 +48,7 @@
|
|||||||
|
|
||||||
<div class="panel-footer clearfix">
|
<div class="panel-footer clearfix">
|
||||||
<a class="btn btn-default pull-right"
|
<a class="btn btn-default pull-right"
|
||||||
ui-sref="timeline({datasetId: testDetails.dataset.id, test: testDetails.item.name})">
|
ui-sref="timeline({artifactName: testDetails.artifactName, test: testDetails.item.name})">
|
||||||
Timeline
|
Timeline
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
<header class="bs-header">
|
<header class="bs-header">
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<h1 class="page-header">
|
<h1 class="page-header">
|
||||||
Timeline: {{ timeline.dataset.name }}
|
Timeline: {{ timeline.artifactName }}
|
||||||
<small>#{{ timeline.dataset.id }}</small>
|
|
||||||
</h1>
|
</h1>
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
@ -17,7 +16,7 @@
|
|||||||
</div>
|
</div>
|
||||||
<div class="row" ng-if="!timeline.error">
|
<div class="row" ng-if="!timeline.error">
|
||||||
<div class="col-lg-12">
|
<div class="col-lg-12">
|
||||||
<timeline dataset="timeline.dataset"
|
<timeline artifact-name="timeline.artifactName"
|
||||||
hovered-item="timeline.hoveredItem"
|
hovered-item="timeline.hoveredItem"
|
||||||
selected-item="timeline.selectedItem"
|
selected-item="timeline.selectedItem"
|
||||||
preselect="timeline.preselect"></timeline>
|
preselect="timeline.preselect"></timeline>
|
||||||
@ -26,7 +25,8 @@
|
|||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-lg-12">
|
<div class="col-lg-12">
|
||||||
<a name="details"></a>
|
<a name="details"></a>
|
||||||
<timeline-details dataset="timeline.dataset" item="timeline.selectedItem"></timeline-details>
|
<timeline-details artifact-name="timeline.artifactName"
|
||||||
|
item="timeline.selectedItem"></timeline-details>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -21,12 +21,31 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
from stackviz.parser import tempest_subunit
|
from stackviz.parser import tempest_subunit
|
||||||
|
|
||||||
_base = os.path.dirname(os.path.abspath(__file__))
|
_base = os.path.dirname(os.path.abspath(__file__))
|
||||||
_tempest_count = 0
|
|
||||||
|
|
||||||
|
def environment_params():
|
||||||
|
r = {}
|
||||||
|
|
||||||
|
if 'ZUUL_PROJECT' in os.environ:
|
||||||
|
r['change_project'] = os.environ['ZUUL_PROJECT']
|
||||||
|
|
||||||
|
if 'ZUUL_CHANGE' in os.environ:
|
||||||
|
r['change_id'] = os.environ['ZUUL_CHANGE']
|
||||||
|
|
||||||
|
if 'ZUUL_PATCHSET' in os.environ:
|
||||||
|
r['revision'] = os.environ['ZUUL_PATCHSET']
|
||||||
|
|
||||||
|
if 'ZUUL_PIPELINE' in os.environ:
|
||||||
|
r['pipeline'] = os.environ['ZUUL_PIPELINE']
|
||||||
|
|
||||||
|
if 'JOB_NAME' in os.environ:
|
||||||
|
r['name'] = os.environ['JOB_NAME']
|
||||||
|
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
def open_compressed(output_dir, file_name, compress):
|
def open_compressed(output_dir, file_name, compress):
|
||||||
@ -46,40 +65,61 @@ def json_date_handler(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def export_tempest_tree(stream, output_stream):
|
def build_artifact(path, artifact_name, artifact_type, content_type, primary,
|
||||||
converted = tempest_subunit.convert_stream(stream, strip_details=True)
|
compress):
|
||||||
tree = tempest_subunit.reorganize(converted)
|
ret = {
|
||||||
json.dump(tree, output_stream, default=json_date_handler)
|
'path': path,
|
||||||
output_stream.close()
|
'artifact_name': artifact_name,
|
||||||
|
'artifact_type': artifact_type,
|
||||||
|
'content_type': content_type,
|
||||||
|
'primary': primary
|
||||||
|
}
|
||||||
|
|
||||||
|
if compress:
|
||||||
|
ret['content_encoding'] = 'gzip'
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def export_tempest_raw(stream, output_stream):
|
def export_tempest_raw(name, subunit, output_dir, prefix, compress):
|
||||||
converted = tempest_subunit.convert_stream(stream, strip_details=True)
|
converted = tempest_subunit.convert_stream(subunit, strip_details=True)
|
||||||
json.dump(converted, output_stream, default=json_date_handler)
|
|
||||||
output_stream.close()
|
stream, path = open_compressed(output_dir,
|
||||||
|
prefix + '-raw.json',
|
||||||
|
compress)
|
||||||
|
json.dump(converted, stream, default=json_date_handler)
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
return converted, build_artifact(path, name,
|
||||||
|
'subunit', 'application/json',
|
||||||
|
True, compress)
|
||||||
|
|
||||||
|
|
||||||
def export_tempest_details(stream, output_stream):
|
def export_tempest_details(name, subunit, output_dir, prefix, compress):
|
||||||
converted = tempest_subunit.convert_stream(stream)
|
converted = tempest_subunit.convert_stream(subunit, strip_details=False)
|
||||||
|
|
||||||
output = {}
|
output = {}
|
||||||
for entry in converted:
|
for entry in converted:
|
||||||
output[entry['name']] = entry['details']
|
output[entry['name']] = entry['details']
|
||||||
|
|
||||||
json.dump(output, output_stream, default=json_date_handler)
|
stream, path = open_compressed(output_dir,
|
||||||
output_stream.close()
|
prefix + '-details.json',
|
||||||
|
compress)
|
||||||
|
json.dump(output, stream, default=json_date_handler)
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
return build_artifact(path, name,
|
||||||
|
'subunit-details', 'application/json',
|
||||||
|
False, compress)
|
||||||
|
|
||||||
|
|
||||||
def get_stats(stream):
|
def export_stats(name, subunit_parsed, output_dir, prefix, compress):
|
||||||
converted = tempest_subunit.convert_stream(stream, strip_details=False)
|
|
||||||
|
|
||||||
start = None
|
start = None
|
||||||
end = None
|
end = None
|
||||||
total_duration = 0
|
total_duration = 0
|
||||||
failures = []
|
failures = []
|
||||||
skips = []
|
skips = []
|
||||||
|
|
||||||
for entry in converted:
|
for entry in subunit_parsed:
|
||||||
# find min/max dates
|
# find min/max dates
|
||||||
entry_start, entry_end = entry['timestamps']
|
entry_start, entry_end = entry['timestamps']
|
||||||
if start is None or entry_start < start:
|
if start is None or entry_start < start:
|
||||||
@ -112,56 +152,44 @@ def get_stats(stream):
|
|||||||
'details': entry['details'].get('reason')
|
'details': entry['details'].get('reason')
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
stream, path = open_compressed(
|
||||||
'count': len(converted),
|
output_dir, prefix + '-stats.json', compress)
|
||||||
|
|
||||||
|
json.dump({
|
||||||
|
'count': len(subunit_parsed),
|
||||||
'start': start,
|
'start': start,
|
||||||
'end': end,
|
'end': end,
|
||||||
'total_duration': total_duration,
|
'total_duration': total_duration,
|
||||||
'failures': failures,
|
'failures': failures,
|
||||||
'skips': skips
|
'skips': skips
|
||||||
}
|
}, stream, default=json_date_handler)
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
return build_artifact(path, name,
|
||||||
|
'subunit-stats', 'application/json',
|
||||||
|
False, compress)
|
||||||
|
|
||||||
|
|
||||||
def export_tempest(provider, output_dir, dstat, compress):
|
def export_tempest(provider, output_dir, compress):
|
||||||
global _tempest_count
|
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
|
||||||
for i in range(provider.count):
|
for i in range(provider.count):
|
||||||
path_base = 'tempest_%s_%d' % (provider.name, i)
|
prefix = '%s-%d' % (provider.name, i)
|
||||||
if provider.count > 1:
|
|
||||||
name = '%s (%d)' % (provider.description, i)
|
|
||||||
else:
|
|
||||||
name = provider.description
|
|
||||||
|
|
||||||
open_ = partial(open_compressed,
|
# convert and save raw (without details)
|
||||||
output_dir=output_dir,
|
raw, artifact = export_tempest_raw(provider.name,
|
||||||
compress=compress)
|
provider.get_stream(i),
|
||||||
|
output_dir, prefix, compress)
|
||||||
|
ret.append(artifact)
|
||||||
|
|
||||||
stream_raw, path_raw = open_(file_name=path_base + '_raw.json')
|
# convert and save details
|
||||||
export_tempest_raw(provider.get_stream(i), stream_raw)
|
ret.append(export_tempest_details(provider.name,
|
||||||
|
provider.get_stream(i),
|
||||||
|
output_dir, prefix, compress))
|
||||||
|
|
||||||
stream_tree, path_tree = open_(file_name=path_base + '_tree.json')
|
# generate and save stats
|
||||||
export_tempest_tree(provider.get_stream(i), stream_tree)
|
ret.append(export_stats(provider.name, raw, output_dir, prefix,
|
||||||
|
compress))
|
||||||
stream_details, path_details = open_(
|
|
||||||
file_name=path_base + '_details.json')
|
|
||||||
export_tempest_details(provider.get_stream(i), stream_details)
|
|
||||||
|
|
||||||
stats = get_stats(provider.get_stream(i))
|
|
||||||
|
|
||||||
entry = {
|
|
||||||
'id': _tempest_count,
|
|
||||||
'name': name,
|
|
||||||
'raw': path_raw,
|
|
||||||
'tree': path_tree,
|
|
||||||
'details': path_details,
|
|
||||||
'stats': stats
|
|
||||||
}
|
|
||||||
entry.update({'dstat': dstat} if dstat else {})
|
|
||||||
|
|
||||||
ret.append(entry)
|
|
||||||
_tempest_count += 1
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@ -170,7 +198,7 @@ def export_dstat(path, output_dir, compress):
|
|||||||
f = open(path, 'rb')
|
f = open(path, 'rb')
|
||||||
out_stream, out_file = open_compressed(
|
out_stream, out_file = open_compressed(
|
||||||
output_dir,
|
output_dir,
|
||||||
'dstat_log.csv',
|
'dstat.csv',
|
||||||
compress)
|
compress)
|
||||||
|
|
||||||
shutil.copyfileobj(f, out_stream)
|
shutil.copyfileobj(f, out_stream)
|
||||||
@ -178,7 +206,9 @@ def export_dstat(path, output_dir, compress):
|
|||||||
f.close()
|
f.close()
|
||||||
out_stream.close()
|
out_stream.close()
|
||||||
|
|
||||||
return out_file
|
return build_artifact(out_file, os.path.basename(path),
|
||||||
|
'dstat', 'text/csv',
|
||||||
|
False, compress)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -190,12 +220,15 @@ def main():
|
|||||||
parser.add_argument("-z", "--gzip",
|
parser.add_argument("-z", "--gzip",
|
||||||
help="Enable gzip compression for data files.",
|
help="Enable gzip compression for data files.",
|
||||||
action="store_true")
|
action="store_true")
|
||||||
|
parser.add_argument("-e", "--env",
|
||||||
|
help="Include Zuul metadata from environment "
|
||||||
|
"variables.",
|
||||||
|
action="store_true")
|
||||||
parser.add_argument("-f", "--stream-file",
|
parser.add_argument("-f", "--stream-file",
|
||||||
action="append",
|
action="append",
|
||||||
help="Include the given direct subunit stream; can be "
|
help="Include the given direct subunit stream; can be "
|
||||||
"used multiple times.")
|
"used multiple times.")
|
||||||
parser.add_argument("-r", "--repository",
|
parser.add_argument("-r", "--repository",
|
||||||
action="append",
|
|
||||||
help="A directory containing a `.testrepository` to "
|
help="A directory containing a `.testrepository` to "
|
||||||
"include; can be used multiple times.")
|
"include; can be used multiple times.")
|
||||||
parser.add_argument("-i", "--stdin",
|
parser.add_argument("-i", "--stdin",
|
||||||
@ -211,28 +244,42 @@ def main():
|
|||||||
if not os.path.exists(args.path):
|
if not os.path.exists(args.path):
|
||||||
os.mkdir(args.path)
|
os.mkdir(args.path)
|
||||||
|
|
||||||
dstat = None
|
artifacts = []
|
||||||
|
dataset = {
|
||||||
|
'name': None,
|
||||||
|
'url': None,
|
||||||
|
'status': None,
|
||||||
|
'ci_username': None,
|
||||||
|
'pipeline': None,
|
||||||
|
'change_id': None,
|
||||||
|
'revision': None,
|
||||||
|
'change_project': None,
|
||||||
|
'change_subject': None,
|
||||||
|
'artifacts': artifacts
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.env:
|
||||||
|
dataset.update(environment_params())
|
||||||
|
|
||||||
if args.dstat:
|
if args.dstat:
|
||||||
print("Exporting DStat log")
|
print("Exporting DStat log")
|
||||||
dstat = export_dstat(args.dstat, args.path, args.gzip)
|
dstat = export_dstat(args.dstat, args.path, args.gzip)
|
||||||
|
artifacts.append(dstat)
|
||||||
|
|
||||||
providers = tempest_subunit.get_providers(
|
providers = tempest_subunit.get_providers(
|
||||||
args.repository,
|
args.repository,
|
||||||
args.stream_file,
|
args.stream_file,
|
||||||
args.stdin)
|
args.stdin)
|
||||||
|
|
||||||
tempest_config_entries = []
|
|
||||||
|
|
||||||
for provider in providers.values():
|
for provider in providers.values():
|
||||||
print("Exporting Tempest provider: %s (%d)" % (provider.description,
|
print("Exporting Tempest provider: %s (%d)" % (provider.description,
|
||||||
provider.count))
|
provider.count))
|
||||||
tempest_config_entries.extend(
|
artifacts.extend(export_tempest(provider, args.path, args.gzip))
|
||||||
export_tempest(provider, args.path, dstat, args.gzip)
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(os.path.join(args.path, 'config.json'), 'w') as f:
|
with open(os.path.join(args.path, 'config.json'), 'w') as f:
|
||||||
json.dump({
|
json.dump({
|
||||||
'tempest': tempest_config_entries
|
'deployer': False,
|
||||||
|
'datasets': [dataset]
|
||||||
}, f, default=json_date_handler)
|
}, f, default=json_date_handler)
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ class RepositoryProvider(SubunitProvider):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
return "repo_%s" % os.path.basename(self.repository_path)
|
return os.path.basename(self.repository_path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
@ -120,7 +120,7 @@ class FileProvider(SubunitProvider):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
return "file_%s" % os.path.basename(self.path)
|
return os.path.basename(self.path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
|
@ -1,17 +1,39 @@
|
|||||||
/*global angular */
|
/*global angular */
|
||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
describe('Unit: DatasetService', function() {
|
describe('Unit: DatasetService', function() {
|
||||||
|
|
||||||
var service, httpBackend;
|
var service, httpBackend;
|
||||||
var exampleConfig = {"tempest": [
|
|
||||||
{"raw": "tempest_file_freshlog_0_raw.json",
|
var mockConfig = {
|
||||||
"details": "tempest_file_freshlog_0_details.json",
|
"deployer": false,
|
||||||
"tree": "tempest_file_freshlog_0_tree.json",
|
"datasets": [{
|
||||||
"id": 0,
|
"status": null, "ci_username": null, "pipeline": null,
|
||||||
"name": "Subunit File: freshlog"}
|
"change_project": null, "name": null, "url": null,
|
||||||
]};
|
"change_id": null, "change_subject": null, "revision": null,
|
||||||
|
"artifacts": [
|
||||||
|
{
|
||||||
|
"artifact_type": "dstat", "path": "dstat.csv", "primary": false,
|
||||||
|
"content_type": "text/csv", "artifact_name": "dstat-csv.txt"
|
||||||
|
}, {
|
||||||
|
"artifact_type": "subunit", "primary": true,
|
||||||
|
"path": "testrepository.subunit-0-raw.json",
|
||||||
|
"content_type": "application/json",
|
||||||
|
"artifact_name": "testrepository.subunit"
|
||||||
|
}, {
|
||||||
|
"artifact_type": "subunit-details", "primary": false,
|
||||||
|
"path": "testrepository.subunit-0-details.json",
|
||||||
|
"content_type": "application/json",
|
||||||
|
"artifact_name": "testrepository.subunit"
|
||||||
|
}, {
|
||||||
|
"artifact_type": "subunit-stats", "primary": false,
|
||||||
|
"path": "testrepository.subunit-0-stats.json",
|
||||||
|
"content_type": "application/json",
|
||||||
|
"artifact_name": "testrepository.subunit"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
};
|
||||||
|
|
||||||
beforeEach(function() {
|
beforeEach(function() {
|
||||||
// instantiate the app module
|
// instantiate the app module
|
||||||
@ -21,6 +43,8 @@ describe('Unit: DatasetService', function() {
|
|||||||
angular.mock.inject(function(datasetService, $httpBackend) {
|
angular.mock.inject(function(datasetService, $httpBackend) {
|
||||||
service = datasetService;
|
service = datasetService;
|
||||||
httpBackend = $httpBackend;
|
httpBackend = $httpBackend;
|
||||||
|
|
||||||
|
httpBackend.whenGET("data/config.json").respond(mockConfig);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -28,40 +52,45 @@ describe('Unit: DatasetService', function() {
|
|||||||
expect(service).toBeDefined();
|
expect(service).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return config.json', function() {
|
it('should return the loaded configuration', function() {
|
||||||
httpBackend.whenGET("data/config.json").respond(exampleConfig);
|
service.config().then(function(config) {
|
||||||
service.list().then(function(config) {
|
expect(config.config).toEqual(mockConfig);
|
||||||
expect(config.data).toEqual(exampleConfig);
|
|
||||||
});
|
});
|
||||||
httpBackend.flush();
|
httpBackend.flush();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should GET the raw file from a dataset', function() {
|
it('should only have valid primary artifacts', function() {
|
||||||
httpBackend.whenGET(exampleConfig.raw).respond(exampleConfig.raw);
|
service.groups(true).then(function(groups) {
|
||||||
service.raw(exampleConfig).then(function(raw) {
|
expect(groups.length).toEqual(1);
|
||||||
expect(raw).toEqual(exampleConfig.raw);
|
expect(groups).toContain('testrepository.subunit');
|
||||||
|
}, function() {
|
||||||
|
fail('callback should return');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
httpBackend.flush();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should GET the details file from a dataset', function() {
|
it('should find all artifacts matching a particular name', function() {
|
||||||
httpBackend.whenGET(exampleConfig.details).respond(exampleConfig.details);
|
service.artifacts('testrepository.subunit').then(function(artifacts) {
|
||||||
service.details(exampleConfig).then(function(details) {
|
expect(artifacts.length).toEqual(3);
|
||||||
expect(details).toEqual(exampleConfig.details);
|
}, function() {
|
||||||
|
fail('callback should return');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
httpBackend.flush();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should GET the tree file from a dataset', function() {
|
it('should load an artifact', function() {
|
||||||
httpBackend.whenGET(exampleConfig.tree).respond(exampleConfig.tree);
|
httpBackend.whenGET('data/testrepository.subunit-0-raw.json').respond({
|
||||||
service.tree(exampleConfig).then(function(tree) {
|
mock: true
|
||||||
expect(tree).toEqual(exampleConfig.tree);
|
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
it('should GET the dstat file from a dataset', function() {
|
service.artifact('testrepository.subunit', 'subunit').then(function(resp) {
|
||||||
httpBackend.whenGET(exampleConfig.dstat).respond(exampleConfig.dstat);
|
expect(resp.data).toEqual({ mock: true });
|
||||||
service.dstat(exampleConfig).then(function(dstat) {
|
}, function(ex) {
|
||||||
expect(dstat).toEqual(exampleConfig.dstat);
|
fail('promise should return successfully: ' + ex);
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
|
httpBackend.flush();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user