Modified: trunk/PerformanceTests/resources/results-template.html (124801 => 124802)
--- trunk/PerformanceTests/resources/results-template.html 2012-08-06 21:00:29 UTC (rev 124801)
+++ trunk/PerformanceTests/resources/results-template.html 2012-08-06 21:59:06 UTC (rev 124802)
@@ -40,22 +40,29 @@
<div id="container"></div>
<script>
-function attachPlot(testName) {
- var section = $('<section><h1></h1><div class="plot" style="width: 200px; height: 300px;"></div>'
+function createPlot(testName) {
+ var section = $('<section><h1></h1><div class="plot"></div>'
+ '<span class="tooltip"></span><section>');
var unit = testUnits[testName];
+ section.children('.plot').css({'width': 100 * maxLength + 'px', 'height': '300px'});
section.children('h1').html(testName + (unit ? ' (' + unit + ')' : ''));
$('#container').append(section);
+
+ attachPlot(testName, section);
+}
+function attachPlot(testName, section, minIsZero) {
var averages = testResults[testName];
var color = 'rgb(230,50,50)';
var minMaxOptions = {lines: {show:true, lineWidth: 0},
color: color,
- points: {show: true, radius: 1}};
+ points: {show: true, radius: 1},
+ bars: {show: false}};
+
function makeLowPlot(id, data) { return $.extend(true, {}, minMaxOptions, {id: id, data: data}); }
function makeHighPlot(from, to, fill, data) { return $.extend(true, {}, minMaxOptions,
- {id: to, data: data, lines: {fill: fill}, fillBetween: from}); }
+ {id: to, data: data}); }
var plotData = [
makeLowPlot('min', testResultsMin[testName]),
@@ -67,21 +74,30 @@
var plotContainer = section.children('.plot');
$.plot(plotContainer, plotData, {
xaxis: {
- min: averages[0][0] - 0.25,
- max: averages[averages.length - 1][0] + 0.25,
+ min: averages[0][0] - 0.5,
+ max: averages[averages.length - 1][0] + 0.5,
tickSize: 1,
- tickDecimals: 0,
- tickFormatter: function (x) { return x >= 0 ? x : ''; }},
- yaxis: {},
+ ticks: averages.map(function (value, index) {
+ var label = 'r' + webkitRevisions[index];
+ if (descriptions[index])
+ label += ' ‐ ' + descriptions[index]
+ return [index, label];
+ }),
+ },
+ yaxis: {
+ min: minIsZero ? 0 : Math.max.apply(Math, $.map(testResultsMin[testName], function (entry) { return entry[1]; })) * 0.98,
+ max: Math.max.apply(Math, $.map(testResultsMax[testName], function (entry) { return entry[1]; })) * (minIsZero ? 1.1 : 1.01),
+ },
crosshair: { mode: 'y' },
series: { shadowSize: 0 },
+ bars: {show: true, align: 'center', barWidth: 0.5},
lines: { show: false },
points: { show: true },
grid: {
borderWidth: 2,
backgroundColor: '#fff',
hoverable: true,
- autoHighlight: true,
+ autoHighlight: false,
}
});
@@ -95,13 +111,26 @@
} else
tooltip.hide();
});
+ plotContainer.mouseout(function () {
+ tooltip.hide();
+ });
+
+ plotContainer.click(function (event) {
+ event.preventDefault();
+ attachPlot(testName, section, !minIsZero);
+ });
}
var results = JSON.parse(document.getElementById('json').textContent);
var tests = [];
var testResults = {}, testResultsMin = {}, testResultsMax = {}, testResultsStdevLow = {}, testResultsStdevHigh = {};
var testUnits = {};
+var webkitRevisions = [];
+var descriptions = [];
+var maxLength = 0;
$.each(results, function (index, entry) {
+ webkitRevisions.push(entry['webkit-revision']);
+ descriptions.push(entry['description']);
$.each(entry.results, function (test, result) {
if (tests.indexOf(test) < 0)
tests.push(test);
@@ -125,11 +154,12 @@
testResultsStdevHigh[test].push([index, result['avg'] + result['stdev']]);
}
}
+ maxLength = Math.max(maxLength, testResults[test].length);
testUnits[test] = result.unit;
});
});
-$.each(tests.sort(), function (index, test) { attachPlot(test); });
+$.each(tests.sort(), function (index, test) { createPlot(test); });
</script>
</body>
-</html>
\ No newline at end of file
+</html>
Modified: trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py (124801 => 124802)
--- trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py 2012-08-06 21:00:29 UTC (rev 124801)
+++ trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py 2012-08-06 21:59:06 UTC (rev 124802)
@@ -102,6 +102,8 @@
help="Filename of the JSON file that summaries the results."),
optparse.make_option("--source-json-path",
help="Path to a JSON file to be merged into the JSON file when --output-json-path is present."),
+ optparse.make_option("--description",
+ help="Add a description to the output JSON file if one is generated"),
optparse.make_option("--test-results-server",
help="Upload the generated JSON file to the specified server when --output-json-path is present."),
optparse.make_option("--webkit-test-runner", "-2", action=""
@@ -170,7 +172,7 @@
if not output_json_path:
output_json_path = self._host.filesystem.join(self._port.perf_results_directory(), self._DEFAULT_JSON_FILENAME)
- output = self._generate_results_dict(self._timestamp, options.platform, options.builder_name, options.build_number)
+ output = self._generate_results_dict(self._timestamp, options.description, options.platform, options.builder_name, options.build_number)
if options.source_json_path:
output = self._merge_source_json(options.source_json_path, output)
@@ -193,8 +195,10 @@
else:
self._port.show_results_html_file(results_page_path)
- def _generate_results_dict(self, timestamp, platform, builder_name, build_number):
+ def _generate_results_dict(self, timestamp, description, platform, builder_name, build_number):
contents = {'results': self._results}
+ if description:
+ contents['description'] = description
for (name, path) in self._port.repository_paths():
contents[name + '-revision'] = self._host.scm().svn_revision(path)
Modified: trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py (124801 => 124802)
--- trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py 2012-08-06 21:00:29 UTC (rev 124801)
+++ trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py 2012-08-06 21:59:06 UTC (rev 124802)
@@ -270,6 +270,16 @@
"inspector/pass.html:group_name:test_name": 42},
"webkit-revision": 5678, "branch": "webkit-trunk"})
+ def test_run_with_description(self):
+ runner, port = self.create_runner(args=['--output-json-path=/mock-checkout/output.json',
+ '--test-results-server=some.host', '--description', 'some description'])
+ self._test_run_with_json_output(runner, port.host.filesystem)
+ self.assertEqual(json.loads(port.host.filesystem.read_text_file('/mock-checkout/output.json')), {
+ "timestamp": 123456789, "description": "some description", "results":
+ {"Bindings/event-target-wrapper": {"max": 1510, "avg": 1489.05, "median": 1487, "min": 1471, "stdev": 14.46, "unit": "ms"},
+ "inspector/pass.html:group_name:test_name": 42},
+ "webkit-revision": 5678, "branch": "webkit-trunk"})
+
def create_runner_and_setup_results_template(self, args=[]):
runner, port = self.create_runner(args)
filesystem = port.host.filesystem