Diff
Modified: trunk/PerformanceTests/ChangeLog (234756 => 234757)
--- trunk/PerformanceTests/ChangeLog 2018-08-10 16:10:45 UTC (rev 234756)
+++ trunk/PerformanceTests/ChangeLog 2018-08-10 16:26:13 UTC (rev 234757)
@@ -1,3 +1,16 @@
+2018-08-10 Truitt Savell <[email protected]>
+
+ Unreviewed, rolling out r234750.
+
+ Caused 185 perf test failures.
+
+ Reverted changeset:
+
+ "Add ability to ignore process prewarming for launch time
+ benchmark"
+ https://bugs.webkit.org/show_bug.cgi?id=188462
+ https://trac.webkit.org/changeset/234750
+
2018-08-09 Ben Richards <[email protected]>
Add ability to ignore process prewarming for launch time benchmark
Modified: trunk/PerformanceTests/LaunchTime/launch_time.py (234756 => 234757)
--- trunk/PerformanceTests/LaunchTime/launch_time.py 2018-08-10 16:10:45 UTC (rev 234756)
+++ trunk/PerformanceTests/LaunchTime/launch_time.py 2018-08-10 16:26:13 UTC (rev 234757)
@@ -41,12 +41,21 @@
</html>
'''
- def on_receive_stop_signal(self, data):
+ def get_blank_page(self):
+ return '''<!DOCTYPE html>
+ <html>
+ <head>
+ <title>Launch Time Benchmark</title>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+ </head>
+ </html>'''
+
+ def on_receive_stop_time(self, time):
pass
def do_HEAD(self):
self.send_response(200)
- self.send_header('Content-type', 'text/html')
+ self.send_header('Content-type', 'text/hetml')
self.end_headers()
def do_GET(self):
@@ -53,8 +62,7 @@
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
- if not self.path.startswith('/blank'):
- self.wfile.write(self.get_test_page())
+ self.wfile.write(self.get_blank_page() if self.path == '/blank' else self.get_test_page())
self.wfile.close()
def do_POST(self):
@@ -65,8 +73,10 @@
self.wfile.close()
data_string = self.rfile.read(int(self.headers['Content-Length']))
- self.on_receive_stop_signal(data_string)
+ time = float(data_string)
+ self.on_receive_stop_time(time)
+
def log_message(self, format, *args):
pass
@@ -167,8 +177,7 @@
def _standard_deviation(self, results, mean=None):
if mean is None:
mean = sum(results) / float(len(results))
- divisor = float(len(results) - 1) if len(results) > 1 else float(len(results))
- variance = sum((x - mean) ** 2 for x in results) / divisor
+ variance = sum((x - mean) ** 2 for x in results) / float(len(results) - 1)
return sqrt(variance)
def _compute_results(self, results):
@@ -188,13 +197,9 @@
for i in range(self.iteration_groups):
yield self.wait_time_low + increment_per_group * i
- def open_tab(self, blank=False):
- if blank:
- call(['open', '-a', self._browser_bundle_path,
- 'http://localhost:{}/blank/{}'.format(self._port, self._open_count)])
- else:
- call(['open', '-a', self._browser_bundle_path,
- 'http://localhost:{}/{}'.format(self._port, self._open_count)])
+ def open_tab(self):
+ call(['open', '-a', self._browser_bundle_path,
+ 'http://localhost:{}/{}'.format(self._port, self._open_count)])
self._open_count += 1
def launch_browser(self):
@@ -268,8 +273,8 @@
results_by_iteration_number[i].append(result_in_ms)
except KeyboardInterrupt:
raise KeyboardInterrupt
- except Exception as error:
- self._exit_due_to_exception('(Test {} failed) {}: {}\n'.format(i + 1 if self._verbose else i, type(error).__name__, error))
+ except:
+ self._exit_due_to_exception('(Test {} failed)\n'.format(i + 1 if self._verbose else i))
if not self._verbose:
print ''
Modified: trunk/PerformanceTests/LaunchTime/new_tab.py (234756 => 234757)
--- trunk/PerformanceTests/LaunchTime/new_tab.py 2018-08-10 16:10:45 UTC (rev 234756)
+++ trunk/PerformanceTests/LaunchTime/new_tab.py 2018-08-10 16:26:13 UTC (rev 234757)
@@ -36,22 +36,16 @@
self.start_time = None
self.stop_time = None
self.stop_signal_was_received = Event()
- self.allow_prewarm = True
def run_iteration(self):
- tabs_to_open = 1 if self.allow_prewarm else 2
- self.stop_time = None
- for _ in range(tabs_to_open - 1):
- self.open_tab(blank=True)
self.start_time = time.time() * 1000
self.open_tab()
while self.stop_time is None:
self.stop_signal_was_received.wait()
result = self.stop_time - self.start_time
+ self.stop_time = None
self.stop_signal_was_received.clear()
- for _ in range(tabs_to_open):
- self.close_tab()
-
+ self.close_tab()
return result
def group_init(self):
@@ -62,8 +56,6 @@
help='number of groups of iterations to run (default: {})'.format(self.iteration_groups))
self.argument_parser.add_argument('-w', '--wait-time', type=self._parse_wait_time,
help='wait time to use between iterations or range to scan (format is "N" or "N:M" where N < M, default: {}:{})'.format(self.wait_time_low, self.wait_time_high))
- self.argument_parser.add_argument('--no-prewarm', action='',
- help='attempt to ignore process prewarming (will most likely raise standard deviation)')
def did_parse_arguments(self, args):
if args.groups:
@@ -70,8 +62,6 @@
self.iteration_groups = args.groups
if args.wait_time:
self.wait_time_low, self.wait_time_high = args.wait_time
- if args.no_prewarm:
- self.allow_prewarm = False
@staticmethod
def ResponseHandler(new_tab_benchmark):
@@ -84,8 +74,8 @@
<meta http-equiv="Content-Type" content="text/html" />
<script>
function sendDone() {
- const time = performance.timing.navigationStart
- const request = new XMLHttpRequest();
+ var time = performance.timing.navigationStart
+ var request = new XMLHttpRequest();
request.open("POST", "done", false);
request.setRequestHeader('Content-Type', 'application/json');
request.send(JSON.stringify(time));
@@ -99,8 +89,8 @@
</html>
'''
- def on_receive_stop_signal(self, data):
- new_tab_benchmark.stop_time = float(data)
+ def on_receive_stop_time(self, stop_time):
+ new_tab_benchmark.stop_time = stop_time
new_tab_benchmark.stop_signal_was_received.set()
return Handler
Modified: trunk/PerformanceTests/LaunchTime/startup.py (234756 => 234757)
--- trunk/PerformanceTests/LaunchTime/startup.py 2018-08-10 16:10:45 UTC (rev 234756)
+++ trunk/PerformanceTests/LaunchTime/startup.py 2018-08-10 16:26:13 UTC (rev 234757)
@@ -50,8 +50,8 @@
</html>
'''
- def on_receive_stop_signal(self, data):
- startup_benchmark.stop_time = float(data)
+ def on_receive_stop_time(self, stop_time):
+ startup_benchmark.stop_time = stop_time
startup_benchmark.stop_signal_was_received.set()
return Handler