Diff
Modified: trunk/Tools/ChangeLog (159594 => 159595)
--- trunk/Tools/ChangeLog 2013-11-21 00:42:51 UTC (rev 159594)
+++ trunk/Tools/ChangeLog 2013-11-21 00:54:05 UTC (rev 159595)
@@ -1,3 +1,21 @@
+2013-11-20 Jozsef Berta <[email protected]>
+
+ Delete baseline optimizer
+ https://bugs.webkit.org/show_bug.cgi?id=122333
+
+ Reviewed by Ryosuke Niwa.
+
+ * Scripts/webkitpy/common/checkout/baselineoptimizer.py: Removed.
+ * Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py: Removed.
+ * Scripts/webkitpy/tool/commands/rebaseline.py:
+ (RebaselineTest.execute):
+ (AbstractParallelRebaselineCommand._files_to_add):
+ (AbstractParallelRebaselineCommand._rebaseline):
+ * Scripts/webkitpy/tool/commands/rebaseline_unittest.py:
+ (TestRebaselineJson.test_rebaseline_all):
+ (TestRebaselineJson.test_rebaseline_debug):
+ (TestRebaselineExpectations.disabled_test_overrides_are_included_correctly):
+
2013-11-20 Anders Carlsson <[email protected]>
Enable PageLoadTest assertions again
Deleted: trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer.py (159594 => 159595)
--- trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer.py 2013-11-21 00:42:51 UTC (rev 159594)
+++ trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer.py 2013-11-21 00:54:05 UTC (rev 159595)
@@ -1,274 +0,0 @@
-# Copyright (C) 2011, Google Inc. All rights reserved.
-# Copyright (C) 2013, Apple Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import copy
-import logging
-
-
-_log = logging.getLogger(__name__)
-
-
-# Yes, it's a hypergraph.
-# FIXME: Should this function live with the ports somewhere?
-# Perhaps this should move onto PortFactory?
-def _baseline_search_hypergraph(host, port_names):
- hypergraph = {}
-
- # These edges in the hypergraph aren't visible on build.webkit.org,
- # but they impose constraints on how we optimize baselines.
- hypergraph.update(_VIRTUAL_PORTS)
-
- # FIXME: Should we get this constant from somewhere?
- fallback_path = ['LayoutTests']
-
- port_factory = host.port_factory
- for port_name in port_names:
- port = port_factory.get(port_name)
- webkit_base = port.webkit_base()
- search_path = port.baseline_search_path()
- if search_path:
- hypergraph[port_name] = [host.filesystem.relpath(path, webkit_base) for path in search_path] + fallback_path
- return hypergraph
-
-
-_VIRTUAL_PORTS = {
- 'mac-future': ['LayoutTests/platform/mac-future', 'LayoutTests/platform/mac', 'LayoutTests'],
- 'win-future': ['LayoutTests/platform/win-future', 'LayoutTests/platform/win', 'LayoutTests'],
-}
-
-
-# FIXME: Should this function be somewhere more general?
-def _invert_dictionary(dictionary):
- inverted_dictionary = {}
- for key, value in dictionary.items():
- if inverted_dictionary.get(value):
- inverted_dictionary[value].append(key)
- else:
- inverted_dictionary[value] = [key]
- return inverted_dictionary
-
-
-class BaselineOptimizer(object):
- def __init__(self, host, port_names):
- self._host = host
- self._filesystem = self._host.filesystem
- self._scm = self._host.scm()
- self._hypergraph = _baseline_search_hypergraph(host, port_names)
- self._directories = reduce(set.union, map(set, self._hypergraph.values()))
-
- def read_results_by_directory(self, baseline_name):
- results_by_directory = {}
- for directory in self._directories:
- path = self._filesystem.join(self._scm.checkout_root, directory, baseline_name)
- if self._filesystem.exists(path):
- results_by_directory[directory] = self._filesystem.sha1(path)
- return results_by_directory
-
- def _results_by_port_name(self, results_by_directory):
- results_by_port_name = {}
- for port_name, search_path in self._hypergraph.items():
- for directory in search_path:
- if directory in results_by_directory:
- results_by_port_name[port_name] = results_by_directory[directory]
- break
- return results_by_port_name
-
- def _most_specific_common_directory(self, port_names):
- paths = [self._hypergraph[port_name] for port_name in port_names]
- common_directories = reduce(set.intersection, map(set, paths))
-
- def score(directory):
- return sum([path.index(directory) for path in paths])
-
- _, directory = sorted([(score(directory), directory) for directory in common_directories])[0]
- return directory
-
- def _filter_port_names_by_result(self, predicate, port_names_by_result):
- filtered_port_names_by_result = {}
- for result, port_names in port_names_by_result.items():
- filtered_port_names = filter(predicate, port_names)
- if filtered_port_names:
- filtered_port_names_by_result[result] = filtered_port_names
- return filtered_port_names_by_result
-
- def _place_results_in_most_specific_common_directory(self, port_names_by_result, results_by_directory):
- for result, port_names in port_names_by_result.items():
- directory = self._most_specific_common_directory(port_names)
- results_by_directory[directory] = result
-
- def _find_optimal_result_placement(self, baseline_name):
- results_by_directory = self.read_results_by_directory(baseline_name)
- results_by_port_name = self._results_by_port_name(results_by_directory)
- port_names_by_result = _invert_dictionary(results_by_port_name)
-
- new_results_by_directory = self._optimize_by_most_specific_common_directory(results_by_directory, results_by_port_name, port_names_by_result)
- if not new_results_by_directory:
- new_results_by_directory = self._optimize_by_pushing_results_up(results_by_directory, results_by_port_name, port_names_by_result)
-
- return results_by_directory, new_results_by_directory
-
- def _optimize_by_most_specific_common_directory(self, results_by_directory, results_by_port_name, port_names_by_result):
- new_results_by_directory = {}
- unsatisfied_port_names_by_result = port_names_by_result
- while unsatisfied_port_names_by_result:
- self._place_results_in_most_specific_common_directory(unsatisfied_port_names_by_result, new_results_by_directory)
- new_results_by_port_name = self._results_by_port_name(new_results_by_directory)
-
- def is_unsatisfied(port_name):
- return results_by_port_name[port_name] != new_results_by_port_name[port_name]
-
- new_unsatisfied_port_names_by_result = self._filter_port_names_by_result(is_unsatisfied, port_names_by_result)
-
- if len(new_unsatisfied_port_names_by_result.values()) >= len(unsatisfied_port_names_by_result.values()):
- return {} # Frowns. We do not appear to be converging.
- unsatisfied_port_names_by_result = new_unsatisfied_port_names_by_result
-
- return new_results_by_directory
-
- def _optimize_by_pushing_results_up(self, results_by_directory, results_by_port_name, port_names_by_result):
- try:
- results_by_directory = results_by_directory
- best_so_far = results_by_directory
- while True:
- new_results_by_directory = copy.copy(best_so_far)
- for port_name in self._hypergraph.keys():
- fallback_path = self._hypergraph[port_name]
- current_index, current_directory = self._find_in_fallbackpath(fallback_path, results_by_port_name[port_name], best_so_far)
- current_result = results_by_port_name[port_name]
- for index in range(current_index + 1, len(fallback_path)):
- new_directory = fallback_path[index]
- if not new_directory in new_results_by_directory:
- new_results_by_directory[new_directory] = current_result
- if current_directory in new_results_by_directory:
- del new_results_by_directory[current_directory]
- elif new_results_by_directory[new_directory] == current_result:
- if current_directory in new_results_by_directory:
- del new_results_by_directory[current_directory]
- else:
- # The new_directory contains a different result, so stop trying to push results up.
- break
-
- if len(new_results_by_directory) >= len(best_so_far):
- # We've failed to improve, so give up.
- break
- best_so_far = new_results_by_directory
-
- return best_so_far
- except KeyError as e:
- # FIXME: KeyErrors get raised if we're missing baselines. We should handle this better.
- return {}
-
- def _find_in_fallbackpath(self, fallback_path, current_result, results_by_directory):
- for index, directory in enumerate(fallback_path):
- if directory in results_by_directory and (results_by_directory[directory] == current_result):
- return index, directory
- assert False, "result %s not found in fallback_path %s, %s" % (current_result, fallback_path, results_by_directory)
-
- def _filtered_results_by_port_name(self, results_by_directory):
- results_by_port_name = self._results_by_port_name(results_by_directory)
- for port_name in _VIRTUAL_PORTS.keys():
- if port_name in results_by_port_name:
- del results_by_port_name[port_name]
- return results_by_port_name
-
- def _platform(self, filename):
- platform_dir = 'LayoutTests' + self._filesystem.sep + 'platform' + self._filesystem.sep
- if filename.startswith(platform_dir):
- return filename.replace(platform_dir, '').split(self._filesystem.sep)[0]
- platform_dir = self._filesystem.join(self._scm.checkout_root, platform_dir)
- if filename.startswith(platform_dir):
- return filename.replace(platform_dir, '').split(self._filesystem.sep)[0]
- return '(generic)'
-
- def _move_baselines(self, baseline_name, results_by_directory, new_results_by_directory):
- data_for_result = {}
- for directory, result in results_by_directory.items():
- if not result in data_for_result:
- source = self._filesystem.join(self._scm.checkout_root, directory, baseline_name)
- data_for_result[result] = self._filesystem.read_binary_file(source)
-
- file_names = []
- for directory, result in results_by_directory.items():
- if new_results_by_directory.get(directory) != result:
- file_names.append(self._filesystem.join(self._scm.checkout_root, directory, baseline_name))
- if file_names:
- _log.debug(" Deleting:")
- for platform_dir in sorted(self._platform(filename) for filename in file_names):
- _log.debug(" " + platform_dir)
- self._scm.delete_list(file_names)
- else:
- _log.debug(" (Nothing to delete)")
-
- file_names = []
- for directory, result in new_results_by_directory.items():
- if results_by_directory.get(directory) != result:
- destination = self._filesystem.join(self._scm.checkout_root, directory, baseline_name)
- self._filesystem.maybe_make_directory(self._filesystem.split(destination)[0])
- self._filesystem.write_binary_file(destination, data_for_result[result])
- file_names.append(destination)
- if file_names:
- _log.debug(" Adding:")
- for platform_dir in sorted(self._platform(filename) for filename in file_names):
- _log.debug(" " + platform_dir)
- self._scm.add_list(file_names)
- else:
- _log.debug(" (Nothing to add)")
-
- def directories_by_result(self, baseline_name):
- results_by_directory = self.read_results_by_directory(baseline_name)
- return _invert_dictionary(results_by_directory)
-
- def write_by_directory(self, results_by_directory, writer, indent):
- for path in sorted(results_by_directory):
- writer("%s%s: %s" % (indent, self._platform(path), results_by_directory[path][0:6]))
-
- def optimize(self, baseline_name):
- basename = self._filesystem.basename(baseline_name)
- results_by_directory, new_results_by_directory = self._find_optimal_result_placement(baseline_name)
- self.new_results_by_directory = new_results_by_directory
- if new_results_by_directory == results_by_directory:
- if new_results_by_directory:
- _log.debug(" %s: (already optimal)" % basename)
- self.write_by_directory(results_by_directory, _log.debug, " ")
- else:
- _log.debug(" %s: (no baselines found)" % basename)
- return True
- if self._filtered_results_by_port_name(results_by_directory) != self._filtered_results_by_port_name(new_results_by_directory):
- _log.warning(" %s: optimization failed" % basename)
- self.write_by_directory(results_by_directory, _log.warning, " ")
- return False
-
- _log.debug(" %s:" % basename)
- _log.debug(" Before: ")
- self.write_by_directory(results_by_directory, _log.debug, " ")
- _log.debug(" After: ")
- self.write_by_directory(new_results_by_directory, _log.debug, " ")
-
- self._move_baselines(baseline_name, results_by_directory, new_results_by_directory)
- return True
Deleted: trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py (159594 => 159595)
--- trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py 2013-11-21 00:42:51 UTC (rev 159594)
+++ trunk/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py 2013-11-21 00:54:05 UTC (rev 159595)
@@ -1,151 +0,0 @@
-# Copyright (C) 2011 Google Inc. All rights reserved.
-# Copyright (C) 2013 Apple Inc. All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-import sys
-import unittest2 as unittest
-
-from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
-from webkitpy.common.system.filesystem_mock import MockFileSystem
-from webkitpy.common.host_mock import MockHost
-
-
-class TestBaselineOptimizer(BaselineOptimizer):
- def __init__(self, mock_results_by_directory):
- host = MockHost()
- BaselineOptimizer.__init__(self, host, host.port_factory.all_port_names())
- self._mock_results_by_directory = mock_results_by_directory
-
- # We override this method for testing so we don't have to construct an
- # elaborate mock file system.
- def read_results_by_directory(self, baseline_name):
- return self._mock_results_by_directory
-
- def _move_baselines(self, baseline_name, results_by_directory, new_results_by_directory):
- self.new_results_by_directory = new_results_by_directory
-
-
-class BaselineOptimizerTest(unittest.TestCase):
- def _assertOptimization(self, results_by_directory, expected_new_results_by_directory):
- baseline_optimizer = TestBaselineOptimizer(results_by_directory)
- self.assertTrue(baseline_optimizer.optimize('mock-baseline.png'))
- self.assertEqual(baseline_optimizer.new_results_by_directory, expected_new_results_by_directory)
-
- def _assertOptimizationFailed(self, results_by_directory):
- baseline_optimizer = TestBaselineOptimizer(results_by_directory)
- self.assertFalse(baseline_optimizer.optimize('mock-baseline.png'))
-
- def test_move_baselines(self):
- host = MockHost()
- host.filesystem.write_binary_file('/mock-checkout/LayoutTests/platform/mac-lion/another/test-expected.txt', 'result A')
- host.filesystem.write_binary_file('/mock-checkout/LayoutTests/platform/mac-lion-wk2/another/test-expected.txt', 'result A')
- host.filesystem.write_binary_file('/mock-checkout/LayoutTests/platform/mac/another/test-expected.txt', 'result B')
- baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names())
- baseline_optimizer._move_baselines('another/test-expected.txt', {
- 'LayoutTests/platform/mac-lion': 'aaa',
- 'LayoutTests/platform/mac-lion-wk2': 'aaa',
- 'LayoutTests/platform/mac': 'bbb',
- }, {
- 'LayoutTests/platform/mac': 'aaa',
- })
- self.assertEqual(host.filesystem.read_binary_file('/mock-checkout/LayoutTests/platform/mac/another/test-expected.txt'), 'result A')
-
- def test_efl(self):
- self._assertOptimization({
- 'LayoutTests/platform/efl': '462d03b9c025db1b0392d7453310dbee5f9a9e74',
- }, {
- 'LayoutTests/platform/efl': '462d03b9c025db1b0392d7453310dbee5f9a9e74',
- })
-
- def test_no_add_mac_future(self):
- self._assertOptimization({
- 'LayoutTests/platform/mac': '29a1715a6470d5dd9486a142f609708de84cdac8',
- 'LayoutTests/platform/win-xp': '453e67177a75b2e79905154ece0efba6e5bfb65d',
- 'LayoutTests/platform/mac-lion': 'c43eaeb358f49d5e835236ae23b7e49d7f2b089f',
- }, {
- 'LayoutTests/platform/mac': '29a1715a6470d5dd9486a142f609708de84cdac8',
- 'LayoutTests/platform/win-xp': '453e67177a75b2e79905154ece0efba6e5bfb65d',
- 'LayoutTests/platform/mac-lion': 'c43eaeb358f49d5e835236ae23b7e49d7f2b089f',
- })
-
- def test_mac_future(self):
- self._assertOptimization({
- 'LayoutTests/platform/mac-lion': '462d03b9c025db1b0392d7453310dbee5f9a9e74',
- }, {
- 'LayoutTests/platform/mac-lion': '462d03b9c025db1b0392d7453310dbee5f9a9e74',
- })
-
- def test_win_does_not_drop_to_win_7sp0(self):
- self._assertOptimization({
- 'LayoutTests/platform/win': '1',
- 'LayoutTests/platform/mac': '2',
- 'LayoutTests/platform/gtk': '3',
- }, {
- 'LayoutTests/platform/win': '1',
- 'LayoutTests/platform/mac': '2',
- 'LayoutTests/platform/gtk': '3',
- })
-
- def test_common_directory_includes_root(self):
- # This test case checks that we don't throw an exception when we fail
- # to optimize.
- self._assertOptimizationFailed({
- 'LayoutTests/platform/gtk': 'e8608763f6241ddacdd5c1ef1973ba27177d0846',
- 'LayoutTests/platform/mac': 'e8608763f6241ddacdd5c1ef1973ba27177d0846',
- })
-
- self._assertOptimization({
- 'LayoutTests': '9c876f8c3e4cc2aef9519a6c1174eb3432591127',
- }, {
- 'LayoutTests': '9c876f8c3e4cc2aef9519a6c1174eb3432591127',
- })
-
- def test_complex_shadowing(self):
- # This test relies on OS specific functionality, so it doesn't work on Windows.
- # FIXME: What functionality does this rely on? When can we remove this if?
- if sys.platform == 'win32':
- return
- self._assertOptimization({
- 'LayoutTests/platform/mac': '5daa78e55f05d9f0d1bb1f32b0cd1bc3a01e9364',
- 'LayoutTests/platform/mac-lion': '7ad045ece7c030e2283c5d21d9587be22bcba56e',
- 'LayoutTests/platform/win-xp': '5b1253ef4d5094530d5f1bc6cdb95c90b446bec7',
- }, {
- 'LayoutTests/platform/mac': '5daa78e55f05d9f0d1bb1f32b0cd1bc3a01e9364',
- 'LayoutTests/platform/mac-lion': '7ad045ece7c030e2283c5d21d9587be22bcba56e',
- 'LayoutTests/platform/win-xp': '5b1253ef4d5094530d5f1bc6cdb95c90b446bec7',
- })
-
- def test_virtual_ports_filtered(self):
- self._assertOptimization({
- 'LayoutTests/platform/gtk': '3',
- 'LayoutTests/platform/efl': '3',
- 'LayoutTests/platform/mac': '4',
- }, {
- 'LayoutTests': '3',
- 'LayoutTests/platform/mac': '4',
- })
Modified: trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline.py (159594 => 159595)
--- trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline.py 2013-11-21 00:42:51 UTC (rev 159594)
+++ trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline.py 2013-11-21 00:54:05 UTC (rev 159595)
@@ -31,7 +31,6 @@
import optparse
import sys
-from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.system.executive import ScriptError
from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
from webkitpy.layout_tests.models import test_failures
@@ -197,74 +196,6 @@
print json.dumps(self._scm_changes)
-class OptimizeBaselines(AbstractRebaseliningCommand):
- name = "optimize-baselines"
- help_text = "Reshuffles the baselines for the given tests to use as litte space on disk as possible."
- argument_names = "TEST_NAMES"
-
- def __init__(self):
- super(OptimizeBaselines, self).__init__(options=[self.suffixes_option] + self.platform_options)
-
- def _optimize_baseline(self, optimizer, test_name):
- for suffix in self._baseline_suffix_list:
- baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix)
- if not optimizer.optimize(baseline_name):
- print "Heuristics failed to optimize %s" % baseline_name
-
- def execute(self, options, args, tool):
- self._baseline_suffix_list = options.suffixes.split(',')
- port_names = tool.port_factory.all_port_names(options.platform)
- if not port_names:
- print "No port names match '%s'" % options.platform
- return
-
- optimizer = BaselineOptimizer(tool, port_names)
- port = tool.port_factory.get(port_names[0])
- for test_name in port.tests(args):
- _log.info("Optimizing %s" % test_name)
- self._optimize_baseline(optimizer, test_name)
-
-
-class AnalyzeBaselines(AbstractRebaseliningCommand):
- name = "analyze-baselines"
- help_text = "Analyzes the baselines for the given tests and prints results that are identical."
- argument_names = "TEST_NAMES"
-
- def __init__(self):
- super(AnalyzeBaselines, self).__init__(options=[
- self.suffixes_option,
- optparse.make_option('--missing', action='', default=False, help='show missing baselines as well'),
- ] + self.platform_options)
- self._optimizer_class = BaselineOptimizer # overridable for testing
- self._baseline_optimizer = None
- self._port = None
-
- def _write(self, msg):
- print msg
-
- def _analyze_baseline(self, options, test_name):
- for suffix in self._baseline_suffix_list:
- baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix)
- results_by_directory = self._baseline_optimizer.read_results_by_directory(baseline_name)
- if results_by_directory:
- self._write("%s:" % baseline_name)
- self._baseline_optimizer.write_by_directory(results_by_directory, self._write, " ")
- elif options.missing:
- self._write("%s: (no baselines found)" % baseline_name)
-
- def execute(self, options, args, tool):
- self._baseline_suffix_list = options.suffixes.split(',')
- port_names = tool.port_factory.all_port_names(options.platform)
- if not port_names:
- print "No port names match '%s'" % options.platform
- return
-
- self._baseline_optimizer = self._optimizer_class(tool, port_names)
- self._port = tool.port_factory.get(port_names[0])
- for test_name in self._port.tests(args):
- self._analyze_baseline(options, test_name)
-
-
class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand):
# not overriding execute() - pylint: disable=W0223
@@ -336,15 +267,6 @@
return list(files_to_add)
- def _optimize_baselines(self, test_list, verbose=False):
- # We don't run this in parallel because modifying the SCM in parallel is unreliable.
- for test in test_list:
- all_suffixes = set()
- for builder in self._builders_to_fetch_from(test_list[test]):
- all_suffixes.update(test_list[test][builder])
- # FIXME: We should propagate the platform options as well.
- self._run_webkit_patch(['optimize-baselines', '--suffixes', ','.join(all_suffixes), test], verbose)
-
def _rebaseline(self, options, test_list):
for test, builders_to_check in sorted(test_list.items()):
_log.info("Rebaselining %s" % test)
@@ -363,10 +285,7 @@
if files_to_add:
self._tool.scm().add_list(list(files_to_add))
- if options.optimize:
- self._optimize_baselines(test_list, options.verbose)
-
class RebaselineJson(AbstractParallelRebaselineCommand):
name = "rebaseline-json"
help_text = "Rebaseline based off JSON passed to stdin. Intended to only be called from other scripts."
Modified: trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline_unittest.py (159594 => 159595)
--- trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline_unittest.py 2013-11-21 00:42:51 UTC (rev 159594)
+++ trunk/Tools/Scripts/webkitpy/tool/commands/rebaseline_unittest.py 2013-11-21 00:54:05 UTC (rev 159595)
@@ -30,7 +30,6 @@
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
-from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.net.buildbot.buildbot_mock import MockBuilder
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.thirdparty.mock import Mock
@@ -248,29 +247,10 @@
options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder": ["txt", "png"]}})
- # Note that we have one run_in_parallel() call followed by a run_command()
- self.assertEqual(self.tool.executive.calls,
- [[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'user-scripts/another-test.html', '--verbose']],
- ['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
-
def test_rebaseline_debug(self):
options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
- # Note that we have one run_in_parallel() call followed by a run_command()
- self.assertEqual(self.tool.executive.calls,
- [[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder (Debug)', '--test', 'user-scripts/another-test.html', '--verbose']],
- ['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
-
- def test_move_overwritten(self):
- options = MockOptions(optimize=True, verbose=True, move_overwritten_baselines=True, results_directory=None)
- self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder": ["txt", "png"]}})
-
- # Note that we have one run_in_parallel() call followed by a run_command()
- self.assertEqual(self.tool.executive.calls,
- [[['echo', 'rebaseline-test-internal', '--suffixes', 'txt,png', '--builder', 'MOCK builder', '--test', 'user-scripts/another-test.html', '--move-overwritten-baselines-to', 'test-mac-leopard', '--verbose']],
- ['echo', '--verbose', 'optimize-baselines', '--suffixes', 'txt,png', 'user-scripts/another-test.html']])
-
def test_no_optimize(self):
options = MockOptions(optimize=False, verbose=True, move_overwritten_baselines=False, results_directory=None)
self.command._rebaseline(options, {"user-scripts/another-test.html": {"MOCK builder (Debug)": ["txt", "png"]}})
@@ -367,38 +347,3 @@
self.assertDictEqual(self.command._tests_to_rebaseline(self.lion_port), {'userscripts/another-test.html': set(['png', 'txt', 'wav'])})
self.assertEqual(self._read(self.lion_expectations_path), '')
-
-
-class _FakeOptimizer(BaselineOptimizer):
- def read_results_by_directory(self, baseline_name):
- if baseline_name.endswith('txt'):
- return {'LayoutTests/passes/text.html': '123456',
- 'LayoutTests/platform/test-mac-leopard/passes/text.html': 'abcdef'}
- return {}
-
-
-class TestAnalyzeBaselines(_BaseTestCase):
- command_constructor = AnalyzeBaselines
-
- def setUp(self):
- super(TestAnalyzeBaselines, self).setUp()
- self.port = self.tool.port_factory.get('test')
- self.tool.port_factory.get = (lambda port_name=None, options=None: self.port)
- self.lines = []
- self.command._optimizer_class = _FakeOptimizer
- self.command._write = (lambda msg: self.lines.append(msg)) # pylint bug warning about unnecessary lambda? pylint: disable=W0108
-
- def test_default(self):
- self.command.execute(MockOptions(suffixes='txt', missing=False, platform=None), ['passes/text.html'], self.tool)
- self.assertEqual(self.lines,
- ['passes/text-expected.txt:',
- ' (generic): 123456',
- ' test-mac-leopard: abcdef'])
-
- def test_missing_baselines(self):
- self.command.execute(MockOptions(suffixes='png,txt', missing=True, platform=None), ['passes/text.html'], self.tool)
- self.assertEqual(self.lines,
- ['passes/text-expected.png: (no baselines found)',
- 'passes/text-expected.txt:',
- ' (generic): 123456',
- ' test-mac-leopard: abcdef'])