Revision: 18062
Author: [email protected]
Date: Mon Nov 25 17:34:52 2013 UTC
Log: Make test runner more flexible for running fast tests.
Transformed variant flags into named variants. Now, all combinations of
variants can be specified on the command line. The old command-line flags
are kept for backwards compatibility on the bots.
Added two new test groups: slow and pass|fail. Both are implemented similar
to the flaky test feature and allow to either skip or run tests marked as
slow or as pass|fail.
[email protected]
Review URL: https://codereview.chromium.org/85733003
http://code.google.com/p/v8/source/detail?r=18062
Modified:
/branches/bleeding_edge/test/mozilla/mozilla.status
/branches/bleeding_edge/tools/run-tests.py
/branches/bleeding_edge/tools/testrunner/local/statusfile.py
/branches/bleeding_edge/tools/testrunner/local/testsuite.py
=======================================
--- /branches/bleeding_edge/test/mozilla/mozilla.status Wed Nov 20 15:04:37
2013 UTC
+++ /branches/bleeding_edge/test/mozilla/mozilla.status Mon Nov 25 17:34:52
2013 UTC
@@ -81,23 +81,23 @@
# This takes a long time to run (~100 seconds). It should only be run
# by the really patient.
- 'js1_5/GC/regress-324278': [SLOW],
+ 'js1_5/GC/regress-324278': [SKIP],
# This takes a long time to run because our indexOf operation is
# pretty slow - it causes a lot of GCs; see issue
# #926379. We could consider marking this SKIP because it takes a
# while to run to completion.
- 'js1_5/GC/regress-338653': [SLOW],
+ 'js1_5/GC/regress-338653': [SKIP],
# This test is designed to run until it runs out of memory. This takes
# a very long time because it builds strings character by character
# and compiles a lot of regular expressions. We could consider marking
# this SKIP because it takes a while to run to completion.
- 'js1_5/GC/regress-346794': [SLOW],
+ 'js1_5/GC/regress-346794': [SKIP],
# Runs out of memory while trying to build huge string of 'x'
# characters. This takes a long time to run (~32 seconds).
- 'js1_5/GC/regress-348532': [SLOW],
+ 'js1_5/GC/regress-348532': [SKIP],
##################### FLAKY TESTS #####################
=======================================
--- /branches/bleeding_edge/tools/run-tests.py Thu Oct 31 10:14:02 2013 UTC
+++ /branches/bleeding_edge/tools/run-tests.py Mon Nov 25 17:34:52 2013 UTC
@@ -53,9 +53,13 @@
"release" : 1 }
# Use this to run several variants of the tests.
-VARIANT_FLAGS = [[],
- ["--stress-opt", "--always-opt"],
- ["--nocrankshaft"]]
+VARIANT_FLAGS = {
+ "default": [],
+ "stress": ["--stress-opt", "--always-opt"],
+ "nocrankshaft": ["--nocrankshaft"]}
+
+VARIANTS = ["default", "stress", "nocrankshaft"]
+
MODE_FLAGS = {
"debug" : ["--nobreak-on-abort", "--nodead-code-elimination",
"--nofold-constants", "--enable-slow-asserts",
@@ -97,6 +101,12 @@
result.add_option("--flaky-tests",
help="Regard tests marked as flaky (run|skip|
dontcare)",
default="dontcare")
+ result.add_option("--slow-tests",
+ help="Regard slow tests (run|skip|dontcare)",
+ default="dontcare")
+ result.add_option("--pass-fail-tests",
+ help="Regard pass|fail tests (run|skip|dontcare)",
+ default="dontcare")
result.add_option("--command-prefix",
help="Prepended to each shell command used to run a
test",
default="")
@@ -128,6 +138,8 @@
result.add_option("--no-variants", "--novariants",
help="Don't run any testing variants",
default=False, dest="no_variants", action="store_true")
+ result.add_option("--variants",
+ help="Comma-separated list of testing variants")
result.add_option("--outdir", help="Base directory with compile output",
default="out")
result.add_option("-p", "--progress",
@@ -167,6 +179,7 @@
def ProcessOptions(options):
global VARIANT_FLAGS
+ global VARIANTS
# Architecture and mode related stuff.
if options.arch_and_mode:
@@ -205,26 +218,41 @@
"""Returns true if zero or one of multiple arguments are true."""
return reduce(lambda x, y: x + y, args) <= 1
- if not excl(options.no_stress, options.stress_only, options.no_variants):
- print "Use only one of --no-stress, --stress-only or --no-variants."
+ if not excl(options.no_stress, options.stress_only, options.no_variants,
+ bool(options.variants)):
+ print("Use only one of --no-stress, --stress-only, --no-variants or "
+ "--variants.")
return False
if options.no_stress:
- VARIANT_FLAGS = [[], ["--nocrankshaft"]]
+ VARIANTS = ["default", "nocrankshaft"]
if options.no_variants:
- VARIANT_FLAGS = [[]]
+ VARIANTS = ["default"]
+ if options.stress_only:
+ VARIANTS = ["stress"]
+ if options.variants:
+ VARIANTS = options.variants.split(",")
+ if not set(VARIANTS).issubset(VARIANT_FLAGS.keys()):
+ print "All variants must be in %s" % str(VARIANT_FLAGS.keys())
+ return False
if not options.shell_dir:
if options.shell:
print "Warning: --shell is deprecated, use --shell-dir instead."
options.shell_dir = os.path.dirname(options.shell)
- if options.stress_only:
- VARIANT_FLAGS = [["--stress-opt", "--always-opt"]]
if options.valgrind:
run_valgrind = os.path.join("tools", "run-valgrind.py")
# This is OK for distributed running, so we don't need to set
no_network.
options.command_prefix = (["python", "-u", run_valgrind] +
options.command_prefix)
- if not options.flaky_tests in ["run", "skip", "dontcare"]:
- print "Unknown flaky test mode %s" % options.flaky_tests
+ def CheckTestMode(name, option):
+ if not option in ["run", "skip", "dontcare"]:
+ print "Unknown %s mode %s" % (name, option)
+ return False
+ return True
+ if not CheckTestMode("flaky test", options.flaky_tests):
+ return False
+ if not CheckTestMode("slow test", options.slow_tests):
+ return False
+ if not CheckTestMode("pass|fail test", options.pass_fail_tests):
return False
if not options.no_i18n:
DEFAULT_TESTS.append("intl")
@@ -341,13 +369,15 @@
if len(args) > 0:
s.FilterTestCasesByArgs(args)
all_tests += s.tests
- s.FilterTestCasesByStatus(options.warn_unused, options.flaky_tests)
+ s.FilterTestCasesByStatus(options.warn_unused, options.flaky_tests,
+ options.slow_tests, options.pass_fail_tests)
if options.cat:
verbose.PrintTestSource(s.tests)
continue
+ variant_flags = [VARIANT_FLAGS[var] for var in VARIANTS]
s.tests = [ t.CopyAddingFlags(v)
for t in s.tests
- for v in s.VariantFlags(t, VARIANT_FLAGS) ]
+ for v in s.VariantFlags(t, variant_flags) ]
s.tests = ShardTests(s.tests, options.shard_count, options.shard_run)
num_tests += len(s.tests)
for t in s.tests:
=======================================
--- /branches/bleeding_edge/tools/testrunner/local/statusfile.py Thu Oct 31
11:48:31 2013 UTC
+++ /branches/bleeding_edge/tools/testrunner/local/statusfile.py Mon Nov 25
17:34:52 2013 UTC
@@ -59,7 +59,11 @@
def DoSkip(outcomes):
- return SKIP in outcomes or SLOW in outcomes
+ return SKIP in outcomes
+
+
+def IsSlow(outcomes):
+ return SLOW in outcomes
def OnlyStandardVariant(outcomes):
=======================================
--- /branches/bleeding_edge/tools/testrunner/local/testsuite.py Thu Oct 17
13:09:28 2013 UTC
+++ /branches/bleeding_edge/tools/testrunner/local/testsuite.py Mon Nov 25
17:34:52 2013 UTC
@@ -93,11 +93,24 @@
def _FilterFlaky(flaky, mode):
return (mode == "run" and not flaky) or (mode == "skip" and flaky)
- def FilterTestCasesByStatus(self, warn_unused_rules,
flaky_tests="dontcare"):
+ @staticmethod
+ def _FilterSlow(slow, mode):
+ return (mode == "run" and not slow) or (mode == "skip" and slow)
+
+ @staticmethod
+ def _FilterPassFail(pass_fail, mode):
+ return (mode == "run" and not pass_fail) or (mode == "skip" and
pass_fail)
+
+ def FilterTestCasesByStatus(self, warn_unused_rules,
+ flaky_tests="dontcare",
+ slow_tests="dontcare",
+ pass_fail_tests="dontcare"):
filtered = []
used_rules = set()
for t in self.tests:
flaky = False
+ slow = False
+ pass_fail = False
testname = self.CommonTestName(t)
if testname in self.rules:
used_rules.add(testname)
@@ -107,6 +120,8 @@
if statusfile.DoSkip(t.outcomes):
continue # Don't add skipped tests to |filtered|.
flaky = statusfile.IsFlaky(t.outcomes)
+ slow = statusfile.IsSlow(t.outcomes)
+ pass_fail = statusfile.IsPassOrFail(t.outcomes)
skip = False
for rule in self.wildcards:
assert rule[-1] == '*'
@@ -117,7 +132,11 @@
skip = True
break # "for rule in self.wildcards"
flaky = flaky or statusfile.IsFlaky(t.outcomes)
- if skip or self._FilterFlaky(flaky, flaky_tests):
+ slow = slow or statusfile.IsSlow(t.outcomes)
+ pass_fail = pass_fail or statusfile.IsPassOrFail(t.outcomes)
+ if (skip or self._FilterFlaky(flaky, flaky_tests)
+ or self._FilterSlow(slow, slow_tests)
+ or self._FilterPassFail(pass_fail, pass_fail_tests)):
continue # "for t in self.tests"
filtered.append(t)
self.tests = filtered
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/groups/opt_out.