This makes env a Test class attribute. This allows Test and all of its children to use env without passing it around, which saves a lot of passing, and allows some more code cleanups and re-factors.
Signed-off-by: Dylan Baker <[email protected]> --- framework/exectest.py | 18 ++++++++++-------- framework/profile.py | 7 ++++--- framework/tests/dmesg_tests.py | 10 ++++------ framework/tests/gleantest_tests.py | 3 +-- tests/igt.py | 4 ++-- 5 files changed, 21 insertions(+), 21 deletions(-) diff --git a/framework/exectest.py b/framework/exectest.py index 0d5b026..b76fcd9 100644 --- a/framework/exectest.py +++ b/framework/exectest.py @@ -28,7 +28,7 @@ import time import sys import traceback -from .core import TestResult +from .core import TestResult, Environment __all__ = ['Test', @@ -49,6 +49,8 @@ else: class Test(object): + ENV = Environment() + def __init__(self, command, run_concurrent=False): ''' 'run_concurrent' controls whether this test will @@ -63,7 +65,7 @@ class Test(object): # self.run is called. self._test_hook_execute_run = lambda: None - def execute(self, env, path, log, json_writer, dmesg): + def execute(self, path, log, json_writer, dmesg): ''' Run the test. @@ -71,15 +73,15 @@ class Test(object): Fully qualified test name as a string. For example, ``spec/glsl-1.30/preprocessor/compiler/keywords/void.frag``. ''' - log_current = log.pre_log(path if env.verbose else None) + log_current = log.pre_log(path if self.ENV.verbose else None) # Run the test - if env.execute: + if self.ENV.execute: try: time_start = time.time() dmesg.update_dmesg() self._test_hook_execute_run() - result = self.run(env) + result = self.run() result = dmesg.update_result(result) time_end = time.time() if 'time' not in result: @@ -127,7 +129,7 @@ class Test(object): raise NotImplementedError return out - def run(self, env): + def run(self): """ Run a test. The return value will be a dictionary with keys including 'result', 'info', 'returncode' and 'command'. @@ -143,7 +145,7 @@ class Test(object): command = self.command - if env.valgrind: + if self.ENV.valgrind: command[:0] = ['valgrind', '--quiet', '--error-exitcode=1', '--tool=memcheck'] @@ -195,7 +197,7 @@ class Test(object): elif returncode != 0: results['note'] = 'Returncode was {0}'.format(returncode) - if env.valgrind: + if self.ENV.valgrind: # If the underlying test failed, simply report # 'skip' for this valgrind test. if results['result'] != 'pass': diff --git a/framework/profile.py b/framework/profile.py index b76907c..2e160e3 100644 --- a/framework/profile.py +++ b/framework/profile.py @@ -35,8 +35,7 @@ import importlib from framework.dmesg import get_dmesg from framework.log import Log - - +import framework.exectest class TestProfile(object): def __init__(self): @@ -115,6 +114,8 @@ class TestProfile(object): See ``Test.schedule`` and ``Test.run``. ''' + framework.exectest.Test.ENV = env + self.prepare_test_list(env) log = Log(len(self.test_list), env.verbose) @@ -125,7 +126,7 @@ class TestProfile(object): """ name, test = pair - test.execute(env, name, log, json_writer, self.dmesg) + test.execute(name, log, json_writer, self.dmesg) # Multiprocessing.dummy is a wrapper around Threading that provides a # multiprocessing compatible API diff --git a/framework/tests/dmesg_tests.py b/framework/tests/dmesg_tests.py index f87bdd5..f713320 100644 --- a/framework/tests/dmesg_tests.py +++ b/framework/tests/dmesg_tests.py @@ -26,7 +26,7 @@ import subprocess import nose.tools as nt from nose.plugins.skip import SkipTest from framework.dmesg import DummyDmesg, LinuxDmesg, get_dmesg, DmesgError -from framework.core import TestResult, PiglitJSONEncoder, Environment +from framework.core import TestResult, PiglitJSONEncoder from framework.exectest import PiglitTest from framework.gleantest import GleanTest from framework.shader_test import ShaderTest @@ -229,8 +229,6 @@ def test_json_serialize_updated_result(): def test_testclasses_dmesg(): """ Generator that creates tests for """ - env = Environment() - lists = [(PiglitTest, ['attribs', '-auto', '-fbo'], 'PiglitTest'), (GleanTest, 'basic', "GleanTest"), (ShaderTest, 'tests/shaders/loopfunc.shader_test', @@ -242,10 +240,10 @@ def test_testclasses_dmesg(): for tclass, tfile, desc in lists: yieldable.description = "Test dmesg in {}".format(desc) - yield yieldable, tclass, tfile, env + yield yieldable, tclass, tfile -def check_classes_dmesg(test_class, test_args, env): +def check_classes_dmesg(test_class, test_args): """ Do the actual check on the provided test class for dmesg """ if not os.path.exists('bin'): raise SkipTest("This tests requires a working, built version of " @@ -284,7 +282,7 @@ def check_classes_dmesg(test_class, test_args, env): json = DummyJsonWriter() - test.execute(env, None, DummyLog(), json, dmesg) + test.execute(None, DummyLog(), json, dmesg) nt.assert_in(json.result['result'], ['dmesg-warn', 'dmesg-fail'], msg="{0} did not update status with dmesg".format(type(test))) diff --git a/framework/tests/gleantest_tests.py b/framework/tests/gleantest_tests.py index 784a7b1..848b835 100644 --- a/framework/tests/gleantest_tests.py +++ b/framework/tests/gleantest_tests.py @@ -23,7 +23,6 @@ from __future__ import print_function import os from nose.plugins.skip import SkipTest -from framework.core import Environment from framework.gleantest import GleanTest @@ -68,6 +67,6 @@ def test_bad_returncode(): os.environ = {} test = GleanTest('basic') - result = test.run(Environment()) + result = test.run() print("result: {result}\nreturncode: {returncode}".format(**result)) assert result['result'] == 'fail', "Result should have been fail" diff --git a/tests/igt.py b/tests/igt.py index 1051ec8..1053791 100644 --- a/tests/igt.py +++ b/tests/igt.py @@ -88,7 +88,7 @@ class IGTTest(Test): results['result'] = 'fail' return out - def run(self, env): + def run(self): if not igtEnvironmentOk: results = TestResult() results['result'] = 'fail' @@ -96,7 +96,7 @@ class IGTTest(Test): return results - return super(IGTTest, self).run(env) + return super(IGTTest, self).run() def listTests(listname): oldDir = os.getcwd() -- 1.9.2 _______________________________________________ Piglit mailing list [email protected] http://lists.freedesktop.org/mailman/listinfo/piglit
