Allow additional parameters to be specified with test plans. Also wrap the control file for each test in the test plan, so that the Test Planner can prefix each intended test with a verify_test, along with any site-specific prefixes that may be required.
Allowing additional parameters gives the users the option to directly specify the parameters for the verify_test, along with what site-specific prefixes, if any, to attach. Signed-off-by: James Ren <[email protected]> --- /dev/null 2009-12-17 12:29:38.000000000 -0800 +++ autotest/frontend/migrations/060_add_planner_additional_parameters.py 2010-05-04 10:33:35.000000000 -0700 @@ -0,0 +1,47 @@ +UP_SQL = """ +CREATE TABLE planner_additional_parameters ( + id INT PRIMARY KEY AUTO_INCREMENT, + plan_id INT NOT NULL, + hostname_regex VARCHAR(255) NOT NULL, + param_type VARCHAR(32) NOT NULL, + application_order INT NOT NULL +) ENGINE = InnoDB; + +ALTER TABLE planner_additional_parameters +ADD CONSTRAINT planner_additional_parameters_plan_ibfk +FOREIGN KEY (plan_id) REFERENCES planner_plans (id); + +ALTER TABLE planner_additional_parameters +ADD CONSTRAINT planner_additional_parameters_unique +UNIQUE KEY (plan_id, hostname_regex, param_type); + + +CREATE TABLE planner_additional_parameter_values ( + id INT PRIMARY KEY AUTO_INCREMENT, + additional_parameter_id INT NOT NULL, + `key` VARCHAR(255) NOT NULL, + value VARCHAR(255) NOT NULL +) ENGINE = InnoDB; + +ALTER TABLE planner_additional_parameter_values +ADD CONSTRAINT planner_additional_parameter_values_additional_parameter_ibfk +FOREIGN KEY (additional_parameter_id) + REFERENCES planner_additional_parameters (id); + +ALTER TABLE planner_additional_parameter_values +ADD CONSTRAINT planner_additional_parameter_values_unique +UNIQUE KEY (additional_parameter_id, `key`); +""" + +DOWN_SQL = """ +ALTER TABLE planner_additional_parameter_values +DROP FOREIGN KEY planner_additional_parameter_values_additional_parameter_ibfk; + +DROP TABLE planner_additional_parameter_values; + + +ALTER TABLE planner_additional_parameters +DROP FOREIGN KEY planner_additional_parameters_plan_ibfk; + +DROP TABLE planner_additional_parameters; +""" --- /dev/null 2009-12-17 12:29:38.000000000 -0800 +++ autotest/frontend/planner/control_file.py 2010-05-04 10:33:35.000000000 -0700 @@ -0,0 +1,98 @@ +import base64 +from autotest_lib.client.common_lib import utils + + +VERIFY_TEST_SEGMENT = """\ +###################################################### +### Run the verify test +###################################################### + +def run(machine): + host = hosts.create_host(machine, initialize=False) + host.log_kernel() + ret = job.run_test('verify_test', host=host, %(verify_args)s) + if not ret: + raise JobError("Verify test failed; aborting job") + +job.parallel_simple(run, machines) + +""" + +CLIENT_SEGMENT = """\ +###################################################### +### Run the client-side control file +###################################################### + +# The following is encoded in base64 in the variable control, below: +# +%(control_comment)s +# +import base64 +control = base64.decodestring(%(control_base64)r) + +def run(machine): + host = hosts.create_host(machine) + at = autotest.Autotest() + at.run(control, host=host) + +job.parallel_simple(run, machines) +""" + + +SERVER_SEGMENT = """\ +###################################################### +### Run the server side control file +###################################################### + +%(control_raw)s +""" + +def _generate_additional_segments_dummy(**kwargs): + return '' + + +def wrap_control_file(control_file, is_server, skip_verify, + verify_params=None, **kwargs): + """ + Wraps a control file for use with Test Planner + """ + wrapped = '' + + if not skip_verify: + prepared_args = prepare_args(verify_params) + wrapped += apply_string_arguments(VERIFY_TEST_SEGMENT, + verify_args=prepared_args) + + site_generate_additional_segments = utils.import_site_function( + __file__, 'autotest_lib.frontend.planner.site_control_file', + 'generate_additional_segments', _generate_additional_segments_dummy) + wrapped += site_generate_additional_segments(**kwargs) + + if is_server: + wrapped += apply_string_arguments(SERVER_SEGMENT, + control_raw=control_file) + else: + control_base64 = base64.encodestring(control_file) + control_comment = '\n'.join('# ' + l for l in control_file.split('\n')) + wrapped += apply_string_arguments(CLIENT_SEGMENT, + control_base64=control_base64, + control_comment=control_comment) + + return wrapped + + +def prepare_args(args_dict): + if not args_dict: + return '' + + args = [] + for k, v in args_dict.iteritems(): + args.append("%s=%s" % (k, v)) + return ', '.join(args) + + +def apply_string_arguments(source, **kwargs): + """ + Separate method to facilitate unit testing + """ + return source % kwargs --- /dev/null 2009-12-17 12:29:38.000000000 -0800 +++ autotest/frontend/planner/control_file_unittest.py 2010-05-04 10:33:35.000000000 -0700 @@ -0,0 +1,73 @@ +#!/usr/bin/python + +import unittest, base64 +import common +from autotest_lib.frontend.planner import control_file +from autotest_lib.client.common_lib.test_utils import mock + + +class ControlFileUnittest(unittest.TestCase): + def setUp(self): + self.god = mock.mock_god() + + + def tearDown(self): + self.god.unstub_all() + + + def _test_wrap_control_file_helper(self): + self.verify_params = object() + self.control = 'control' + self.verify_segment = '|verify_segment|' + prepared_verify_args = 'prepared_verify_args' + + self.god.stub_function(control_file, 'prepare_args') + self.god.stub_function(control_file, 'apply_string_arguments') + control_file.prepare_args.expect_call( + self.verify_params).and_return(prepared_verify_args) + control_file.apply_string_arguments.expect_call( + control_file.VERIFY_TEST_SEGMENT, + verify_args=prepared_verify_args).and_return( + self.verify_segment) + + + def test_wrap_control_file_client(self): + self._test_wrap_control_file_helper() + control_base64 = 'control_base64' + control_segment = '|control_segment|' + + self.god.stub_function(base64, 'encodestring') + base64.encodestring.expect_call(self.control).and_return(control_base64) + control_file.apply_string_arguments.expect_call( + control_file.CLIENT_SEGMENT, control_base64=control_base64, + control_comment=mock.is_string_comparator()).and_return( + control_segment) + + result = control_file.wrap_control_file(control_file=self.control, + is_server=False, + skip_verify=False, + verify_params=self.verify_params) + + self.assertEqual(result, self.verify_segment + control_segment) + self.god.check_playback() + + + def test_wrap_control_file_server(self): + self._test_wrap_control_file_helper() + control_segment = '|control_segment|' + + control_file.apply_string_arguments.expect_call( + control_file.SERVER_SEGMENT, + control_raw=self.control).and_return(control_segment) + + result = control_file.wrap_control_file(control_file=self.control, + is_server=True, + skip_verify=False, + verify_params=self.verify_params) + + self.assertEqual(result, self.verify_segment + control_segment) + self.god.check_playback() + + +if __name__ == '__main__': + unittest.main() --- autotest/frontend/planner/execution_engine.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/execution_engine.py 2010-05-04 10:33:35.000000000 -0700 @@ -202,15 +202,20 @@ def _run_job(self, hostname, test_config_id, cleanup_before_job, cleanup_after_job, run_verify): - test_config = self._planner_rpc.run('get_test_config', - id=test_config_id) + if run_verify is None: + run_verify = True + + test_config = self._planner_rpc.run('get_wrapped_test_config', + id=test_config_id, + hostname=hostname, + run_verify=run_verify) info = self._afe_rest.execution_info.get().execution_info - info['control_file'] = test_config['control_file']['contents'] - info['is_server'] = test_config['is_server'] + info['control_file'] = test_config['wrapped_control_file'] + info['is_server'] = True info['cleanup_before_job'] = cleanup_before_job info['cleanup_after_job'] = cleanup_after_job - info['run_verify'] = run_verify + info['run_verify'] = False atomic_group_class = self._afe_rest.labels.get( name=self._label_name).members[0].get().atomic_group_class.href --- autotest/frontend/planner/model_attributes.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/model_attributes.py 2010-05-04 10:33:35.000000000 -0700 @@ -1,5 +1,5 @@ import common -from autotest_lib.client.common_lib import enum +from autotest_lib.client.common_lib import enum, utils # common enums for Host attributes @@ -13,3 +13,15 @@ # common enums for SavedObject attributes SavedObjectType = enum.Enum('support', 'triage', 'autoprocess', 'custom_query', string_values=True) + + +# common enums for AdditionalParameter attributes +def _site_additional_parameter_types_dummy(): + return [] +_site_additional_parameter_types = utils.import_site_function( + __file__, 'autotest_lib.frontend.planner.site_model_attributes', + 'site_additional_parameter_types', + _site_additional_parameter_types_dummy) +AdditionalParameterType = enum.Enum( + string_values=True, + *(_site_additional_parameter_types() + ['Verify'])) --- autotest/frontend/planner/models.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/models.py 2010-05-04 10:33:35.000000000 -0700 @@ -1,3 +1,4 @@ +import re from django.db import models as dbmodels import common from autotest_lib.frontend.afe import models as afe_models @@ -131,7 +132,7 @@ Required: alias: The name to give this test within the plan. Unique with plan id - test_control_file: The control file to run + control_file: The control file to run is_server: True if this control file is a server-side test execution_order: An integer describing when this test should be run in the test plan @@ -394,3 +395,69 @@ def _get_details_unicode(self): return 'Autoprocessing condition: %s' % self.condition + + +class AdditionalParameter(ModelWithPlan): + """ + Allows parameters to be passed to the execution engine for test configs + + If this object matches a hostname by regex, it will apply the associated + parameters at their applicable locations. + + Required: + hostname_regex: A regular expression, for matching on the hostname + param_type: Currently only 'Verify' (and site-specific values) allowed + application_order: The order in which to apply this parameter. + Parameters are attempted in the order specified here, + and stop when the first match is found + """ + hostname_regex = dbmodels.CharField(max_length=255) + param_type = dbmodels.CharField( + max_length=32, + choices=model_attributes.AdditionalParameterType.choices()) + application_order = dbmodels.IntegerField(blank=True) + + class Meta: + db_table = 'planner_additional_parameters' + unique_together = ('plan', 'hostname_regex', 'param_type') + + + @classmethod + def find_applicable_additional_parameter(cls, plan, hostname, param_type): + """ + Finds the first AdditionalParameter that matches the given arguments + """ + params = cls.objects.filter( + plan=plan, param_type=param_type).order_by('application_order') + for param in params: + if re.match(param.hostname_regex, hostname): + return param + return None + + + def _get_details_unicode(self): + return 'Additional %s parameters, regex: %s' % (self.param_type, + self.hostname_regex) + + +class AdditionalParameterValue(dbmodels.Model): + """ + The actual values for the additional parameters + + Required: + additional_parameter: The associated AdditionalParameter + key: The name of the parameter + value: The value of the parameter + """ + additional_parameter = dbmodels.ForeignKey(AdditionalParameter) + key = dbmodels.CharField(max_length=255) + value = dbmodels.CharField(max_length=255) + + class Meta: + db_table = 'planner_additional_parameter_values' + unique_together = ('additional_parameter', 'key') + + + def __unicode__(self): + return u'Value for parameter %d: %s=%s' % (self.additional_parameter.id, + self.key, self.value) --- autotest/frontend/planner/models_test.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/models_test.py 2010-05-04 10:33:35.000000000 -0700 @@ -4,7 +4,7 @@ import common from autotest_lib.frontend import setup_django_environment from autotest_lib.frontend.afe import frontend_test_utils, rpc_utils -from autotest_lib.frontend.planner import models +from autotest_lib.frontend.planner import models, model_attributes class ModelWithHashTestBase(frontend_test_utils.FrontendTestMixin): @@ -61,5 +61,57 @@ 'value' : 'test_value'} +class AdditionalParameterTest(frontend_test_utils.FrontendTestMixin, + unittest.TestCase): + def setUp(self): + self._frontend_common_setup() + self.plan = models.Plan.objects.create(name='plan') + self.param_type = model_attributes.AdditionalParameterType.VERIFY + + def tearDown(self): + self._frontend_common_teardown() + + + def test_find_applicable_control_parameter_match(self): + parameter = models.AdditionalParameter.objects.create( + plan=self.plan, hostname_regex='host.*', + param_type=self.param_type, application_order=0) + found = models.AdditionalParameter.find_applicable_additional_parameter( + plan=self.plan, hostname='host1', param_type=self.param_type) + + self.assertEqual(parameter, found) + + + def test_find_applicable_additional_parameter_ordered(self): + additional1 = models.AdditionalParameter.objects.create( + plan=self.plan, hostname_regex='host.*', + param_type=self.param_type, application_order=0) + additional2 = models.AdditionalParameter.objects.create( + plan=self.plan, hostname_regex='.*', + param_type=self.param_type, application_order=1) + + found1 = ( + models.AdditionalParameter.find_applicable_additional_parameter( + plan=self.plan, hostname='host1', + param_type=self.param_type)) + found2 = ( + models.AdditionalParameter.find_applicable_additional_parameter( + plan=self.plan, hostname='other', + param_type=self.param_type)) + + self.assertEqual(additional1, found1) + self.assertEqual(additional2, found2) + + + def test_find_applicable_additional_parameter_no_match(self): + models.AdditionalParameter.objects.create( + plan=self.plan, hostname_regex='host.*', + param_type=self.param_type, application_order=0) + found = models.AdditionalParameter.find_applicable_additional_parameter( + plan=self.plan, hostname='other', param_type=self.param_type) + + self.assertEqual(None, found) + + if __name__ == '__main__': unittest.main() --- autotest/frontend/planner/rpc_interface.py 2010-04-30 15:40:42.000000000 -0700 +++ autotest/frontend/planner/rpc_interface.py 2010-05-04 10:33:35.000000000 -0700 @@ -5,12 +5,13 @@ __author__ = '[email protected] (James Ren)' -import os +import os, re import common from django.db import models as django_models from autotest_lib.frontend import thread_local from autotest_lib.frontend.afe import model_logic, models as afe_models from autotest_lib.frontend.afe import rpc_utils as afe_rpc_utils +from autotest_lib.frontend.afe import rpc_interface as afe_rpc_interface from autotest_lib.frontend.tko import models as tko_models from autotest_lib.frontend.planner import models, rpc_utils, model_attributes from autotest_lib.frontend.planner import failure_actions @@ -36,11 +37,6 @@ models.Host.objects.get(id=id).update_object(data) -def get_test_config(id): - return afe_rpc_utils.prepare_rows_as_nested_dicts( - models.TestConfig.objects.filter(id=id), ('control_file',))[0] - - def add_job(plan_id, test_config_id, afe_job_id): models.Job.objects.create( plan=models.Plan.objects.get(id=plan_id), @@ -50,8 +46,8 @@ # more advanced calls -def submit_plan(name, hosts, host_labels, tests, - support=None, label_override=None): +def submit_plan(name, hosts, host_labels, tests, support=None, + label_override=None, additional_parameters=None): """ Submits a plan to the Test Planner @@ -68,6 +64,32 @@ @param support: the global support script @param label_override: label to prepend to all AFE jobs for this test plan. Defaults to the plan name. + @param additional_parameters: A mapping of AdditionalParameters to apply to + this test plan, as an ordered list. Each item + of the list is a dictionary: + hostname_regex: A regular expression; the + additional parameter in the + value will be applied if the + hostname matches this regex + param_type: The type of additional parameter + param_values: A dictionary of key=value pairs + for this parameter + example: + [{'hostname_regex': 'host[0-9]', + 'param_type': 'Verify', + 'param_values': {'key1': 'value1', + 'key2': 'value2'}}, + {'hostname_regex': '.*', + 'param_type': 'Verify', + 'param_values': {'key': 'value'}}] + + Currently, the only (non-site-specific) + param_type available is 'Verify'. Setting + these parameters allows the user to specify + arguments to the + job.run_test('verify_test', ...) line at the + beginning of the wrapped control file for each + test """ host_objects = [] label_objects = [] @@ -108,6 +130,7 @@ {'name': 'Plan name %s already exists' % name}) try: + rpc_utils.set_additional_parameters(plan, additional_parameters) label = rpc_utils.create_plan_label(plan) try: for i, test in enumerate(tests): @@ -386,6 +409,85 @@ return result +def generate_test_config(alias, afe_test_name=None, + estimated_runtime=0, **kwargs): + """ + Creates and returns a test config suitable for passing into submit_plan() + + Also accepts optional parameters to pass directly in to the AFE RPC + interface's generate_control_file() method. + + @param alias: The alias for the test + @param afe_test_name: The name of the test, as shown on AFE + @param estimated_runtime: Estimated number of hours this test is expected to + run. For reporting purposes. + """ + if afe_test_name is None: + afe_test_name = alias + alias = alias.replace(' ', '_') + + control = afe_rpc_interface.generate_control_file(tests=[afe_test_name], + **kwargs) + + return {'alias': alias, + 'control_file': control['control_file'], + 'is_server': control['is_server'], + 'estimated_runtime': estimated_runtime} + + +def get_wrapped_test_config(id, hostname, run_verify): + """ + Gets the TestConfig object identified by the ID + + Returns the object dict of the TestConfig, plus an additional + 'wrapped_control_file' value, which includes the pre-processing that the + ControlParameters specify. + + @param hostname: Hostname of the machine this test config will run on + @param run_verify: Set to True or False to override the default behavior + (which is to run the verify test unless the skip_verify + ControlParameter is set) + """ + test_config = models.TestConfig.objects.get(id=id) + object_dict = test_config.get_object_dict() + object_dict['control_file'] = test_config.control_file.get_object_dict() + object_dict['wrapped_control_file'] = rpc_utils.wrap_control_file( + plan=test_config.plan, hostname=hostname, + run_verify=run_verify, test_config=test_config) + + return object_dict + + +def generate_additional_parameters(hostname_regex, param_type, param_values): + """ + Generates an AdditionalParamter dictionary, for passing in to submit_plan() + + Returns a dictionary. To use in submit_job(), put this dictionary into a + list (possibly with other additional_parameters dictionaries) + + @param hostname_regex: The hostname regular expression to match + @param param_type: One of get_static_data()['additional_parameter_types'] + @param param_values: Dictionary of key=value pairs for this parameter + """ + try: + re.compile(hostname_regex) + except Exception: + raise model_logic.ValidationError( + {'hostname_regex': '%s is not a valid regex' % hostname_regex}) + + if param_type not in model_attributes.AdditionalParameterType.values: + raise model_logic.ValidationError( + {'param_type': '%s is not a valid parameter type' % param_type}) + + if type(param_values) is not dict: + raise model_logic.ValidationError( + {'param_values': '%s is not a dictionary' % repr(param_values)}) + + return {'hostname_regex': hostname_regex, + 'param_type': param_type, + 'param_values': param_values} + + def get_motd(): return afe_rpc_utils.get_motd() @@ -393,5 +495,7 @@ def get_static_data(): result = {'motd': get_motd(), 'host_actions': sorted(failure_actions.HostAction.values), - 'test_actions': sorted(failure_actions.TestAction.values)} + 'test_actions': sorted(failure_actions.TestAction.values), + 'additional_parameter_types': + sorted(model_attributes.AdditionalParameterType.values)} return result --- autotest/frontend/planner/rpc_interface_unittest.py 2010-04-30 15:40:42.000000000 -0700 +++ autotest/frontend/planner/rpc_interface_unittest.py 2010-05-04 10:33:35.000000000 -0700 @@ -7,8 +7,8 @@ from autotest_lib.frontend.planner import planner_test_utils, model_attributes from autotest_lib.frontend.planner import rpc_interface, models, rpc_utils from autotest_lib.frontend.planner import failure_actions -from autotest_lib.frontend.afe import model_logic -from autotest_lib.frontend.afe import models as afe_models +from autotest_lib.frontend.afe import model_logic, models as afe_models +from autotest_lib.frontend.afe import rpc_interface as afe_rpc_interface from autotest_lib.frontend.tko import models as tko_models @@ -210,5 +210,27 @@ self.assertEqual(sorted(actual), sorted(expected)) + def test_generate_test_config(self): + control = {'control_file': object(), + 'is_server': object()} + test = 'test' + alias = 'test alias' + estimated_runtime = object() + + self.god.stub_function(afe_rpc_interface, 'generate_control_file') + afe_rpc_interface.generate_control_file.expect_call( + tests=[test]).and_return(control) + + result = rpc_interface.generate_test_config( + alias=alias, afe_test_name=test, + estimated_runtime=estimated_runtime) + + self.assertEqual(result['alias'], 'test_alias') + self.assertEqual(result['control_file'], control['control_file']) + self.assertEqual(result['is_server'], control['is_server']) + self.assertEqual(result['estimated_runtime'], estimated_runtime) + self.god.check_playback() + + if __name__ == '__main__': unittest.main() --- autotest/frontend/planner/rpc_utils.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/rpc_utils.py 2010-05-04 10:33:35.000000000 -0700 @@ -2,7 +2,7 @@ import os from autotest_lib.frontend.afe import models as afe_models, model_logic from autotest_lib.frontend.planner import models, model_attributes -from autotest_lib.frontend.planner import failure_actions +from autotest_lib.frontend.planner import failure_actions, control_file from autotest_lib.frontend.tko import models as tko_models from autotest_lib.client.common_lib import global_config, utils, global_config @@ -262,3 +262,58 @@ assert action == TestAction.RERUN planner_job.requires_rerun = True planner_job.save() + + +def set_additional_parameters(plan, additional_parameters): + if not additional_parameters: + return + + for index, additional_parameter in enumerate(additional_parameters): + hostname_regex = additional_parameter['hostname_regex'] + param_type = additional_parameter['param_type'] + param_values = additional_parameter['param_values'] + + additional_param = models.AdditionalParameter.objects.create( + plan=plan, hostname_regex=hostname_regex, + param_type=param_type, application_order=index) + + for key, value in param_values.iteritems(): + models.AdditionalParameterValue.objects.create( + additional_parameter=additional_param, + key=key, value=repr(value)) + + +def _additional_wrap_arguments_dummy(plan, hostname): + return {} + + +def get_wrap_arguments(plan, hostname, param_type): + additional_param = ( + models.AdditionalParameter.find_applicable_additional_parameter( + plan=plan, hostname=hostname, param_type=param_type)) + if not additional_param: + return {} + + param_values = additional_param.additionalparametervalue_set.values_list( + 'key', 'value') + return dict(param_values) + + +def wrap_control_file(plan, hostname, run_verify, test_config): + """ + Wraps a control file using the ControlParameters for the plan + """ + site_additional_wrap_arguments = utils.import_site_function( + __file__, 'autotest_lib.frontend.planner.site_rpc_utils', + 'additional_wrap_arguments', _additional_wrap_arguments_dummy) + additional_wrap_arguments = site_additional_wrap_arguments(plan, hostname) + + verify_params = get_wrap_arguments( + plan, hostname, model_attributes.AdditionalParameterType.VERIFY) + + return control_file.wrap_control_file( + control_file=test_config.control_file.contents, + is_server=test_config.is_server, + skip_verify=(not run_verify), + verify_params=verify_params, + **additional_wrap_arguments) --- autotest/frontend/planner/rpc_utils_unittest.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/rpc_utils_unittest.py 2010-05-04 10:33:35.000000000 -0700 @@ -269,5 +269,58 @@ self.assertTrue(planner_job.requires_rerun) + def test_set_additional_parameters(self): + hostname_regex = 'host[0-9]' + param_type = model_attributes.AdditionalParameterType.VERIFY + param_values = {'key1': 'value1', + 'key2': []} + + additional_parameters = {'hostname_regex': hostname_regex, + 'param_type': param_type, + 'param_values': param_values} + + rpc_utils.set_additional_parameters(self._plan, [additional_parameters]) + + additional_parameters_query = ( + models.AdditionalParameter.objects.filter(plan=self._plan)) + self.assertEqual(additional_parameters_query.count(), 1) + + additional_parameter = additional_parameters_query[0] + self.assertEqual(additional_parameter.hostname_regex, hostname_regex) + self.assertEqual(additional_parameter.param_type, param_type) + self.assertEqual(additional_parameter.application_order, 0) + + values_query = additional_parameter.additionalparametervalue_set.all() + self.assertEqual(values_query.count(), 2) + + value_query1 = values_query.filter(key='key1') + value_query2 = values_query.filter(key='key2') + self.assertEqual(value_query1.count(), 1) + self.assertEqual(value_query2.count(), 1) + + self.assertEqual(value_query1[0].value, repr('value1')) + self.assertEqual(value_query2[0].value, repr([])) + + + def test_get_wrap_arguments(self): + hostname_regex = '.*' + param_type = model_attributes.AdditionalParameterType.VERIFY + + additional_param = models.AdditionalParameter.objects.create( + plan=self._plan, hostname_regex=hostname_regex, + param_type=param_type, application_order=0) + models.AdditionalParameterValue.objects.create( + additional_parameter=additional_param, + key='key1', value=repr('value1')) + models.AdditionalParameterValue.objects.create( + additional_parameter=additional_param, + key='key2', value=repr([])) + + actual = rpc_utils.get_wrap_arguments(self._plan, 'host', param_type) + expected = {'key1': repr('value1'), + 'key2': repr([])} + + self.assertEqual(actual, expected) + if __name__ == '__main__': unittest.main() --- autotest/frontend/planner/support.py 2010-05-04 10:33:35.000000000 -0700 +++ autotest/frontend/planner/support.py 2010-05-04 10:33:35.000000000 -0700 @@ -19,7 +19,7 @@ self._reboot_before = afe_model_attributes.RebootBefore.IF_DIRTY self._reboot_after = afe_model_attributes.RebootAfter.ALWAYS - self._run_verify = True + self._run_verify = None def skip_test(self): @@ -77,7 +77,7 @@ def set_run_verify(self, run_verify): """ - Sets whether or not the job should run the "Verify" stage. + Sets whether or not the job should run the verify_test. Defaults to True. """ _______________________________________________ Autotest mailing list [email protected] http://test.kernel.org/cgi-bin/mailman/listinfo/autotest
