Begin adding support for parameterized jobs.

This will allow test developers to specify certain parameters that a test
control file may take, so that users can then easily set those parameters
on job create. Enabling this feature removes the ability to edit the
control file directly on job creation.

Feature is currently INCOMPLETE. Do not attempt to use. This feature will
be committed in small pieces for the sake of having smaller code reviews.

Signed-off-by: James Ren <[email protected]>

--- autotest/frontend/afe/doctests/001_rpc_test.txt     2010-07-14 
12:49:58.000000000 -0700
+++ autotest/frontend/afe/doctests/001_rpc_test.txt     2010-07-14 
12:49:58.000000000 -0700
@@ -544,7 +544,8 @@
 ...         'reboot_before': 'If dirty',
 ...         'reboot_after': 'Always',
 ...         'parse_failed_repair': True,
-...         'drone_set': drone_set}
+...         'drone_set': drone_set,
+...         'parameterized_job': None}
 True
 
 # get_host_queue_entries returns a lot of data, so let's only check a couple
--- autotest/frontend/afe/frontend_test_utils.py        2010-07-14 
12:49:58.000000000 -0700
+++ autotest/frontend/afe/frontend_test_utils.py        2010-07-14 
12:49:58.000000000 -0700
@@ -3,6 +3,7 @@
 from autotest_lib.frontend import setup_test_environment
 from autotest_lib.frontend import thread_local
 from autotest_lib.frontend.afe import models, model_attributes
+from autotest_lib.client.common_lib import global_config
 from autotest_lib.client.common_lib.test_utils import mock
 
 class FrontendTestMixin(object):
@@ -65,6 +66,8 @@
     def _frontend_common_setup(self, fill_data=True):
         self.god = mock.mock_god(ut=self)
         setup_test_environment.set_up()
+        global_config.global_config.override_config_value(
+                'AUTOTEST_WEB', 'parameterized_jobs', 'False')
         if fill_data:
             self._fill_in_test_data()
 
@@ -77,7 +80,8 @@
 
     def _create_job(self, hosts=[], metahosts=[], priority=0, active=False,
                     synchronous=False, atomic_group=None, hostless=False,
-                    drone_set=None):
+                    drone_set=None, control_file='control',
+                    parameterized_job=None):
         """
         Create a job row in the test database.
 
@@ -112,7 +116,8 @@
             name='test', owner='autotest_system', priority=priority,
             synch_count=synch_count, created_on=created_on,
             reboot_before=model_attributes.RebootBefore.NEVER,
-            drone_set=drone_set)
+            drone_set=drone_set, control_file=control_file,
+            parameterized_job=parameterized_job)
         for host_id in hosts:
             models.HostQueueEntry.objects.create(job=job, host_id=host_id,
                                                  status=status,
--- autotest/frontend/afe/model_attributes.py   2010-07-14 12:49:58.000000000 
-0700
+++ autotest/frontend/afe/model_attributes.py   2010-07-14 12:49:58.000000000 
-0700
@@ -9,3 +9,7 @@
 
 # common enums for test attributes
 TestTypes = enum.Enum('Client', 'Server', start_value=1)
+
+
+# common enums for profiler and job parameter types
+ParameterTypes = enum.Enum('int', 'float', 'string', string_values=True)
--- autotest/frontend/afe/models.py     2010-07-14 12:49:58.000000000 -0700
+++ autotest/frontend/afe/models.py     2010-07-14 12:49:58.000000000 -0700
@@ -549,6 +549,21 @@
         return unicode(self.name)
 
 
+class TestParameter(dbmodels.Model):
+    """
+    A declared parameter of a test
+    """
+    test = dbmodels.ForeignKey(Test)
+    name = dbmodels.CharField(max_length=255)
+
+    class Meta:
+        db_table = 'afe_test_parameters'
+        unique_together = ('test', 'name')
+
+    def __unicode__(self):
+        return u'%s (%s)' % (self.name, test.name)
+
+
 class Profiler(dbmodels.Model, model_logic.ModelExtensions):
     """\
     Required:
@@ -710,6 +725,135 @@
         return unicode(self.name)
 
 
+class Kernel(dbmodels.Model):
+    """
+    A kernel configuration for a parameterized job
+    """
+    version = dbmodels.CharField(max_length=255)
+    cmdline = dbmodels.CharField(max_length=255, blank=True)
+
+    @classmethod
+    def create_kernels(cls, kernel_list):
+        """
+        Creates all kernels in the kernel list
+
+        @param kernel_list A list of dictionaries that describe the kernels, in
+                           the same format as the 'kernel' argument to
+                           rpc_interface.generate_control_file
+        @returns a list of the created kernels
+        """
+        if not kernel_list:
+            return None
+        return [cls._create(kernel) for kernel in kernel_list]
+
+
+    @classmethod
+    def _create(cls, kernel_dict):
+        version = kernel_dict.pop('version')
+        cmdline = kernel_dict.pop('cmdline', '')
+
+        if kernel_dict:
+            raise Exception('Extraneous kernel arguments remain: %r'
+                            % kernel_dict)
+
+        kernel, _ = cls.objects.get_or_create(version=version,
+                                              cmdline=cmdline)
+        return kernel
+
+
+    class Meta:
+        db_table = 'afe_kernels'
+        unique_together = ('version', 'cmdline')
+
+    def __unicode__(self):
+        return u'%s %s' % (self.version, self.cmdline)
+
+
+class ParameterizedJob(dbmodels.Model):
+    """
+    Auxiliary configuration for a parameterized job
+    """
+    test = dbmodels.ForeignKey(Test)
+    label = dbmodels.ForeignKey(Label, null=True)
+    use_container = dbmodels.BooleanField(default=False)
+    profile_only = dbmodels.BooleanField(default=False)
+    upload_kernel_config = dbmodels.BooleanField(default=False)
+
+    kernels = dbmodels.ManyToManyField(
+            Kernel, db_table='afe_parameterized_job_kernels')
+    profilers = dbmodels.ManyToManyField(
+            Profiler, through='ParameterizedJobProfiler')
+
+
+    @classmethod
+    def smart_get(cls, id_or_name, *args, **kwargs):
+        """For compatibility with Job.add_object"""
+        return cls.objects.get(pk=id_or_name)
+
+
+    def job(self):
+        jobs = self.job_set.all()
+        assert jobs.count() <= 1
+        return jobs and jobs[0] or None
+
+
+    class Meta:
+        db_table = 'afe_parameterized_jobs'
+
+    def __unicode__(self):
+        return u'%s (parameterized) - %s' % (self.test.name, self.job())
+
+
+class ParameterizedJobProfiler(dbmodels.Model):
+    """
+    A profiler to run on a parameterized job
+    """
+    parameterized_job = dbmodels.ForeignKey(ParameterizedJob)
+    profiler = dbmodels.ForeignKey(Profiler)
+
+    class Meta:
+        db_table = 'afe_parameterized_jobs_profilers'
+        unique_together = ('parameterized_job', 'profiler')
+
+
+class ParameterizedJobProfilerParameter(dbmodels.Model):
+    """
+    A parameter for a profiler in a parameterized job
+    """
+    parameterized_job_profiler = dbmodels.ForeignKey(ParameterizedJobProfiler)
+    parameter_name = dbmodels.CharField(max_length=255)
+    parameter_value = dbmodels.TextField()
+    parameter_type = dbmodels.CharField(
+            max_length=8, choices=model_attributes.ParameterTypes.choices())
+
+    class Meta:
+        db_table = 'afe_parameterized_job_profiler_parameters'
+        unique_together = ('parameterized_job_profiler', 'parameter_name')
+
+    def __unicode__(self):
+        return u'%s - %s' % (self.parameterized_job_profiler.profiler.name,
+                             self.parameter_name)
+
+
+class ParameterizedJobParameter(dbmodels.Model):
+    """
+    Parameters for a parameterized job
+    """
+    parameterized_job = dbmodels.ForeignKey(ParameterizedJob)
+    test_parameter = dbmodels.ForeignKey(TestParameter)
+    parameter_value = dbmodels.TextField()
+    parameter_type = dbmodels.CharField(
+            max_length=8, choices=model_attributes.ParameterTypes.choices())
+
+    class Meta:
+        db_table = 'afe_parameterized_job_parameters'
+        unique_together = ('parameterized_job', 'test_parameter')
+
+    def __unicode__(self):
+        return u'%s - %s' % (self.parameterized_job.job().name,
+                             self.test_parameter.name)
+
+
 class JobManager(model_logic.ExtendedManager):
     'Custom manager to provide efficient status counts querying.'
     def get_status_counts(self, job_ids):
@@ -776,7 +920,7 @@
     priority = dbmodels.SmallIntegerField(choices=Priority.choices(),
                                           blank=True, # to allow 0
                                           default=Priority.MEDIUM)
-    control_file = dbmodels.TextField()
+    control_file = dbmodels.TextField(null=True, blank=True)
     control_type = dbmodels.SmallIntegerField(choices=ControlType.choices(),
                                               blank=True, # to allow 0
                                               default=ControlType.CLIENT)
@@ -799,6 +943,9 @@
     max_runtime_hrs = dbmodels.IntegerField(default=DEFAULT_MAX_RUNTIME_HRS)
     drone_set = dbmodels.ForeignKey(DroneSet, null=True, blank=True)
 
+    parameterized_job = dbmodels.ForeignKey(ParameterizedJob, null=True,
+                                            blank=True)
+
 
     # custom manager
     objects = JobManager()
@@ -809,6 +956,34 @@
 
 
     @classmethod
+    def parameterized_jobs_enabled(cls):
+        return global_config.global_config.get_config_value(
+                'AUTOTEST_WEB', 'parameterized_jobs', type=bool)
+
+
+    @classmethod
+    def check_parameterized_job(cls, control_file, parameterized_job):
+        """
+        Checks that the job is valid given the global config settings
+
+        First, either control_file must be set, or parameterized_job must be
+        set, but not both. Second, parameterized_job must be set if and only if
+        the parameterized_jobs option in the global config is set to True.
+        """
+        if not (bool(control_file) ^ bool(parameterized_job)):
+            raise Exception('Job must have either control file or '
+                            'parameterization, but not both')
+
+        parameterized_jobs_enabled = cls.parameterized_jobs_enabled()
+        if control_file and parameterized_jobs_enabled:
+            raise Exception('Control file specified, but parameterized jobs '
+                            'are enabled')
+        if parameterized_job and not parameterized_jobs_enabled:
+            raise Exception('Parameterized job specified, but parameterized '
+                            'jobs are not enabled')
+
+
+    @classmethod
     def create(cls, owner, options, hosts):
         """\
         Creates a job by taking some information (the listed args)
@@ -816,6 +991,11 @@
         """
         AclGroup.check_for_acl_violation_hosts(hosts)
 
+        control_file = options.get('control_file')
+        parameterized_job = options.get('parameterized_job')
+        cls.check_parameterized_job(control_file=control_file,
+                                    parameterized_job=parameterized_job)
+
         user = User.current_user()
         if options.get('reboot_before') is None:
             options['reboot_before'] = user.get_reboot_before_display()
@@ -828,7 +1008,7 @@
             owner=owner,
             name=options['name'],
             priority=options['priority'],
-            control_file=options['control_file'],
+            control_file=control_file,
             control_type=options['control_type'],
             synch_count=options.get('synch_count'),
             timeout=options.get('timeout'),
@@ -839,7 +1019,8 @@
             reboot_after=options.get('reboot_after'),
             parse_failed_repair=options.get('parse_failed_repair'),
             created_on=datetime.now(),
-            drone_set=drone_set)
+            drone_set=drone_set,
+            parameterized_job=parameterized_job)
 
         job.dependency_labels = options['dependencies']
 
@@ -850,6 +1031,12 @@
         return job
 
 
+    def save(self, *args, **kwargs):
+        self.check_parameterized_job(control_file=self.control_file,
+                                     parameterized_job=self.parameterized_job)
+        super(Job, self).save(*args, **kwargs)
+
+
     def queue(self, hosts, atomic_group=None, is_template=False):
         """Enqueue a job on the given hosts."""
         if not hosts:
--- autotest/frontend/afe/models_test.py        2010-07-14 12:49:58.000000000 
-0700
+++ autotest/frontend/afe/models_test.py        2010-07-14 12:49:58.000000000 
-0700
@@ -4,8 +4,8 @@
 import common
 from autotest_lib.frontend import setup_django_environment
 from autotest_lib.frontend.afe import frontend_test_utils
-from autotest_lib.frontend.afe import models
-from autotest_lib.frontend.afe import model_logic
+from autotest_lib.frontend.afe import models, model_attributes, model_logic
+from autotest_lib.client.common_lib import global_config
 
 
 class AclGroupTest(unittest.TestCase,
@@ -214,5 +214,94 @@
         self.assertEqual(0, models.Job.objects.all().count())
 
 
+class KernelTest(unittest.TestCase, frontend_test_utils.FrontendTestMixin):
+    def setUp(self):
+        self._frontend_common_setup()
+
+
+    def tearDown(self):
+        self._frontend_common_teardown()
+
+
+    def test_create_kernels_none(self):
+        self.assertEqual(None, models.Kernel.create_kernels(None))
+
+
+    def test_create_kernels(self):
+        self.god.stub_function(models.Kernel, '_create')
+
+        num_kernels = 3
+        kernel_list = [object() for _ in range(num_kernels)]
+        result = [object() for _ in range(num_kernels)]
+
+        for kernel, response in zip(kernel_list, result):
+            models.Kernel._create.expect_call(kernel).and_return(response)
+        self.assertEqual(result, models.Kernel.create_kernels(kernel_list))
+        self.god.check_playback()
+
+
+    def test_create(self):
+        kernel = models.Kernel._create({'version': 'version'})
+        self.assertEqual(kernel.version, 'version')
+        self.assertEqual(kernel.cmdline, '')
+        self.assertEqual(kernel, models.Kernel._create({'version': 'version'}))
+
+
+class ParameterizedJobTest(unittest.TestCase,
+                           frontend_test_utils.FrontendTestMixin):
+    def setUp(self):
+        self._frontend_common_setup()
+
+
+    def tearDown(self):
+        self._frontend_common_teardown()
+
+
+    def test_job(self):
+        global_config.global_config.override_config_value(
+                'AUTOTEST_WEB', 'parameterized_jobs', 'True')
+
+        test = models.Test.objects.create(
+                name='name', author='author', test_class='class',
+                test_category='category',
+                test_type=model_attributes.TestTypes.SERVER, path='path')
+        parameterized_job = models.ParameterizedJob.objects.create(test=test)
+        job = self._create_job(hosts=[1], control_file=None,
+                               parameterized_job=parameterized_job)
+
+        self.assertEqual(job, parameterized_job.job())
+
+
+class JobTest(unittest.TestCase, frontend_test_utils.FrontendTestMixin):
+    def setUp(self):
+        self._frontend_common_setup()
+
+
+    def tearDown(self):
+        self._frontend_common_teardown()
+
+
+    def test_check_parameterized_jobs_no_args(self):
+        self.assertRaises(Exception, models.Job.check_parameterized_job,
+                          control_file=None, parameterized_job=None)
+
+
+    def test_check_parameterized_jobs_both_args(self):
+        self.assertRaises(Exception, models.Job.check_parameterized_job,
+                          control_file=object(), parameterized_job=object())
+
+
+    def test_check_parameterized_jobs_disabled(self):
+        self.assertRaises(Exception, models.Job.check_parameterized_job,
+                          control_file=None, parameterized_job=object())
+
+
+    def test_check_parameterized_jobs_enabled(self):
+        global_config.global_config.override_config_value(
+                'AUTOTEST_WEB', 'parameterized_jobs', 'True')
+        self.assertRaises(Exception, models.Job.check_parameterized_job,
+                          control_file=object(), parameterized_job=None)
+
+
 if __name__ == '__main__':
     unittest.main()
--- autotest/frontend/afe/rpc_interface.py      2010-07-14 12:49:58.000000000 
-0700
+++ autotest/frontend/afe/rpc_interface.py      2010-07-14 12:49:58.000000000 
-0700
@@ -397,6 +397,92 @@
     return cf_info
 
 
+def create_parameterized_job(name, priority, test, parameters, kernel=None,
+                             label=None, profilers=(), 
profiler_parameters=None,
+                             use_container=False, profile_only=None,
+                             upload_kernel_config=False, hosts=(),
+                             meta_hosts=(), one_time_hosts=(),
+                             atomic_group_name=None, synch_count=None,
+                             is_template=False, timeout=None,
+                             max_runtime_hrs=None, run_verify=True,
+                             email_list='', dependencies=(), 
reboot_before=None,
+                             reboot_after=None, parse_failed_repair=None,
+                             hostless=False, keyvals=None, drone_set=None):
+    """
+    Creates and enqueues a parameterized job.
+
+    Most parameters a combination of the parameters for generate_control_file()
+    and create_job(), with the exception of:
+
+    @param test name or ID of the test to run
+    @param parameters a map of parameter name ->
+                          tuple of (param value, param type)
+    @param profiler_parameters a dictionary of parameters for the profilers:
+                                   key: profiler name
+                                   value: dict of param name -> tuple of
+                                                                (param value,
+                                                                 param type)
+    """
+    # Save the values of the passed arguments here. What we're going to do with
+    # them is pass them all to rpc_utils.get_create_job_common_args(), which
+    # will extract the subset of these arguments that apply for
+    # rpc_utils.create_job_common(), which we then pass in to that function.
+    args = locals()
+
+    # Set up the parameterized job configs
+    test_obj = models.Test.smart_get(test)
+    if test_obj.test_type == model_attributes.TestTypes.SERVER:
+        control_type = models.Job.ControlType.SERVER
+    else:
+        control_type = models.Job.ControlType.CLIENT
+
+    try:
+        label = models.Label.smart_get(label)
+    except models.Label.DoesNotExist:
+        label = None
+
+    kernel_objs = models.Kernel.create_kernels(kernel)
+    profiler_objs = [models.Profiler.smart_get(profiler)
+                     for profiler in profilers]
+
+    parameterized_job = models.ParameterizedJob.objects.create(
+            test=test_obj, label=label, use_container=use_container,
+            profile_only=profile_only,
+            upload_kernel_config=upload_kernel_config)
+    parameterized_job.kernels.add(*kernel_objs)
+
+    for profiler in profiler_objs:
+        parameterized_profiler = 
models.ParameterizedJobProfiler.objects.create(
+                parameterized_job=parameterized_job,
+                profiler=profiler)
+        profiler_params = profiler_parameters.get(profiler.name, {})
+        for name, (value, param_type) in profiler_params.iteritems():
+            models.ParameterizedJobProfilerParameter.objects.create(
+                    parameterized_job_profiler=parameterized_profiler,
+                    parameter_name=name,
+                    parameter_value=value,
+                    parameter_type=param_type)
+
+    try:
+        for parameter in test_obj.testparameter_set.all():
+            if parameter.name in parameters:
+                param_value, param_type = parameters.pop(parameter.name)
+                parameterized_job.parameterizedjobparameter_set.create(
+                        test_parameter=parameter, parameter_value=param_value,
+                        parameter_type=param_type)
+
+        if parameters:
+            raise Exception('Extra parameters remain: %r' % parameters)
+
+        return rpc_utils.create_job_common(
+                parameterized_job=parameterized_job.id,
+                control_type=control_type,
+                **rpc_utils.get_create_job_common_args(args))
+    except:
+        parameterized_job.delete()
+        raise
+
+
 def create_job(name, priority, control_file, control_type,
                hosts=(), meta_hosts=(), one_time_hosts=(),
                atomic_group_name=None, synch_count=None, is_template=False,
@@ -437,110 +523,8 @@
 
     @returns The created Job id number.
     """
-    user = models.User.current_user()
-    owner = user.login
-
-    # Convert metahost names to lower case, to avoid case sensitivity issues
-    meta_hosts = [meta_host.lower() for meta_host in meta_hosts]
-
-    # input validation
-    if not (hosts or meta_hosts or one_time_hosts or atomic_group_name
-            or hostless):
-        raise model_logic.ValidationError({
-            'arguments' : "You must pass at least one of 'hosts', "
-                          "'meta_hosts', 'one_time_hosts', "
-                          "'atomic_group_name', or 'hostless'"
-            })
-
-    if hostless:
-        if hosts or meta_hosts or one_time_hosts or atomic_group_name:
-            raise model_logic.ValidationError({
-                    'hostless': 'Hostless jobs cannot include any hosts!'})
-        server_type = models.Job.ControlType.get_string(
-                models.Job.ControlType.SERVER)
-        if control_type != server_type:
-            raise model_logic.ValidationError({
-                    'control_type': 'Hostless jobs cannot use client-side '
-                                    'control files'})
-
-    labels_by_name = dict((label.name.lower(), label)
-                          for label in models.Label.objects.all())
-    atomic_groups_by_name = dict((ag.name.lower(), ag)
-                                 for ag in models.AtomicGroup.objects.all())
-
-    # Schedule on an atomic group automagically if one of the labels given
-    # is an atomic group label and no explicit atomic_group_name was supplied.
-    if not atomic_group_name:
-        for label_name in meta_hosts or []:
-            label = labels_by_name.get(label_name)
-            if label and label.atomic_group:
-                atomic_group_name = label.atomic_group.name
-                break
-
-    # convert hostnames & meta hosts to host/label objects
-    host_objects = models.Host.smart_get_bulk(hosts)
-    metahost_objects = []
-    for label_name in meta_hosts or []:
-        if label_name in labels_by_name:
-            label = labels_by_name[label_name]
-            metahost_objects.append(label)
-        elif label_name in atomic_groups_by_name:
-            # If given a metahost name that isn't a Label, check to
-            # see if the user was specifying an Atomic Group instead.
-            atomic_group = atomic_groups_by_name[label_name]
-            if atomic_group_name and atomic_group_name != atomic_group.name:
-                raise model_logic.ValidationError({
-                        'meta_hosts': (
-                                'Label "%s" not found.  If assumed to be an '
-                                'atomic group it would conflict with the '
-                                'supplied atomic group "%s".' % (
-                                        label_name, atomic_group_name))})
-            atomic_group_name = atomic_group.name
-        else:
-            raise model_logic.ValidationError(
-                {'meta_hosts' : 'Label "%s" not found' % label_name})
-
-    # Create and sanity check an AtomicGroup object if requested.
-    if atomic_group_name:
-        if one_time_hosts:
-            raise model_logic.ValidationError(
-                    {'one_time_hosts':
-                     'One time hosts cannot be used with an Atomic Group.'})
-        atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
-        if synch_count and synch_count > atomic_group.max_number_of_machines:
-            raise model_logic.ValidationError(
-                    {'atomic_group_name' :
-                     'You have requested a synch_count (%d) greater than the '
-                     'maximum machines in the requested Atomic Group (%d).' %
-                     (synch_count, atomic_group.max_number_of_machines)})
-    else:
-        atomic_group = None
-
-    for host in one_time_hosts or []:
-        this_host = models.Host.create_one_time_host(host)
-        host_objects.append(this_host)
-
-    options = dict(name=name,
-                   priority=priority,
-                   control_file=control_file,
-                   control_type=control_type,
-                   is_template=is_template,
-                   timeout=timeout,
-                   max_runtime_hrs=max_runtime_hrs,
-                   synch_count=synch_count,
-                   run_verify=run_verify,
-                   email_list=email_list,
-                   dependencies=dependencies,
-                   reboot_before=reboot_before,
-                   reboot_after=reboot_after,
-                   parse_failed_repair=parse_failed_repair,
-                   keyvals=keyvals,
-                   drone_set=drone_set)
-    return rpc_utils.create_new_job(owner=owner,
-                                    options=options,
-                                    host_objects=host_objects,
-                                    metahost_objects=metahost_objects,
-                                    atomic_group=atomic_group)
+    return rpc_utils.create_job_common(
+            **rpc_utils.get_create_job_common_args(locals()))
 
 
 def abort_host_queue_entries(**filter_data):
@@ -838,6 +822,7 @@
     result['motd'] = rpc_utils.get_motd()
     result['drone_sets_enabled'] = models.DroneSet.drone_sets_enabled()
     result['drone_sets'] = drone_sets
+    result['parameterized_jobs'] = models.Job.parameterized_jobs_enabled()
 
     result['status_dictionary'] = {"Aborted": "Aborted",
                                    "Verifying": "Verifying Host",
--- autotest/frontend/afe/rpc_interface_unittest.py     2010-07-14 
12:49:58.000000000 -0700
+++ autotest/frontend/afe/rpc_interface_unittest.py     2010-07-14 
12:49:58.000000000 -0700
@@ -6,7 +6,8 @@
 from autotest_lib.frontend.afe import frontend_test_utils
 from django.db import connection
 from autotest_lib.frontend.afe import models, rpc_interface, 
frontend_test_utils
-from autotest_lib.frontend.afe import model_logic
+from autotest_lib.frontend.afe import model_logic, model_attributes
+from autotest_lib.client.common_lib import global_config
 
 
 _hqe_status = models.HostQueueEntry.Status
@@ -313,6 +314,55 @@
         self.assertEquals(task['requested_by'], 'autotest_system')
 
 
+    def test_parameterized_job(self):
+        global_config.global_config.override_config_value(
+                'AUTOTEST_WEB', 'parameterized_jobs', 'True')
+
+        string_type = model_attributes.ParameterTypes.STRING
+
+        test = models.Test.objects.create(
+                name='test', test_type=model_attributes.TestTypes.SERVER)
+        test_parameter = test.testparameter_set.create(name='key')
+        profiler = models.Profiler.objects.create(name='profiler')
+
+        kernels = ({'version': 'version', 'cmdline': 'cmdline'},)
+        profilers = ('profiler',)
+        profiler_parameters = {'profiler': {'key': ('value', string_type)}}
+        job_parameters = {'key': ('value', string_type)}
+
+        job_id = rpc_interface.create_parameterized_job(
+                name='job', priority=models.Job.Priority.MEDIUM, test='test',
+                parameters=job_parameters, kernel=kernels, label='label1',
+                profilers=profilers, profiler_parameters=profiler_parameters,
+                profile_only=False, hosts=('host1',))
+        parameterized_job = models.Job.smart_get(job_id).parameterized_job
+
+        self.assertEqual(parameterized_job.test, test)
+        self.assertEqual(parameterized_job.label, self.labels[0])
+        self.assertEqual(parameterized_job.kernels.count(), 1)
+        self.assertEqual(parameterized_job.profilers.count(), 1)
+
+        kernel = models.Kernel.objects.get(**kernels[0])
+        self.assertEqual(parameterized_job.kernels.all()[0], kernel)
+        self.assertEqual(parameterized_job.profilers.all()[0], profiler)
+
+        parameterized_profiler = models.ParameterizedJobProfiler.objects.get(
+                parameterized_job=parameterized_job, profiler=profiler)
+        profiler_parameters_obj = (
+                models.ParameterizedJobProfilerParameter.objects.get(
+                parameterized_job_profiler=parameterized_profiler))
+        self.assertEqual(profiler_parameters_obj.parameter_name, 'key')
+        self.assertEqual(profiler_parameters_obj.parameter_value, 'value')
+        self.assertEqual(profiler_parameters_obj.parameter_type, string_type)
+
+        self.assertEqual(
+                parameterized_job.parameterizedjobparameter_set.count(), 1)
+        parameters_obj = (
+                parameterized_job.parameterizedjobparameter_set.all()[0])
+        self.assertEqual(parameters_obj.test_parameter, test_parameter)
+        self.assertEqual(parameters_obj.parameter_value, 'value')
+        self.assertEqual(parameters_obj.parameter_type, string_type)
+
 
 if __name__ == '__main__':
     unittest.main()
--- autotest/frontend/afe/rpc_utils.py  2010-07-14 12:49:58.000000000 -0700
+++ autotest/frontend/afe/rpc_utils.py  2010-07-14 12:49:58.000000000 -0700
@@ -5,7 +5,7 @@
 
 __author__ = '[email protected] (Steve Howard)'
 
-import datetime, os, sys
+import datetime, os, sys, inspect
 import django.http
 from autotest_lib.frontend.afe import models, model_logic, model_attributes
 
@@ -620,3 +620,132 @@
             special_task_index += 1
 
     return interleaved_entries
+
+
+def get_create_job_common_args(local_args):
+    """
+    Returns a dict containing only the args that apply for create_job_common
+
+    Returns a subset of local_args, which contains only the arguments that can
+    be passed in to create_job_common().
+    """
+    arg_names, _, _, _ = inspect.getargspec(create_job_common)
+    return dict(item for item in local_args.iteritems() if item[0] in 
arg_names)
+
+
+def create_job_common(name, priority, control_type, control_file=None,
+                      hosts=(), meta_hosts=(), one_time_hosts=(),
+                      atomic_group_name=None, synch_count=None,
+                      is_template=False, timeout=None, max_runtime_hrs=None,
+                      run_verify=True, email_list='', dependencies=(),
+                      reboot_before=None, reboot_after=None,
+                      parse_failed_repair=None, hostless=False, keyvals=None,
+                      drone_set=None, parameterized_job=None):
+    """
+    Common code between creating "standard" jobs and creating parameterized 
jobs
+    """
+    user = models.User.current_user()
+    owner = user.login
+
+    # Convert metahost names to lower case, to avoid case sensitivity issues
+    meta_hosts = [meta_host.lower() for meta_host in meta_hosts]
+
+    # input validation
+    if not (hosts or meta_hosts or one_time_hosts or atomic_group_name
+            or hostless):
+        raise model_logic.ValidationError({
+            'arguments' : "You must pass at least one of 'hosts', "
+                          "'meta_hosts', 'one_time_hosts', "
+                          "'atomic_group_name', or 'hostless'"
+            })
+
+    if hostless:
+        if hosts or meta_hosts or one_time_hosts or atomic_group_name:
+            raise model_logic.ValidationError({
+                    'hostless': 'Hostless jobs cannot include any hosts!'})
+        server_type = models.Job.ControlType.get_string(
+                models.Job.ControlType.SERVER)
+        if control_type != server_type:
+            raise model_logic.ValidationError({
+                    'control_type': 'Hostless jobs cannot use client-side '
+                                    'control files'})
+
+    labels_by_name = dict((label.name.lower(), label)
+                          for label in models.Label.objects.all())
+    atomic_groups_by_name = dict((ag.name.lower(), ag)
+                                 for ag in models.AtomicGroup.objects.all())
+
+    # Schedule on an atomic group automagically if one of the labels given
+    # is an atomic group label and no explicit atomic_group_name was supplied.
+    if not atomic_group_name:
+        for label_name in meta_hosts or []:
+            label = labels_by_name.get(label_name)
+            if label and label.atomic_group:
+                atomic_group_name = label.atomic_group.name
+                break
+
+    # convert hostnames & meta hosts to host/label objects
+    host_objects = models.Host.smart_get_bulk(hosts)
+    metahost_objects = []
+    for label_name in meta_hosts or []:
+        if label_name in labels_by_name:
+            label = labels_by_name[label_name]
+            metahost_objects.append(label)
+        elif label_name in atomic_groups_by_name:
+            # If given a metahost name that isn't a Label, check to
+            # see if the user was specifying an Atomic Group instead.
+            atomic_group = atomic_groups_by_name[label_name]
+            if atomic_group_name and atomic_group_name != atomic_group.name:
+                raise model_logic.ValidationError({
+                        'meta_hosts': (
+                                'Label "%s" not found.  If assumed to be an '
+                                'atomic group it would conflict with the '
+                                'supplied atomic group "%s".' % (
+                                        label_name, atomic_group_name))})
+            atomic_group_name = atomic_group.name
+        else:
+            raise model_logic.ValidationError(
+                {'meta_hosts' : 'Label "%s" not found' % label_name})
+
+    # Create and sanity check an AtomicGroup object if requested.
+    if atomic_group_name:
+        if one_time_hosts:
+            raise model_logic.ValidationError(
+                    {'one_time_hosts':
+                     'One time hosts cannot be used with an Atomic Group.'})
+        atomic_group = models.AtomicGroup.smart_get(atomic_group_name)
+        if synch_count and synch_count > atomic_group.max_number_of_machines:
+            raise model_logic.ValidationError(
+                    {'atomic_group_name' :
+                     'You have requested a synch_count (%d) greater than the '
+                     'maximum machines in the requested Atomic Group (%d).' %
+                     (synch_count, atomic_group.max_number_of_machines)})
+    else:
+        atomic_group = None
+
+    for host in one_time_hosts or []:
+        this_host = models.Host.create_one_time_host(host)
+        host_objects.append(this_host)
+
+    options = dict(name=name,
+                   priority=priority,
+                   control_file=control_file,
+                   control_type=control_type,
+                   is_template=is_template,
+                   timeout=timeout,
+                   max_runtime_hrs=max_runtime_hrs,
+                   synch_count=synch_count,
+                   run_verify=run_verify,
+                   email_list=email_list,
+                   dependencies=dependencies,
+                   reboot_before=reboot_before,
+                   reboot_after=reboot_after,
+                   parse_failed_repair=parse_failed_repair,
+                   keyvals=keyvals,
+                   drone_set=drone_set,
+                   parameterized_job=parameterized_job)
+    return create_new_job(owner=owner,
+                          options=options,
+                          host_objects=host_objects,
+                          metahost_objects=metahost_objects,
+                          atomic_group=atomic_group)
--- autotest/frontend/client/src/autotest/afe/CreateJobView.java        
2010-07-14 12:49:58.000000000 -0700
+++ autotest/frontend/client/src/autotest/afe/CreateJobView.java        
2010-07-14 12:49:58.000000000 -0700
@@ -512,6 +512,10 @@
         controlHeaderPanel.add(viewLink);
         controlHeaderPanel.add(editControlButton);
 
+        if (parameterizedJobsEnabled()) {
+            editControlButton.setEnabled(false);
+        }
+
         controlFilePanel.setHeader(controlHeaderPanel);
         controlFilePanel.add(controlEditPanel);
 
@@ -802,4 +806,8 @@
         selectPreferredDroneSet();
         testSelector.reset();
     }
+
+    private boolean parameterizedJobsEnabled() {
+        return 
staticData.getData("parameterized_jobs").isBoolean().booleanValue();
+    }
 }
--- /dev/null   2009-12-17 12:29:38.000000000 -0800
+++ autotest/frontend/migrations/063_parameterized_tests.py     2010-07-14 
12:49:58.000000000 -0700
@@ -0,0 +1,144 @@
+UP_SQL = """
+CREATE TABLE afe_test_parameters (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  test_id INT NOT NULL,
+  name VARCHAR(255) NOT NULL
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_test_parameters
+ADD CONSTRAINT afe_test_parameters_test_ibfk
+FOREIGN KEY (test_id) REFERENCES afe_autotests (id);
+
+ALTER TABLE afe_test_parameters
+ADD CONSTRAINT afe_test_parameters_unique
+UNIQUE KEY (test_id, name);
+
+
+CREATE TABLE afe_parameterized_jobs (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  test_id INT NOT NULL,
+  label_id INT DEFAULT NULL,
+  use_container TINYINT(1) DEFAULT 0,
+  profile_only TINYINT(1) DEFAULT 0,
+  upload_kernel_config TINYINT(1) DEFAULT 0
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_parameterized_jobs
+ADD CONSTRAINT afe_parameterized_jobs_test_ibfk
+FOREIGN KEY (test_id) REFERENCES afe_autotests (id);
+
+ALTER TABLE afe_parameterized_jobs
+ADD CONSTRAINT afe_parameterized_jobs_label_ibfk
+FOREIGN KEY (label_id) REFERENCES afe_labels (id);
+
+
+CREATE TABLE afe_kernels (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  version VARCHAR(255) NOT NULL,
+  cmdline VARCHAR(255) DEFAULT ''
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_kernels
+ADD CONSTRAINT afe_kernals_unique
+UNIQUE KEY (version, cmdline);
+
+
+CREATE TABLE afe_parameterized_jobs_kernels (
+  parameterized_job_id INT NOT NULL,
+  kernel_id INT NOT NULL,
+  PRIMARY KEY (parameterized_job_id, kernel_id)
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_parameterized_jobs_kernels
+ADD CONSTRAINT afe_parameterized_jobs_kernels_parameterized_job_ibfk
+FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
+
+
+CREATE TABLE afe_parameterized_jobs_profilers (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  parameterized_job_id INT NOT NULL,
+  profiler_id INT NOT NULL
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_parameterized_jobs_profilers
+ADD CONSTRAINT afe_parameterized_jobs_profilers_parameterized_job_ibfk
+FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
+
+ALTER TABLE afe_parameterized_jobs_profilers
+ADD CONSTRAINT afe_parameterized_jobs_profilers_profile_ibfk
+FOREIGN KEY (profiler_id) REFERENCES afe_profilers (id);
+
+ALTER TABLE afe_parameterized_jobs_profilers
+ADD CONSTRAINT afe_parameterized_jobs_profilers_unique
+UNIQUE KEY (parameterized_job_id, profiler_id);
+
+
+CREATE TABLE afe_parameterized_job_profiler_parameters (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  parameterized_job_profiler_id INT NOT NULL,
+  parameter_name VARCHAR(255) NOT NULL,
+  parameter_value TEXT NOT NULL,
+  parameter_type ENUM('int', 'float', 'string')
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_parameterized_job_profiler_parameters
+ADD CONSTRAINT afe_parameterized_job_profiler_parameters_ibfk
+FOREIGN KEY (parameterized_job_profiler_id)
+  REFERENCES afe_parameterized_jobs_profilers (id);
+
+ALTER TABLE afe_parameterized_job_profiler_parameters
+ADD CONSTRAINT afe_parameterized_job_profiler_parameters_unique
+UNIQUE KEY (parameterized_job_profiler_id, parameter_name);
+
+
+CREATE TABLE afe_parameterized_job_parameters (
+  id INT PRIMARY KEY AUTO_INCREMENT,
+  parameterized_job_id INT NOT NULL,
+  test_parameter_id INT NOT NULL,
+  parameter_value TEXT NOT NULL,
+  parameter_type ENUM('int', 'float', 'string')
+) ENGINE = InnoDB;
+
+ALTER TABLE afe_parameterized_job_parameters
+ADD CONSTRAINT afe_parameterized_job_parameters_job_ibfk
+FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
+
+ALTER TABLE afe_parameterized_job_parameters
+ADD CONSTRAINT afe_parameterized_job_parameters_test_parameter_ibfk
+FOREIGN KEY (test_parameter_id) REFERENCES afe_test_parameters (id);
+
+ALTER TABLE afe_parameterized_job_parameters
+ADD CONSTRAINT afe_parameterized_job_parameters_unique
+UNIQUE KEY (parameterized_job_id, test_parameter_id);
+
+
+ALTER TABLE afe_jobs
+MODIFY COLUMN control_file TEXT DEFAULT NULL;
+
+ALTER TABLE afe_jobs
+ADD COLUMN parameterized_job_id INT DEFAULT NULL;
+
+ALTER TABLE afe_jobs
+ADD CONSTRAINT afe_jobs_parameterized_job_ibfk
+FOREIGN KEY (parameterized_job_id) REFERENCES afe_parameterized_jobs (id);
+"""
+
+
+DOWN_SQL = """
+ALTER TABLE afe_jobs
+DROP FOREIGN KEY afe_jobs_parameterized_job_ibfk;
+
+ALTER TABLE afe_jobs
+DROP COLUMN parameterized_job_id;
+
+ALTER TABLE afe_jobs
+MODIFY COLUMN control_file TEXT;
+
+DROP TABLE afe_parameterized_job_parameters;
+DROP TABLE afe_parameterized_job_profiler_parameters;
+DROP TABLE afe_parameterized_jobs_profilers;
+DROP TABLE afe_parameterized_jobs_kernels;
+DROP TABLE afe_kernels;
+DROP TABLE afe_parameterized_jobs;
+DROP TABLE afe_test_parameters;
+"""
--- autotest/global_config.ini  2010-07-14 12:49:58.000000000 -0700
+++ autotest/global_config.ini  2010-07-14 12:49:58.000000000 -0700
@@ -16,6 +16,7 @@
 min_retry_delay: 20
 max_retry_delay: 60
 graph_cache_creation_timeout_minutes: 10
+parameterized_jobs: False
 
 [TKO]
 host: localhost
--- autotest/scheduler/scheduler_models.py      2010-07-14 12:49:58.000000000 
-0700
+++ autotest/scheduler/scheduler_models.py      2010-07-14 12:49:58.000000000 
-0700
@@ -765,7 +765,8 @@
     _fields = ('id', 'owner', 'name', 'priority', 'control_file',
                'control_type', 'created_on', 'synch_count', 'timeout',
                'run_verify', 'email_list', 'reboot_before', 'reboot_after',
-               'parse_failed_repair', 'max_runtime_hrs', 'drone_set_id')
+               'parse_failed_repair', 'max_runtime_hrs', 'drone_set_id',
+               'parameterized_job_id')
 
     # This does not need to be a column in the DB.  The delays are likely to
     # be configured short.  If the scheduler is stopped and restarted in
_______________________________________________
Autotest mailing list
[email protected]
http://test.kernel.org/cgi-bin/mailman/listinfo/autotest

Reply via email to