This is an automated email from the ASF dual-hosted git repository.

dimuthuupe pushed a commit to branch airavata-v2-refactoring
in repository https://gitbox.apache.org/repos/asf/airavata.git


The following commit(s) were added to refs/heads/airavata-v2-refactoring by 
this push:
     new fb3c2053ff Initial workflow framework
fb3c2053ff is described below

commit fb3c2053ffb1d666ca7ac0905b0e604eca8ae85e
Author: DImuthuUpe <[email protected]>
AuthorDate: Wed Jun 21 04:39:33 2023 -0400

    Initial workflow framework
---
 modules/airavata-apis/airavata-apis-server/pom.xml |  11 +
 .../apache/airavata/apis/config/ConfigBeans.java   |   7 +
 .../apis/db/entity/backend/EC2BackendEntity.java   |  10 +
 .../apis/scheduling/ExperimentLauncher.java        | 189 +++++++++++++++++
 .../airavata/apis/service/ExecutionService.java    |   4 +
 .../apis/service/impl/ExecutionServiceImpl.java    |  12 ++
 .../airavata/apis/workflow/WorkflowExecutor.java   | 225 +++++++++++++++++++++
 .../apis/workflow/task/common/BaseTask.java        | 109 ++++++++++
 .../apis/workflow/task/common/OutPort.java         |  14 ++
 .../apis/workflow/task/common/TaskParamType.java   |   6 +
 .../apis/workflow/task/common/TaskUtil.java        |  91 +++++++++
 .../workflow/task/common/annotation/TaskDef.java   |  12 ++
 .../task/common/annotation/TaskOutPort.java        |  12 ++
 .../workflow/task/common/annotation/TaskParam.java |  14 ++
 .../apis/workflow/task/data/DataMovementTask.java  |  25 +++
 .../task/docker/DestroyDockerContainerTask.java    |   4 +
 .../task/docker/StartDockerContainerTask.java      |   4 +
 .../workflow/task/ec2/CreateEC2InstanceTask.java   |  25 +++
 .../workflow/task/ec2/DestroyEC2InstanceTask.java  |   4 +
 .../workflow/task/runners/RunLocalCommandTask.java |   4 +
 .../task/runners/RunServerCommandTask.java         |   4 +
 .../src/main/resources/api.properties              |  16 ++
 .../src/main/resources/log4j2.xml                  |   2 +-
 .../src/main/resources/workflow.properties         |  20 ++
 .../src/main/proto/execution/experiment_stub.proto |   1 +
 pom.xml                                            | 116 -----------
 26 files changed, 824 insertions(+), 117 deletions(-)

diff --git a/modules/airavata-apis/airavata-apis-server/pom.xml 
b/modules/airavata-apis/airavata-apis-server/pom.xml
index d4642ec899..8660d06d2d 100644
--- a/modules/airavata-apis/airavata-apis-server/pom.xml
+++ b/modules/airavata-apis/airavata-apis-server/pom.xml
@@ -94,6 +94,17 @@
             <artifactId>dozer-proto3</artifactId>
             <version>6.5.2</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.helix</groupId>
+            <artifactId>helix-core</artifactId>
+            <version>0.9.9</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
     </dependencies>
     <properties>
         <maven.compiler.source>18</maven.compiler.source>
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/config/ConfigBeans.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/config/ConfigBeans.java
index c421330640..31f0c436b3 100644
--- 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/config/ConfigBeans.java
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/config/ConfigBeans.java
@@ -1,5 +1,7 @@
 package org.apache.airavata.apis.config;
 
+import org.apache.airavata.api.execution.ExperimentService;
+import org.apache.airavata.apis.scheduling.ExperimentLauncher;
 import org.apache.airavata.apis.scheduling.MetaScheduler;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
@@ -10,4 +12,9 @@ public class ConfigBeans {
     public MetaScheduler metaScheduler() {
         return new MetaScheduler();
     }
+
+    @Bean
+    ExperimentLauncher experimentLauncher() {
+        return new ExperimentLauncher();
+    }
 }
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/db/entity/backend/EC2BackendEntity.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/db/entity/backend/EC2BackendEntity.java
index 5fb599e8f4..cfd8d8b4d2 100644
--- 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/db/entity/backend/EC2BackendEntity.java
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/db/entity/backend/EC2BackendEntity.java
@@ -11,6 +11,9 @@ public class EC2BackendEntity extends ComputeBackendEntity {
     @Column
     String region;
 
+    @Column
+    String imageId;
+
     @Column
     String awsCredentialId;
 
@@ -38,4 +41,11 @@ public class EC2BackendEntity extends ComputeBackendEntity {
         this.awsCredentialId = awsCredentialId;
     }
 
+    public String getImageId() {
+        return imageId;
+    }
+
+    public void setImageId(String imageId) {
+        this.imageId = imageId;
+    }
 }
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/scheduling/ExperimentLauncher.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/scheduling/ExperimentLauncher.java
new file mode 100644
index 0000000000..b1fd6d5846
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/scheduling/ExperimentLauncher.java
@@ -0,0 +1,189 @@
+package org.apache.airavata.apis.scheduling;
+
+import org.apache.airavata.api.execution.ExperimentLaunchRequest;
+import org.apache.airavata.api.execution.stubs.Experiment;
+import org.apache.airavata.apis.service.ExecutionService;
+import org.apache.airavata.apis.workflow.task.common.BaseTask;
+import org.apache.airavata.apis.workflow.task.common.OutPort;
+import org.apache.airavata.apis.workflow.task.common.TaskUtil;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskDef;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskOutPort;
+import org.apache.airavata.apis.workflow.task.data.DataMovementTask;
+import org.apache.airavata.apis.workflow.task.ec2.CreateEC2InstanceTask;
+import org.apache.helix.HelixManager;
+import org.apache.helix.HelixManagerFactory;
+import org.apache.helix.InstanceType;
+import org.apache.helix.task.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.lang.reflect.Field;
+import java.util.*;
+
+public class ExperimentLauncher {
+
+    private static final long WORKFLOW_EXPIRY_TIME = 1 * 1000;
+    private static final long TASK_EXPIRY_TIME = 24 * 60 * 60 * 1000;
+    private static final int PARALLEL_JOBS_PER_WORKFLOW = 20;
+
+    private final static Logger logger = 
LoggerFactory.getLogger(ExperimentLauncher.class);
+
+    private TaskDriver taskDriver;
+    private HelixManager helixManager;
+    @Autowired
+    private ExecutionService executionService;
+
+    public void launchExperiment(ExperimentLaunchRequest 
experimentLaunchRequest) throws Exception {
+        Optional<Experiment> experimentOp = 
executionService.getExperiment(experimentLaunchRequest.getExperimentId());
+        if (experimentOp.isEmpty()) {
+            throw new Exception("No experiment with id " + 
experimentLaunchRequest.getExperimentId());
+        }
+
+        Experiment experiment = experimentOp.get();
+    }
+
+    public static void main(String args[]) throws Exception {
+        ExperimentLauncher launcher = new ExperimentLauncher();
+        launcher.init("airavata", "wm", "localhost:2181");
+
+        Map<String, BaseTask> taskMap = new HashMap<>();
+
+        DataMovementTask dataMovementTask = new DataMovementTask();
+        dataMovementTask.setTaskId(UUID.randomUUID().toString());
+
+        taskMap.put(dataMovementTask.getTaskId(), dataMovementTask);
+
+        CreateEC2InstanceTask ec2InstanceTask = new CreateEC2InstanceTask();
+        ec2InstanceTask.setTaskId(UUID.randomUUID().toString());
+        taskMap.put(ec2InstanceTask.getTaskId(), ec2InstanceTask);
+
+        dataMovementTask.addOutPort(new 
OutPort().setNextTaskId(ec2InstanceTask.getTaskId()));
+
+        String[] startTaskIds = {dataMovementTask.getTaskId()};
+        logger.info("Submitting workflow");
+        launcher.buildAndRunWorkflow(taskMap, startTaskIds);
+    }
+
+    public void init(String clusterName, String workflowManagerName, String 
zkAddress)
+            throws Exception {
+
+        helixManager = HelixManagerFactory.getZKHelixManager(clusterName, 
workflowManagerName,
+                InstanceType.SPECTATOR, zkAddress);
+        helixManager.connect();
+
+        Runtime.getRuntime().addShutdownHook(
+                new Thread() {
+                    @Override
+                    public void run() {
+                        if (helixManager != null && 
helixManager.isConnected()) {
+                            helixManager.disconnect();
+                        }
+                    }
+                }
+        );
+
+        taskDriver = new TaskDriver(helixManager);
+    }
+
+    public void destroy() {
+        if (helixManager != null) {
+            helixManager.disconnect();
+        }
+    }
+
+    public String buildAndRunWorkflow(Map<String, BaseTask> taskMap, String[] 
startTaskIds) throws Exception {
+
+        if (taskDriver == null) {
+            throw new Exception("Workflow operator needs to be initialized");
+        }
+
+        String workflowName = UUID.randomUUID().toString();
+        Workflow.Builder workflowBuilder = new 
Workflow.Builder(workflowName).setExpiry(0);
+
+        for (String startTaskId: startTaskIds) {
+            buildWorkflowRecursively(workflowBuilder, workflowName, 
startTaskId, taskMap);
+        }
+
+        WorkflowConfig.Builder config = new WorkflowConfig.Builder()
+                .setFailureThreshold(0)
+                .setAllowOverlapJobAssignment(true);
+
+        workflowBuilder.setWorkflowConfig(config.build());
+        workflowBuilder.setExpiry(WORKFLOW_EXPIRY_TIME);
+        Workflow workflow = workflowBuilder.build();
+
+        logger.info("Starting workflow {}", workflowName);
+        taskDriver.start(workflow);
+        return workflowName;
+    }
+
+    private void buildWorkflowRecursively(Workflow.Builder workflowBuilder, 
String workflowName,
+                                          String nextTaskId, Map<String, 
BaseTask> taskMap)
+            throws Exception{
+        BaseTask currentTask = taskMap.get(nextTaskId);
+
+        if (currentTask == null) {
+            logger.error("Couldn't find a task with id {} in the task map", 
nextTaskId);
+            throw new Exception("Couldn't find a task with id " + nextTaskId 
+" in the task map");
+        }
+
+        TaskDef blockingTaskDef = 
currentTask.getClass().getAnnotation(TaskDef.class);
+
+        if (blockingTaskDef != null) {
+            String taskName = blockingTaskDef.name();
+            TaskConfig.Builder taskBuilder = new TaskConfig.Builder()
+                    .setTaskId(currentTask.getTaskId())
+                    .setCommand(taskName);
+
+            Map<String, String> paramMap = 
TaskUtil.serializeTaskData(currentTask);
+            paramMap.forEach(taskBuilder::addConfig);
+
+            List<TaskConfig> taskBuilds = new ArrayList<>();
+            taskBuilds.add(taskBuilder.build());
+
+            JobConfig.Builder job = new JobConfig.Builder()
+                    .addTaskConfigs(taskBuilds)
+                    .setFailureThreshold(0)
+                    .setExpiry(WORKFLOW_EXPIRY_TIME)
+                    .setTimeoutPerTask(TASK_EXPIRY_TIME)
+                    .setNumConcurrentTasksPerInstance(20)
+                    .setMaxAttemptsPerTask(currentTask.getRetryCount());
+
+            workflowBuilder.addJob(currentTask.getTaskId(), job);
+
+            List<OutPort> outPorts = getOutPortsOfTask(currentTask);
+
+            for (OutPort outPort : outPorts) {
+                if (outPort != null) {
+                    
workflowBuilder.addParentChildDependency(currentTask.getTaskId(), 
outPort.getNextTaskId());
+                    logger.info("Parent to child dependency {} -> {}", 
currentTask.getTaskId(), outPort.getNextTaskId());
+                    buildWorkflowRecursively(workflowBuilder, workflowName, 
outPort.getNextTaskId(), taskMap);
+                }
+            }
+        } else {
+            logger.error("Couldn't find the task def annotation in class {}", 
currentTask.getClass().getName());
+            throw new Exception("Couldn't find the task def annotation in 
class " + currentTask.getClass().getName());
+        }
+    }
+
+
+    private <T extends BaseTask> List<OutPort> getOutPortsOfTask(T taskObj) 
throws IllegalAccessException {
+
+        List<OutPort> outPorts = new ArrayList<>();
+        for (Class<?> c = taskObj.getClass(); c != null; c = 
c.getSuperclass()) {
+            Field[] fields = c.getDeclaredFields();
+            for (Field field : fields) {
+                TaskOutPort outPortAnnotation = 
field.getAnnotation(TaskOutPort.class);
+                if (outPortAnnotation != null) {
+                    field.setAccessible(true);
+                    List<OutPort> outPort = (List<OutPort>) field.get(taskObj);
+                    outPorts.addAll(outPort);
+                }
+            }
+        }
+        return outPorts;
+    }
+
+
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/ExecutionService.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/ExecutionService.java
index fe37625491..2831902119 100644
--- 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/ExecutionService.java
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/ExecutionService.java
@@ -2,6 +2,8 @@ package org.apache.airavata.apis.service;
 
 import org.apache.airavata.api.execution.stubs.Experiment;
 
+import java.util.Optional;
+
 /**
  * Transactional service layer for CRUD operations on database.
  */
@@ -10,4 +12,6 @@ public interface ExecutionService {
     Experiment createExperiment(Experiment experiment);
 
     Experiment updateExperiment(Experiment experiment);
+
+    Optional<Experiment> getExperiment(String experimentId);
 }
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/impl/ExecutionServiceImpl.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/impl/ExecutionServiceImpl.java
index ed94225800..6eda4da1ce 100644
--- 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/impl/ExecutionServiceImpl.java
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/service/impl/ExecutionServiceImpl.java
@@ -54,4 +54,16 @@ public class ExecutionServiceImpl implements 
ExecutionService {
 
     }
 
+    @Override
+    public Optional<Experiment> getExperiment(String experimentId) {
+        Optional<ExperimentEntity> maybeExperimentEntity = 
experimentRepository.findById(experimentId);
+
+        if (maybeExperimentEntity.isEmpty()) {
+            return Optional.empty();
+        }
+
+        Experiment experiment = Experiment.getDefaultInstance();
+        ExperimentEntity experimentEntity = maybeExperimentEntity.get();
+        return 
Optional.of(experimentMapper.mapEntityToModel(experimentEntity));
+    }
 }
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/WorkflowExecutor.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/WorkflowExecutor.java
new file mode 100644
index 0000000000..29e67f51da
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/WorkflowExecutor.java
@@ -0,0 +1,225 @@
+package org.apache.airavata.apis.workflow;
+
+import org.apache.airavata.apis.workflow.task.common.BaseTask;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskDef;
+import org.apache.helix.InstanceType;
+import org.apache.helix.controller.HelixControllerMain;
+import org.apache.helix.examples.OnlineOfflineStateModelFactory;
+import org.apache.helix.manager.zk.ZKHelixAdmin;
+import org.apache.helix.manager.zk.ZKHelixManager;
+import org.apache.helix.manager.zk.ZNRecordSerializer;
+import org.apache.helix.manager.zk.ZkClient;
+import org.apache.helix.model.BuiltInStateModelDefinitions;
+import org.apache.helix.model.InstanceConfig;
+import org.apache.helix.participant.StateMachineEngine;
+import org.apache.helix.task.TaskFactory;
+import org.apache.helix.task.TaskStateModelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.CommandLineRunner;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.WebApplicationType;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.context.annotation.PropertySource;
+
+import java.util.*;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+@SpringBootApplication()
+@PropertySource(value = "classpath:workflow.properties")
+public class WorkflowExecutor implements CommandLineRunner {
+
+    private final static Logger logger = 
LoggerFactory.getLogger(WorkflowExecutor.class);
+
+    
@org.springframework.beans.factory.annotation.Value("${workflow.cluster.name}")
+    private String clusterName;
+
+    
@org.springframework.beans.factory.annotation.Value("${workflow.controller.name}")
+    private String controllerName;
+
+    
@org.springframework.beans.factory.annotation.Value("${workflow.participant.name}")
+    private String participantName;
+
+    @org.springframework.beans.factory.annotation.Value("${zookeeper.address}")
+    private String zkAddress;
+
+    private org.apache.helix.HelixManager zkControllerHelixManager;
+    private org.apache.helix.HelixManager zkParticipantHelixManager;
+
+    private CountDownLatch controllerStartLatch = new CountDownLatch(1);
+    private CountDownLatch serverStopLatch = new CountDownLatch(1);
+
+    private final List<String> runningTasks = Collections.synchronizedList(new 
ArrayList<>());
+    private int shutdownGracePeriod = 30000;
+    private int shutdownGraceRetries = 2;
+    private final ExecutorService pool = Executors.newFixedThreadPool(2);
+
+    @Override
+    public void run(String... args) throws Exception {
+        pool.submit(this::startController);
+        pool.submit(this::startParticipant);
+    }
+
+    private void startController() {
+        logger.info("Starting Workflow Controller ......");
+
+        try {
+            ZkClient zkClient = new ZkClient(zkAddress, 
ZkClient.DEFAULT_SESSION_TIMEOUT,
+                    ZkClient.DEFAULT_CONNECTION_TIMEOUT, new 
ZNRecordSerializer());
+            ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkClient);
+
+            // Creates the zk cluster if not available
+            if (!zkHelixAdmin.getClusters().contains(clusterName)) {
+                logger.info("Creating the cluster for first time: {}", 
clusterName);
+                zkHelixAdmin.addCluster(clusterName, true);
+            }
+
+            zkHelixAdmin.close();
+            zkClient.close();
+
+            logger.info("Connection to helix cluster : {}  with name : {}", 
clusterName, controllerName);
+            logger.info("Zookeeper connection string {}", zkAddress);
+
+            zkControllerHelixManager = 
HelixControllerMain.startHelixController(zkAddress, clusterName,
+                    controllerName, HelixControllerMain.STANDALONE);
+            controllerStartLatch.countDown();
+            logger.info("Workflow Controller Started...");
+            serverStopLatch.await();
+            logger.info("Workflow Controller Stopping");
+
+        } catch (Exception ex) {
+            logger.error("Error in running the Controller: {}", 
controllerName, ex);
+
+        } finally {
+            disconnectController();
+        }
+    }
+    private void startParticipant() {
+        try {
+            controllerStartLatch.await();
+            logger.info("Controller started. Starting the participant...");
+
+            ZkClient zkClient = null;
+            try {
+                zkClient = new ZkClient(zkAddress, 
ZkClient.DEFAULT_SESSION_TIMEOUT,
+                        ZkClient.DEFAULT_CONNECTION_TIMEOUT, new 
ZNRecordSerializer());
+                ZKHelixAdmin zkHelixAdmin = new ZKHelixAdmin(zkClient);
+
+                List<String> nodesInCluster = 
zkHelixAdmin.getInstancesInCluster(clusterName);
+
+                if (!nodesInCluster.contains(participantName)) {
+                    InstanceConfig instanceConfig = new 
InstanceConfig(participantName);
+                    instanceConfig.setHostName("localhost");
+                    instanceConfig.setInstanceEnabled(true);
+                    instanceConfig.setMaxConcurrentTask(30);
+                    zkHelixAdmin.addInstance(clusterName, instanceConfig);
+                    logger.info("Participant: " + participantName + " has been 
added to cluster: " + clusterName);
+
+                } else {
+                    zkHelixAdmin.enableInstance(clusterName, participantName, 
true);
+                    logger.debug("Participant: " + participantName + " has 
been re-enabled at the cluster: " + clusterName);
+                }
+
+                Runtime.getRuntime().addShutdownHook(
+                        new Thread(() -> {
+                            logger.debug("Participant: " + participantName + " 
shutdown hook called");
+                            try {
+                                zkHelixAdmin.enableInstance(clusterName, 
participantName, false);
+                            } catch (Exception e) {
+                                logger.warn("Participant: " + participantName 
+ " was not disabled normally", e);
+                            }
+                            disconnectParticipant();
+                        })
+                );
+
+                zkParticipantHelixManager = new ZKHelixManager(clusterName, 
participantName, InstanceType.PARTICIPANT, zkAddress);
+                // register online-offline model
+                StateMachineEngine machineEngine = 
zkParticipantHelixManager.getStateMachineEngine();
+                OnlineOfflineStateModelFactory factory = new 
OnlineOfflineStateModelFactory(participantName);
+                
machineEngine.registerStateModelFactory(BuiltInStateModelDefinitions.OnlineOffline.name(),
 factory);
+
+                // register task model
+                machineEngine.registerStateModelFactory("Task", new 
TaskStateModelFactory(zkParticipantHelixManager, getTaskFactory()));
+
+                logger.debug("Participant: " + participantName + ", registered 
state model factories.");
+
+                zkParticipantHelixManager.connect();
+                logger.info("Participant: " + participantName + ", has 
connected to cluster: " + clusterName);
+                serverStopLatch.await();
+                logger.info("Workflow participant is stopping");
+            } catch (Exception e) {
+                logger.error("Failed to start the participant", e);
+            }
+
+        } catch (InterruptedException e) {
+            logger.error("Failed waiting for controller to start.", e);
+        }
+    }
+
+    private Map<String, TaskFactory> getTaskFactory() throws Exception {
+
+        String[] taskClasses = {
+                "org.apache.airavata.apis.workflow.task.data.DataMovementTask",
+                
"org.apache.airavata.apis.workflow.task.ec2.CreateEC2InstanceTask"};
+
+        Map<String, TaskFactory> taskMap = new HashMap<>();
+
+        for (String className : taskClasses) {
+            try {
+                logger.info("Loading task {}", className);
+                Class<?> taskClz = Class.forName(className);
+                Object taskObj = taskClz.getConstructor().newInstance();
+                BaseTask baseTask = (BaseTask) taskObj;
+                TaskFactory taskFactory = context -> {
+                    baseTask.setCallbackContext(context);
+                    return baseTask;
+                };
+                TaskDef btDef = 
baseTask.getClass().getAnnotation(TaskDef.class);
+                taskMap.put(btDef.name(), taskFactory);
+
+            } catch (ClassNotFoundException e) {
+                logger.error("Couldn't find a class with name {}", className);
+                throw e;
+            }
+        }
+
+        return taskMap;
+    }
+
+    private void disconnectController() {
+        if (zkControllerHelixManager != null) {
+            logger.info("Controller: {}, has disconnected from cluster: {}", 
controllerName, clusterName);
+            zkControllerHelixManager.disconnect();
+        }
+    }
+
+    private void disconnectParticipant() {
+        logger.info("Shutting down participant. Currently available tasks " + 
runningTasks.size());
+        if (zkParticipantHelixManager != null) {
+            if (runningTasks.size() > 0) {
+                for (int i = 0; i <= shutdownGraceRetries; i++) {
+                    logger.info("Shutting down gracefully [RETRY " + i + "]");
+                    try {
+                        Thread.sleep(shutdownGracePeriod);
+                    } catch (InterruptedException e) {
+                        logger.warn("Waiting for running tasks failed [RETRY " 
+ i + "]", e);
+                    }
+                    if (runningTasks.size() == 0) {
+                        break;
+                    }
+                }
+            }
+            logger.info("Participant: " + participantName + ", has 
disconnected from cluster: " + clusterName);
+            zkParticipantHelixManager.disconnect();
+        }
+    }
+
+    public static void main(String args[]) throws Exception {
+        SpringApplication app = new SpringApplication(WorkflowExecutor.class);
+        app.setWebApplicationType(WebApplicationType.NONE);
+        System.out.printf("Starting app");
+        app.run(args);
+    }
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/BaseTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/BaseTask.java
new file mode 100644
index 0000000000..1b3e098ae2
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/BaseTask.java
@@ -0,0 +1,109 @@
+package org.apache.airavata.apis.workflow.task.common;
+
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskOutPort;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskParam;
+import org.apache.helix.task.Task;
+import org.apache.helix.task.TaskCallbackContext;
+import org.apache.helix.task.TaskResult;
+import org.apache.helix.task.UserContentStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+public abstract class BaseTask extends UserContentStore implements Task {
+
+    private final static Logger logger = 
LoggerFactory.getLogger(BaseTask.class);
+
+    private ThreadLocal<TaskCallbackContext> callbackContext = new 
ThreadLocal<>();
+    private BlockingQueue<TaskCallbackContext> callbackContextQueue = new 
LinkedBlockingQueue<>();
+
+    @TaskOutPort(name = "nextTask")
+    private List<OutPort> outPorts = new ArrayList<>();
+
+    @TaskParam(name = "taskId")
+    private ThreadLocal<String> taskId = new ThreadLocal<>();
+
+    @TaskParam(name = "retryCount")
+    private ThreadLocal<Integer> retryCount = ThreadLocal.withInitial(()-> 3);
+
+    @Override
+    public TaskResult run() {
+        try {
+            TaskCallbackContext cbc = callbackContextQueue.poll();
+
+            if (cbc == null) {
+                logger.error("No callback context available");
+                throw new Exception("No callback context available");
+            }
+
+            this.callbackContext.set(cbc);
+            String helixTaskId = getCallbackContext().getTaskConfig().getId();
+            logger.info("Running task {}", helixTaskId);
+            TaskUtil.deserializeTaskData(this, 
getCallbackContext().getTaskConfig().getConfigMap());
+        } catch (Exception e) {
+            logger.error("Failed at deserializing task data", e);
+            return new TaskResult(TaskResult.Status.FAILED, "Failed in 
deserializing task data");
+        }
+
+        try {
+            return onRun();
+        } catch (Exception e) {
+            logger.error("Unknown error while running task {}", getTaskId(), 
e);
+            return new TaskResult(TaskResult.Status.FAILED, "Failed due to 
unknown error");
+        }
+    }
+
+    @Override
+    public void cancel() {
+        try {
+            onCancel();
+        } catch (Exception e) {
+            logger.error("Unknown error while cancelling task {}", 
getTaskId(), e);
+        }
+    }
+
+    public abstract TaskResult onRun() throws Exception;
+
+    public abstract void onCancel() throws Exception;
+
+    public List<OutPort> getOutPorts() {
+        return outPorts;
+    }
+
+    public void addOutPort(OutPort outPort) {
+        this.outPorts.add(outPort);
+    }
+
+    public int getRetryCount() {
+        return retryCount.get();
+    }
+
+    public void setRetryCount(int retryCount) {
+        this.retryCount.set(retryCount);
+    }
+
+    public TaskCallbackContext getCallbackContext() {
+        return callbackContext.get();
+    }
+
+    public String getTaskId() {
+        return taskId.get();
+    }
+
+    public void setTaskId(String taskId) {
+        this.taskId.set(taskId);
+    }
+
+    public void setCallbackContext(TaskCallbackContext callbackContext) {
+        logger.info("Setting callback context {}", 
callbackContext.getJobConfig().getId());
+        try {
+            this.callbackContextQueue.put(callbackContext);
+        } catch (InterruptedException e) {
+            logger.error("Failed to put callback context to the queue", e);
+        }
+    }
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/OutPort.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/OutPort.java
new file mode 100644
index 0000000000..8d46951717
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/OutPort.java
@@ -0,0 +1,14 @@
+package org.apache.airavata.apis.workflow.task.common;
+
+public class OutPort {
+    private String nextTaskId;
+
+    public String getNextTaskId() {
+        return nextTaskId;
+    }
+
+    public OutPort setNextTaskId(String nextTaskId) {
+        this.nextTaskId = nextTaskId;
+        return this;
+    }
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskParamType.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskParamType.java
new file mode 100644
index 0000000000..e0ba778296
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskParamType.java
@@ -0,0 +1,6 @@
+package org.apache.airavata.apis.workflow.task.common;
+
+public interface TaskParamType {
+    public String serialize();
+    public void deserialize(String content);
+}
\ No newline at end of file
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskUtil.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskUtil.java
new file mode 100644
index 0000000000..7aa23b7c5e
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/TaskUtil.java
@@ -0,0 +1,91 @@
+package org.apache.airavata.apis.workflow.task.common;
+
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskParam;
+import org.apache.commons.beanutils.PropertyUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.beans.PropertyDescriptor;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class TaskUtil {
+
+    private final static Logger logger = 
LoggerFactory.getLogger(TaskUtil.class);
+
+    public static <T extends BaseTask> void deserializeTaskData(T instance, 
Map<String, String> params) throws IllegalAccessException, 
NoSuchMethodException, InvocationTargetException, InstantiationException {
+
+        List<Field> allFields = new ArrayList<>();
+        Class genericClass = instance.getClass();
+
+        while (BaseTask.class.isAssignableFrom(genericClass)) {
+            Field[] declaredFields = genericClass.getDeclaredFields();
+            for (Field declaredField : declaredFields) {
+                allFields.add(declaredField);
+            }
+            genericClass = genericClass.getSuperclass();
+        }
+
+        for (Field classField : allFields) {
+            TaskParam param = classField.getAnnotation(TaskParam.class);
+            if (param != null) {
+                if (params.containsKey(param.name())) {
+                    classField.setAccessible(true);
+                    PropertyDescriptor propertyDescriptor = 
PropertyUtils.getPropertyDescriptor(instance, classField.getName());
+                    Method writeMethod = 
PropertyUtils.getWriteMethod(propertyDescriptor);
+                    Class<?>[] methodParamType = 
writeMethod.getParameterTypes();
+                    Class<?> writeParameterType = methodParamType[0];
+
+                    if (writeParameterType.isAssignableFrom(String.class)) {
+                        writeMethod.invoke(instance, params.get(param.name()));
+                    } else if 
(writeParameterType.isAssignableFrom(Integer.class) ||
+                            writeParameterType.isAssignableFrom(Integer.TYPE)) 
{
+                        writeMethod.invoke(instance, 
Integer.parseInt(params.get(param.name())));
+                    } else if (writeParameterType.isAssignableFrom(Long.class) 
||
+                            writeParameterType.isAssignableFrom(Long.TYPE)) {
+                        writeMethod.invoke(instance, 
Long.parseLong(params.get(param.name())));
+                    } else if 
(writeParameterType.isAssignableFrom(Boolean.class) ||
+                            writeParameterType.isAssignableFrom(Boolean.TYPE)) 
{
+                        writeMethod.invoke(instance, 
Boolean.parseBoolean(params.get(param.name())));
+                    } else if 
(TaskParamType.class.isAssignableFrom(writeParameterType)) {
+                        Constructor<?> ctor = 
writeParameterType.getConstructor();
+                        Object obj = ctor.newInstance();
+                        
((TaskParamType)obj).deserialize(params.get(param.name()));
+                        writeMethod.invoke(instance, obj);
+                    }
+                }
+            }
+        }
+    }
+
+    public static <T extends BaseTask> Map<String, String> serializeTaskData(T 
data) throws IllegalAccessException, InvocationTargetException, 
NoSuchMethodException {
+
+        Map<String, String> result = new HashMap<>();
+        for (Class<?> c = data.getClass(); c != null; c = c.getSuperclass()) {
+            Field[] fields = c.getDeclaredFields();
+            for (Field classField : fields) {
+                TaskParam parm = classField.getAnnotation(TaskParam.class);
+                try {
+                    if (parm != null) {
+                        Object propertyValue = PropertyUtils.getProperty(data, 
classField.getName());
+                        if (propertyValue instanceof TaskParamType) {
+                            result.put(parm.name(), 
TaskParamType.class.cast(propertyValue).serialize());
+                        } else {
+                            result.put(parm.name(), propertyValue.toString());
+                        }
+                    }
+                } catch (Exception e) {
+                    logger.error("Failed to serialize task parameter {} in 
class {}", parm.name(), data.getClass().getName());
+                    throw e;
+                }
+            }
+        }
+        return result;
+    }
+}
\ No newline at end of file
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskDef.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskDef.java
new file mode 100644
index 0000000000..2a8f8daa73
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskDef.java
@@ -0,0 +1,12 @@
+package org.apache.airavata.apis.workflow.task.common.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface TaskDef {
+    public String name();
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskOutPort.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskOutPort.java
new file mode 100644
index 0000000000..e717ff954c
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskOutPort.java
@@ -0,0 +1,12 @@
+package org.apache.airavata.apis.workflow.task.common.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface TaskOutPort {
+    public String name();
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskParam.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskParam.java
new file mode 100644
index 0000000000..87da3327b4
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/common/annotation/TaskParam.java
@@ -0,0 +1,14 @@
+package org.apache.airavata.apis.workflow.task.common.annotation;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface TaskParam {
+    public String name();
+    public String defaultValue() default "";
+    public boolean mandatory() default false;
+}
\ No newline at end of file
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/data/DataMovementTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/data/DataMovementTask.java
new file mode 100644
index 0000000000..0a4792626f
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/data/DataMovementTask.java
@@ -0,0 +1,25 @@
+package org.apache.airavata.apis.workflow.task.data;
+
+import org.apache.airavata.apis.workflow.task.common.BaseTask;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskDef;
+import org.apache.helix.task.TaskResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@TaskDef(name = "DataMovementTask")
+public class DataMovementTask extends BaseTask {
+
+    private final static Logger logger = 
LoggerFactory.getLogger(DataMovementTask.class);
+
+    @Override
+    public TaskResult onRun() throws Exception {
+        logger.info("Starting Data Movement task {}", getTaskId());
+
+        return new TaskResult(TaskResult.Status.COMPLETED, "Completed");
+    }
+
+    @Override
+    public void onCancel() throws Exception {
+
+    }
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/DestroyDockerContainerTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/DestroyDockerContainerTask.java
new file mode 100644
index 0000000000..aa2b6a480c
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/DestroyDockerContainerTask.java
@@ -0,0 +1,4 @@
+package org.apache.airavata.apis.workflow.task.docker;
+
+public class DestroyDockerContainerTask {
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/StartDockerContainerTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/StartDockerContainerTask.java
new file mode 100644
index 0000000000..318d409a5c
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/docker/StartDockerContainerTask.java
@@ -0,0 +1,4 @@
+package org.apache.airavata.apis.workflow.task.docker;
+
+public class StartDockerContainerTask {
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/CreateEC2InstanceTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/CreateEC2InstanceTask.java
new file mode 100644
index 0000000000..537323bd70
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/CreateEC2InstanceTask.java
@@ -0,0 +1,25 @@
+package org.apache.airavata.apis.workflow.task.ec2;
+
+import org.apache.airavata.apis.workflow.task.common.BaseTask;
+import org.apache.airavata.apis.workflow.task.common.annotation.TaskDef;
+import org.apache.helix.task.TaskResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@TaskDef(name = "CreateEC2InstanceTask")
+public class CreateEC2InstanceTask extends BaseTask {
+
+    private final static Logger logger = 
LoggerFactory.getLogger(CreateEC2InstanceTask.class);
+
+    @Override
+    public TaskResult onRun() throws Exception {
+        logger.info("Starting Create EC2 Instance Task {}", getTaskId());
+
+        return new TaskResult(TaskResult.Status.COMPLETED, "Completed");
+    }
+
+    @Override
+    public void onCancel() throws Exception {
+
+    }
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/DestroyEC2InstanceTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/DestroyEC2InstanceTask.java
new file mode 100644
index 0000000000..7c4f39674a
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/ec2/DestroyEC2InstanceTask.java
@@ -0,0 +1,4 @@
+package org.apache.airavata.apis.workflow.task.ec2;
+
+public class DestroyEC2InstanceTask {
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunLocalCommandTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunLocalCommandTask.java
new file mode 100644
index 0000000000..ac6d1a223b
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunLocalCommandTask.java
@@ -0,0 +1,4 @@
+package org.apache.airavata.apis.workflow.task.runners;
+
+public class RunLocalCommandTask {
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunServerCommandTask.java
 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunServerCommandTask.java
new file mode 100644
index 0000000000..cb06f370a4
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/java/org/apache/airavata/apis/workflow/task/runners/RunServerCommandTask.java
@@ -0,0 +1,4 @@
+package org.apache.airavata.apis.workflow.task.runners;
+
+public class RunServerCommandTask {
+}
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/resources/api.properties 
b/modules/airavata-apis/airavata-apis-server/src/main/resources/api.properties
index e69de29bb2..a9fd83fea0 100644
--- 
a/modules/airavata-apis/airavata-apis-server/src/main/resources/api.properties
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/resources/api.properties
@@ -0,0 +1,16 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
\ No newline at end of file
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/resources/log4j2.xml 
b/modules/airavata-apis/airavata-apis-server/src/main/resources/log4j2.xml
index 8f3f226373..7d868d1626 100644
--- a/modules/airavata-apis/airavata-apis-server/src/main/resources/log4j2.xml
+++ b/modules/airavata-apis/airavata-apis-server/src/main/resources/log4j2.xml
@@ -40,7 +40,7 @@
         </RollingFile>
     </Appenders>
     <Loggers>
-        <logger name="org.apache.helix" level="WARN"/>
+        <logger name="org.apache.helix" level="ERROR"/>
         <logger name="org.apache.zookeeper" level="ERROR"/>
         <logger name="org.apache.airavata" level="INFO"/>
         <logger name="org.hibernate" level="ERROR"/>
diff --git 
a/modules/airavata-apis/airavata-apis-server/src/main/resources/workflow.properties
 
b/modules/airavata-apis/airavata-apis-server/src/main/resources/workflow.properties
new file mode 100644
index 0000000000..604e79bd92
--- /dev/null
+++ 
b/modules/airavata-apis/airavata-apis-server/src/main/resources/workflow.properties
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+zookeeper.address=localhost:2181
+workflow.cluster.name=airavata
+workflow.controller.name=controller0
+workflow.participant.name=participant0
diff --git 
a/modules/airavata-apis/airavata-apis-stub/src/main/proto/execution/experiment_stub.proto
 
b/modules/airavata-apis/airavata-apis-stub/src/main/proto/execution/experiment_stub.proto
index 3da863c02a..15457f14fe 100644
--- 
a/modules/airavata-apis/airavata-apis-stub/src/main/proto/execution/experiment_stub.proto
+++ 
b/modules/airavata-apis/airavata-apis-stub/src/main/proto/execution/experiment_stub.proto
@@ -137,6 +137,7 @@ message ServerBackend {
 
 message EC2Backend {
   string flavor = 1;
+  string imageId = 2;
   string region = 3;
   string aws_credential_id = 4;
 }
diff --git a/pom.xml b/pom.xml
index d300abaa3f..626f0ff48a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -654,45 +654,6 @@
                             </execution>
                         </executions>
                     </plugin>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-remote-resources-plugin</artifactId>
-                        <executions>
-                            <execution>
-                                <goals>
-                                    <goal>bundle</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <includes>
-                                <include>**/*.*</include>
-                            </includes>
-                        </configuration>
-                    </plugin>
-                    <plugin>
-                        <artifactId>maven-resources-plugin</artifactId>
-                        <version>2.5</version>
-                        <executions>
-                            <execution>
-                                <id>copy-resources</id>
-                                <!-- here the phase you need -->
-                                <phase>validate</phase>
-                                <goals>
-                                    <goal>copy-resources</goal>
-                                </goals>
-                                <configuration>
-                                    
<outputDirectory>${basedir}/target/classes/META-INF</outputDirectory>
-                                    <resources>
-                                        <resource>
-                                            
<directory>${basedir}/src/main/assembly/dist</directory>
-                                            <filtering>true</filtering>
-                                        </resource>
-                                    </resources>
-                                </configuration>
-                            </execution>
-                        </executions>
-                    </plugin>
                     <plugin>
                         <groupId>org.apache.maven.plugins</groupId>
                         <artifactId>maven-compiler-plugin</artifactId>
@@ -762,45 +723,6 @@
                             </execution>
                         </executions>
                     </plugin>
-                    <plugin>
-                        <groupId>org.jacoco</groupId>
-                        <artifactId>jacoco-maven-plugin</artifactId>
-                        <version>${jacoco.version}</version>
-                        <executions>
-                            <execution>
-                                <id>prepare-agent</id>
-                                <goals>
-                                    <goal>prepare-agent</goal>
-                                </goals>
-                            </execution>
-                            <execution>
-                                <id>report</id>
-                                <phase>prepare-package</phase>
-                                <goals>
-                                    <goal>report</goal>
-                                </goals>
-                            </execution>
-                            <execution>
-                                <id>post-unit-test</id>
-                                <phase>test</phase>
-                                <goals>
-                                    <goal>report</goal>
-                                </goals>
-                                <configuration>
-                                    <!-- Sets the path to the file which 
contains the execution data. -->
-
-                                    <dataFile>target/jacoco.exec</dataFile>
-                                    <!-- Sets the output directory for the 
code coverage report. -->
-                                    
<outputDirectory>target/jacoco-ut</outputDirectory>
-                                </configuration>
-                            </execution>
-                        </executions>
-                        <configuration>
-                            <systemPropertyVariables>
-                                
<jacoco-agent.destfile>target/jacoco.exec</jacoco-agent.destfile>
-                            </systemPropertyVariables>
-                        </configuration>
-                    </plugin>
                 </plugins>
             </build>
             <activation>
@@ -846,28 +768,6 @@
                             <!--<fork>true</fork>-->
                         </configuration>
                     </plugin>
-                    <plugin>
-                        <groupId>com.mycila</groupId>
-                        <artifactId>license-maven-plugin</artifactId>
-                        <version>3.0</version>
-                        <configuration>
-                            <header>apache-license-header.txt</header>
-                            <excludes>
-                                <exclude>**/README</exclude>
-                                <exclude>**/src/test/resources/**</exclude>
-                                <exclude>**/src/main/resources/**</exclude>
-                                <exclude>**/vault.yml</exclude>
-                            </excludes>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <phase>process-sources</phase>
-                                <goals>
-                                    <goal>format</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
                     <plugin>
                         <groupId>org.apache.rat</groupId>
                         <artifactId>apache-rat-plugin</artifactId>
@@ -909,20 +809,4 @@
         </profile>
     </profiles>
 
-    <reporting>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-javadoc-plugin</artifactId>
-                <version>2.9.1</version>
-                <configuration>
-                    <aggregate>true</aggregate>
-                    <quiet>true</quiet>
-                    <minmemory>256m</minmemory>
-                    <maxmemory>2g</maxmemory>
-                </configuration>
-            </plugin>
-        </plugins>
-    </reporting>
-
 </project>

Reply via email to