Repository: oozie
Updated Branches:
  refs/heads/master f8cbce617 -> 8a0a6487d


http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestShellAction.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestShellAction.java
 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestShellAction.java
new file mode 100644
index 0000000..552933b
--- /dev/null
+++ 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestShellAction.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.jobs.client.minitest;
+
+import org.apache.oozie.client.OozieClientException;
+import org.apache.oozie.fluentjob.api.GraphVisualization;
+import org.apache.oozie.fluentjob.api.action.Prepare;
+import org.apache.oozie.fluentjob.api.action.PrepareBuilder;
+import org.apache.oozie.fluentjob.api.action.ShellAction;
+import org.apache.oozie.fluentjob.api.action.ShellActionBuilder;
+import org.apache.oozie.fluentjob.api.dag.Graph;
+import org.apache.oozie.fluentjob.api.serialization.WorkflowMarshaller;
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.apache.oozie.test.WorkflowTestCase;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+
+public class TestShellAction extends WorkflowTestCase {
+    public void testForkedShellActions() throws IOException, JAXBException, 
OozieClientException {
+        final Prepare prepare = new PrepareBuilder()
+                
.withDelete("hdfs://localhost:8020/user/${wf:user()}/examples/output")
+                .build();
+
+        final ShellAction parent = ShellActionBuilder.create()
+                .withResourceManager(getJobTrackerUri())
+                .withNameNode(getNameNodeUri())
+                .withPrepare(prepare)
+                .withConfigProperty("mapred.job.queue.name", "default")
+                .withArgument("arg1")
+                .withExecutable("python")
+                .withEnvironmentVariable("PATH=$PATH:/opt/python27/bin")
+                .withCaptureOutput(true)
+                .build();
+
+        //  We are reusing the definition of parent and only modifying and 
adding what is different.
+        final ShellAction leftChild = 
ShellActionBuilder.createFromExistingAction(parent)
+                .withParent(parent)
+                .withoutArgument("arg1")
+                .withArgument("arg2")
+                .withExecutable("python3")
+                .withoutEnvironmentVariable("PATH=$PATH:/opt/python27/bin")
+                .withEnvironmentVariable("PATH=$PATH:/opt/python36/bin")
+                .withCaptureOutput(false)
+                .build();
+
+        ShellActionBuilder.createFromExistingAction(leftChild)
+                .withoutArgument("arg2")
+                .withArgument("arg3")
+                .withExecutable("python4")
+                .withoutEnvironmentVariable("PATH=$PATH:/opt/python36/bin")
+                .withEnvironmentVariable("PATH=$PATH:/opt/python42/bin")
+                .build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("simple-shell-example")
+                .withDagContainingNode(parent).build();
+
+        final String xml = WorkflowMarshaller.marshal(workflow);
+
+        System.out.println(xml);
+
+        GraphVisualization.workflowToPng(workflow, 
"simple-shell-example-workflow.png");
+
+        final Graph intermediateGraph = new Graph(workflow);
+
+        GraphVisualization.graphToPng(intermediateGraph, 
"simple-shell-example-graph.png");
+
+        log.debug("Workflow XML is:\n{0}", xml);
+
+        validate(xml);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSparkAction.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSparkAction.java
 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSparkAction.java
new file mode 100644
index 0000000..3a1ce3b
--- /dev/null
+++ 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSparkAction.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.jobs.client.minitest;
+
+import org.apache.oozie.client.OozieClientException;
+import org.apache.oozie.fluentjob.api.GraphVisualization;
+import org.apache.oozie.fluentjob.api.action.Prepare;
+import org.apache.oozie.fluentjob.api.action.PrepareBuilder;
+import org.apache.oozie.fluentjob.api.action.SparkAction;
+import org.apache.oozie.fluentjob.api.action.SparkActionBuilder;
+import org.apache.oozie.fluentjob.api.dag.Graph;
+import org.apache.oozie.fluentjob.api.serialization.WorkflowMarshaller;
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.apache.oozie.test.WorkflowTestCase;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+
+public class TestSparkAction extends WorkflowTestCase {
+    public void testForkedSparkActions() throws IOException, JAXBException, 
OozieClientException {
+        final Prepare prepare = new PrepareBuilder()
+                
.withDelete("hdfs://localhost:8020/user/${wf:user()}/examples/output")
+                .build();
+
+        final SparkAction parent = SparkActionBuilder.create()
+                .withResourceManager(getJobTrackerUri())
+                .withNameNode(getNameNodeUri())
+                .withPrepare(prepare)
+                .withConfigProperty("mapred.job.queue.name", "default")
+                .withArg("inputpath=hdfs://localhost/input/file.txt")
+                .withArg("value=1")
+                .withMaster("yarn")
+                .withMode("cluster")
+                .withActionName("Spark Example")
+                .withActionClass("org.apache.spark.examples.mllib.JavaALS")
+                .withJar("/lib/spark-examples_2.10-1.1.0.jar")
+                .withSparkOpts("--executor-memory 20G --num-executors 50")
+                .build();
+
+        //  We are reusing the definition of parent and only modifying and 
adding what is different.
+        final SparkAction leftChild = 
SparkActionBuilder.createFromExistingAction(parent)
+                .withParent(parent)
+                .withoutArg("value=1")
+                .withArg("value=3")
+                .build();
+
+        SparkActionBuilder.createFromExistingAction(leftChild)
+                .withoutArg("value=2")
+                .withArg("value=3")
+                .build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("simple-spark-example")
+                .withDagContainingNode(parent).build();
+
+        final String xml = WorkflowMarshaller.marshal(workflow);
+
+        System.out.println(xml);
+
+        GraphVisualization.workflowToPng(workflow, 
"simple-spark-example-workflow.png");
+
+        final Graph intermediateGraph = new Graph(workflow);
+
+        GraphVisualization.graphToPng(intermediateGraph, 
"simple-spark-example-graph.png");
+
+        log.debug("Workflow XML is:\n{0}", xml);
+
+        validate(xml);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSqoopAction.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSqoopAction.java
 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSqoopAction.java
new file mode 100644
index 0000000..3d6fec9
--- /dev/null
+++ 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSqoopAction.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.jobs.client.minitest;
+
+import org.apache.oozie.client.OozieClientException;
+import org.apache.oozie.fluentjob.api.GraphVisualization;
+import org.apache.oozie.fluentjob.api.action.Prepare;
+import org.apache.oozie.fluentjob.api.action.PrepareBuilder;
+import org.apache.oozie.fluentjob.api.action.SqoopAction;
+import org.apache.oozie.fluentjob.api.action.SqoopActionBuilder;
+import org.apache.oozie.fluentjob.api.dag.Graph;
+import org.apache.oozie.fluentjob.api.serialization.WorkflowMarshaller;
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.apache.oozie.test.WorkflowTestCase;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+
+public class TestSqoopAction extends WorkflowTestCase {
+    public void testForkedSqoopActions() throws IOException, JAXBException, 
OozieClientException {
+        final Prepare prepare = new PrepareBuilder()
+                
.withDelete("hdfs://localhost:8020/user/${wf:user()}/examples/output")
+                .build();
+
+        final SqoopAction parent = SqoopActionBuilder.create()
+                .withResourceManager(getJobTrackerUri())
+                .withNameNode(getNameNodeUri())
+                .withPrepare(prepare)
+                .withConfigProperty("mapred.job.queue.name", "default")
+                .withCommand("python")
+                .build();
+
+        //  We are reusing the definition of parent and only modifying and 
adding what is different.
+        final SqoopAction leftChild = 
SqoopActionBuilder.createFromExistingAction(parent)
+                .withParent(parent)
+                .withCommand("python3")
+                .build();
+
+        SqoopActionBuilder.createFromExistingAction(leftChild)
+                .withoutArgument("arg2")
+                .withArgument("arg3")
+                .withCommand(null)
+                .build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("simple-sqoop-example")
+                .withDagContainingNode(parent).build();
+
+        final String xml = WorkflowMarshaller.marshal(workflow);
+
+        System.out.println(xml);
+
+        GraphVisualization.workflowToPng(workflow, 
"simple-sqoop-example-workflow.png");
+
+        final Graph intermediateGraph = new Graph(workflow);
+
+        GraphVisualization.graphToPng(intermediateGraph, 
"simple-sqoop-example-graph.png");
+
+        log.debug("Workflow XML is:\n{0}", xml);
+
+        validate(xml);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSshAction.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSshAction.java
 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSshAction.java
new file mode 100644
index 0000000..20ffdbf
--- /dev/null
+++ 
b/fluent-job/fluent-job-client/src/test/java/org/apache/oozie/jobs/client/minitest/TestSshAction.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.jobs.client.minitest;
+
+import org.apache.oozie.client.OozieClientException;
+import org.apache.oozie.fluentjob.api.GraphVisualization;
+import org.apache.oozie.fluentjob.api.action.SshAction;
+import org.apache.oozie.fluentjob.api.action.SshActionBuilder;
+import org.apache.oozie.fluentjob.api.dag.Graph;
+import org.apache.oozie.fluentjob.api.serialization.WorkflowMarshaller;
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.apache.oozie.test.WorkflowTestCase;
+
+import javax.xml.bind.JAXBException;
+import java.io.IOException;
+
+public class TestSshAction extends WorkflowTestCase {
+    public void testForkedSshActions() throws IOException, JAXBException, 
OozieClientException {
+        final SshAction parent = SshActionBuilder.create()
+                .withArg("\"Hello Oozie!\"")
+                .withHost("localhost")
+                .withCommand("echo")
+                .withCaptureOutput(true)
+                .build();
+
+        //  We are reusing the definition of parent and only modifying and 
adding what is different.
+        final SshAction leftChild = 
SshActionBuilder.createFromExistingAction(parent)
+                .withParent(parent)
+                .withoutArg("\"Hello Oozie!\"")
+                .withArg("\"Hello Oozie!!\"")
+                .withCaptureOutput(false)
+                .build();
+
+        SshActionBuilder.createFromExistingAction(leftChild)
+                .withoutArg("\"Hello Oozie!!\"")
+                .withArg("\"Hello Oozie!!!\"")
+                .build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("simple-ssh-example")
+                .withDagContainingNode(parent).build();
+
+        final String xml = WorkflowMarshaller.marshal(workflow);
+
+        System.out.println(xml);
+
+        GraphVisualization.workflowToPng(workflow, 
"simple-ssh-example-workflow.png");
+
+        final Graph intermediateGraph = new Graph(workflow);
+
+        GraphVisualization.graphToPng(intermediateGraph, 
"simple-ssh-example-graph.png");
+
+        log.debug("Workflow XML is:\n{0}", xml);
+
+        validate(xml);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/resources/workflow-all-actions.xml
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/resources/workflow-all-actions.xml 
b/fluent-job/fluent-job-client/src/test/resources/workflow-all-actions.xml
new file mode 100644
index 0000000..df66ac8
--- /dev/null
+++ b/fluent-job/fluent-job-client/src/test/resources/workflow-all-actions.xml
@@ -0,0 +1,234 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<workflow-app xmlns="uri:oozie:workflow:1.0"
+              xmlns:sla="uri:oozie:sla:0.2"
+              name="jaxb-example-wf">
+
+    <start to="mr-node"/>
+
+    <action name="mr-node">
+        <map-reduce>
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <prepare>
+                <delete 
path="${nameNode}/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.job.queue.name</name>
+                    <value>${queueName}</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.oozie.example.SampleMapper</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.oozie.example.SampleReducer</value>
+                </property>
+                <property>
+                    <name>mapred.map.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.input.dir</name>
+                    
<value>/user/${wf:user()}/${examplesRoot}/input-data/text</value>
+                </property>
+                <property>
+                    <name>mapred.output.dir</name>
+                    
<value>/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="distcp"/>
+        <error to="fail"/>
+        <!--<sla:info>-->
+            <!--<sla:nominal-time>${nominal_time}</sla:nominal-time>-->
+            <!--<sla:should-start>${5 * MINUTES}</sla:should-start>-->
+            <!--<sla:should-end>${15 * MINUTES}</sla:should-end>-->
+            <!--<sla:max-duration>${15 * MINUTES}</sla:max-duration>-->
+            
<!--<sla:alert-events>start_miss,end_met,end_miss</sla:alert-events>-->
+            <!--<sla:alert-contact>[email protected]</sla:alert-contact>-->
+        <!--</sla:info>-->
+    </action>
+
+    <action name="distcp">
+        <distcp xmlns="uri:oozie:distcp-action:1.0">
+            <job-tracker>${jobTracker}</job-tracker>
+            <name-node>${nameNode}</name-node>
+            <prepare>
+                <delete 
path="${nameNode}/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}"/>
+                <mkdir 
path="${nameNode}/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>a</name>
+                    <value>A</value>
+                </property>
+                <property>
+                    <name>b</name>
+                    <value>B</value>
+                </property>
+            </configuration>
+            
<arg>${nameNode}/user/${wf:user()}/${examplesRoot}/input-data/${inputDir}/data.txt</arg>
+            
<arg>${nameNode}/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}/data.txt</arg>
+        </distcp>
+        <ok to="email"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="email">
+        <email xmlns="uri:oozie:email-action:0.2">
+            <to>[email protected]</to>
+            <subject>foo</subject>
+            <body>bar</body>
+        </email>
+        <ok to="end"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="hive2">
+        <hive2 xmlns="uri:oozie:hive2-action:1.0">
+            <job-tracker>foo:8021</job-tracker>
+            <name-node>bar:8020</name-node>
+            <prepare>
+                <delete path="${jobOutput}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.compress.map.output</name>
+                    <value>true</value>
+                </property>
+            </configuration>
+            <jdbc-url>jdbc:hive2://localhost:10000/default</jdbc-url>
+            <password>foo</password>
+            <script>myscript.q</script>
+            <param>InputDir=/home/rkanter/input-data</param>
+            <param>OutputDir=${jobOutput}</param>
+        </hive2>
+        <ok to="hive"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="hive">
+        <hive xmlns="uri:oozie:hive-action:1.0">
+            <job-tracker>foo:8021</job-tracker>
+            <name-node>bar:8020</name-node>
+            <prepare>
+                <delete path="${jobOutput}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.compress.map.output</name>
+                    <value>true</value>
+                </property>
+            </configuration>
+            <script>myscript.q</script>
+            <param>InputDir=/home/tucu/input-data</param>
+            <param>OutputDir=${jobOutput}</param>
+        </hive>
+        <ok to="shell"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="shell">
+        <shell xmlns="uri:oozie:shell-action:1.0">
+            <exec>echo</exec>
+            <argument>foo</argument>
+            <capture-output/>
+        </shell>
+        <ok to="spark"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="spark">
+        <spark xmlns="uri:oozie:spark-action:1.0">
+            <job-tracker>foo:8021</job-tracker>
+            <name-node>bar:8020</name-node>
+            <prepare>
+                <delete path="${jobOutput}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.compress.map.output</name>
+                    <value>true</value>
+                </property>
+            </configuration>
+            <master>local[*]</master>
+            <mode>client</mode>
+            <name>Spark Example</name>
+            <class>org.apache.spark.examples.mllib.JavaALS</class>
+            <jar>/lib/spark-examples_2.10-1.1.0.jar</jar>
+            <spark-opts>--executor-memory 20G --num-executors 50
+                --conf 
spark.executor.extraJavaOptions="-XX:+HeapDumpOnOutOfMemoryError 
-XX:HeapDumpPath=/tmp"</spark-opts>
+            <arg>inputpath=hdfs://localhost/input/file.txt</arg>
+            <arg>value=2</arg>
+        </spark>
+        <ok to="sqoop"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="sqoop">
+        <sqoop xmlns="uri:oozie:sqoop-action:1.0">
+            <job-tracker>foo:8021</job-tracker>
+            <name-node>bar:8020</name-node>
+            <prepare>
+                <delete path="${jobOutput}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.compress.map.output</name>
+                    <value>true</value>
+                </property>
+            </configuration>
+            <command>
+                import --connect jdbc:hsqldb:file:db.hsqldb --table TT 
--target-dir hdfs://localhost:8020/user/tucu/foo -m 1
+            </command>
+        </sqoop>
+        <ok to="ssh"/>
+        <error to="fail"/>
+    </action>
+
+    <action name="ssh">
+        <ssh xmlns="uri:oozie:ssh-action:0.2">
+            <host>[email protected]</host>
+            <command>uploaddata</command>
+            <args>jdbc:derby://bar.com:1527/myDB</args>
+            <args>hdfs://foobar.com:8020/usr/tucu/myData</args>
+        </ssh>
+        <ok to="end"/>
+        <error to="fail"/>
+    </action>
+
+    <kill name = "fail">
+        <message>Action failed, error 
message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+
+    <end name="end"/>
+
+    <sla:info>
+        <sla:nominal-time>${nominal_time}</sla:nominal-time>
+        <sla:should-start>${10 * MINUTES}</sla:should-start>
+        <sla:should-end>${30 * MINUTES}</sla:should-end>
+        <sla:max-duration>${30 * MINUTES}</sla:max-duration>
+        <sla:alert-events>start_miss,end_met,end_miss</sla:alert-events>
+        <sla:alert-contact>[email protected]</sla:alert-contact>
+    </sla:info>
+
+</workflow-app>

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-client/src/test/resources/workflow-mapreduce-action.xml
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-client/src/test/resources/workflow-mapreduce-action.xml 
b/fluent-job/fluent-job-client/src/test/resources/workflow-mapreduce-action.xml
new file mode 100644
index 0000000..21c03b1
--- /dev/null
+++ 
b/fluent-job/fluent-job-client/src/test/resources/workflow-mapreduce-action.xml
@@ -0,0 +1,63 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<workflow-app xmlns="uri:oozie:workflow:1.0" name="jaxb-example-wf">
+    <start to="mr-node"/>
+    <action name="mr-node">
+        <map-reduce>
+            <resource-manager>${resourceManager}</resource-manager>
+            <name-node>${nameNode}</name-node>
+            <prepare>
+                <delete 
path="${nameNode}/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}"/>
+            </prepare>
+            <configuration>
+                <property>
+                    <name>mapred.job.queue.name</name>
+                    <value>${queueName}</value>
+                </property>
+                <property>
+                    <name>mapred.mapper.class</name>
+                    <value>org.apache.oozie.example.SampleMapper</value>
+                </property>
+                <property>
+                    <name>mapred.reducer.class</name>
+                    <value>org.apache.oozie.example.SampleReducer</value>
+                </property>
+                <property>
+                    <name>mapred.map.tasks</name>
+                    <value>1</value>
+                </property>
+                <property>
+                    <name>mapred.input.dir</name>
+                    
<value>/user/${wf:user()}/${examplesRoot}/input-data/text</value>
+                </property>
+                <property>
+                    <name>mapred.output.dir</name>
+                    
<value>/user/${wf:user()}/${examplesRoot}/output-data/${outputDir}</value>
+                </property>
+            </configuration>
+        </map-reduce>
+        <ok to="end"/>
+        <error to="fail"/>
+    </action>
+
+    <kill name = "fail">
+        <message>Map/Reduce failed, error 
message[${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/pom.xml
----------------------------------------------------------------------
diff --git a/fluent-job/pom.xml b/fluent-job/pom.xml
new file mode 100644
index 0000000..5b24c91
--- /dev/null
+++ b/fluent-job/pom.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <parent>
+        <artifactId>oozie-main</artifactId>
+        <groupId>org.apache.oozie</groupId>
+        <version>5.1.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>oozie-fluent-job</artifactId>
+    <description>Apache Oozie Fluent Job</description>
+    <name>Apache Oozie Fluent Job</name>
+    <packaging>pom</packaging>
+
+    <modules>
+        <module>fluent-job-api</module>
+        <module>fluent-job-client</module>
+    </modules>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-deploy-plugin</artifactId>
+                <configuration>
+                    <skip>true</skip>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.rat</groupId>
+                <artifactId>apache-rat-plugin</artifactId>
+                <configuration>
+                    <excludeSubProjects>false</excludeSubProjects>
+                    <excludes>
+                        <!-- excluding all as the root POM does the full check 
-->
+                        <exclude>**</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/minitest/pom.xml
----------------------------------------------------------------------
diff --git a/minitest/pom.xml b/minitest/pom.xml
index 1847838..56afddb 100644
--- a/minitest/pom.xml
+++ b/minitest/pom.xml
@@ -17,7 +17,7 @@
   limitations under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
     <modelVersion>4.0.0</modelVersion>
 
     <parent>
@@ -80,4 +80,20 @@
             <artifactId>hadoop-minicluster</artifactId>
         </dependency>
     </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>test-jar</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/minitest/src/test/java/org/apache/oozie/test/TestWorkflow.java
----------------------------------------------------------------------
diff --git a/minitest/src/test/java/org/apache/oozie/test/TestWorkflow.java 
b/minitest/src/test/java/org/apache/oozie/test/TestWorkflow.java
index 4257b60..3c20bf2 100644
--- a/minitest/src/test/java/org/apache/oozie/test/TestWorkflow.java
+++ b/minitest/src/test/java/org/apache/oozie/test/TestWorkflow.java
@@ -20,20 +20,8 @@ package org.apache.oozie.test;
 
 import com.google.common.base.Strings;
 import org.apache.oozie.action.hadoop.JavaActionExecutor;
-import org.apache.oozie.client.OozieClientException;
-import org.apache.oozie.service.XLogService;
-import org.apache.oozie.client.OozieClient;
 import org.apache.oozie.client.WorkflowJob;
-import org.apache.oozie.local.LocalOozie;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.io.Writer;
-import java.io.OutputStreamWriter;
 import java.util.Date;
 import java.util.Properties;
 
@@ -42,58 +30,13 @@ import static org.junit.Assume.assumeFalse;
 /**
  * {@code MiniOozie} integration test for different workflow kinds.
  */
-public class TestWorkflow extends MiniOozieTestCase {
-
-    @Override
-    protected void setUp() throws Exception {
-        System.setProperty(XLogService.LOG4J_FILE, "oozie-log4j.properties");
-        super.setUp();
-    }
-
-    @Override
-    protected void tearDown() throws Exception {
-        super.tearDown();
-    }
+public class TestWorkflow extends WorkflowTestCase {
 
     public void testWorkflowWithStartAndEndCompletesSuccessfully() throws 
Exception {
-        final String wfApp = "<workflow-app xmlns='uri:oozie:workflow:0.1' 
name='test-wf'>" + "    <start to='end'/>"
+        final String workflowXml = "<workflow-app 
xmlns='uri:oozie:workflow:0.1' name='test-wf'>" + "    <start to='end'/>"
                 + "    <end name='end'/>" + "</workflow-app>";
 
-        final FileSystem fs = getFileSystem();
-        final Path appPath = new Path(getFsTestCaseDir(), "app");
-        fs.mkdirs(appPath);
-        fs.mkdirs(new Path(appPath, "lib"));
-
-        final Writer writer = new OutputStreamWriter(fs.create(new 
Path(appPath, "workflow.xml")));
-        writer.write(wfApp);
-        writer.close();
-
-        final OozieClient wc = LocalOozie.getClient();
-
-        final Properties conf = wc.createConfiguration();
-        conf.setProperty(OozieClient.APP_PATH, new Path(appPath, 
"workflow.xml").toString());
-        conf.setProperty(OozieClient.USER_NAME, getTestUser());
-
-
-        final String jobId = wc.submit(conf);
-        assertNotNull(jobId);
-
-        WorkflowJob wf = wc.getJobInfo(jobId);
-        assertNotNull(wf);
-        assertEquals(WorkflowJob.Status.PREP, wf.getStatus());
-
-        wc.start(jobId);
-
-        waitFor(1000, new Predicate() {
-            public boolean evaluate() throws Exception {
-                final WorkflowJob wf = wc.getJobInfo(jobId);
-                return wf.getStatus() == WorkflowJob.Status.SUCCEEDED;
-            }
-        });
-
-        wf = wc.getJobInfo(jobId);
-        assertNotNull(wf);
-        assertEquals(WorkflowJob.Status.SUCCEEDED, wf.getStatus());
+        submitAndAssert(workflowXml, WorkflowJob.Status.SUCCEEDED);
     }
 
     public void testFsDecisionWorkflowCompletesSuccessfully() throws Exception 
{
@@ -116,100 +59,4 @@ public class TestWorkflow extends MiniOozieTestCase {
 
         runWorkflowFromFile(workflowFileName, additionalWorkflowProperties);
     }
-
-    private void runWorkflowFromFile(final String workflowFileName, final 
Properties additionalWorkflowProperties)
-            throws IOException, OozieClientException {
-        final FileSystem fs = getFileSystem();
-        final Path appPath = new Path(getFsTestCaseDir(), "app");
-        fs.mkdirs(appPath);
-        fs.mkdirs(new Path(appPath, "lib"));
-
-        final Reader reader = getResourceAsReader(workflowFileName, -1);
-        final Writer writer = new OutputStreamWriter(fs.create(new 
Path(appPath, "workflow.xml")));
-        copyCharStream(reader, writer);
-        writer.close();
-        reader.close();
-
-        final Path path = getFsTestCaseDir();
-
-        final OozieClient oozieClient = LocalOozie.getClient();
-
-        final Properties conf = oozieClient.createConfiguration();
-        conf.setProperty(OozieClient.APP_PATH, new Path(appPath, 
"workflow.xml").toString());
-        conf.setProperty(OozieClient.USER_NAME, getTestUser());
-        conf.setProperty("nameNodeBasePath", path.toString());
-        conf.setProperty("base", path.toUri().getPath());
-        conf.setProperty("nameNode", getNameNodeUri());
-        conf.setProperty("jobTracker", getJobTrackerUri());
-
-        for (final String additionalKey : 
additionalWorkflowProperties.stringPropertyNames()) {
-            conf.setProperty(additionalKey, 
additionalWorkflowProperties.getProperty(additionalKey));
-        }
-
-        final String jobId = oozieClient.submit(conf);
-        assertNotNull(jobId);
-
-        WorkflowJob wf = oozieClient.getJobInfo(jobId);
-        assertNotNull(wf);
-        assertEquals(WorkflowJob.Status.PREP, wf.getStatus());
-
-        oozieClient.start(jobId);
-
-        waitFor(15 * 1000, new Predicate() {
-            public boolean evaluate() throws Exception {
-                final WorkflowJob wf = oozieClient.getJobInfo(jobId);
-                return wf.getStatus() == WorkflowJob.Status.SUCCEEDED;
-            }
-        });
-
-        wf = oozieClient.getJobInfo(jobId);
-        assertNotNull(wf);
-        assertEquals(WorkflowJob.Status.SUCCEEDED, wf.getStatus());
-    }
-
-    /**
-     * Return a classpath resource as a stream.
-     * <p/>
-     *
-     * @param path classpath for the resource.
-     * @param maxLen max content length allowed.
-     * @return the stream for the resource.
-     * @throws IOException thrown if the resource could not be read.
-     */
-    private InputStream getResourceAsStream(final String path, final int 
maxLen) throws IOException {
-        final InputStream is = 
Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
-        if (is == null) {
-            throw new IllegalArgumentException("resource " + path + " not 
found");
-        }
-        return is;
-    }
-
-    /**
-     * Return a classpath resource as a reader.
-     * <p/>
-     * It is assumed that the resource is a text resource.
-     *
-     * @param path classpath for the resource.
-     * @param maxLen max content length allowed.
-     * @return the reader for the resource.
-     * @throws IOException thrown if the resource could not be read.
-     */
-    private Reader getResourceAsReader(final String path, final int maxLen) 
throws IOException {
-        return new InputStreamReader(getResourceAsStream(path, maxLen));
-    }
-
-    /**
-     * Copies an char input stream into an char output stream.
-     *
-     * @param reader reader to copy from.
-     * @param writer writer to copy to.
-     * @throws IOException thrown if the copy failed.
-     */
-    private void copyCharStream(final Reader reader, final Writer writer) 
throws IOException {
-        final char[] buffer = new char[4096];
-        int read;
-        while ((read = reader.read(buffer)) > -1) {
-            writer.write(buffer, 0, read);
-        }
-    }
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/minitest/src/test/java/org/apache/oozie/test/WorkflowTestCase.java
----------------------------------------------------------------------
diff --git a/minitest/src/test/java/org/apache/oozie/test/WorkflowTestCase.java 
b/minitest/src/test/java/org/apache/oozie/test/WorkflowTestCase.java
new file mode 100644
index 0000000..3a47972
--- /dev/null
+++ b/minitest/src/test/java/org/apache/oozie/test/WorkflowTestCase.java
@@ -0,0 +1,259 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.test;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.OozieClientException;
+import org.apache.oozie.client.WorkflowJob;
+import org.apache.oozie.local.LocalOozie;
+import org.apache.oozie.service.XLogService;
+import org.apache.oozie.servlet.V2ValidateServlet;
+
+import java.io.*;
+import java.util.Properties;
+
+public abstract class WorkflowTestCase extends MiniOozieTestCase {
+
+    @Override
+    protected void setUp() throws Exception {
+        System.setProperty(XLogService.LOG4J_FILE, "oozie-log4j.properties");
+        super.setUp();
+    }
+
+    @Override
+    protected void tearDown() throws Exception {
+        super.tearDown();
+    }
+
+    protected void submitAndAssert(final String workflowXml, final 
WorkflowJob.Status terminalStatus)
+            throws OozieClientException, IOException {
+        final WorkflowJob finishedWorkflowJob = new WorkflowJobBuilder()
+                .submit(workflowXml)
+                .start()
+                .waitForSucceeded()
+                .build();
+
+        assertNotNull(finishedWorkflowJob);
+        assertEquals(terminalStatus, finishedWorkflowJob.getStatus());
+    }
+
+    protected void validate(final String workflowXml) throws IOException, 
OozieClientException {
+        new WorkflowJobBuilder()
+                .validate(workflowXml);
+    }
+
+    protected void runWorkflowFromFile(final String workflowFileName, final 
Properties additionalWorkflowProperties)
+            throws IOException, OozieClientException {
+        final FileSystem fs = getFileSystem();
+        final Path appPath = new Path(getFsTestCaseDir(), "app");
+        fs.mkdirs(appPath);
+        fs.mkdirs(new Path(appPath, "lib"));
+
+        final Reader reader = getResourceAsReader(workflowFileName, -1);
+        final Writer writer = new OutputStreamWriter(fs.create(new 
Path(appPath, "workflow.xml")));
+        copyCharStream(reader, writer);
+        writer.close();
+        reader.close();
+
+        final Path path = getFsTestCaseDir();
+
+        final OozieClient oozieClient = LocalOozie.getClient();
+
+        final Properties conf = oozieClient.createConfiguration();
+        conf.setProperty(OozieClient.APP_PATH, new Path(appPath, 
"workflow.xml").toString());
+        conf.setProperty(OozieClient.USER_NAME, getTestUser());
+        conf.setProperty("nameNodeBasePath", path.toString());
+        conf.setProperty("base", path.toUri().getPath());
+        conf.setProperty("nameNode", getNameNodeUri());
+        conf.setProperty("jobTracker", getJobTrackerUri());
+
+        for (final String additionalKey : 
additionalWorkflowProperties.stringPropertyNames()) {
+            conf.setProperty(additionalKey, 
additionalWorkflowProperties.getProperty(additionalKey));
+        }
+
+        final String jobId = oozieClient.submit(conf);
+        assertNotNull(jobId);
+
+        WorkflowJob wf = oozieClient.getJobInfo(jobId);
+        assertNotNull(wf);
+        assertEquals(WorkflowJob.Status.PREP, wf.getStatus());
+
+        oozieClient.start(jobId);
+
+        waitFor(15_000, new Predicate() {
+            public boolean evaluate() throws Exception {
+                final WorkflowJob wf = oozieClient.getJobInfo(jobId);
+                return wf.getStatus() == WorkflowJob.Status.SUCCEEDED;
+            }
+        });
+
+        wf = oozieClient.getJobInfo(jobId);
+        assertNotNull(wf);
+        assertEquals(WorkflowJob.Status.SUCCEEDED, wf.getStatus());
+    }
+
+    /**
+     * Return a classpath resource as a stream.
+     * <p/>
+     *
+     * @param path classpath for the resource.
+     * @param maxLen max content length allowed.
+     * @return the stream for the resource.
+     * @throws IOException thrown if the resource could not be read.
+     */
+    private InputStream getResourceAsStream(final String path, final int 
maxLen) throws IOException {
+        final InputStream is = 
Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+        if (is == null) {
+            throw new IllegalArgumentException("resource " + path + " not 
found");
+        }
+        return is;
+    }
+
+    /**
+     * Return a classpath resource as a reader.
+     * <p/>
+     * It is assumed that the resource is a text resource.
+     *
+     * @param path classpath for the resource.
+     * @param maxLen max content length allowed.
+     * @return the reader for the resource.
+     * @throws IOException thrown if the resource could not be read.
+     */
+    private Reader getResourceAsReader(final String path, final int maxLen) 
throws IOException {
+        return new InputStreamReader(getResourceAsStream(path, maxLen));
+    }
+
+    /**
+     * Copies an char input stream into an char output stream.
+     *
+     * @param reader reader to copy from.
+     * @param writer writer to copy to.
+     * @throws IOException thrown if the copy failed.
+     */
+    private void copyCharStream(final Reader reader, final Writer writer) 
throws IOException {
+        final char[] buffer = new char[4096];
+        int read;
+        while ((read = reader.read(buffer)) > -1) {
+            writer.write(buffer, 0, read);
+        }
+    }
+
+    private class WorkflowJobBuilder {
+        private final FileSystem dfs;
+        private final Path appPath;
+        private final OozieClient oozieClient = LocalOozie.getClient();
+        private String workflowJobId;
+        private WorkflowJob workflowJob;
+        private final Path localPath;
+
+        private WorkflowJobBuilder() throws IOException {
+            this.dfs = getFileSystem();
+            this.appPath = new Path(getFsTestCaseDir(), "app");
+            this.localPath = new Path(File.createTempFile(appPath.getName(), 
"workflow.xml").toString());
+
+            dfs.mkdirs(appPath);
+            dfs.mkdirs(new Path(appPath, "lib"));
+        }
+
+        private WorkflowJobBuilder submit(final String workflowXml) throws 
IOException, OozieClientException {
+            writeToDFS(workflowXml);
+
+            final Properties conf = createAndResolveConfiguration();
+
+            workflowJobId = oozieClient.submit(conf);
+
+            assertNotNull(workflowJobId);
+
+            return this;
+        }
+
+        private WorkflowJobBuilder validate(final String workflowXml) throws 
IOException, OozieClientException {
+            final String result = oozieClient.validateXML(workflowXml);
+
+            assertEquals("not a valid workflow xml", 
V2ValidateServlet.VALID_WORKFLOW_APP, result);
+
+            return this;
+        }
+
+        private void writeToDFS(final String workflowXml) throws IOException {
+            try (final Writer writer = new 
OutputStreamWriter(dfs.create(getDFSWorkflowPath()))) {
+                writer.write(workflowXml);
+                writer.flush();
+            }
+        }
+
+        private Properties createAndResolveConfiguration() {
+            final OozieClient wc = LocalOozie.getClient();
+
+            final Properties conf = wc.createConfiguration();
+            conf.setProperty(OozieClient.APP_PATH, 
getDFSWorkflowPath().toString());
+            conf.setProperty(OozieClient.USER_NAME, getTestUser());
+            conf.setProperty("nameNodeBasePath", 
getFsTestCaseDir().toString());
+            conf.setProperty("base", getFsTestCaseDir().toUri().getPath());
+            conf.setProperty("nameNode", getNameNodeUri());
+            conf.setProperty("jobTracker", getJobTrackerUri());
+            return conf;
+        }
+
+        private void writeToLocalFile(final String workflowXml) throws 
IOException {
+            try (final Writer writer = new FileWriter(localPath.toString())) {
+                writer.write(workflowXml);
+                writer.flush();
+            }
+        }
+
+        private Path getDFSWorkflowPath() {
+            return new Path(appPath, "workflow.xml");
+        }
+
+        private WorkflowJobBuilder start() throws OozieClientException {
+            workflowJob = oozieClient.getJobInfo(workflowJobId);
+
+            assertNotNull(workflowJob);
+            assertEquals(WorkflowJob.Status.PREP, workflowJob.getStatus());
+
+            oozieClient.start(workflowJobId);
+
+            workflowJob = oozieClient.getJobInfo(workflowJobId);
+
+            assertEquals(WorkflowJob.Status.RUNNING, workflowJob.getStatus());
+
+            return this;
+        }
+
+        private WorkflowJobBuilder waitForSucceeded() throws 
OozieClientException {
+            waitFor(15_000, new Predicate() {
+                public boolean evaluate() throws Exception {
+                    final WorkflowJob wf = 
oozieClient.getJobInfo(workflowJobId);
+                    return wf.getStatus() == WorkflowJob.Status.SUCCEEDED;
+                }
+            });
+
+            workflowJob = oozieClient.getJobInfo(workflowJobId);
+
+            return this;
+        }
+
+        private WorkflowJob build() {
+            return workflowJob;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 8c34cf4..7f03e6d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -128,6 +128,7 @@
         <module>docs</module>
         <module>tools</module>
         <module>minitest</module>
+        <module>fluent-job</module>
         <module>server</module>
         <module>distro</module>
         <module>zookeeper-security-tests</module>
@@ -190,6 +191,12 @@
             </dependency>
             <dependency>
                 <groupId>org.apache.oozie</groupId>
+                <artifactId>oozie-core</artifactId>
+                <type>test-jar</type>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.oozie</groupId>
                 <artifactId>oozie-examples</artifactId>
                 <version>${project.version}</version>
             </dependency>
@@ -255,6 +262,28 @@
                 <version>${project.version}</version>
                 <type>war</type>
             </dependency>
+            <dependency>
+                <groupId>org.apache.oozie</groupId>
+                <artifactId>oozie-fluent-job-api</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.oozie</groupId>
+                <artifactId>oozie-fluent-job-api</artifactId>
+                <version>${project.version}</version>
+                <classifier>tests</classifier>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.oozie.test</groupId>
+                <artifactId>oozie-mini</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.oozie.test</groupId>
+                <artifactId>oozie-mini</artifactId>
+                <version>${project.version}</version>
+                <classifier>tests</classifier>
+            </dependency>
 
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
@@ -840,11 +869,19 @@
                 <artifactId>hadoop-yarn-server-web-proxy</artifactId>
                 <version>${hadoop.version}</version>
             </dependency>
+
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-mapreduce-client-core</artifactId>
                 <version>${hadoop.version}</version>
             </dependency>
+
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-annotations</artifactId>
+                <version>${hadoop.version}</version>
+            </dependency>
+
             <dependency>
                 <groupId>org.apache.pig</groupId>
                 <artifactId>pig</artifactId>
@@ -1513,13 +1550,41 @@
                 <version>${dropwizard.metrics.version}</version>
             </dependency>
 
-            <!-- For drawing runtime DAG -->
+            <!-- Draw runtime DAG -->
             <dependency>
                 <groupId>guru.nidi</groupId>
                 <artifactId>graphviz-java</artifactId>
                 <version>0.2.2</version>
             </dependency>
 
+            <!-- Fluent Job API conversion to XML -->
+            <dependency>
+                <groupId>net.sf.dozer</groupId>
+                <artifactId>dozer</artifactId>
+                <version>5.5.1</version>
+            </dependency>
+            <dependency>
+                <groupId>org.jvnet.jaxb2_commons</groupId>
+                <artifactId>jaxb2-basics</artifactId>
+                <version>1.11.1</version>
+            </dependency>
+            <dependency>
+                <groupId>org.jvnet.jaxb2_commons</groupId>
+                <artifactId>jaxb2-namespace-prefix</artifactId>
+                <version>1.3</version>
+            </dependency>
+            <dependency>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>jaxb2-maven-plugin</artifactId>
+                <version>2.2</version>
+            </dependency>
+
+            <dependency>
+                <groupId>org.xmlunit</groupId>
+                <artifactId>xmlunit-core</artifactId>
+                <version>2.3.0</version>
+            </dependency>
+
             <dependency>
                 <groupId>org.eclipse.jgit</groupId>
                 <artifactId>org.eclipse.jgit</artifactId>
@@ -1710,6 +1775,11 @@
                     <artifactId>maven-clean-plugin</artifactId>
                     <version>3.0.0</version>
                 </plugin>
+                <plugin>
+                    <groupId>org.codehaus.mojo</groupId>
+                    <artifactId>jaxb2-maven-plugin</artifactId>
+                    <version>2.3.1</version>
+                </plugin>
             </plugins>
         </pluginManagement>
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index c53d4d1..cfb558c 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.1.0 release (trunk - unreleased)
 
+OOZIE-2339 [fluent-job] Minimum Viable Fluent Job API (daniel.becker, 
andras.piros via rkanter, gezapeti, pbacsko)
 OOZIE-3224 Upgrade Jetty to 9.3 (kmarton via andras.piros)
 OOZIE-3284 Upgrade maven-javadoc-plugin to 3.0.1 (kmarton via pbacsko, 
andras.piros)
 OOZIE-3278 Oozie fails to start with Hadoop 2.6.0 (kmarton via andras.piros)

Reply via email to