http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestGraph.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestGraph.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestGraph.java
new file mode 100644
index 0000000..ff5df72
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestGraph.java
@@ -0,0 +1,941 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.dag;
+
+import org.apache.oozie.fluentjob.api.Condition;
+import org.apache.oozie.fluentjob.api.NodesToPng;
+import org.apache.oozie.fluentjob.api.action.MapReduceActionBuilder;
+import org.apache.oozie.fluentjob.api.action.Node;
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class TestGraph {
+    @Rule
+    public final ExpectedException expectedException = 
ExpectedException.none();
+
+    @Rule
+    public final NodesToPng nodesToPng = new NodesToPng();
+
+    @Test
+    public void testNameIsCorrect() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final String name = "workflow-name";
+        final Workflow workflow = new 
WorkflowBuilder().withName(name).withDagContainingNode(a).build();
+
+        final Graph graph = new Graph(workflow);
+        assertEquals(name, graph.getName());
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testDuplicateNamesThrow() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        MapReduceActionBuilder.create().withName("A").withParent(a).build();
+
+        // The exception will be thrown by the Workflow object,
+        // but if it breaks there, we want to catch duplicates here, too.
+        expectedException.expect(IllegalArgumentException.class);
+        final Workflow workflow = new 
WorkflowBuilder().withDagContainingNode(a).build();
+
+        new Graph(workflow);
+    }
+
+    @Test
+    public void testWorkflowWithoutJoin() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("without-join").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testWorkflowWithTrivialJoin() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+        
MapReduceActionBuilder.create().withName("D").withParent(b).withParent(c).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("trivial-join").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testWorkflowNewDependenciesNeeded() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).withParent(c).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(c).build();
+
+        
MapReduceActionBuilder.create().withName("F").withParent(d).withParent(e).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("new-dependencies-needed").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+
+        end.addParent(F);
+        F.addParent(join2);
+        join2.addParent(D);
+        join2.addParent(E);
+        D.addParent(fork2);
+        E.addParent(fork2);
+        fork2.addParent(join1);
+        join1.addParent(B);
+        join1.addParent(C);
+        B.addParent(fork1);
+        C.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D, E, F);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testCrossingDependencyLines() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = MapReduceActionBuilder.create().withName("B").build();
+        
MapReduceActionBuilder.create().withName("C").withParent(a).withParent(b).build();
+
+        
MapReduceActionBuilder.create().withName("D").withParent(a).withParent(b).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("crossing-dependencies").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+
+        end.addParent(join2);
+        join2.addParent(C);
+        join2.addParent(D);
+        C.addParent(fork2);
+        D.addParent(fork2);
+        fork2.addParent(join1);
+        join1.addParent(A);
+        join1.addParent(B);
+        A.addParent(fork1);
+        B.addParent(fork1);
+        fork1.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D);
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testSplittingJoins() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(b).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(a).build();
+
+        
MapReduceActionBuilder.create().withName("F").withParent(c).withParent(d).withParent(e).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("splitting-joins").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+
+        end.addParent(F);
+        F.addParent(join1);
+        join1.addParent(join2);
+        join1.addParent(E);
+        join2.addParent(C);
+        join2.addParent(D);
+        C.addParent(fork2);
+        D.addParent(fork2);
+        fork2.addParent(B);
+        B.addParent(fork1);
+        E.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D, E, F);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testSplittingForks() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(a).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(b).withParent(c).build();
+
+        
MapReduceActionBuilder.create().withName("F").withParent(e).withParent(d).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("splitting-forks").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+
+        end.addParent(F);
+        F.addParent(join1);
+        join1.addParent(E);
+        join1.addParent(D);
+        E.addParent(join2);
+        join2.addParent(B);
+        join2.addParent(C);
+        B.addParent(fork2);
+        C.addParent(fork2);
+        fork2.addParent(fork1);
+        D.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D, E, F);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testBranchingUncles() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(c).build();
+
+        final Node f = 
MapReduceActionBuilder.create().withName("F").withParent(d).withParent(e).build();
+        final Node g = 
MapReduceActionBuilder.create().withName("G").withParent(c).build();
+        
MapReduceActionBuilder.create().withName("H").withParent(f).withParent(g).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("branching-uncles").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+        final NodeBase H = new ExplicitNode("H", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork3");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join3", fork2);
+
+        end.addParent(H);
+        H.addParent(join2);
+        join2.addParent(F);
+        join2.addParent(G);
+        F.addParent(fork2);
+        G.addParent(fork2);
+        fork2.addParent(join1);
+        join1.addParent(D);
+        join1.addParent(E);
+        D.addParent(B);
+        E.addParent(C);
+        B.addParent(fork1);
+        C.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D, E, F, G, H);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testTrivialRedundantEdge() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        
MapReduceActionBuilder.create().withName("C").withParent(a).withParent(b).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("trivial-redundant-edge").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+
+        end.addParent(C);
+        C.addParent(B);
+        B.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, A, B, C);
+
+        checkEqualStructureByNames(nodes, graph);
+//
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testRedundantEdge() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).withParent(c).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(c).build();
+
+        
MapReduceActionBuilder.create().withName("F").withParent(d).withParent(e).withParent(a).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("redundant-edge").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+
+        end.addParent(F);
+        F.addParent(join2);
+        join2.addParent(D);
+        join2.addParent(E);
+        D.addParent(fork2);
+        E.addParent(fork2);
+        fork2.addParent(join1);
+        join1.addParent(B);
+        join1.addParent(C);
+        B.addParent(fork1);
+        C.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
join1, join2, A, B, C, D, E, F);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testLateUncle() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(b).build();
+
+        final Node f = 
MapReduceActionBuilder.create().withName("F").withParent(c).build();
+
+        final Node g = 
MapReduceActionBuilder.create().withName("G").withParent(e).build();
+        final Node h = 
MapReduceActionBuilder.create().withName("H").withParent(f).build();
+        final Node i = 
MapReduceActionBuilder.create().withName("I").withParent(d).withParent(g).build();
+        final Node j = 
MapReduceActionBuilder.create().withName("J").withParent(e).withParent(h).build();
+        
MapReduceActionBuilder.create().withName("K").withParent(i).withParent(j).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("late-uncle").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+        final NodeBase H = new ExplicitNode("H", null);
+        final NodeBase I = new ExplicitNode("I", null);
+        final NodeBase J = new ExplicitNode("J", null);
+        final NodeBase K = new ExplicitNode("K", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Fork fork3 = new Fork("fork3");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+        final Join join3 = new Join("join3", fork3);
+
+        end.addParent(K);
+        K.addParent(join3);
+        join3.addParent(I);
+        join3.addParent(J);
+        I.addParent(fork3);
+        J.addParent(fork3);
+        fork3.addParent(join1);
+        join1.addParent(join2);
+        join1.addParent(H);
+        join2.addParent(D);
+        join2.addParent(G);
+        G.addParent(E);
+        D.addParent(fork2);
+        E.addParent(fork2);
+        fork2.addParent(B);
+        B.addParent(fork1);
+        H.addParent(F);
+        F.addParent(C);
+        C.addParent(fork1);
+        fork1.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(start, end, fork1, fork2, 
fork3, join1, join2, join3,
+                                             A, B, C, D, E, F, G, H, I, J, K);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testMultipleRoots() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        final Node g = MapReduceActionBuilder.create().withName("G").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).withParent(g).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b).withParent(c).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(c).build();
+
+        
MapReduceActionBuilder.create().withName("F").withParent(d).withParent(e).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("multiple-roots").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        checkDependencies(workflow.getNodes(), graph);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork1 = new Fork("fork1");
+        final Fork fork2 = new Fork("fork2");
+        final Fork fork3 = new Fork("fork3");
+        final Join join1 = new Join("join1", fork1);
+        final Join join2 = new Join("join2", fork2);
+        final Join join3 = new Join("join3", fork3);
+
+        end.addParent(F);
+        F.addParent(join3);
+        join3.addParent(D);
+        join3.addParent(E);
+        D.addParent(fork3);
+        E.addParent(fork3);
+        fork3.addParent(join2);
+        join2.addParent(B);
+        join2.addParent(C);
+        B.addParent(fork2);
+        C.addParent(fork2);
+        fork2.addParent(join1);
+        join1.addParent(G);
+        join1.addParent(A);
+        G.addParent(fork1);
+        A.addParent(fork1);
+        fork1.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, fork1, fork2, fork3, join1, join2, join3, A, B, C, 
D, E, F, G);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testTrivialDecision() {
+        final String conditionGotoB = "condition_goto_B";
+
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        
MapReduceActionBuilder.create().withName("B").withParentWithCondition(a, 
conditionGotoB).build();
+        
MapReduceActionBuilder.create().withName("C").withParentDefaultConditional(a).build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("Workflow_to_map")
+                .withDagContainingNode(a)
+                .build();
+        final Graph graph = new Graph(workflow);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Decision decision = new Decision("decision1");
+        final DecisionJoin decisionJoin = new DecisionJoin("decisionJoin1", 
decision);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+
+        end.addParent(decisionJoin);
+        decisionJoin.addParent(B);
+        decisionJoin.addParent(C);
+        B.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoB));
+        C.addParentDefaultConditional(decision);
+        decision.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, decision, decisionJoin, A, B, C);
+
+        checkEqualStructureByNames(nodes, graph);
+    }
+
+    @Test
+    public void testTrivialDiamondDecision() {
+        final String conditionGotoB = "condition_goto_B";
+        final String conditionGotoC = "condition_goto_C";
+
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParentWithCondition(a, 
conditionGotoB).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParentWithCondition(a, 
conditionGotoC).build();
+        
MapReduceActionBuilder.create().withName("D").withParent(b).withParent(c).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("trivial-decision").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Decision decision = new Decision("decision1");
+        final DecisionJoin decisionJoin = new DecisionJoin("decisionJoin1", 
decision);
+
+        end.addParent(D);
+        D.addParent(decisionJoin);
+        decisionJoin.addParent(B);
+        decisionJoin.addParent(C);
+        B.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoB));
+        C.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoC));
+        decision.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, decision, decisionJoin, A, B, C, D);
+
+        checkEqualStructureByNames(nodes, graph);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+    }
+
+    @Test
+    public void testDecisionAndJoin() {
+        final String conditionGotoD = "condition_goto_D";
+        final String conditionGotoE = "condition_goto_E";
+
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(b)
+                .withParentWithCondition(c, conditionGotoD).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParentWithCondition(c, 
conditionGotoE).build();
+        MapReduceActionBuilder.create().withName("F").withParent(d).build();
+        MapReduceActionBuilder.create().withName("G").withParent(e).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("decision-and-join").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork = new Fork("fork1");
+        final Join join = new Join("join1", fork);
+        final Decision decision = new Decision("decision1");
+        final DecisionJoin decisionJoin = new DecisionJoin("decisionJoin1", 
decision);
+
+        end.addParent(decisionJoin);
+        decisionJoin.addParent(F);
+        decisionJoin.addParent(G);
+        F.addParent(D);
+        D.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoD));
+        G.addParent(E);
+        E.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoE));
+        decision.addParent(join);
+        join.addParent(B);
+        join.addParent(C);
+        B.addParent(fork);
+        C.addParent(fork);
+        fork.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, fork, join, decision, decisionJoin, A, B, C, D, E, 
F, G);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+
+        checkEqualStructureByNames(nodes, graph);
+    }
+
+    @Test
+    public void testDecisionAtUncleOfJoin() {
+        final String conditionGotoD = "condition_goto_D";
+        final String conditionGotoF = "condition_goto_F";
+
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParentWithCondition(c, 
conditionGotoD).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParent(b).withParent(d).build();
+        final Node f = 
MapReduceActionBuilder.create().withName("F").withParentWithCondition(c, 
conditionGotoF).build();
+        MapReduceActionBuilder.create().withName("G").withParent(e).build();
+        MapReduceActionBuilder.create().withName("H").withParent(f).build();
+
+        final Workflow workflow = new 
WorkflowBuilder().withName("decision-at-uncle-of-join").withDagContainingNode(a).build();
+        final Graph graph = new Graph(workflow);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+        final NodeBase H = new ExplicitNode("H", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork = new Fork("fork1");
+        final Join join = new Join("join1", fork);
+        final Decision decision = new Decision("decision1");
+        final DecisionJoin decisionJoin = new DecisionJoin("decisionJoin1", 
decision);
+
+        end.addParent(decisionJoin);
+        decisionJoin.addParent(G);
+        decisionJoin.addParent(H);
+        G.addParent(E);
+        H.addParent(F);
+        E.addParent(D);
+        D.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoD));
+        F.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoF));
+        decision.addParent(join);
+        join.addParent(B);
+        join.addParent(C);
+        B.addParent(fork);
+        C.addParent(fork);
+        fork.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, fork, join, decision, decisionJoin, A, B, C, D, E, 
F, G, H);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+
+        checkEqualStructureByNames(nodes, graph);
+    }
+
+    @Test
+    public void testAlreadyClosedDecisionBranching() {
+        final String conditionGotoD = "condition_goto_D";
+        final String conditionGotoE = "condition_goto_E";
+
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParentWithCondition(b, 
conditionGotoD).build();
+        final Node e = 
MapReduceActionBuilder.create().withName("E").withParentWithCondition(b, 
conditionGotoE).build();
+
+        final Node f = 
MapReduceActionBuilder.create().withName("F").withParent(d).withParent(e).build();
+        
MapReduceActionBuilder.create().withName("G").withParent(f).withParent(c).build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                .withName("already-closed-decision-branching")
+                .withDagContainingNode(a)
+                .build();
+        final Graph graph = new Graph(workflow);
+
+        final NodeBase A = new ExplicitNode("A", null);
+        final NodeBase B = new ExplicitNode("B", null);
+        final NodeBase C = new ExplicitNode("C", null);
+        final NodeBase D = new ExplicitNode("D", null);
+        final NodeBase E = new ExplicitNode("E", null);
+        final NodeBase F = new ExplicitNode("F", null);
+        final NodeBase G = new ExplicitNode("G", null);
+
+        final Start start = new Start("start");
+        final End end = new End("end");
+        final Fork fork = new Fork("fork1");
+        final Join join = new Join("join1", fork);
+        final Decision decision = new Decision("decision1");
+        final DecisionJoin decisionJoin = new DecisionJoin("decisionJoin1", 
decision);
+
+        end.addParent(G);
+        G.addParent(join);
+        join.addParent(F);
+        join.addParent(C);
+        F.addParent(decisionJoin);
+        decisionJoin.addParent(D);
+        decisionJoin.addParent(E);
+        D.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoD));
+        E.addParentWithCondition(decision, 
Condition.actualCondition(conditionGotoE));
+        decision.addParent(B);
+        B.addParent(fork);
+        C.addParent(fork);
+        fork.addParent(A);
+        A.addParent(start);
+
+        final List<NodeBase> nodes = Arrays.asList(
+                start, end, fork, join, decision, decisionJoin, A, B, C, D, E, 
F, G);
+
+//        nodesToPng.withWorkflow(workflow);
+//        nodesToPng.withGraph(graph);
+
+        checkEqualStructureByNames(nodes, graph);
+
+    }
+
+    @Test
+    public void testIncomingConditionalBranchesFromDifferentDecisionsThrows() {
+        final Node a = MapReduceActionBuilder.create().withName("A").build();
+
+        final Node b = 
MapReduceActionBuilder.create().withName("B").withParent(a).build();
+        final Node c = 
MapReduceActionBuilder.create().withName("C").withParent(a).build();
+        final Node d = 
MapReduceActionBuilder.create().withName("D").withParent(a).build();
+
+        
MapReduceActionBuilder.create().withName("E").withParentWithCondition(c, 
"condition_goto_E").build();
+        final Node f = 
MapReduceActionBuilder.create().withName("F").withParentDefaultConditional(c).build();
+
+        final Node g = 
MapReduceActionBuilder.create().withName("G").withParentWithCondition(d, 
"condition_goto_G").build();
+        final Node h = 
MapReduceActionBuilder.create().withName("H").withParentDefaultConditional(d).build();
+
+        
MapReduceActionBuilder.create().withName("I").withParent(b).withParent(f).withParent(g).build();
+        MapReduceActionBuilder.create().withName("J").withParent(h).build();
+
+        final Workflow workflow = new WorkflowBuilder()
+                
.withName("incoming-conditional-branches-from-different-decisions")
+                .withDagContainingNode(a)
+                .build();
+
+//        nodesToPng.withWorkflow(workflow);
+
+        // TODO: We might choose to implement it later without an exception.
+        expectedException.expect(IllegalStateException.class);
+        new Graph(workflow);
+    }
+
+    private void checkEqualStructureByNames(final Collection<NodeBase> 
expectedNodes, final Graph graph2) {
+        assertEquals(expectedNodes.size(), graph2.getNodes().size());
+
+        for (final NodeBase expectedNode : expectedNodes) {
+            final NodeBase nodeInOtherGraph = 
graph2.getNodeByName(expectedNode.getName());
+
+            assertNotNull(nodeInOtherGraph);
+
+            final List<NodeBase> expectedChildren = expectedNode.getChildren();
+            final List<NodeBase> actualChildren = 
nodeInOtherGraph.getChildren();
+
+            final List<String> expectedChildrenNames = new ArrayList<>();
+            for (final NodeBase child : expectedChildren) {
+                expectedChildrenNames.add(child.getName());
+            }
+
+            final List<String> actualChildrenNames = new ArrayList<>();
+            for (final NodeBase child : actualChildren) {
+                actualChildrenNames.add(child.getName());
+            }
+
+            if (expectedNode instanceof Fork) {
+                // The order of the children of fork nodes is not important.
+                Collections.sort(expectedChildrenNames);
+                Collections.sort(actualChildrenNames);
+            }
+
+            assertEquals(expectedChildrenNames.size(), 
actualChildrenNames.size());
+
+            for (int i = 0; i < expectedChildren.size(); ++i) {
+                final String expectedName = expectedChildrenNames.get(i);
+                final String actualName = actualChildrenNames.get(i);
+
+                if (graph2.getNodeByName(actualName) instanceof ExplicitNode) {
+                    assertEquals(expectedName, actualName);
+                }
+            }
+        }
+    }
+
+    private void checkDependencies(final Set<Node> originalNodes, final Graph 
graph) {
+        for (final Node originalNode : originalNodes) {
+            for (final Node originalParent : originalNode.getAllParents()) {
+                final NodeBase node = 
graph.getNodeByName(originalNode.getName());
+                final NodeBase parent = 
graph.getNodeByName(originalParent.getName());
+
+                assertTrue(verifyDependency(parent, node));
+            }
+        }
+    }
+
+    private boolean verifyDependency(final NodeBase dependency, final NodeBase 
dependent) {
+        final List<NodeBase> children = dependency.getChildren();
+
+        for (final NodeBase child : children) {
+            if (child == dependent || verifyDependency(child, dependent)) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoin.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoin.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoin.java
new file mode 100644
index 0000000..f915bce
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoin.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.dag;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class TestJoin extends TestJoiningNodeBase<Fork, Join> {
+    @Override
+    protected Join getInstance(final String name) {
+        return getJoiningInstance(name);
+    }
+
+    @Override
+    protected Fork getBranchingInstance(String name) {
+        return new Fork(name);
+    }
+
+    @Override
+    protected Join getJoiningInstance(String name, Fork branchingPair) {
+        return new Join(name, branchingPair);
+    }
+
+    @Test
+    public void testCorrespondingForkIsCorrect() {
+        Fork fork = new Fork("fork");
+        Join join = new Join("join", fork);
+
+        assertEquals(fork, join.getBranchingPair());
+
+        assertEquals(join, fork.getClosingJoin());
+        assertTrue(fork.isClosed());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoiningNodeBase.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoiningNodeBase.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoiningNodeBase.java
new file mode 100644
index 0000000..d498019
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestJoiningNodeBase.java
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.dag;
+
+import org.junit.Test;
+
+import java.util.Arrays;
+
+import static org.junit.Assert.assertEquals;
+
+public abstract class TestJoiningNodeBase<B, J extends JoiningNodeBase<B>> 
extends TestNodeBase<JoiningNodeBase<B>> {
+    protected abstract B getBranchingInstance(final String name);
+    protected abstract J getJoiningInstance(final String name, final B 
branchingPair);
+
+    protected  J getJoiningInstance(final String name) {
+        return getJoiningInstance(name, getBranchingInstance("branching"));
+    }
+
+    @Test
+    public void testCorrespondingBranchingIsCorrect() {
+        B branching = getBranchingInstance("branching");
+        J joining = getJoiningInstance("joining", branching);
+
+        assertEquals(branching, joining.getBranchingPair());
+    }
+
+    @Test
+    public void testAddParentWhenNoneAlreadyExists() {
+        final ExplicitNode parent = new ExplicitNode("parent", null);
+        final J instance = getJoiningInstance("instance");
+
+        instance.addParent(parent);
+        assertEquals(Arrays.asList(parent), instance.getParents());
+        assertEquals(instance, parent.getChild());
+    }
+
+    @Test
+    public void testAddParentWhenSomeAlreadyExist() {
+        final NodeBase parent1 = new ExplicitNode("parent1", null);
+        final NodeBase parent2 = new ExplicitNode("parent2", null);
+
+        final J instance = getJoiningInstance("instance");
+
+        instance.addParent(parent1);
+        instance.addParent(parent2);
+
+        assertEquals(Arrays.asList(parent1, parent2), instance.getParents());
+    }
+
+    @Test
+    public void testRemoveExistingParent() {
+        final ExplicitNode parent1 = new ExplicitNode("parent1", null);
+        final ExplicitNode parent2 = new ExplicitNode("parent2", null);
+
+        final J instance = getJoiningInstance("instance");
+
+        instance.addParent(parent1);
+        instance.addParent(parent2);
+
+        instance.removeParent(parent2);
+        assertEquals(Arrays.asList(parent1), instance.getParents());
+        assertEquals(null, parent2.getChild());
+    }
+
+    @Test
+    public void testRemoveNonexistentParentThrows() {
+        final ExplicitNode parent = new ExplicitNode("parent", null);
+        final J instance = getJoiningInstance("instance");
+
+        expectedException.expect(IllegalArgumentException.class);
+        instance.removeParent(parent);
+    }
+
+    @Test
+    public void testClearExistingParent() {
+        final ExplicitNode parent1 = new ExplicitNode("parent1", null);
+        final ExplicitNode parent2 = new ExplicitNode("parent2", null);
+
+        final J instance = getJoiningInstance("instance");
+
+        instance.addParent(parent1);
+        instance.addParent(parent2);
+
+        instance.clearParents();
+        assertEquals(0, instance.getParents().size());
+        assertEquals(null, parent1.getChild());
+        assertEquals(null, parent2.getChild());
+    }
+
+    @Test
+    public void testClearNonExistentParent() {
+        final J instance = getJoiningInstance("instance");
+
+        instance.clearParents();
+        assertEquals(0, instance.getParents().size());
+    }
+
+    @Test
+    public void testJoinAddedAsParentWhenItHasNoChild() {
+        final J instance = getJoiningInstance("instance");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(instance);
+
+        assertEquals(child, instance.getChild());
+    }
+
+    @Test
+    public void testJoinAddedAsParentWhenItAlreadyHasAChildThrows() {
+        final J instance = getJoiningInstance("instance");
+        final NodeBase child1 = new ExplicitNode("child1", null);
+        final NodeBase child2 = new ExplicitNode("child2", null);
+
+        child1.addParent(instance);
+
+        expectedException.expect(IllegalStateException.class);
+        child2.addParent(instance);
+    }
+
+    @Test
+    public void testJoinRemovedAsParent() {
+        final J instance = getJoiningInstance("instance");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(instance);
+
+        child.removeParent(instance);
+
+        assertEquals(null, instance.getChild());
+    }
+
+    @Test
+    public void testGetChildren() {
+        final J instance = getJoiningInstance("instance");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(instance);
+
+        assertEquals(Arrays.asList(child), instance.getChildren());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestNodeBase.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestNodeBase.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestNodeBase.java
new file mode 100644
index 0000000..5f14c0b
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestNodeBase.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.dag;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.junit.Assert.assertEquals;
+
+public abstract class TestNodeBase<T extends NodeBase> {
+    @Rule
+    public final ExpectedException expectedException = 
ExpectedException.none();
+
+    static final String NAME = "node name";
+
+    protected abstract T getInstance(final String name);
+
+    @Test
+    public void testNameIsCorrect() {
+        final T instance = getInstance(NAME);
+        assertEquals(NAME, instance.getName());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestStart.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestStart.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestStart.java
new file mode 100644
index 0000000..c131db0
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/dag/TestStart.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.dag;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.util.Arrays;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestStart extends TestNodeBase<Start> {
+    @Rule
+    public final ExpectedException expectedException = 
ExpectedException.none();
+
+    @Override
+    protected Start getInstance(final String name) {
+        return new Start(name);
+    }
+
+    @Test
+    public void testAddParent() {
+        final ExplicitNode parent = new ExplicitNode("parent", null);
+        final Start start = getInstance("start");
+
+        expectedException.expect(IllegalStateException.class);
+        start.addParent(parent);
+    }
+
+    @Test
+    public void testRemoveParent() {
+        final Start start = getInstance("start");
+
+        expectedException.expect(IllegalStateException.class);
+        start.removeParent(null);
+    }
+
+    @Test
+    public void testClearExistingParent() {
+        new Start("parent");
+        final Start instance = getInstance("instance");
+
+        instance.clearParents();
+    }
+
+    @Test
+    public void testClearNonExistentParent() {
+        new Start("parent");
+        final Start instance = getInstance("instance");
+
+        instance.clearParents();
+    }
+
+    @Test
+    public void testStartAddedAsParentWhenItHasNoChild() {
+        final Start start = getInstance("start");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(start);
+
+        assertEquals(child, start.getChild());
+    }
+
+    @Test
+    public void testStartAddedAsParentWhenItAlreadyHasAChildThrows() {
+        final Start start = getInstance("start");
+        final NodeBase child1 = new ExplicitNode("child1", null);
+        final NodeBase child2 = new ExplicitNode("child2", null);
+
+        child1.addParent(start);
+
+        expectedException.expect(IllegalStateException.class);
+        child2.addParent(start);
+    }
+
+    @Test
+    public void testStartRemovedAsParent() {
+        final Start instance = getInstance("instance");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(instance);
+        child.removeParent(instance);
+
+        assertEquals(null, instance.getChild());
+    }
+
+    @Test
+    public void testGetChildren() {
+        final Start start = getInstance("start");
+        final NodeBase child = new ExplicitNode("child", null);
+
+        child.addParent(start);
+
+        assertEquals(Arrays.asList(child), start.getChildren());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/factory/SimpleWorkflowFactory.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/factory/SimpleWorkflowFactory.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/factory/SimpleWorkflowFactory.java
new file mode 100644
index 0000000..b1a7ffe
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/factory/SimpleWorkflowFactory.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.factory;
+
+import org.apache.oozie.fluentjob.api.workflow.Workflow;
+import org.apache.oozie.fluentjob.api.workflow.WorkflowBuilder;
+import org.apache.oozie.fluentjob.api.action.ShellAction;
+import org.apache.oozie.fluentjob.api.action.ShellActionBuilder;
+
+/**
+ * An easily understandable {@link WorkflowFactory} that creates a {@link 
Workflow} instance consisting of
+ * three {@link ShellAction}s, the two latter depending conditionally on the 
output of the former.
+ * <p>
+ * It demonstrates how the Jobs API can be used to create dynamic {@code 
Workflow} artifacts, as well as
+ * serves as an input for {@code TestOozieCLI} methods that check, submit or 
run Jobs API {@code .jar} files.
+ */
+public class SimpleWorkflowFactory implements WorkflowFactory {
+
+    @Override
+    public Workflow create() {
+        final ShellAction parent = ShellActionBuilder.create()
+                .withName("parent")
+                .withResourceManager("${resourceManager}")
+                .withNameNode("${nameNode}")
+                .withConfigProperty("mapred.job.queue.name", "${queueName}")
+                .withArgument("my_output=Hello Oozie")
+                .withExecutable("echo")
+                .withCaptureOutput(true)
+                .build();
+
+        ShellActionBuilder.createFromExistingAction(parent)
+                .withName("happy-path")
+                .withParentWithCondition(parent, 
"${wf:actionData('parent')['my_output'] eq 'Hello Oozie'}")
+                .withoutArgument("my_output=Hello Oozie")
+                .withArgument("Happy path")
+                .withCaptureOutput(null)
+                .build();
+
+        ShellActionBuilder.createFromExistingAction(parent)
+                .withName("sad-path")
+                .withParentDefaultConditional(parent)
+                .withArgument("Sad path")
+                .build();
+
+        return new WorkflowBuilder()
+                .withName("shell-example")
+                .withDagContainingNode(parent).build();
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/SourceDataFactory.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/SourceDataFactory.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/SourceDataFactory.java
new file mode 100644
index 0000000..4def062
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/SourceDataFactory.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import com.google.common.collect.Lists;
+import org.apache.oozie.fluentjob.api.generated.workflow.CREDENTIALS;
+import org.apache.oozie.fluentjob.api.workflow.ConfigurationEntry;
+import org.apache.oozie.fluentjob.api.workflow.Credentials;
+import org.apache.oozie.fluentjob.api.workflow.CredentialsBuilder;
+
+import static org.junit.Assert.assertEquals;
+
+class SourceDataFactory {
+
+    Credentials createCredentials() {
+        return CredentialsBuilder.create()
+                .withCredential("hbase", "hbase")
+                .withCredential("hive2", "hive2",
+                        Lists.newArrayList(new ConfigurationEntry("jdbcUrl", 
"jdbc://localhost/hive2")))
+                .build();
+    }
+
+    void assertCredentials(final CREDENTIALS destination) {
+        assertEquals("hbase", destination.getCredential().get(0).getName());
+        assertEquals("hbase", destination.getCredential().get(0).getType());
+        assertEquals(0, 
destination.getCredential().get(0).getProperty().size());
+        assertEquals("hive2", destination.getCredential().get(1).getName());
+        assertEquals("hive2", destination.getCredential().get(1).getType());
+        assertEquals("jdbcUrl", 
destination.getCredential().get(1).getProperty().get(0).getName());
+        assertEquals("jdbc://localhost/hive2", 
destination.getCredential().get(1).getProperty().get(0).getValue());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestActionAttributesMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestActionAttributesMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestActionAttributesMapping.java
new file mode 100644
index 0000000..3f9aab5
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestActionAttributesMapping.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import com.google.common.base.Strings;
+import org.apache.oozie.fluentjob.api.action.MapReduceAction;
+import org.apache.oozie.fluentjob.api.action.MapReduceActionBuilder;
+import org.apache.oozie.fluentjob.api.dag.ExplicitNode;
+import org.apache.oozie.fluentjob.api.generated.workflow.ACTION;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public class TestActionAttributesMapping {
+
+    private final SourceDataFactory factory = new SourceDataFactory();
+
+    @Test
+    public void testMappingNoCredentialsToAction() {
+        final MapReduceAction source = MapReduceActionBuilder
+                .create()
+                .build();
+
+        final ACTION target = DozerBeanMapperSingleton.instance().map(new 
ExplicitNode("explicitNode", source), ACTION.class);
+
+        assertTrue(Strings.isNullOrEmpty(target.getCred()));
+    }
+
+    @Test
+    public void testMappingOneCredentialToAction() {
+        final MapReduceAction source = MapReduceActionBuilder
+                .create()
+                
.withCredential(factory.createCredentials().getCredentials().get(0))
+                .build();
+
+        final ACTION target = DozerBeanMapperSingleton.instance().map(new 
ExplicitNode("explicitNode", source), ACTION.class);
+
+        assertEquals("hbase", target.getCred());
+    }
+
+    @Test
+    public void testMappingTwoCredentialsToSameAction() {
+        final MapReduceAction source = MapReduceActionBuilder
+                .create()
+                
.withCredential(factory.createCredentials().getCredentials().get(0))
+                
.withCredential(factory.createCredentials().getCredentials().get(1))
+                .build();
+
+        final ACTION target = DozerBeanMapperSingleton.instance().map(new 
ExplicitNode("explicitNode", source), ACTION.class);
+
+        assertEquals("hbase,hive2", target.getCred());
+    }
+
+    @Test
+    public void testMappingNoRetryAttributesToAction() {
+        final MapReduceAction source = MapReduceActionBuilder
+                .create()
+                .build();
+
+        final ACTION target = DozerBeanMapperSingleton.instance().map(new 
ExplicitNode("explicitNode", source), ACTION.class);
+
+        assertNull(target.getRetryInterval());
+        assertNull(target.getRetryMax());
+        assertNull(target.getRetryPolicy());
+    }
+
+    @Test
+    public void testMappingRetryAttributesToAction() {
+        final MapReduceAction source = MapReduceActionBuilder
+                .create()
+                .withRetryInterval(1)
+                .withRetryMax(3)
+                .withRetryPolicy("retry-policy")
+                .build();
+
+        final ACTION target = DozerBeanMapperSingleton.instance().map(new 
ExplicitNode("explicitNode", source), ACTION.class);
+
+        assertEquals("1", target.getRetryInterval());
+        assertEquals("3", target.getRetryMax());
+        assertEquals("retry-policy", target.getRetryPolicy());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestConfigurationMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestConfigurationMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestConfigurationMapping.java
new file mode 100644
index 0000000..fac4c03
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestConfigurationMapping.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.oozie.fluentjob.api.generated.workflow.CONFIGURATION;
+import org.junit.Test;
+
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestConfigurationMapping {
+    @Test
+    public void testMappingMapToConfiguration() {
+        final String key = "key";
+        final String value = "value";
+        final ImmutableMap<String, String> map = new 
ImmutableMap.Builder<String, String>().put(key, value).build();
+
+        final CONFIGURATION configuration
+                = DozerBeanMapperSingleton.instance().map(map, 
CONFIGURATION.class);
+
+        final List<CONFIGURATION.Property> properties = 
configuration.getProperty();
+        final CONFIGURATION.Property property = properties.get(0);
+
+        assertEquals(key, property.getName());
+        assertEquals(value, property.getValue());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestControlNodeMappingBase.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestControlNodeMappingBase.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestControlNodeMappingBase.java
new file mode 100644
index 0000000..c65943c
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestControlNodeMappingBase.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.junit.Rule;
+import org.junit.rules.ExpectedException;
+
+public abstract class TestControlNodeMappingBase {
+    @Rule
+    public final ExpectedException expectedException = 
ExpectedException.none();
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestCredentialsMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestCredentialsMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestCredentialsMapping.java
new file mode 100644
index 0000000..8b08868
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestCredentialsMapping.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.generated.workflow.CREDENTIALS;
+import org.apache.oozie.fluentjob.api.workflow.Credentials;
+import org.junit.Test;
+
+public class TestCredentialsMapping {
+
+    private final SourceDataFactory factory = new SourceDataFactory();
+
+    @Test
+    public void testMappingCredentials() {
+        final Credentials source = factory.createCredentials();
+
+        final CREDENTIALS destination = 
DozerBeanMapperSingleton.instance().map(source, CREDENTIALS.class);
+
+        factory.assertCredentials(destination);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDecisionMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDecisionMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDecisionMapping.java
new file mode 100644
index 0000000..10a7ad9
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDecisionMapping.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.generated.workflow.DECISION;
+import org.apache.oozie.fluentjob.api.generated.workflow.DEFAULT;
+import org.apache.oozie.fluentjob.api.Condition;
+import org.apache.oozie.fluentjob.api.generated.workflow.CASE;
+import org.apache.oozie.fluentjob.api.generated.workflow.SWITCH;
+import org.apache.oozie.fluentjob.api.dag.Decision;
+import org.apache.oozie.fluentjob.api.dag.DecisionJoin;
+import org.apache.oozie.fluentjob.api.dag.ExplicitNode;
+import org.apache.oozie.fluentjob.api.dag.NodeBase;
+import org.junit.Test;
+
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestDecisionMapping extends TestControlNodeMappingBase {
+    @Test
+    public void testMappingDecision() {
+        final String name = "decision";
+        final Decision decision = new Decision(name);
+
+        final NodeBase child1 = new ExplicitNode("child1", null);
+        final NodeBase child2 = new ExplicitNode("child2", null);
+        final NodeBase defaultChild = new ExplicitNode("defaultChild", null);
+
+        final String condition1String = "condition1";
+        final String condition2String = "condition2";
+
+        child1.addParentWithCondition(decision, 
Condition.actualCondition(condition1String));
+        child2.addParentWithCondition(decision, 
Condition.actualCondition(condition2String));
+        defaultChild.addParentDefaultConditional(decision);
+
+        final DECISION mappedDecision = 
DozerBeanMapperSingleton.instance().map(decision, DECISION.class);
+
+        assertEquals(name, mappedDecision.getName());
+
+        final SWITCH decisionSwitch = mappedDecision.getSwitch();
+        final List<CASE> cases = decisionSwitch.getCase();
+
+        assertEquals(2, cases.size());
+
+        assertEquals(child1.getName(), cases.get(0).getTo());
+        assertEquals(condition1String, cases.get(0).getValue());
+
+        assertEquals(child2.getName(), cases.get(1).getTo());
+        assertEquals(condition2String, cases.get(1).getValue());
+
+        final DEFAULT decisionDefault = decisionSwitch.getDefault();
+        assertEquals(defaultChild.getName(), decisionDefault.getTo());
+    }
+
+    @Test
+    public void testMappingDecisionWithoutDefaultThrows() {
+        final String name = "decision";
+        final Decision decision = new Decision(name);
+
+        final NodeBase child1 = new ExplicitNode("child1", null);
+        final NodeBase child2 = new ExplicitNode("child2", null);
+
+        final Condition condition1 = Condition.actualCondition("condition1");
+        final Condition condition2 = Condition.actualCondition("condition2");
+
+        child1.addParentWithCondition(decision, condition1);
+        child2.addParentWithCondition(decision, condition2);
+
+        expectedException.expect(IllegalStateException.class);
+        DozerBeanMapperSingleton.instance().map(decision, DECISION.class);
+    }
+
+    @Test
+    public void testMappingDecisionWithDecisionJoin() {
+        final String child1Name = "child1";
+        final String child2Name = "child2";
+        final Decision decision = new Decision("decision");
+
+        final NodeBase decisionJoin1 = new DecisionJoin("decisionJoin1", new 
Decision("decision"));
+        decisionJoin1.addParentWithCondition(decision, 
Condition.actualCondition("condition"));
+
+        final NodeBase decisionJoin2 = new DecisionJoin("decisionJoin2", new 
Decision("decision2"));
+        decisionJoin2.addParentDefaultConditional(decision);
+
+        final NodeBase child1 = new ExplicitNode(child1Name, null);
+        child1.addParent(decisionJoin1);
+
+        final NodeBase child2 = new ExplicitNode(child2Name, null);
+        child2.addParent(decisionJoin2);
+
+        final DECISION mappedDecision = 
DozerBeanMapperSingleton.instance().map(decision, DECISION.class);
+
+        assertEquals(child1Name, 
mappedDecision.getSwitch().getCase().get(0).getTo());
+        assertEquals(child2Name, 
mappedDecision.getSwitch().getDefault().getTo());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDeleteMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDeleteMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDeleteMapping.java
new file mode 100644
index 0000000..7869237
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDeleteMapping.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.action.Delete;
+import org.apache.oozie.fluentjob.api.generated.workflow.DELETE;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestDeleteMapping {
+    @Test
+    public void testMappingDelete() {
+        final String path = "path/to/location";
+        final Boolean skipTrash = true;
+        final Delete delete = new Delete(path, skipTrash);
+
+        final DELETE deleteJAXB = 
DozerBeanMapperSingleton.instance().map(delete, DELETE.class);
+
+        assertEquals(path, deleteJAXB.getPath());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDistcpActionMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDistcpActionMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDistcpActionMapping.java
new file mode 100644
index 0000000..55d66c2
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestDistcpActionMapping.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.action.DistcpAction;
+import org.apache.oozie.fluentjob.api.action.DistcpActionBuilder;
+import org.apache.oozie.fluentjob.api.action.PrepareBuilder;
+import org.apache.oozie.fluentjob.api.generated.action.distcp.ACTION;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+public class TestDistcpActionMapping {
+    @Test
+    public void testMappingDistcpAction() {
+        final String resourceManager = "${resourceManager}";
+        final String nameNode = "${nameNode}";
+        final String javaOpts = "-Dopt1 -Dopt2";
+        final List<String> args = Arrays.asList("arg1", "arg2");
+
+        final DistcpActionBuilder builder = DistcpActionBuilder.create();
+
+        builder.withResourceManager(resourceManager)
+                .withNameNode(nameNode)
+                .withPrepare(new PrepareBuilder().build())
+                .withJavaOpts(javaOpts);
+
+        for (final String arg : args) {
+            builder.withArg(arg);
+        }
+
+        builder.withConfigProperty("propertyName1", "propertyValue1")
+                .withConfigProperty("propertyName2", "propertyValue2");
+
+        final DistcpAction action = builder.build();
+
+        final ACTION distcp = DozerBeanMapperSingleton.instance().map(action, 
ACTION.class);
+
+        assertEquals(resourceManager, distcp.getResourceManager());
+        assertEquals(nameNode, distcp.getNameNode());
+        assertNotNull(distcp.getPrepare());
+        assertNotNull(distcp.getConfiguration());
+        assertEquals(javaOpts, distcp.getJavaOpts());
+        assertEquals(args, distcp.getArg());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEmailActionMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEmailActionMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEmailActionMapping.java
new file mode 100644
index 0000000..0bea733
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEmailActionMapping.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.action.EmailAction;
+import org.apache.oozie.fluentjob.api.action.EmailActionBuilder;
+import org.apache.oozie.fluentjob.api.generated.action.email.ACTION;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestEmailActionMapping {
+    @Test
+    public void testMappingEmailAction() {
+        final String to = "[email protected]";
+        final String cc = "[email protected]";
+        final String bcc = "[email protected]";
+        final String subject = "Subject";
+        final String body = "Email body.";
+        final String contentType = "content_type";
+        final String attachment = "attachment";
+
+        final EmailAction action = EmailActionBuilder.create()
+                .withName("email-action")
+                .withRecipient(to)
+                .withCc(cc)
+                .withBcc(bcc)
+                .withSubject(subject)
+                .withBody(body)
+                .withContentType(contentType)
+                .withAttachment(attachment)
+                .build();
+
+        final ACTION emailAction = 
DozerBeanMapperSingleton.instance().map(action, ACTION.class);
+
+        assertEquals(to, emailAction.getTo());
+        assertEquals(cc, emailAction.getCc());
+        assertEquals(bcc, emailAction.getBcc());
+        assertEquals(subject, emailAction.getSubject());
+        assertEquals(body, emailAction.getBody());
+        assertEquals(contentType, emailAction.getContentType());
+        assertEquals(attachment, emailAction.getAttachment());
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/8a0a6487/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEndMapping.java
----------------------------------------------------------------------
diff --git 
a/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEndMapping.java
 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEndMapping.java
new file mode 100644
index 0000000..079b8fa
--- /dev/null
+++ 
b/fluent-job/fluent-job-api/src/test/java/org/apache/oozie/fluentjob/api/mapping/TestEndMapping.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.fluentjob.api.mapping;
+
+import org.apache.oozie.fluentjob.api.generated.workflow.END;
+import org.apache.oozie.fluentjob.api.dag.End;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestEndMapping extends TestControlNodeMappingBase {
+    @Test
+    public void testMappingEnd() {
+        final String name = "end";
+        final End end = new End(name);
+
+        final END mappedEnd = DozerBeanMapperSingleton.instance().map(end, 
END.class);
+
+        assertEquals(name, mappedEnd.getName());
+    }
+}

Reply via email to