http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
----------------------------------------------------------------------
diff --git 
a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
 
b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
index 25f7180..9a09f18 100644
--- 
a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
+++ 
b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
@@ -694,9 +694,9 @@ public class OozieWorkflowEngine extends 
AbstractWorkflowEngine {
                 if (action.equals(JobAction.STATUS) && 
Boolean.TRUE.equals(allAttempts)) {
                     try {
                         performAction(cluster, action, coordinatorAction, 
props, instance, isForced);
-                        if (instance.getRunId() > 0) {
-                            instanceList = getAllInstances(cluster, 
coordinatorAction, nominalTimeStr);
-                        } else {
+                        instanceList = getAllInstances(cluster, 
coordinatorAction, nominalTimeStr);
+                        // Happens when the action is in READY/WAITING, when 
no workflow is kicked off yet.
+                        if (instanceList.isEmpty() || 
StringUtils.isBlank(coordinatorAction.getExternalId())) {
                             instanceList.add(instance);
                         }
                     } catch (FalconException e) {
@@ -883,8 +883,8 @@ public class OozieWorkflowEngine extends 
AbstractWorkflowEngine {
     private List<InstancesResult.Instance> getAllInstances(String cluster, 
CoordinatorAction coordinatorAction,
                                                            String 
nominalTimeStr) throws FalconException {
         List<InstancesResult.Instance> instanceList = new ArrayList<>();
-        if (StringUtils.isNotBlank(coordinatorAction.getExternalId())) {
-            List<WorkflowJob> workflowJobList = getWfsForCoordAction(cluster, 
coordinatorAction.getExternalId());
+        if (StringUtils.isNotBlank(coordinatorAction.getId())) {
+            List<WorkflowJob> workflowJobList = getWfsForCoordAction(cluster, 
coordinatorAction.getId());
             if (workflowJobList != null && workflowJobList.size()>0) {
                 for (WorkflowJob workflowJob : workflowJobList) {
                     InstancesResult.Instance newInstance = new 
InstancesResult.Instance(cluster, nominalTimeStr, null);
@@ -892,7 +892,7 @@ public class OozieWorkflowEngine extends 
AbstractWorkflowEngine {
                     if (wfJob!=null) {
                         newInstance.startTime = wfJob.getStartTime();
                         newInstance.endTime = wfJob.getEndTime();
-                        newInstance.logFile = coordinatorAction.getId();
+                        newInstance.logFile = wfJob.getConsoleUrl();
                         populateInstanceActions(cluster, wfJob, newInstance);
                         newInstance.status = 
WorkflowStatus.valueOf(mapActionStatus(wfJob.getStatus().name()));
                         instanceList.add(newInstance);
@@ -912,7 +912,7 @@ public class OozieWorkflowEngine extends 
AbstractWorkflowEngine {
             status = jobInfo.getStatus().name();
             instance.startTime = jobInfo.getStartTime();
             instance.endTime = jobInfo.getEndTime();
-            instance.logFile = coordinatorAction.getId();
+            instance.logFile = jobInfo.getConsoleUrl();
             instance.runId = jobInfo.getRun();
         }
 
@@ -1603,6 +1603,8 @@ public class OozieWorkflowEngine extends 
AbstractWorkflowEngine {
             } else if (jobId.endsWith("-B")) {
                 BundleJob bundle = client.getBundleJobInfo(jobId);
                 return bundle.getStatus().name();
+            } else if (jobId.contains("-C@")) {
+                return client.getCoordActionInfo(jobId).getStatus().name();
             }
             throw new IllegalArgumentException("Unhandled jobs id: " + jobId);
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/main/resources/action/feed/eviction-action.xml
----------------------------------------------------------------------
diff --git a/oozie/src/main/resources/action/feed/eviction-action.xml 
b/oozie/src/main/resources/action/feed/eviction-action.xml
index 4ab67d2..bded1d6 100644
--- a/oozie/src/main/resources/action/feed/eviction-action.xml
+++ b/oozie/src/main/resources/action/feed/eviction-action.xml
@@ -31,7 +31,7 @@
             <!-- HCatalog jars -->
             <property>
                 <name>oozie.action.sharelib.for.java</name>
-                <value>hcatalog</value>
+                <value>hcatalog,hive</value>
             </property>
             <property>
                 <name>oozie.launcher.oozie.libpath</name>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/main/resources/action/feed/table-export.xml
----------------------------------------------------------------------
diff --git a/oozie/src/main/resources/action/feed/table-export.xml 
b/oozie/src/main/resources/action/feed/table-export.xml
index fcf1a1a..6bc214b 100644
--- a/oozie/src/main/resources/action/feed/table-export.xml
+++ b/oozie/src/main/resources/action/feed/table-export.xml
@@ -26,7 +26,6 @@
         <prepare>
             <delete path="${distcpSourcePaths}"/>
         </prepare>
-        <job-xml>${wf:appPath()}/conf/falcon-source-hive-site.xml</job-xml>
         <configuration>
             <property>
                 <name>mapred.job.queue.name</name>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/main/resources/action/feed/table-import.xml
----------------------------------------------------------------------
diff --git a/oozie/src/main/resources/action/feed/table-import.xml 
b/oozie/src/main/resources/action/feed/table-import.xml
index 6e9a073..450e68a 100644
--- a/oozie/src/main/resources/action/feed/table-import.xml
+++ b/oozie/src/main/resources/action/feed/table-import.xml
@@ -20,7 +20,6 @@
     <hive xmlns="uri:oozie:hive-action:0.2">
         <job-tracker>${falconTargetJobTracker}</job-tracker>
         <name-node>${falconTargetNameNode}</name-node>
-        <job-xml>${wf:appPath()}/conf/falcon-target-hive-site.xml</job-xml>
         <configuration>
             <property>
                 <name>mapred.job.queue.name</name>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
----------------------------------------------------------------------
diff --git 
a/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
 
b/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
index b0fc230..2040748 100644
--- 
a/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
+++ 
b/oozie/src/test/java/org/apache/falcon/oozie/feed/OozieFeedWorkflowBuilderTest.java
@@ -561,9 +561,6 @@ public class OozieFeedWorkflowBuilderTest extends 
AbstractTestBase {
         Assert.assertTrue(fs.exists(new Path(wfPath + 
"/scripts/falcon-table-export.hql")));
         Assert.assertTrue(fs.exists(new Path(wfPath + 
"/scripts/falcon-table-import.hql")));
 
-        Assert.assertTrue(fs.exists(new Path(wfPath + "/conf")));
-        Assert.assertTrue(fs.exists(new Path(wfPath + 
"/conf/falcon-source-hive-site.xml")));
-        Assert.assertTrue(fs.exists(new Path(wfPath + 
"/conf/falcon-target-hive-site.xml")));
 
         HashMap<String, String> props = getCoordProperties(coord);
 
@@ -626,11 +623,7 @@ public class OozieFeedWorkflowBuilderTest extends 
AbstractTestBase {
     private void assertReplicationHCatCredentials(WORKFLOWAPP wf, String 
wfPath) throws IOException {
         FileSystem fs = trgMiniDFS.getFileSystem();
 
-        Path hiveConfPath = new Path(wfPath, 
"conf/falcon-source-hive-site.xml");
-        Assert.assertTrue(fs.exists(hiveConfPath));
 
-        hiveConfPath = new Path(wfPath, "conf/falcon-target-hive-site.xml");
-        Assert.assertTrue(fs.exists(hiveConfPath));
 
         boolean isSecurityEnabled = SecurityUtil.isSecurityEnabled();
         if (isSecurityEnabled) {
@@ -651,7 +644,6 @@ public class OozieFeedWorkflowBuilderTest extends 
AbstractTestBase {
             }
 
             if ("recordsize".equals(actionName)) {
-                Assert.assertEquals(action.getJava().getJobXml(), 
"${wf:appPath()}/conf/falcon-source-hive-site.xml");
                 if (isSecurityEnabled) {
                     Assert.assertNotNull(action.getCred());
                     Assert.assertEquals(action.getCred(), 
"falconSourceHiveAuth");

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
----------------------------------------------------------------------
diff --git 
a/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
 
b/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
index 85100e7..a692d0c 100644
--- 
a/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
+++ 
b/oozie/src/test/java/org/apache/falcon/oozie/process/OozieProcessWorkflowBuilderTest.java
@@ -326,6 +326,80 @@ public class OozieProcessWorkflowBuilderTest extends 
AbstractTestBase {
     }
 
     @Test
+    public void testSparkSQLProcess() throws Exception {
+        URL resource = 
this.getClass().getResource("/config/feed/hive-table-feed.xml");
+        Feed inFeed = (Feed) 
EntityType.FEED.getUnmarshaller().unmarshal(resource);
+        ConfigurationStore.get().publish(EntityType.FEED, inFeed);
+
+        resource = 
this.getClass().getResource("/config/feed/hive-table-feed-out.xml");
+        Feed outFeed = (Feed) 
EntityType.FEED.getUnmarshaller().unmarshal(resource);
+        ConfigurationStore.get().publish(EntityType.FEED, outFeed);
+
+        resource = 
this.getClass().getResource("/config/process/spark-sql-process.xml");
+        Process process = (Process) 
EntityType.PROCESS.getUnmarshaller().unmarshal(resource);
+        ConfigurationStore.get().publish(EntityType.PROCESS, process);
+
+        prepare(process);
+        OozieEntityBuilder builder = OozieEntityBuilder.get(process);
+        Path bundlePath = new Path("/falcon/staging/workflows", 
process.getName());
+        builder.build(cluster, bundlePath);
+        assertTrue(fs.exists(bundlePath));
+
+        BUNDLEAPP bundle = getBundle(fs, bundlePath);
+        assertEquals(EntityUtil.getWorkflowName(process).toString(), 
bundle.getName());
+        assertEquals(1, bundle.getCoordinator().size());
+        assertEquals(EntityUtil.getWorkflowName(Tag.DEFAULT, 
process).toString(),
+                bundle.getCoordinator().get(0).getName());
+        String coordPath = 
bundle.getCoordinator().get(0).getAppPath().replace("${nameNode}", "");
+
+        COORDINATORAPP coord = getCoordinator(fs, new Path(coordPath));
+        HashMap<String, String> props = getCoordProperties(coord);
+        HashMap<String, String> wfProps = getWorkflowProperties(fs, coord);
+
+        verifyEntityProperties(process, cluster,
+                WorkflowExecutionContext.EntityOperations.GENERATE, wfProps);
+        verifyBrokerProperties(cluster, wfProps);
+
+        // verify table and hive props
+        Map<String, String> expected = getExpectedProperties(inFeed, outFeed, 
process);
+        expected.putAll(ClusterHelper.getHiveProperties(cluster));
+        for (Map.Entry<String, String> entry : props.entrySet()) {
+            if (expected.containsKey(entry.getKey())) {
+                Assert.assertEquals(entry.getValue(), 
expected.get(entry.getKey()));
+            }
+        }
+
+        String wfPath = 
coord.getAction().getWorkflow().getAppPath().replace("${nameNode}", "");
+        WORKFLOWAPP parentWorkflow = getWorkflowapp(fs, new Path(wfPath, 
"workflow.xml"));
+        testParentWorkflow(process, parentWorkflow);
+        assertEquals(process.getWorkflow().getLib(), 
"/resources/action/lib/falcon-examples.jar");
+
+        ACTION sparkNode = getAction(parentWorkflow, "user-action");
+
+        JAXBElement<org.apache.falcon.oozie.spark.ACTION> actionJaxbElement =
+                OozieUtils.unMarshalSparkAction(sparkNode);
+        org.apache.falcon.oozie.spark.ACTION sparkAction = 
actionJaxbElement.getValue();
+
+        assertEquals(sparkAction.getMaster(), "local");
+        assertEquals(sparkAction.getJar(), "falcon-examples.jar");
+
+        Assert.assertTrue(Storage.TYPE.TABLE == 
ProcessHelper.getStorageType(cluster, process));
+        List<String> argsList = sparkAction.getArg();
+
+        Input input = process.getInputs().getInputs().get(0);
+        Output output = process.getOutputs().getOutputs().get(0);
+
+        assertEquals(argsList.get(0), 
"${falcon_"+input.getName()+"_partition_filter_hive}");
+        assertEquals(argsList.get(1), "${falcon_"+input.getName()+"_table}");
+        assertEquals(argsList.get(2), 
"${falcon_"+input.getName()+"_database}");
+        assertEquals(argsList.get(3), 
"${falcon_"+output.getName()+"_partitions_hive}");
+        assertEquals(argsList.get(4), "${falcon_"+output.getName()+"_table}");
+        assertEquals(argsList.get(5), 
"${falcon_"+output.getName()+"_database}");
+
+        ConfigurationStore.get().remove(EntityType.PROCESS, process.getName());
+    }
+
+    @Test
     public void testSparkProcess() throws Exception {
 
         URL resource = this.getClass().getResource(SPARK_PROCESS_XML);
@@ -357,6 +431,7 @@ public class OozieProcessWorkflowBuilderTest extends 
AbstractTestBase {
         String wfPath = 
coord.getAction().getWorkflow().getAppPath().replace("${nameNode}", "");
         WORKFLOWAPP parentWorkflow = getWorkflowapp(fs, new Path(wfPath, 
"workflow.xml"));
         testParentWorkflow(process, parentWorkflow);
+        assertEquals(process.getWorkflow().getLib(), 
"/resources/action/lib/spark-wordcount.jar");
 
         ACTION sparkNode = getAction(parentWorkflow, "user-action");
 
@@ -364,7 +439,7 @@ public class OozieProcessWorkflowBuilderTest extends 
AbstractTestBase {
                 OozieUtils.unMarshalSparkAction(sparkNode);
         org.apache.falcon.oozie.spark.ACTION sparkAction = 
actionJaxbElement.getValue();
         assertEquals(sparkAction.getMaster(), "local");
-        assertEquals(sparkAction.getJar(), 
"jail://testCluster:00/resources/action/lib/spark-wordcount.jar");
+        assertEquals(sparkAction.getJar(), "spark-wordcount.jar");
         List<String> argsList = sparkAction.getArg();
         Input input = process.getInputs().getInputs().get(0);
         Output output = process.getOutputs().getOutputs().get(0);

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
----------------------------------------------------------------------
diff --git 
a/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
 
b/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
index 1c50a97..4132c3a 100644
--- 
a/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
+++ 
b/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
@@ -100,6 +100,9 @@ public class FalconPostProcessingTest {
 
     @AfterClass
     public void tearDown() throws Exception {
+        if (broker.isStopped()) {
+            broker.start(true);
+        }
         broker.deleteAllMessages();
         broker.stop();
     }

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/oozie/src/test/resources/config/process/spark-sql-process.xml
----------------------------------------------------------------------
diff --git a/oozie/src/test/resources/config/process/spark-sql-process.xml 
b/oozie/src/test/resources/config/process/spark-sql-process.xml
new file mode 100644
index 0000000..55ff89b
--- /dev/null
+++ b/oozie/src/test/resources/config/process/spark-sql-process.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<process name="spark-sql-process" xmlns="uri:falcon:process:0.1">
+    <!-- where -->
+    <clusters>
+        <cluster name="corp">
+            <validity start="2011-11-02T00:00Z" end="2011-12-30T00:00Z"/>
+        </cluster>
+    </clusters>
+
+    <!-- when -->
+    <parallel>1</parallel>
+    <order>LIFO</order>
+    <frequency>hours(1)</frequency>
+    <timezone>UTC</timezone>
+
+    <!-- what -->
+    <inputs>
+        <input name="input" feed="clicks-raw-table" start="yesterday(0,0)" 
end="yesterday(20,0)"/>
+    </inputs>
+
+    <outputs>
+        <output name="output" feed="clicks-summary-table" 
instance="today(0,0)"/>
+    </outputs>
+
+    <workflow engine="spark" path="/resources/action"/>
+    <spark-attributes>
+        <master>local</master>
+        <name>Spark SQL</name>
+        <class>org.apache.falcon.example.spark.SparkSQLProcessTable</class>
+        <jar>/resources/action/lib/falcon-examples.jar</jar>
+        <spark-opts>--num-executors 1 --driver-memory 512m --executor-memory 
512m --executor-cores 1</spark-opts>
+    </spark-attributes>
+
+    <retry policy="periodic" delay="minutes(3)" attempts="3"/>
+
+</process>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 0d804b2..8e836da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -27,7 +27,7 @@
     <modelVersion>4.0.0</modelVersion>
     <groupId>org.apache.falcon</groupId>
     <artifactId>falcon-main</artifactId>
-    <version>0.10-SNAPSHOT</version>
+    <version>0.10</version>
     <description>Data Management and Processing Platform over 
Hadoop</description>
     <name>Apache Falcon</name>
     <packaging>pom</packaging>
@@ -98,7 +98,7 @@
         <oozie.version>4.2.0</oozie.version>
         <oozie.buildversion>${oozie.version}-falcon</oozie.buildversion>
         <oozie.forcebuild>false</oozie.forcebuild>
-        <activemq.version>5.12.0</activemq.version>
+        <activemq.version>5.13.3</activemq.version>
         <tinkerpop.version>2.6.0</tinkerpop.version>
         <titan.version>0.5.4</titan.version>
         <hbase.version>1.1.5</hbase.version>
@@ -106,7 +106,9 @@
         <spark.version>1.6.1</spark.version>
         <jetty.version>6.1.26</jetty.version>
         <jersey.version>1.9</jersey.version>
+        <jackson.version>2.2.3</jackson.version>
         <quartz.version>2.2.1</quartz.version>
+        <c3p0.version>0.9.5.2</c3p0.version>
         <joda.version>2.8.2</joda.version>
         <mockito.version>1.9.5</mockito.version>
         <openjpa.version>2.4.0</openjpa.version>
@@ -216,6 +218,7 @@
                                 <exclude>**/db1.script</exclude>
                                 <exclude>**/credential_provider.jceks</exclude>
                                 <exclude>**/*.json</exclude>
+                                <exclude>**/falcon-cli-hist.log</exclude>
                             </excludes>
                         </configuration>
                         <executions>
@@ -630,15 +633,21 @@
             </dependency>
 
             <dependency>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-core-asl</artifactId>
-                <version>1.9.2</version>
+                <groupId>com.fasterxml.jackson.core</groupId>
+                <artifactId>jackson-annotations</artifactId>
+                <version>${jackson.version}</version>
             </dependency>
 
             <dependency>
-                <groupId>org.codehaus.jackson</groupId>
-                <artifactId>jackson-mapper-asl</artifactId>
-                <version>1.9.2</version>
+                <groupId>com.fasterxml.jackson.core</groupId>
+                <artifactId>jackson-core</artifactId>
+                <version>${jackson.version}</version>
+            </dependency>
+
+            <dependency>
+                <groupId>com.fasterxml.jackson.core</groupId>
+                <artifactId>jackson-databind</artifactId>
+                <version>${jackson.version}</version>
             </dependency>
 
             <dependency>
@@ -748,12 +757,6 @@
             </dependency>
 
             <dependency>
-                <groupId>com.vividsolutions</groupId>
-                <artifactId>jts</artifactId>
-                <version>1.13</version>
-            </dependency>
-
-            <dependency>
                 <groupId>org.apache.falcon</groupId>
                 <artifactId>falcon-hadoop-dependencies</artifactId>
                 <version>${project.version}</version>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/prism/pom.xml
----------------------------------------------------------------------
diff --git a/prism/pom.xml b/prism/pom.xml
index 2eddbc1..e7ec531 100644
--- a/prism/pom.xml
+++ b/prism/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.falcon</groupId>
         <artifactId>falcon-main</artifactId>
-        <version>0.10-SNAPSHOT</version>
+        <version>0.10</version>
         <relativePath>../pom.xml</relativePath>
     </parent>
     <artifactId>falcon-prism</artifactId>
@@ -98,11 +98,13 @@
         <dependency>
             <groupId>org.apache.falcon</groupId>
             <artifactId>falcon-metrics</artifactId>
+            <scope>provided</scope>
         </dependency>
 
         <dependency>
             <groupId>org.apache.falcon</groupId>
             <artifactId>falcon-rerun</artifactId>
+            <scope>provided</scope>
         </dependency>
 
         <dependency>
@@ -249,6 +251,14 @@
                         
<include>org/apache/falcon/plugin/ChainableMonitoringPluginTest.java</include>
                         
<include>org/apache/falcon/aspect/GenericAlert.java</include>
                         
<include>org/apache/falcon/aspect/GenericAlertTest.java</include>
+                        
<include>org/apache/falcon/security/FalconAuditFilter.java</include>
+                        
<include>org/apache/falcon/resource/metadata/LineageMetadataResource</include>
+                        
<include>org/apache/falcon/messaging/JMSMessageConsumer</include>
+                        
<include>org/apache/falcon/service/LogCleanupService</include>
+                        
<include>org/apache/falcon/cleanup/LogCleanupServiceTest</include>
+                        
<include>org/apache/falcon/security/AuthenticationInitializationService</include>
+                        
<include>org/apache/falcon/security/AuthenticationInitializationServiceTest</include>
+                        
<include>org/apache/falcon/messaging/JMSMessageConsumerTest</include>
                     </includes>
                     <weaveDependencies>
                         <weaveDependency>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/prism/src/main/java/org/apache/falcon/plugin/GraphiteNotificationPlugin.java
----------------------------------------------------------------------
diff --git 
a/prism/src/main/java/org/apache/falcon/plugin/GraphiteNotificationPlugin.java 
b/prism/src/main/java/org/apache/falcon/plugin/GraphiteNotificationPlugin.java
index 9d46b0d..abe6777 100644
--- 
a/prism/src/main/java/org/apache/falcon/plugin/GraphiteNotificationPlugin.java
+++ 
b/prism/src/main/java/org/apache/falcon/plugin/GraphiteNotificationPlugin.java
@@ -45,34 +45,39 @@ public class GraphiteNotificationPlugin implements 
MonitoringPlugin {
         MetricNotificationService metricNotificationService =
                 
Services.get().getService(MetricNotificationService.SERVICE_NAME);
         try {
-            String entityType = message.getDimensions().get("entity-type");
-            String entityName = message.getDimensions().get("entity-name");
+            String entityType = 
StringUtils.isNotBlank(message.getDimensions().get("entityType"))
+                    ? message.getDimensions().get("entityType") 
:message.getDimensions().get("entity-type");
+            String entityName = 
StringUtils.isNotBlank(message.getDimensions().get("entityName"))
+                    ? message.getDimensions().get("entityName") 
:message.getDimensions().get("entity-name");
             String prefix = 
StartupProperties.get().getProperty("falcon.graphite.prefix");
-            if (entityType.equals(EntityType.PROCESS.name())) {
+            String separator = ".";
+            LOG.debug("message:" + message.getAction());
+            if (entityType.equalsIgnoreCase(EntityType.PROCESS.name())) {
                 Entity entity = 
ConfigurationStore.get().get(EntityType.PROCESS, entityName);
                 Process process = (Process) entity;
                 String pipeline =  
StringUtils.isNotBlank(process.getPipelines()) ? process.getPipelines() : 
"default";
 
-
                 if ((message.getAction().equals("wf-instance-succeeded"))) {
                     Long timeTaken =  message.getExecutionTime() / 1000000000;
-                    String metricsName = prefix + 
message.getDimensions().get("cluster") + pipeline
-                            + ".GENERATE." + entityName + ".processing_time";
-                    metricNotificationService.publish(metricsName, timeTaken);
+                    StringBuilder processingMetric = new 
StringBuilder(prefix).append(".").append(message.
+                            
getDimensions().get("cluster")).append(".").append(pipeline).append(".GENERATE.")
+                            .append(entityName).append(".processing_time");
+                    
metricNotificationService.publish(processingMetric.toString(), timeTaken);
 
                     DateTime nominalTime = new 
DateTime(message.getDimensions().get("nominal-time"));
                     DateTime startTime = new 
DateTime(message.getDimensions().get("start-time"));
-                    metricsName = prefix + 
message.getDimensions().get("cluster") + pipeline
-                            + ".GENERATE." + entityName + ".start_delay";
-                    metricNotificationService.publish(metricsName,
-                        (long)Seconds.secondsBetween(nominalTime, 
startTime).getSeconds());
+                    StringBuilder startTimeMetric = new 
StringBuilder(prefix).append(".").append(message.
+                            
getDimensions().get("cluster")).append(".").append(pipeline).append(".GENERATE.").
+                            append(entityName).append(".start_delay");
+                    
metricNotificationService.publish(startTimeMetric.toString(),
+                            (long)Seconds.secondsBetween(nominalTime, 
startTime).getSeconds());
                 }
 
                 if (message.getAction().equals("wf-instance-failed")){
-                    String metricName =  prefix + 
message.getDimensions().get("cluster") + pipeline
-                            + ".GENERATE." +  entityName + ".failure"
-                        + message.getDimensions().get("error-message");
-                    metricNotificationService.publish(metricName, (long) 1);
+                    StringBuilder metricName = new 
StringBuilder(prefix).append(".").append(message.
+                            
getDimensions().get("cluster")).append(".").append(pipeline).append(".GENERATE.").
+                            
append(entityName).append(".failure").append(message.getDimensions().get("error-message"));
+                    metricNotificationService.publish(metricName.toString(), 
(long) 1);
                 }
             }
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
----------------------------------------------------------------------
diff --git 
a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java 
b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
index 5fa345d..8ba9b82 100644
--- a/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
+++ b/prism/src/main/java/org/apache/falcon/resource/AbstractEntityManager.java
@@ -168,15 +168,8 @@ public abstract class AbstractEntityManager extends 
AbstractMetadataResource {
             Set<String> clusters = EntityUtil.getClustersDefined(entity);
             Set<String> colos = new HashSet<String>();
             for (String cluster : clusters) {
-                try{
-                    Cluster clusterEntity = 
EntityUtil.getEntity(EntityType.CLUSTER, cluster);
-                    colos.add(clusterEntity.getColo());
-                } catch (EntityNotRegisteredException e){
-                    LOG.warn(e.getMessage(), e);
-                }
-            }
-            if (colos.isEmpty()) {
-                throw new EntityNotRegisteredException(entity.getName()  + " 
(" + type + ") not found");
+                Cluster clusterEntity = 
EntityUtil.getEntity(EntityType.CLUSTER, cluster);
+                colos.add(clusterEntity.getColo());
             }
             return colos;
         } catch (FalconException e) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/prism/src/main/java/org/apache/falcon/util/EmbeddedServer.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/util/EmbeddedServer.java 
b/prism/src/main/java/org/apache/falcon/util/EmbeddedServer.java
index 788df58..f69b566 100644
--- a/prism/src/main/java/org/apache/falcon/util/EmbeddedServer.java
+++ b/prism/src/main/java/org/apache/falcon/util/EmbeddedServer.java
@@ -35,6 +35,7 @@ public class EmbeddedServer {
         server.addConnector(connector);
 
         WebAppContext application = new WebAppContext(path, "/");
+        application.setParentLoaderPriority(true);
         server.setHandler(application);
     }
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/prism/src/test/java/org/apache/falcon/resource/metadata/MetadataTestContext.java
----------------------------------------------------------------------
diff --git 
a/prism/src/test/java/org/apache/falcon/resource/metadata/MetadataTestContext.java
 
b/prism/src/test/java/org/apache/falcon/resource/metadata/MetadataTestContext.java
index 47d6ba1..0fc708d 100644
--- 
a/prism/src/test/java/org/apache/falcon/resource/metadata/MetadataTestContext.java
+++ 
b/prism/src/test/java/org/apache/falcon/resource/metadata/MetadataTestContext.java
@@ -95,6 +95,10 @@ public class MetadataTestContext {
         Services.get().register(new WorkflowJobEndNotificationService());
         
Assert.assertTrue(Services.get().isRegistered(WorkflowJobEndNotificationService.SERVICE_NAME));
 
+        StartupProperties.get().setProperty("falcon.graph.storage.backend", 
"berkeleyje");
+        String graphDBDir = "target/graphdb-" + System.currentTimeMillis();
+        StartupProperties.get().setProperty("falcon.graph.storage.directory", 
graphDBDir);
+        StartupProperties.get().setProperty("falcon.graph.serialize.path", 
graphDBDir);
         StartupProperties.get().setProperty("falcon.graph.preserve.history", 
"true");
         service = new MetadataMappingService();
         service.init();

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/release-docs/0.10/CHANGES.0.10.md
----------------------------------------------------------------------
diff --git a/release-docs/0.10/CHANGES.0.10.md 
b/release-docs/0.10/CHANGES.0.10.md
new file mode 100644
index 0000000..bfe3a27
--- /dev/null
+++ b/release-docs/0.10/CHANGES.0.10.md
@@ -0,0 +1,220 @@
+# Apache Falcon Changelog
+
+## Release 0.10 - 2016-07-26
+
+### INCOMPATIBLE CHANGES:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-1926](https://issues.apache.org/jira/browse/FALCON-1926) | Filter 
out effectively non-falcon related JMS messages from Oozie |  Major | messaging 
| Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1858](https://issues.apache.org/jira/browse/FALCON-1858) | Support 
HBase as a storage backend for Falcon Titan graphDB |  Major | . | Ying Zheng | 
Venkat Ranganathan |
+| [FALCON-1852](https://issues.apache.org/jira/browse/FALCON-1852) | Optional 
Input for a process not truly optional |  Major | . | Pallavi Rao | Pallavi Rao 
|
+| [FALCON-1844](https://issues.apache.org/jira/browse/FALCON-1844) | Falcon 
feed replication leaves behind old files when a feed instance is re-run |  
Major | . | Pallavi Rao | Pallavi Rao |
+| [FALCON-1835](https://issues.apache.org/jira/browse/FALCON-1835) | Falcon 
should do coord rerun rather than workflow rerun to ensure concurrency |  Major 
| . | Pallavi Rao | Pallavi Rao |
+
+
+### NEW FEATURES:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-1956](https://issues.apache.org/jira/browse/FALCON-1956) | Graphite 
Plugin for monitoring |  Major | . | Praveen Adlakha | Praveen Adlakha |
+| [FALCON-1919](https://issues.apache.org/jira/browse/FALCON-1919) | Provide 
user the option to store sensitive information with Hadoop credential provider 
|  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1865](https://issues.apache.org/jira/browse/FALCON-1865) | Persist 
Feed sla data to database |  Major | . | Ajay Yadava | Praveen Adlakha |
+| [FALCON-1861](https://issues.apache.org/jira/browse/FALCON-1861) | Support 
HDFS Snapshot based replication in Falcon |  Major | replication | Balu 
Vellanki | Balu Vellanki |
+| [FALCON-1763](https://issues.apache.org/jira/browse/FALCON-1763) | Create a 
spark execution engine for Falcon |  Major | . | Venkat Ranganathan | Peeyush 
Bishnoi |
+| [FALCON-1627](https://issues.apache.org/jira/browse/FALCON-1627) | Provider 
integration with Azure Data Factory pipelines |  Major | . | Venkat Ranganathan 
| Ying Zheng |
+| [FALCON-1623](https://issues.apache.org/jira/browse/FALCON-1623) | Implement 
Safe Mode in Falcon |  Major | . | sandeep samudrala | Balu Vellanki |
+| [FALCON-1333](https://issues.apache.org/jira/browse/FALCON-1333) | Support 
instance search of a group of entities |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-634](https://issues.apache.org/jira/browse/FALCON-634) | Add server 
side extensions in Falcon |  Major | . | Venkatesh Seetharam | Sowmya Ramesh |
+| [FALCON-141](https://issues.apache.org/jira/browse/FALCON-141) | Support 
cluster updates |  Major | . | Shwetha G S | Balu Vellanki |
+| [FALCON-36](https://issues.apache.org/jira/browse/FALCON-36) | Ability to 
ingest data from databases |  Major | acquisition | Venkatesh Seetharam | 
Venkatesan Ramachandran |
+
+
+### IMPROVEMENTS:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-2020](https://issues.apache.org/jira/browse/FALCON-2020) | Changes 
in Graphite Notification Plugin |  Major | . | Praveen Adlakha | Praveen 
Adlakha |
+| [FALCON-1981](https://issues.apache.org/jira/browse/FALCON-1981) | Remove 
runtime superfluous jar dependencies - pom.xml cleanup |  Major | build-tools | 
Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1979](https://issues.apache.org/jira/browse/FALCON-1979) | Update 
HttpClient versions to close security vulnerabilities |  Major | . | Balu 
Vellanki | Balu Vellanki |
+| [FALCON-1963](https://issues.apache.org/jira/browse/FALCON-1963) | Falcon 
CLI should provide detailed hints if the user's command is invalid |  Major | . 
| Ying Zheng | Ying Zheng |
+| [FALCON-1942](https://issues.apache.org/jira/browse/FALCON-1942) | Allow 
Falcon server and client classpath to be customizable |  Major | . | Venkat 
Ranganathan | Venkat Ranganathan |
+| [FALCON-1916](https://issues.apache.org/jira/browse/FALCON-1916) | Allow RM 
principal to be specified in Cluster entity |  Major | common | Venkat 
Ranganathan | Venkat Ranganathan |
+| [FALCON-1895](https://issues.apache.org/jira/browse/FALCON-1895) | 
Refactoring of FalconCLI and FalconClient |  Major | client | Praveen Adlakha | 
Praveen Adlakha |
+| [FALCON-1841](https://issues.apache.org/jira/browse/FALCON-1841) | Grouping 
test in falcon for running nightly regression |  Major | regression | Pragya 
Mittal | Pragya Mittal |
+| [FALCON-1836](https://issues.apache.org/jira/browse/FALCON-1836) | Ingest to 
Hive |  Major | . | Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1802](https://issues.apache.org/jira/browse/FALCON-1802) | Workflow 
Builder for scheduling based on Data for Process in case of Native Scheduler |  
Major | . | pavan kumar kolamuri | pavan kumar kolamuri |
+| [FALCON-1774](https://issues.apache.org/jira/browse/FALCON-1774) | Better 
message for api not allowed on server |  Major | . | Sanjeev T | Praveen 
Adlakha |
+| [FALCON-1751](https://issues.apache.org/jira/browse/FALCON-1751) | Support 
assembly:single mojo |  Minor | . | ruoyu wang | ruoyu wang |
+| [FALCON-887](https://issues.apache.org/jira/browse/FALCON-887) | Support for 
multiple lib paths in falcon process |  Minor | process | Akshay Goyal | Sowmya 
Ramesh |
+| [FALCON-625](https://issues.apache.org/jira/browse/FALCON-625) | 
Documentation improvements |  Major | . | Paul Isaychuk | Ajay Yadava |
+
+
+### BUG FIXES:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-2107](https://issues.apache.org/jira/browse/FALCON-2107) | NPE in 
FalconWorkflowEngine::isActive() method |  Blocker | . | Balu Vellanki | Balu 
Vellanki |
+| [FALCON-2104](https://issues.apache.org/jira/browse/FALCON-2104) | Loss of 
data in GraphDB when upgrading Falcon from 0.9 to 0.10 |  Blocker | . | Balu 
Vellanki | Balu Vellanki |
+| [FALCON-2100](https://issues.apache.org/jira/browse/FALCON-2100) | Remove 
dependency on com.vividsolutions.jts |  Major | . | Balu Vellanki | Balu 
Vellanki |
+| [FALCON-2090](https://issues.apache.org/jira/browse/FALCON-2090) | HDFS 
Snapshot failed with UnknownHostException when scheduling in HA Mode |  
Critical | replication | Murali Ramasami | Balu Vellanki |
+| [FALCON-2088](https://issues.apache.org/jira/browse/FALCON-2088) | Entity 
submission fails with EntityNotRegisteredException in distributed mode |  
Blocker | feed, prism, process | Pragya Mittal | Praveen Adlakha |
+| [FALCON-2084](https://issues.apache.org/jira/browse/FALCON-2084) | 
HCatReplicationTest are failing in secure mode |  Critical | replication | 
Murali Ramasami | Venkat Ranganathan |
+| [FALCON-2081](https://issues.apache.org/jira/browse/FALCON-2081) | 
ExtensionManagerIT fails occassionally |  Blocker | tests | Balu Vellanki | 
Balu Vellanki |
+| [FALCON-2076](https://issues.apache.org/jira/browse/FALCON-2076) | Server 
fails to start since extension.store.uri is not defined in startup.properties | 
 Major | prism | Pragya Mittal | Balu Vellanki |
+| [FALCON-2075](https://issues.apache.org/jira/browse/FALCON-2075) | Falcon 
HiveDR tasks do not report progress and can get killed |  Critical | . | Venkat 
Ranganathan | Venkat Ranganathan |
+| [FALCON-2071](https://issues.apache.org/jira/browse/FALCON-2071) | Falcon 
Spark SQL failing with Yarn Client Mode |  Critical | process | Murali Ramasami 
| Peeyush Bishnoi |
+| [FALCON-2061](https://issues.apache.org/jira/browse/FALCON-2061) | Falcon 
CLI shows hadoop classpath loading info in the console |  Major | client | 
Murali Ramasami | Balu Vellanki |
+| [FALCON-2060](https://issues.apache.org/jira/browse/FALCON-2060) | Retry 
does not happen if instance timedout |  Major | . | Pragya Mittal | Pallavi Rao 
|
+| [FALCON-2058](https://issues.apache.org/jira/browse/FALCON-2058) | s3 tests 
with dummy url no longer compatible with latest HDFS |  Major | . | Ying Zheng 
| Ying Zheng |
+| [FALCON-2057](https://issues.apache.org/jira/browse/FALCON-2057) | HiveDR 
not working with multiple users and same DB |  Major | replication | Murali 
Ramasami | Balu Vellanki |
+| [FALCON-2056](https://issues.apache.org/jira/browse/FALCON-2056) | HiveDR 
doesn't work with multiple users |  Major | replication | Murali Ramasami | 
Sowmya Ramesh |
+| [FALCON-2051](https://issues.apache.org/jira/browse/FALCON-2051) | Falcon 
post-processing services are not getting invoked |  Blocker | general | Peeyush 
Bishnoi | Venkatesan Ramachandran |
+| [FALCON-2050](https://issues.apache.org/jira/browse/FALCON-2050) | Configure 
jetty parent classloader to be prioritized over webapp classloader |  Major | 
common | Venkat Ranganathan | Venkat Ranganathan |
+| [FALCON-2049](https://issues.apache.org/jira/browse/FALCON-2049) | Feed 
Replication with Empty Directories are failing |  Blocker | feed | Murali 
Ramasami | Balu Vellanki |
+| [FALCON-2048](https://issues.apache.org/jira/browse/FALCON-2048) | Cluster 
submission failed in yarn-cluster mode |  Critical | general | Murali Ramasami 
| Peeyush Bishnoi |
+| [FALCON-2046](https://issues.apache.org/jira/browse/FALCON-2046) | HDFS 
Replication failing in secure Mode |  Critical | replication | Murali Ramasami 
| Sowmya Ramesh |
+| [FALCON-2045](https://issues.apache.org/jira/browse/FALCON-2045) | Enhance 
document on registry point in cluster specifiction for Hive HA mode |  Major | 
. | Ying Zheng | Ying Zheng |
+| [FALCON-2038](https://issues.apache.org/jira/browse/FALCON-2038) | When all 
Optional input instances are missing, we should not suffix partition |  Major | 
. | Pallavi Rao | Pallavi Rao |
+| [FALCON-2037](https://issues.apache.org/jira/browse/FALCON-2037) | HiveDR 
Extension tests are failed in Secure mode with clusterForJobNNKerberosPrincipal 
not found |  Critical | replication | Murali Ramasami | Sowmya Ramesh |
+| [FALCON-2036](https://issues.apache.org/jira/browse/FALCON-2036) | Update 
twiki on entity list operation with up-to-date REST API path |  Major | . | 
Ying Zheng | Ying Zheng |
+| [FALCON-2035](https://issues.apache.org/jira/browse/FALCON-2035) | Entity 
list operation without type parameter doesn't work when authorization is 
enabled |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-2034](https://issues.apache.org/jira/browse/FALCON-2034) | Make 
numThreads and timeOut configurable In ConfigurationStore init |  Critical | . 
| Pallavi Rao | sandeep samudrala |
+| [FALCON-2032](https://issues.apache.org/jira/browse/FALCON-2032) | Update 
the extension documentation to add ExtensionService before ConfigurationStore 
in startup properties |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-2031](https://issues.apache.org/jira/browse/FALCON-2031) | Hcat 
Retention test cases are failing with NoClassDefFoundError |  Blocker | 
retention | Peeyush Bishnoi | Peeyush Bishnoi |
+| [FALCON-2027](https://issues.apache.org/jira/browse/FALCON-2027) | Enhance 
documentation on data replication from HDP to Azure |  Major | . | Ying Zheng | 
Ying Zheng |
+| [FALCON-2025](https://issues.apache.org/jira/browse/FALCON-2025) | Periodic 
revalidation of kerberos credentials should be done on loginUser |  Major | . | 
Balu Vellanki | Balu Vellanki |
+| [FALCON-2023](https://issues.apache.org/jira/browse/FALCON-2023) | Feed 
eviction fails when feed locations "stats" and "meta" does not have time 
pattern. |  Blocker | feed | Balu Vellanki | Venkatesan Ramachandran |
+| [FALCON-2018](https://issues.apache.org/jira/browse/FALCON-2018) | 
WorkflowJobNotification sends incorrect message for killed instances |  Major | 
. | Pragya Mittal | Praveen Adlakha |
+| [FALCON-2017](https://issues.apache.org/jira/browse/FALCON-2017) | Fix 
HiveDR extension issues |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-2016](https://issues.apache.org/jira/browse/FALCON-2016) | maven 
assembly:single fails on MacOS |  Major | . | Pallavi Rao | Pallavi Rao |
+| [FALCON-2010](https://issues.apache.org/jira/browse/FALCON-2010) | Fix UT 
errors due to ActiveMQ upgrade |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-2007](https://issues.apache.org/jira/browse/FALCON-2007) | Hive DR 
Replication failing with "Can not create a Path from a null string" |  Critical 
| . | Peeyush Bishnoi | Peeyush Bishnoi |
+| [FALCON-1984](https://issues.apache.org/jira/browse/FALCON-1984) | Provide 
proper hint and documentation if required titan storage backend is not 
configured in startup.properties |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1983](https://issues.apache.org/jira/browse/FALCON-1983) | Upgrade 
jackson core and databind versions to fix dependency incompatibility with 
higher-version Hive |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1982](https://issues.apache.org/jira/browse/FALCON-1982) | Document 
use of  HBase in standalone mode for GraphDB |  Major | docs | Venkatesan 
Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1978](https://issues.apache.org/jira/browse/FALCON-1978) | Fix flaky 
unit test - MetadataMappingServiceTest |  Major | tests | Venkatesan 
Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1976](https://issues.apache.org/jira/browse/FALCON-1976) | Remove 
hadoop-2 profile |  Major | . | Venkat Ranganathan | Venkat Ranganathan |
+| [FALCON-1975](https://issues.apache.org/jira/browse/FALCON-1975) | Getting 
NoSuchMethodError when calling isNoneEmpty |  Major | . | Ying Zheng | Ying 
Zheng |
+| [FALCON-1974](https://issues.apache.org/jira/browse/FALCON-1974) | Cluster 
update : Allow superuser to update bundle/coord of dependent entities |  Major 
| . | Balu Vellanki | Balu Vellanki |
+| [FALCON-1973](https://issues.apache.org/jira/browse/FALCON-1973) | Falcon 
build failure due checkstyle issue |  Major | . | Peeyush Bishnoi | Peeyush 
Bishnoi |
+| [FALCON-1972](https://issues.apache.org/jira/browse/FALCON-1972) | Handling 
cases when Extension service or "extension.store.uri" is not present in startup 
proeprties |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1969](https://issues.apache.org/jira/browse/FALCON-1969) | Provide 
server-side error details on CLI, if any |  Major | . | Ying Zheng | Ying Zheng 
|
+| [FALCON-1965](https://issues.apache.org/jira/browse/FALCON-1965) | Update 
ActiveMQ version to 5.13.3 to avoid Falcon start error after rolling upgrade |  
Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1964](https://issues.apache.org/jira/browse/FALCON-1964) | Should 
delete temporary JKS file after IT tests for credential provider alias |  Major 
| . | Ying Zheng | Ying Zheng |
+| [FALCON-1962](https://issues.apache.org/jira/browse/FALCON-1962) | Extension 
related bugs |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1961](https://issues.apache.org/jira/browse/FALCON-1961) | Should 
return error if an extension job doesn't exist for 
delete/suspend/resume/schedule operations |  Major | . | Ying Zheng | Ying 
Zheng |
+| [FALCON-1957](https://issues.apache.org/jira/browse/FALCON-1957) | 
Documentation on using Hadoop credential provider for sensitive properties |  
Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1953](https://issues.apache.org/jira/browse/FALCON-1953) | Build 
fails when profiles hivedr and test-patch is used together |  Major | . | Balu 
Vellanki | Balu Vellanki |
+| [FALCON-1943](https://issues.apache.org/jira/browse/FALCON-1943) | Extension 
API/CLI fails when authorization is enabled |  Major | . | Sowmya Ramesh | 
Sowmya Ramesh |
+| [FALCON-1941](https://issues.apache.org/jira/browse/FALCON-1941) | HiveDR 
fails with NN-HA enabled on both the source and target clusters |  Critical | . 
| Venkat Ranganathan | Venkat Ranganathan |
+| [FALCON-1939](https://issues.apache.org/jira/browse/FALCON-1939) | Avoid 
creating multiple falcon\*.tar.gz during falcon build |  Major | build-tools | 
Balu Vellanki | Balu Vellanki |
+| [FALCON-1936](https://issues.apache.org/jira/browse/FALCON-1936) | 
Extensions related files are not available in $FALCON\_HOM/extensions/ 
directory |  Critical | . | Peeyush Bishnoi | Sowmya Ramesh |
+| [FALCON-1935](https://issues.apache.org/jira/browse/FALCON-1935) | Falcon 
fails to start with default startup.properties |  Blocker | . | Ying Zheng | 
Praveen Adlakha |
+| [FALCON-1934](https://issues.apache.org/jira/browse/FALCON-1934) | Document 
safemode in Falcon Server |  Major | docs | Balu Vellanki | Balu Vellanki |
+| [FALCON-1932](https://issues.apache.org/jira/browse/FALCON-1932) | Extension 
CLI should support common options |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1931](https://issues.apache.org/jira/browse/FALCON-1931) | 
multiCluster tag is missing for Multiple Cluster scenarios |  Major | 
regression | Murali Ramasami | Murali Ramasami |
+| [FALCON-1928](https://issues.apache.org/jira/browse/FALCON-1928) | 
FalconJPAService missing in default startup.properties |  Major | . | Pallavi 
Rao | Pallavi Rao |
+| [FALCON-1924](https://issues.apache.org/jira/browse/FALCON-1924) | Falcon 
Coordinator rerun return old workflow id |  Major | . | Praveen Adlakha | 
Praveen Adlakha |
+| [FALCON-1909](https://issues.apache.org/jira/browse/FALCON-1909) | Catalog 
instance triage action fails with null pointer exception. |  Major | feed | 
Balu Vellanki | Balu Vellanki |
+| [FALCON-1908](https://issues.apache.org/jira/browse/FALCON-1908) | Document 
HDFS snapshot based mirroring extension |  Major | . | Balu Vellanki | Balu 
Vellanki |
+| [FALCON-1907](https://issues.apache.org/jira/browse/FALCON-1907) | Package 
new CLI module added |  Major | client | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1896](https://issues.apache.org/jira/browse/FALCON-1896) | Failure 
in Falcon build in distro module |  Major | . | Praveen Adlakha | Praveen 
Adlakha |
+| [FALCON-1894](https://issues.apache.org/jira/browse/FALCON-1894) | HDFS Data 
replication cannot be initiated independent of Oozie server location |  Minor | 
general | Alex Bush | Sowmya Ramesh |
+| [FALCON-1886](https://issues.apache.org/jira/browse/FALCON-1886) | Feed sla 
monitoring does not work across restarts |  Major | . | Ajay Yadava | Ajay 
Yadava |
+| [FALCON-1885](https://issues.apache.org/jira/browse/FALCON-1885) | SLA 
monitoring API throws ResultNotFoundException |  Major | feed | Pragya Mittal | 
Praveen Adlakha |
+| [FALCON-1883](https://issues.apache.org/jira/browse/FALCON-1883) | Falcon 
regression build fails with minor checkstyle issues |  Major | regression | 
Murali Ramasami | Murali Ramasami |
+| [FALCON-1882](https://issues.apache.org/jira/browse/FALCON-1882) | Instance 
status api not working via prism |  Major | prism | Pragya Mittal | Praveen 
Adlakha |
+| [FALCON-1881](https://issues.apache.org/jira/browse/FALCON-1881) | Database 
Export should not expect fields list in the feed entity specification |  Major 
| acquisition | Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1880](https://issues.apache.org/jira/browse/FALCON-1880) | To 
support TDE encryption : Add --skipcrccheck to distcp options for HiveDR |  
Major | replication | Balu Vellanki | Balu Vellanki |
+| [FALCON-1877](https://issues.apache.org/jira/browse/FALCON-1877) | Falcon 
webUI returns 413 (Full head - Request entity too large) error when TLS is 
enabled in a secure cluster with AD integration |  Major | . | Venkat 
Ranganathan | Venkat Ranganathan |
+| [FALCON-1874](https://issues.apache.org/jira/browse/FALCON-1874) | Import 
and Export fails with HDFS as src/dest |  Major | . | Pallavi Rao | Pallavi Rao 
|
+| [FALCON-1867](https://issues.apache.org/jira/browse/FALCON-1867) | hardcoded 
query names in JDBCStateStore |  Major | . | Praveen Adlakha | Praveen Adlakha |
+| [FALCON-1866](https://issues.apache.org/jira/browse/FALCON-1866) | Bug in 
JDBCStateStore |  Major | . | Praveen Adlakha | Praveen Adlakha |
+| [FALCON-1864](https://issues.apache.org/jira/browse/FALCON-1864) | Retry 
event does not get removed from delay queue even after the instance succeeds |  
Major | rerun | Pallavi Rao | Pallavi Rao |
+| [FALCON-1859](https://issues.apache.org/jira/browse/FALCON-1859) | Database 
Export instances are not added graph db for lineage tracking |  Major | general 
| Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1855](https://issues.apache.org/jira/browse/FALCON-1855) | Falcon 
regression build fails with checkstyle issues |  Major | regression | Pragya 
Mittal | Murali Ramasami |
+| [FALCON-1854](https://issues.apache.org/jira/browse/FALCON-1854) | Fixing 
PrismProcessScheduleTest and NoOutputProcessTest |  Major | regression | Murali 
Ramasami | Murali Ramasami |
+| [FALCON-1848](https://issues.apache.org/jira/browse/FALCON-1848) | Late 
rerun is not working due to failnodes set to true |  Major | rerun | Pragya 
Mittal | Pallavi Rao |
+| [FALCON-1847](https://issues.apache.org/jira/browse/FALCON-1847) | Execution 
order not honored when instances are suspended/resumed |  Major | scheduler | 
Pallavi Rao | Pallavi Rao |
+| [FALCON-1846](https://issues.apache.org/jira/browse/FALCON-1846) | Fixing 
EntityDryRunTest |  Major | regression | Pragya Mittal | Pragya Mittal |
+| [FALCON-1845](https://issues.apache.org/jira/browse/FALCON-1845) | Retries 
Stopped happening  for all entities when one entity was deleted during rerun of 
instance |  Major | rerun | pavan kumar kolamuri | pavan kumar kolamuri |
+| [FALCON-1842](https://issues.apache.org/jira/browse/FALCON-1842) | Falcon 
build failed in Jenkins at 
org.apache.falcon.oozie.feed.OozieFeedWorkflowBuilderTest |  Major | 
falcon-unit | Balu Vellanki | Balu Vellanki |
+| [FALCON-1840](https://issues.apache.org/jira/browse/FALCON-1840) | Archive 
older definition in case of update |  Major | . | Praveen Adlakha | Praveen 
Adlakha |
+| [FALCON-1838](https://issues.apache.org/jira/browse/FALCON-1838) | Export 
instances are not added graph db for lineage tracking |  Major | . | Venkatesan 
Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1826](https://issues.apache.org/jira/browse/FALCON-1826) | Execution 
order not honoured when instances are KILLED |  Major | scheduler | Pragya 
Mittal | Pallavi Rao |
+| [FALCON-1825](https://issues.apache.org/jira/browse/FALCON-1825) | Process 
end time inclusive in case of Native Scheduler |  Major | scheduler | pavan 
kumar kolamuri | pavan kumar kolamuri |
+| [FALCON-1823](https://issues.apache.org/jira/browse/FALCON-1823) |  wrong 
permissions on hadoolibs and conf folder in distributed mode deb |  Major | . | 
Praveen Adlakha | Praveen Adlakha |
+| [FALCON-1819](https://issues.apache.org/jira/browse/FALCON-1819) | Improve 
test class entity cleanup logic |  Major | merlin | Paul Isaychuk | Paul 
Isaychuk |
+| [FALCON-1816](https://issues.apache.org/jira/browse/FALCON-1816) | Fix 
findbugs-exclude.xml path and hadoop version in falcon-regression pom |  Major 
| merlin | Paul Isaychuk | Paul Isaychuk |
+| [FALCON-1811](https://issues.apache.org/jira/browse/FALCON-1811) | Status 
API does not honour start option |  Major | client | Pragya Mittal | Praveen 
Adlakha |
+| [FALCON-1796](https://issues.apache.org/jira/browse/FALCON-1796) | [HOTFIX] 
Incorrect parent pom in distro module |  Major | . | Ajay Yadava | Ajay Yadava |
+| [FALCON-1795](https://issues.apache.org/jira/browse/FALCON-1795) | Kill api 
does not kill waiting/ready instances |  Major | oozie | Pragya Mittal | 
sandeep samudrala |
+| [FALCON-1793](https://issues.apache.org/jira/browse/FALCON-1793) | feed 
element action="archive" is submittable via command line tool falcon |  Major | 
feed | Margus Roo | Deepak Barr |
+| [FALCON-1792](https://issues.apache.org/jira/browse/FALCON-1792) | Upgrade 
hadoop.version to 2.6.2 |  Major | hadoop | Venkatesan Ramachandran | 
Venkatesan Ramachandran |
+| [FALCON-1787](https://issues.apache.org/jira/browse/FALCON-1787) | Ooozie 
pig-action.xml requires hive sharelib for HCatalog use |  Major | oozie | Mark 
Greene | Sowmya Ramesh |
+| [FALCON-1784](https://issues.apache.org/jira/browse/FALCON-1784) | Add 
regression test for for FALCON-1647 |  Major | merlin | Paul Isaychuk | Paul 
Isaychuk |
+| [FALCON-1783](https://issues.apache.org/jira/browse/FALCON-1783) | Fix 
ProcessUpdateTest and SearchApiTest to use prism |  Major | merlin | Paul 
Isaychuk | Paul Isaychuk |
+| [FALCON-1766](https://issues.apache.org/jira/browse/FALCON-1766) | Add CLI 
metrics check for HiveDR, HDFS and feed replication |  Major | merlin | Paul 
Isaychuk | Paul Isaychuk |
+| [FALCON-1743](https://issues.apache.org/jira/browse/FALCON-1743) | Entity 
summary does not work via prism |  Major | client | Pragya Mittal | Ajay Yadava 
|
+| [FALCON-1724](https://issues.apache.org/jira/browse/FALCON-1724) | Falcon 
CLI.twiki in docs folder is not pointed by index page |  Major | . | Praveen 
Adlakha | Praveen Adlakha |
+| [FALCON-1721](https://issues.apache.org/jira/browse/FALCON-1721) | Move 
checkstyle artifacts under parent |  Major | . | Shwetha G S | sandeep 
samudrala |
+| [FALCON-1621](https://issues.apache.org/jira/browse/FALCON-1621) | Lifecycle 
of entity gets missed when prism and falcon server communicates |  Major | . | 
Praveen Adlakha | Praveen Adlakha |
+| [FALCON-1584](https://issues.apache.org/jira/browse/FALCON-1584) | Falcon 
allows invalid hadoop queue name for schedulable feed entities |  Major | . | 
Venkatesan Ramachandran | Venkatesan Ramachandran |
+
+
+### SUB-TASKS:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-2099](https://issues.apache.org/jira/browse/FALCON-2099) | Update 
Installation-steps.txt and NOTICE.txt for 0.10 release |  Major | ease | Balu 
Vellanki | Balu Vellanki |
+| [FALCON-2000](https://issues.apache.org/jira/browse/FALCON-2000) | Create 
branch 0.10 |  Major | general | Balu Vellanki | Balu Vellanki |
+| [FALCON-1996](https://issues.apache.org/jira/browse/FALCON-1996) | Upgrade 
falcon POM for 0.10 release |  Major | build-tools | Balu Vellanki | Balu 
Vellanki |
+| [FALCON-1993](https://issues.apache.org/jira/browse/FALCON-1993) | Update 
JIRA fix versions |  Major | general | Balu Vellanki | Balu Vellanki |
+| [FALCON-1980](https://issues.apache.org/jira/browse/FALCON-1980) | Change 
input and output argument order for Spark process workflow |  Major | . | 
Peeyush Bishnoi | Peeyush Bishnoi |
+| [FALCON-1954](https://issues.apache.org/jira/browse/FALCON-1954) | Steps to 
configure Oozie JMS for Falcon |  Major | messaging | Venkatesan Ramachandran | 
Venkatesan Ramachandran |
+| [FALCON-1938](https://issues.apache.org/jira/browse/FALCON-1938) | Add 
support to execute Spark SQL process |  Major | . | Peeyush Bishnoi | Peeyush 
Bishnoi |
+| [FALCON-1937](https://issues.apache.org/jira/browse/FALCON-1937) | Add 
documentation for cluster update. |  Major | . | Balu Vellanki | Balu Vellanki |
+| [FALCON-1929](https://issues.apache.org/jira/browse/FALCON-1929) | Extension 
job management: IT tests for CLIs |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1905](https://issues.apache.org/jira/browse/FALCON-1905) | Extension 
Job Management: IT tests for REST APIs and CLIs |  Major | . | Ying Zheng | 
Ying Zheng |
+| [FALCON-1904](https://issues.apache.org/jira/browse/FALCON-1904) | Extension 
Job Management: documentation for REST APIs and CLI |  Major | . | Ying Zheng | 
Ying Zheng |
+| [FALCON-1902](https://issues.apache.org/jira/browse/FALCON-1902) | Server 
side extension repository management CLI support |  Major | . | Sowmya Ramesh | 
Sowmya Ramesh |
+| [FALCON-1897](https://issues.apache.org/jira/browse/FALCON-1897) | Extension 
Job Management: CLI support |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1893](https://issues.apache.org/jira/browse/FALCON-1893) | Add 
documentation and examples for spark workflow engine |  Major | . | Peeyush 
Bishnoi | Peeyush Bishnoi |
+| [FALCON-1892](https://issues.apache.org/jira/browse/FALCON-1892) | Remove 
client side Recipe logic |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1860](https://issues.apache.org/jira/browse/FALCON-1860) | 
ADFProviderService should be optional as default setting |  Major | . | Ying 
Zheng | Ying Zheng |
+| [FALCON-1853](https://issues.apache.org/jira/browse/FALCON-1853) | Add spark 
process workflow builder |  Major | oozie | Peeyush Bishnoi | Peeyush Bishnoi |
+| [FALCON-1839](https://issues.apache.org/jira/browse/FALCON-1839) | Test case 
for APIs for entities scheduled on native scheduler |  Major | scheduler | 
Pragya Mittal | Pragya Mittal |
+| [FALCON-1831](https://issues.apache.org/jira/browse/FALCON-1831) | Flaky 
WorkflowExecutionContextTest.testWorkflowStartEnd |  Major | . | Pallavi Rao | 
Pallavi Rao |
+| [FALCON-1829](https://issues.apache.org/jira/browse/FALCON-1829) | Add 
regression for submit and schedule process on native scheduler (time based) |  
Major | scheduler | Pragya Mittal | Pragya Mittal |
+| [FALCON-1817](https://issues.apache.org/jira/browse/FALCON-1817) | Update 
xsd for Spark execution engine |  Major | . | Peeyush Bishnoi | Peeyush Bishnoi 
|
+| [FALCON-1801](https://issues.apache.org/jira/browse/FALCON-1801) | Update 
CHANGES.txt in trunk to mark 0.9 as released |  Major | . | Pallavi Rao | 
Pallavi Rao |
+| [FALCON-1790](https://issues.apache.org/jira/browse/FALCON-1790) | CLI 
support for instance search |  Major | . | Ying Zheng | Ying Zheng |
+| [FALCON-1789](https://issues.apache.org/jira/browse/FALCON-1789) | Extension 
Job Management: REST API |  Major | . | Sowmya Ramesh | Ying Zheng |
+| [FALCON-1767](https://issues.apache.org/jira/browse/FALCON-1767) | Improve 
Falcon retention policy documentation |  Major | . | Sowmya Ramesh | Sowmya 
Ramesh |
+| [FALCON-1729](https://issues.apache.org/jira/browse/FALCON-1729) | Database 
ingest to support password alias via keystore file |  Major | acquisition | 
Venkatesan Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1646](https://issues.apache.org/jira/browse/FALCON-1646) | Ability 
to export to database - Entity Definition |  Major | acquisition | Venkatesan 
Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1496](https://issues.apache.org/jira/browse/FALCON-1496) | Flaky 
FalconPostProcessingTest |  Major | . | Pallavi Rao | Pallavi Rao |
+| [FALCON-1335](https://issues.apache.org/jira/browse/FALCON-1335) | Backend 
support of instance search of a group of entities |  Major | . | Ying Zheng | 
Ying Zheng |
+| [FALCON-1334](https://issues.apache.org/jira/browse/FALCON-1334) | Improve 
search performance with Titan graph database indexing |  Major | . | Ying Zheng 
| Ying Zheng |
+| [FALCON-1111](https://issues.apache.org/jira/browse/FALCON-1111) | Instance 
update on titan DB based on JMS notifications on workflow jobs |  Major | 
common, messaging | Sowmya Ramesh | Ying Zheng |
+| [FALCON-1107](https://issues.apache.org/jira/browse/FALCON-1107) | Move 
trusted recipe processing to server side |  Major | . | Sowmya Ramesh | Sowmya 
Ramesh |
+| [FALCON-1106](https://issues.apache.org/jira/browse/FALCON-1106) | 
Documentation for extension |  Major | . | Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1105](https://issues.apache.org/jira/browse/FALCON-1105) | Server 
side extension repository management REST API support |  Major | client | 
Sowmya Ramesh | Sowmya Ramesh |
+| [FALCON-1085](https://issues.apache.org/jira/browse/FALCON-1085) | Allow 
cluster entities to be updated |  Major | . | Ajay Yadava | Balu Vellanki |
+
+
+### OTHER:
+
+| JIRA | Summary | Priority | Component | Reporter | Contributor |
+|:---- |:---- | :--- |:---- |:---- |:---- |
+| [FALCON-2063](https://issues.apache.org/jira/browse/FALCON-2063) | Add 
change log for 0.10 |  Major | . | Ajay Yadava | Ajay Yadava |
+| [FALCON-1765](https://issues.apache.org/jira/browse/FALCON-1765) | Move to 
github pull request model |  Major | . | Ajay Yadava | Ajay Yadava |
+| [FALCON-2008](https://issues.apache.org/jira/browse/FALCON-2008) | Add 
documentation for Graphite Notification Plugin |  Major | . | Praveen Adlakha | 
Praveen Adlakha |
+| [FALCON-1948](https://issues.apache.org/jira/browse/FALCON-1948) | Document 
steps to configure Oozie for Falcon |  Major | docs | Venkatesan Ramachandran | 
Venkatesan Ramachandran |
+| [FALCON-1899](https://issues.apache.org/jira/browse/FALCON-1899) | Create 
examples artifact module in Falcon |  Major | . | Peeyush Bishnoi | Peeyush 
Bishnoi |
+| [FALCON-1888](https://issues.apache.org/jira/browse/FALCON-1888) | Falcon 
JMS Notification details and documentation |  Major | docs | Venkatesan 
Ramachandran | Venkatesan Ramachandran |
+| [FALCON-1818](https://issues.apache.org/jira/browse/FALCON-1818) | Minor doc 
update for tar package locations after FALCON-1751 |  Minor | . | Deepak Barr | 
Deepak Barr |
+| [FALCON-1806](https://issues.apache.org/jira/browse/FALCON-1806) | Update 
documentation for Import and Export |  Major | . | Venkatesan Ramachandran | 
Venkatesan Ramachandran |
+| [FALCON-1567](https://issues.apache.org/jira/browse/FALCON-1567) | Test case 
for Lifecycle feature |  Major | merlin | Pragya Mittal | Pragya Mittal |
+| [FALCON-1566](https://issues.apache.org/jira/browse/FALCON-1566) | Add test 
for SLA monitoring API |  Major | merlin | Pragya Mittal | Pragya Mittal |

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/replication/pom.xml
----------------------------------------------------------------------
diff --git a/replication/pom.xml b/replication/pom.xml
index 1e80173..6e8b4b1 100644
--- a/replication/pom.xml
+++ b/replication/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.falcon</groupId>
         <artifactId>falcon-main</artifactId>
-        <version>0.10-SNAPSHOT</version>
+        <version>0.10</version>
     </parent>
     <artifactId>falcon-distcp-replication</artifactId>
     <description>Apache Falcon Distcp Replication Module</description>
@@ -52,6 +52,11 @@
             <groupId>org.apache.falcon</groupId>
             <artifactId>falcon-metrics</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.falcon</groupId>
+            <artifactId>falcon-test-util</artifactId>
+            <scope>test</scope>
+        </dependency>
 
         <dependency>
             <groupId>org.slf4j</groupId>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
----------------------------------------------------------------------
diff --git 
a/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java 
b/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
index a8da51d..0906bd5 100644
--- 
a/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
+++ 
b/replication/src/main/java/org/apache/falcon/replication/FeedReplicator.java
@@ -184,12 +184,13 @@ public class FeedReplicator extends Configured implements 
Tool {
         return new GnuParser().parse(options, args);
     }
 
-    protected DistCpOptions getDistCpOptions(CommandLine cmd) {
+    protected DistCpOptions getDistCpOptions(CommandLine cmd) throws 
FalconException, IOException {
         String[] paths = cmd.getOptionValue("sourcePaths").trim().split(",");
         List<Path> srcPaths = getPaths(paths);
-        String trgPath = cmd.getOptionValue("targetPath").trim();
+        String targetPathString = cmd.getOptionValue("targetPath").trim();
+        Path targetPath = new Path(targetPathString);
 
-        DistCpOptions distcpOptions = new DistCpOptions(srcPaths, new 
Path(trgPath));
+        DistCpOptions distcpOptions = new DistCpOptions(srcPaths, targetPath);
         distcpOptions.setBlocking(true);
         
distcpOptions.setMaxMaps(Integer.parseInt(cmd.getOptionValue("maxMaps")));
         
distcpOptions.setMapBandwidth(Integer.parseInt(cmd.getOptionValue("mapBandwidth")));
@@ -214,8 +215,16 @@ public class FeedReplicator extends Configured implements 
Tool {
         // Removing deleted files by default - FALCON-1844
         String removeDeletedFiles = cmd.getOptionValue(
                 
ReplicationDistCpOption.DISTCP_OPTION_REMOVE_DELETED_FILES.getName(), "true");
-        
distcpOptions.setDeleteMissing(Boolean.parseBoolean(removeDeletedFiles));
-
+        boolean deleteMissing = Boolean.parseBoolean(removeDeletedFiles);
+        distcpOptions.setDeleteMissing(deleteMissing);
+        if (deleteMissing) {
+            // DistCP will fail with InvalidInputException if deleteMissing is 
set to true and
+            // if targetPath does not exist. Create targetPath to avoid 
failures.
+            FileSystem fs = 
HadoopClientFactory.get().createProxiedFileSystem(targetPath.toUri(), 
getConf());
+            if (!fs.exists(targetPath)) {
+                fs.mkdirs(targetPath);
+            }
+        }
 
         String preserveBlockSize = cmd.getOptionValue(
                 
ReplicationDistCpOption.DISTCP_OPTION_PRESERVE_BLOCK_SIZE.getName());

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/replication/src/test/java/org/apache/falcon/replication/FeedReplicatorTest.java
----------------------------------------------------------------------
diff --git 
a/replication/src/test/java/org/apache/falcon/replication/FeedReplicatorTest.java
 
b/replication/src/test/java/org/apache/falcon/replication/FeedReplicatorTest.java
index e7e177e..2662ade 100644
--- 
a/replication/src/test/java/org/apache/falcon/replication/FeedReplicatorTest.java
+++ 
b/replication/src/test/java/org/apache/falcon/replication/FeedReplicatorTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.falcon.replication;
 
+import org.apache.falcon.cluster.util.EmbeddedCluster;
 import org.apache.commons.cli.CommandLine;
 import org.apache.falcon.entity.Storage;
 import org.apache.hadoop.fs.Path;
@@ -32,6 +33,8 @@ import java.util.List;
  */
 public class FeedReplicatorTest {
 
+    private String defaultPath = "jail://FeedReplicatorTest:00/tmp";
+
     @Test
     public void testArguments() throws Exception {
         /*
@@ -42,21 +45,26 @@ public class FeedReplicatorTest {
          * <arg>-sourcePaths</arg><arg>${distcpSourcePaths}</arg>
          * <arg>-targetPath</arg><arg>${distcpTargetPaths}</arg>
          */
+
+        // creates jailed cluster in which DistCpOtions command can be tested.
+        EmbeddedCluster cluster =  
EmbeddedCluster.newCluster("FeedReplicatorTest");
+
         final String[] args = {
             "true",
             "-maxMaps", "3",
             "-mapBandwidth", "4",
-            "-sourcePaths", "hdfs://localhost:8020/tmp/",
-            "-targetPath", "hdfs://localhost1:8020/tmp/",
+            "-sourcePaths", defaultPath,
+            "-targetPath", defaultPath,
             "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
         };
 
         FeedReplicator replicator = new FeedReplicator();
         CommandLine cmd = replicator.getCommand(args);
+        replicator.setConf(cluster.getConf());
         DistCpOptions options = replicator.getDistCpOptions(cmd);
 
         List<Path> srcPaths = new ArrayList<Path>();
-        srcPaths.add(new Path("hdfs://localhost:8020/tmp/"));
+        srcPaths.add(new Path(defaultPath));
         validateMandatoryArguments(options, srcPaths, true);
         Assert.assertTrue(options.shouldDeleteMissing());
     }
@@ -82,8 +90,8 @@ public class FeedReplicatorTest {
             "true",
             "-maxMaps", "3",
             "-mapBandwidth", "4",
-            "-sourcePaths", "hdfs://localhost:8020/tmp/",
-            "-targetPath", "hdfs://localhost1:8020/tmp/",
+            "-sourcePaths", defaultPath,
+            "-targetPath", defaultPath,
             "-falconFeedStorageType", Storage.TYPE.FILESYSTEM.name(),
             "-overwrite", "true",
             "-ignoreErrors", "false",
@@ -99,7 +107,7 @@ public class FeedReplicatorTest {
         DistCpOptions options = replicator.getDistCpOptions(cmd);
 
         List<Path> srcPaths = new ArrayList<Path>();
-        srcPaths.add(new Path("hdfs://localhost:8020/tmp/"));
+        srcPaths.add(new Path(defaultPath));
         validateMandatoryArguments(options, srcPaths, false);
         validateOptionalArguments(options);
     }
@@ -108,7 +116,7 @@ public class FeedReplicatorTest {
         Assert.assertEquals(options.getMaxMaps(), 3);
         Assert.assertEquals(options.getMapBandwidth(), 4);
         Assert.assertEquals(options.getSourcePaths(), srcPaths);
-        Assert.assertEquals(options.getTargetPath(), new 
Path("hdfs://localhost1:8020/tmp/"));
+        Assert.assertEquals(options.getTargetPath(), new Path(defaultPath));
         Assert.assertEquals(options.shouldSyncFolder(), shouldSyncFolder);
     }
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/rerun/pom.xml
----------------------------------------------------------------------
diff --git a/rerun/pom.xml b/rerun/pom.xml
index 8694e1d..c61dfa4 100644
--- a/rerun/pom.xml
+++ b/rerun/pom.xml
@@ -25,7 +25,7 @@
     <parent>
         <groupId>org.apache.falcon</groupId>
         <artifactId>falcon-main</artifactId>
-        <version>0.10-SNAPSHOT</version>
+        <version>0.10</version>
     </parent>
     <artifactId>falcon-rerun</artifactId>
     <description>Apache Falcon Rerun Handler</description>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
----------------------------------------------------------------------
diff --git 
a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java 
b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
index 4c763c2..3cad362 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
@@ -59,9 +59,9 @@ public class RetryConsumer<T extends 
RetryHandler<DelayedQueue<RetryEvent>>>
                     (message.getRunId() + 1), message.getAttempts(), 
message.getEntityName(), message.getInstance(),
                     message.getWfId(), SchemaHelper.formatDateUTC(new 
Date(System.currentTimeMillis())));
             // Use coord action id for rerun if available
-            String id = message.getParentId();
-            if (StringUtils.isBlank(id)) {
-                id = message.getWfId();
+            String id = message.getWfId();
+            if (!id.contains("-C@") && 
StringUtils.isNotBlank(message.getParentId())) {
+                id = message.getParentId();
             }
             handler.getWfEngine(entityType, 
entityName).reRun(message.getClusterName(), id, null, false);
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
----------------------------------------------------------------------
diff --git 
a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java 
b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
index c691922..b8adeef 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
@@ -110,17 +110,17 @@ public class RetryHandler<M extends 
DelayedQueue<RetryEvent>> extends
 
     @Override
     public void onFailure(WorkflowExecutionContext context) throws 
FalconException {
-        // Re-run does not make sense when killed by user.
-        if (context.isWorkflowKilledManually()) {
-            LOG.debug("Workflow: {} Instance: {} Entity: {}, killed manually 
by user. Will not retry.",
-                    context.getWorkflowId(), 
context.getNominalTimeAsISO8601(), context.getEntityName());
-            return;
-        } else if (context.hasWorkflowTimedOut()) {
+        if (context.hasWorkflowTimedOut()) {
             Entity entity = EntityUtil.getEntity(context.getEntityType(), 
context.getEntityName());
             Retry retry = getRetry(entity);
             if (!retry.isOnTimeout()) {
                 return;
             }
+        // Re-run does not make sense when killed by user.
+        } else if (context.isWorkflowKilledManually()) {
+            LOG.debug("Workflow: {} Instance: {} Entity: {}, killed manually 
by user. Will not retry.",
+                    context.getWorkflowId(), 
context.getNominalTimeAsISO8601(), context.getEntityName());
+            return;
         }
         handleRerun(context.getClusterName(), context.getEntityType(),
                 context.getEntityName(), context.getNominalTimeAsISO8601(),

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/retention/pom.xml
----------------------------------------------------------------------
diff --git a/retention/pom.xml b/retention/pom.xml
index d59ed81..0eb19d4 100644
--- a/retention/pom.xml
+++ b/retention/pom.xml
@@ -25,7 +25,7 @@
     <parent>
         <groupId>org.apache.falcon</groupId>
         <artifactId>falcon-main</artifactId>
-        <version>0.10-SNAPSHOT</version>
+        <version>0.10</version>
     </parent>
     <artifactId>falcon-retention</artifactId>
     <description>Apache Falcon Retention Module</description>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
----------------------------------------------------------------------
diff --git 
a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java 
b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
index 72447da..98936ae 100644
--- a/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
+++ b/retention/src/test/java/org/apache/falcon/retention/FeedEvictorTest.java
@@ -462,6 +462,43 @@ public class FeedEvictorTest {
         }
     }
 
+    @Test
+    public void testEvictionStatsMetaWithNoPattern() throws Exception {
+        try {
+            Configuration conf = cluster.getConf();
+            FileSystem fs = FileSystem.get(conf);
+            fs.delete(new Path("/"), true);
+            stream.clear();
+
+            Pair<List<String>, List<String>> pair = createTestData("/data");
+            createDir("/stats");
+            createDir("/meta");
+            createTestData("/tmp");
+            final String storageUrl = 
cluster.getConf().get(HadoopClientFactory.FS_DEFAULT_NAME_KEY);
+            FeedEvictor.main(new String[] {
+                "-feedBasePath",
+                getFeedBasePath(LocationType.DATA, storageUrl) + "#"
+                    + getStatsOrMetaPath(LocationType.STATS, storageUrl)
+                    + "#" + getStatsOrMetaPath(LocationType.META, storageUrl)
+                    + "#" + getFeedBasePath(LocationType.TMP, storageUrl),
+                "-retentionType", "instance",
+                "-retentionLimit", "months(5)",
+                "-timeZone", "UTC",
+                "-frequency", "hourly",
+                "-logFile", conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY)
+                + "/falcon/staging/feed/2012-01-01-04-00", 
"-falconFeedStorageType",
+                Storage.TYPE.FILESYSTEM.name(),
+            });
+
+            // should not throw exception
+            // stats and meta dir should not be deleted
+            Assert.assertTrue(isDirPresent("/stats"));
+            Assert.assertTrue(isDirPresent("/meta"));
+        } catch (Exception e) {
+            Assert.fail("Unknown exception", e);
+        }
+    }
+
 
     private Pair<List<String>, List<String>> createTestData(String 
locationType) throws Exception {
         Configuration conf = cluster.getConf();
@@ -482,6 +519,12 @@ public class FeedEvictorTest {
         return Pair.of(inRange, outOfRange);
     }
 
+    private void createDir(String locationType) throws Exception {
+        Configuration conf = cluster.getConf();
+        FileSystem fs = FileSystem.get(conf);
+        touch(fs, locationType, false);
+    }
+
     private Pair<List<String>, List<String>> createTestData(String feed, 
String mask,
                                                             int period, 
TimeUnit timeUnit,
                                                             String 
locationType) throws Exception {
@@ -542,11 +585,21 @@ public class FeedEvictorTest {
         }
     }
 
+    private boolean isDirPresent(String path) throws Exception {
+        FileSystem fs = FileSystem.get(cluster.getConf());
+        return fs.exists(new Path(path));
+    }
+
     private String getFeedBasePath(LocationType locationType, String 
storageUrl) {
         return locationType.name() + "=" + storageUrl
                 + "/" + locationType.name().toLowerCase() + 
"/data/YYYY/feed3/dd/MM/?{MONTH}/more/?{HOUR}";
     }
 
+    private String getStatsOrMetaPath(LocationType locationType, String 
storageUrl) {
+        return locationType.name() + "=" + storageUrl
+                + "/" + locationType.name().toLowerCase();
+    }
+
     private static class InMemoryWriter extends PrintStream {
 
         private final StringBuffer buffer = new StringBuffer();

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/scheduler/pom.xml
----------------------------------------------------------------------
diff --git a/scheduler/pom.xml b/scheduler/pom.xml
index f69dc93..62dd290 100644
--- a/scheduler/pom.xml
+++ b/scheduler/pom.xml
@@ -24,7 +24,7 @@
     <parent>
         <groupId>org.apache.falcon</groupId>
         <artifactId>falcon-main</artifactId>
-        <version>0.10-SNAPSHOT</version>
+        <version>0.10</version>
     </parent>
     <artifactId>falcon-scheduler</artifactId>
     <description>Apache Falcon Scheduler Module</description>
@@ -75,6 +75,18 @@
            <groupId>org.quartz-scheduler</groupId>
            <artifactId>quartz</artifactId>
            <version>${quartz.version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>c3p0</groupId>
+                <artifactId>c3p0</artifactId>
+              </exclusion>
+            </exclusions>
+       </dependency>
+
+       <dependency>
+           <groupId>com.mchange</groupId>
+           <artifactId>c3p0</artifactId>
+           <version>${c3p0.version}</version>
        </dependency>
 
         <dependency>

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/scheduler/src/main/java/org/apache/falcon/workflow/engine/FalconWorkflowEngine.java
----------------------------------------------------------------------
diff --git 
a/scheduler/src/main/java/org/apache/falcon/workflow/engine/FalconWorkflowEngine.java
 
b/scheduler/src/main/java/org/apache/falcon/workflow/engine/FalconWorkflowEngine.java
index 6dbec0c..82a1bdf 100644
--- 
a/scheduler/src/main/java/org/apache/falcon/workflow/engine/FalconWorkflowEngine.java
+++ 
b/scheduler/src/main/java/org/apache/falcon/workflow/engine/FalconWorkflowEngine.java
@@ -108,12 +108,17 @@ public class FalconWorkflowEngine extends 
AbstractWorkflowEngine {
 
     @Override
     public boolean isActive(Entity entity) throws FalconException {
-        EntityID id = new EntityID(entity);
-        // Ideally state store should have all entities, but, check anyway.
-        if (STATE_STORE.entityExists(id)) {
-            return STATE_STORE.getEntity(id).getCurrentState() != 
EntityState.STATE.SUBMITTED;
+        try {
+            EntityID id = new EntityID(entity);
+            // Ideally state store should have all entities, but, check anyway.
+            if (STATE_STORE.entityExists(id)) {
+                return STATE_STORE.getEntity(id).getCurrentState() != 
EntityState.STATE.SUBMITTED;
+            }
+            return false;
+        } catch (NullPointerException npe) {
+            // FalconJPAService is not always used, so catch NPE and return 
false
+            return false;
         }
-        return false;
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/src/bin/graphdbutil.sh
----------------------------------------------------------------------
diff --git a/src/bin/graphdbutil.sh b/src/bin/graphdbutil.sh
new file mode 100644
index 0000000..151ec2f
--- /dev/null
+++ b/src/bin/graphdbutil.sh
@@ -0,0 +1,118 @@
+#!/bin/sh
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License. See accompanying LICENSE file.
+#
+
+
+usage() {
+  echo "usage: $0  operation java-home hadoop-home falcon-home 
falcon-common-jar input/out-dir"
+  echo "  where operation is either export OR import"
+  echo "        java-home is the java installation location"
+  echo "        hadoop-home is the hadoop installation location"
+  echo "        falcon-home is the falcon home installation location"
+  echo "        falcon-common-jar is the falcon-common-<version>.jar location 
with GraphUtils"
+  echo "        input/output dir is the directory for the graph data"
+  exit 1
+}
+
+if [ $# != 6 ]; then
+  usage
+fi
+
+operation=$1
+java_home=$2
+hadoop_home=$3
+falcon_home=$4
+falcon_common_jar=$5
+util_dir=$6
+
+export=0
+import=0
+keep_temp=Y
+
+case $operation in
+   import) import=1
+           ;;
+   export) export=1
+           ;;
+   *)     echo "Unknown operation $operation"
+          usage
+esac
+
+if [ -d  $java_home -a -f $java_home/bin/java -a -f $java_home/bin/jar ] ; then
+  :
+else
+  echo "Invalid java home directory $java_home"
+  usage
+fi
+
+if [ -d  $hadoop_home -a -f $hadoop_home/bin/hadoop ] ; then
+  :
+else
+  echo "Invalid hadoop home directory $hadoop_home"
+  usage
+fi
+
+if [ -d  $falcon_home -a -f $falcon_home/bin/falcon ] ; then
+  :
+else
+  echo "Invalid falcon home directory $falcon_home"
+  usage
+fi
+
+falcon_war=$falcon_home/server/webapp/falcon.war
+if [ ! -f $falcon_war ]; then
+  echo "Falcon war file $falcon_war not available"
+  usage
+fi
+
+if [ ! -f $falcon_common_jar ]; then
+  echo "Falcon commons jar file $falcon_common_jar not available"
+  usage
+fi
+
+
+util_tmpdir=/tmp/falcon-graphutil-tmp-$$
+echo "Using $util_tmpdir as temporary directory"
+trap "rm -rf $util.tmpdir" 0 2 3 15
+rm -rf $util_tmpdir
+mkdir -p $util_tmpdir
+
+if [ ! -d $util_dir ]; then
+   echo "Directory $util_dir does not exist"
+   usage
+fi
+
+if [ x$import = x1 ]; then
+   if [ ! -f $metadata_file ]; then
+      echo "Directory $util_dir does not exist or $metadata_file not present"
+      usage
+   fi
+fi
+
+cd $util_tmpdir
+jar -xf $falcon_war
+rm ./WEB-INF/lib/jackson*  ./WEB-INF/lib/falcon-common*.jar 
./WEB-INF/lib/slf4j* ./WEB-INF/lib/activemq*
+cp $falcon_common_jar ./WEB-INF/lib/
+
+JAVA_HOME=$java_home
+export PATH=$JAVA_HOME/bin:$PATH
+export CLASSPATH="$falcon_home/conf:./WEB-INF/lib/*:`$hadoop_home/bin/hadoop 
classpath`"
+echo "Using classpath $CLASSPATH"
+java -Dfalcon.log.dir=/tmp/ org.apache.falcon.metadata.GraphUpdateUtils 
$operation $util_dir
+
+if [ x$keep_temp = xY ]; then
+  :
+else
+  rm -rf $util_tmpdir
+fi
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/falcon/blob/01a303e3/src/conf/hbase-site.xml.template
----------------------------------------------------------------------
diff --git a/src/conf/hbase-site.xml.template b/src/conf/hbase-site.xml.template
index 2c72617..aa83889 100644
--- a/src/conf/hbase-site.xml.template
+++ b/src/conf/hbase-site.xml.template
@@ -19,7 +19,7 @@
 <configuration>
   <property>
     <name>hbase.rootdir</name>
-    <value>file://${hbase_home}/root</value>
+    <value>file:///${hbase_home}/root</value>
   </property>
   <property>
     <name>hbase.zookeeper.property.dataDir</name>

Reply via email to