Repository: falcon
Updated Branches:
  refs/heads/master a49133aaf -> c52961c6a


FALCON-1892 Remove client side Recipe logic

Please use this pull request to review. Ignore any documentation issues as it 
will be addressed in  https://issues.apache.org/jira/browse/FALCON-1106. Thanks!

Author: Sowmya Ramesh <[email protected]>

Reviewers: "Balu Vellanki <[email protected]>, Venkat Ranganathan 
<[email protected]>"

Closes #91 from sowmyaramesh/FALCON-1892


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/c52961c6
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/c52961c6
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/c52961c6

Branch: refs/heads/master
Commit: c52961c6a3625a7fc617b62e3c3e3fa43534e74e
Parents: a49133a
Author: Sowmya Ramesh <[email protected]>
Authored: Tue Apr 12 16:04:39 2016 -0700
Committer: bvellanki <[email protected]>
Committed: Tue Apr 12 16:04:39 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/falcon/cli/FalconCLI.java   |  13 -
 .../org/apache/falcon/cli/FalconRecipeCLI.java  | 121 --------
 .../org/apache/falcon/client/FalconClient.java  |  74 -----
 .../recipe/HdfsReplicationRecipeTool.java       |  70 -----
 .../HdfsReplicationRecipeToolOptions.java       |  62 ----
 .../recipe/HiveReplicationRecipeTool.java       | 200 -------------
 .../HiveReplicationRecipeToolOptions.java       |  90 ------
 .../java/org/apache/falcon/recipe/Recipe.java   |  29 --
 .../org/apache/falcon/recipe/RecipeFactory.java |  44 ---
 .../org/apache/falcon/recipe/RecipeTool.java    | 285 ------------------
 .../apache/falcon/recipe/RecipeToolArgs.java    |  71 -----
 .../apache/falcon/recipe/RecipeToolOptions.java |  91 ------
 .../recipe/util/RecipeProcessBuilderUtils.java  | 293 -------------------
 .../java/org/apache/falcon/cli/FalconCLIIT.java |  56 ----
 webapp/src/test/resources/client.properties     |   1 -
 .../resources/hdfs-replication-template.xml     |  44 ---
 .../test/resources/hdfs-replication.properties  |  47 ---
 17 files changed, 1591 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/cli/FalconCLI.java 
b/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
index a1f42ce..7d0f2f6 100644
--- a/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
+++ b/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
@@ -56,7 +56,6 @@ public class FalconCLI {
     public static final String METADATA_CMD = "metadata";
     public static final String ENTITY_CMD = "entity";
     public static final String INSTANCE_CMD = "instance";
-    public static final String RECIPE_CMD = "recipe";
 
     public static final String TYPE_OPT = "type";
     public static final String COLO_OPT = "colo";
@@ -92,14 +91,6 @@ public class FalconCLI {
     }
 
     /**
-     * Recipe operation enum.
-     */
-    public enum RecipeOperation {
-        HDFS_REPLICATION,
-        HIVE_DISASTER_RECOVERY
-    }
-
-    /**
      * Entry point for the Falcon CLI when invoked from the command line. Upon
      * completion this method exits the JVM with '0' (success) or '-1'
      * (failure).
@@ -134,7 +125,6 @@ public class FalconCLI {
         FalconEntityCLI entityCLI = new FalconEntityCLI();
         FalconInstanceCLI instanceCLI = new FalconInstanceCLI();
         FalconMetadataCLI metadataCLI = new FalconMetadataCLI();
-        FalconRecipeCLI recipeCLI = new FalconRecipeCLI();
 
         parser.addCommand(ADMIN_CMD, "", "admin operations", 
adminCLI.createAdminOptions(), true);
         parser.addCommand(HELP_CMD, "", "display usage", new Options(), false);
@@ -146,7 +136,6 @@ public class FalconCLI {
                 instanceCLI.createInstanceOptions(), false);
         parser.addCommand(METADATA_CMD, "", "Metadata operations like list, 
relations",
                 metadataCLI.createMetadataOptions(), true);
-        parser.addCommand(RECIPE_CMD, "", "recipe operations", 
recipeCLI.createRecipeOptions(), true);
         parser.addCommand(VERSION_OPT, "", "show client version", new 
Options(), false);
 
         try {
@@ -168,8 +157,6 @@ public class FalconCLI {
                     instanceCLI.instanceCommand(commandLine, client);
                 } else if (command.getName().equals(METADATA_CMD)) {
                     metadataCLI.metadataCommand(commandLine, client);
-                } else if (command.getName().equals(RECIPE_CMD)) {
-                    recipeCLI.recipeCommand(commandLine, client);
                 }
             }
             return exitValue;

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/cli/FalconRecipeCLI.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/cli/FalconRecipeCLI.java 
b/client/src/main/java/org/apache/falcon/cli/FalconRecipeCLI.java
deleted file mode 100644
index 82053f9..0000000
--- a/client/src/main/java/org/apache/falcon/cli/FalconRecipeCLI.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.cli;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.falcon.client.FalconCLIException;
-import org.apache.falcon.client.FalconClient;
-
-import java.util.HashSet;
-import java.util.Set;
-
-/**
- * Recipe extension to Falcon Command Line Interface - wraps the RESTful API 
for Recipe.
- */
-public class FalconRecipeCLI extends FalconCLI {
-
-    public FalconRecipeCLI() throws Exception {
-        super();
-    }
-
-    private static final String RECIPE_NAME = "name";
-    private static final String RECIPE_OPERATION= "operation";
-    private static final String RECIPE_TOOL_CLASS_NAME = "tool";
-    private static final String RECIPE_PROPERTIES_FILE = "properties";
-
-    public Options createRecipeOptions() {
-        Options recipeOptions = new Options();
-        Option url = new Option(URL_OPTION, true, "Falcon URL");
-        recipeOptions.addOption(url);
-
-        Option recipeFileOpt = new Option(RECIPE_NAME, true, "recipe name");
-        recipeOptions.addOption(recipeFileOpt);
-
-        Option recipeToolClassName = new Option(RECIPE_TOOL_CLASS_NAME, true, 
"recipe class");
-        recipeOptions.addOption(recipeToolClassName);
-
-        Option recipeOperation = new Option(RECIPE_OPERATION, true, "recipe 
operation");
-        recipeOptions.addOption(recipeOperation);
-
-        Option recipeProperties = new Option(RECIPE_PROPERTIES_FILE, true, 
"recipe properties file path");
-        recipeOptions.addOption(recipeProperties);
-
-        Option skipDryRunOperation = new Option(SKIPDRYRUN_OPT, false, "skip 
dryrun operation");
-        recipeOptions.addOption(skipDryRunOperation);
-
-        Option doAs = new Option(DO_AS_OPT, true, "doAs user");
-        recipeOptions.addOption(doAs);
-
-        return recipeOptions;
-    }
-
-    public void recipeCommand(CommandLine commandLine, FalconClient client) 
throws FalconCLIException {
-        Set<String> optionsList = new HashSet<String>();
-        for (Option option : commandLine.getOptions()) {
-            optionsList.add(option.getOpt());
-        }
-
-        String recipeName = commandLine.getOptionValue(RECIPE_NAME);
-        String recipeToolClass = 
commandLine.getOptionValue(RECIPE_TOOL_CLASS_NAME);
-        String recipeOperation = commandLine.getOptionValue(RECIPE_OPERATION);
-        String recipePropertiesFile = 
commandLine.getOptionValue(RECIPE_PROPERTIES_FILE);
-        String doAsUser = commandLine.getOptionValue(DO_AS_OPT);
-
-        validateNotEmpty(recipeName, RECIPE_NAME);
-        validateNotEmpty(recipeOperation, RECIPE_OPERATION);
-        validateRecipeOperations(recipeOperation);
-        validateRecipePropertiesFile(recipePropertiesFile, recipeName);
-        Boolean skipDryRun = null;
-        if (optionsList.contains(SKIPDRYRUN_OPT)) {
-            skipDryRun = true;
-        }
-
-        String result = client.submitRecipe(recipeName, recipeToolClass,
-                recipeOperation, recipePropertiesFile, skipDryRun, 
doAsUser).toString();
-        OUT.get().println(result);
-    }
-
-    private static void validateRecipeOperations(String recipeOperation) 
throws FalconCLIException {
-        for(RecipeOperation operation : RecipeOperation.values()) {
-            if (operation.toString().equalsIgnoreCase(recipeOperation)) {
-                return;
-            }
-        }
-        throw new FalconCLIException("Allowed Recipe operations: "
-                + java.util.Arrays.asList((RecipeOperation.values())));
-    }
-
-    private static void validateRecipePropertiesFile(String 
recipePropertiesFile, String recipeName)
-        throws FalconCLIException {
-        if (StringUtils.isBlank(recipePropertiesFile)) {
-            return;
-        }
-
-        String []fileSplits = recipePropertiesFile.split("/");
-        String recipePropertiesName = 
(fileSplits[fileSplits.length-1]).split("\\.")[0];
-        if (recipePropertiesName.equals(recipeName)) {
-            return;
-        }
-
-        throw new FalconCLIException("Provided properties file name do match 
with recipe name: " +recipeName);
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/client/FalconClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/client/FalconClient.java 
b/client/src/main/java/org/apache/falcon/client/FalconClient.java
index 597f608..10243f0 100644
--- a/client/src/main/java/org/apache/falcon/client/FalconClient.java
+++ b/client/src/main/java/org/apache/falcon/client/FalconClient.java
@@ -19,7 +19,6 @@
 package org.apache.falcon.client;
 
 import java.io.BufferedReader;
-import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.FileReader;
@@ -27,7 +26,6 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
 import java.io.UnsupportedEncodingException;
-import java.lang.reflect.Method;
 import java.net.URL;
 import java.security.SecureRandom;
 import java.util.List;
@@ -54,8 +52,6 @@ import org.apache.falcon.entity.v0.DateValidator;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.metadata.RelationshipType;
-import org.apache.falcon.recipe.RecipeTool;
-import org.apache.falcon.recipe.RecipeToolArgs;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.EntityList;
 import org.apache.falcon.resource.EntitySummaryResult;
@@ -951,76 +947,6 @@ public class FalconClient extends AbstractFalconClient {
         return sendMetadataLineageRequest(MetadataOperations.EDGES, id, 
doAsUser);
     }
 
-    private String getRecipePath(String recipePropertiesFile) throws 
FalconCLIException {
-        String recipePath = null;
-        if (StringUtils.isNotBlank(recipePropertiesFile)) {
-            File file = new File(recipePropertiesFile);
-            if (file.exists()) {
-                recipePath = 
file.getAbsoluteFile().getParentFile().getAbsolutePath();
-            }
-        } else {
-            recipePath = clientProperties.getProperty("falcon.recipe.path");
-        }
-
-        return recipePath;
-    }
-
-    public APIResult submitRecipe(String recipeName, String 
recipeToolClassName,
-                                  final String recipeOperation, String 
recipePropertiesFile, Boolean skipDryRun,
-                                  final String doAsUser) throws 
FalconCLIException {
-        String recipePath = getRecipePath(recipePropertiesFile);
-
-        if (StringUtils.isEmpty(recipePath)) {
-            throw new FalconCLIException("falcon.recipe.path is not set in 
client.properties or properties "
-                    + " file is not provided");
-        }
-
-        String templateFilePath = recipePath + File.separator + recipeName + 
TEMPLATE_SUFFIX;
-        File file = new File(templateFilePath);
-        if (!file.exists()) {
-            throw new FalconCLIException("Recipe template file does not exist 
: " + templateFilePath);
-        }
-
-        String propertiesFilePath = recipePath + File.separator + recipeName + 
PROPERTIES_SUFFIX;
-        file = new File(propertiesFilePath);
-        if (!file.exists()) {
-            throw new FalconCLIException("Recipe properties file does not 
exist : " + propertiesFilePath);
-        }
-
-        String processFile;
-        try {
-            String prefix =  "falcon-recipe" + "-" + 
System.currentTimeMillis();
-            File tmpPath = new File("/tmp");
-            if (!tmpPath.exists()) {
-                if (!tmpPath.mkdir()) {
-                    throw new FalconCLIException("Creating directory failed: " 
+ tmpPath.getAbsolutePath());
-                }
-            }
-            File f = File.createTempFile(prefix, ".xml", tmpPath);
-            f.deleteOnExit();
-
-            processFile = f.getAbsolutePath();
-            String[] args = {
-                "-" + RecipeToolArgs.RECIPE_FILE_ARG.getName(), 
templateFilePath,
-                "-" + RecipeToolArgs.RECIPE_PROPERTIES_FILE_ARG.getName(), 
propertiesFilePath,
-                "-" + 
RecipeToolArgs.RECIPE_PROCESS_XML_FILE_PATH_ARG.getName(), processFile,
-                "-" + RecipeToolArgs.RECIPE_OPERATION_ARG.getName(), 
recipeOperation,
-            };
-
-            if (recipeToolClassName != null) {
-                Class<?> clz = Class.forName(recipeToolClassName);
-                Method method = clz.getMethod("main", String[].class);
-                method.invoke(null, args);
-            } else {
-                RecipeTool.main(args);
-            }
-            validate(EntityType.PROCESS.toString(), processFile, skipDryRun, 
doAsUser);
-            return submitAndSchedule(EntityType.PROCESS.toString(), 
processFile, skipDryRun, doAsUser, null);
-        } catch (Exception e) {
-            throw new FalconCLIException(e.getMessage(), e);
-        }
-    }
-
     private String sendMetadataLineageRequest(MetadataOperations job, String 
id,
                                               String doAsUser) throws 
FalconCLIException {
         ClientResponse clientResponse = new ResourceBuilder().path(job.path, 
id).addQueryParam(DO_AS_OPT, doAsUser)

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java 
b/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java
deleted file mode 100644
index cf24078..0000000
--- 
a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeTool.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Properties;
-import java.io.File;
-
-/**
- * Hdfs Replication recipe tool for Falcon recipes.
- */
-public class HdfsReplicationRecipeTool implements Recipe {
-
-    private static final String COMMA_SEPARATOR = ",";
-
-    @Override
-    public void validate(final Properties recipeProperties) {
-        for (HdfsReplicationRecipeToolOptions option : 
HdfsReplicationRecipeToolOptions.values()) {
-            if (recipeProperties.getProperty(option.getName()) == null && 
option.isRequired()) {
-                throw new IllegalArgumentException("Missing argument: " + 
option.getName());
-            }
-        }
-    }
-
-    @Override
-    public Properties getAdditionalSystemProperties(final Properties 
recipeProperties) {
-        Properties additionalProperties = new Properties();
-
-        // Construct fully qualified hdfs src path
-        String srcPaths = 
recipeProperties.getProperty(HdfsReplicationRecipeToolOptions
-                .REPLICATION_SOURCE_DIR.getName());
-        StringBuilder absoluteSrcPaths = new StringBuilder();
-        String srcFsPath = recipeProperties.getProperty(
-                
HdfsReplicationRecipeToolOptions.REPLICATION_SOURCE_CLUSTER_FS_WRITE_ENDPOINT.getName());
-        if (StringUtils.isNotEmpty(srcFsPath)) {
-            srcFsPath = StringUtils.removeEnd(srcFsPath, File.separator);
-        }
-        if (StringUtils.isNotEmpty(srcPaths)) {
-            String[] paths = srcPaths.split(COMMA_SEPARATOR);
-
-            for (String path : paths) {
-                StringBuilder srcpath = new StringBuilder(srcFsPath);
-                srcpath.append(path.trim());
-                srcpath.append(COMMA_SEPARATOR);
-                absoluteSrcPaths.append(srcpath);
-            }
-        }
-
-        
additionalProperties.put(HdfsReplicationRecipeToolOptions.REPLICATION_SOURCE_DIR.getName(),
-                StringUtils.removeEnd(absoluteSrcPaths.toString(), 
COMMA_SEPARATOR));
-        return additionalProperties;
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java
 
b/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java
deleted file mode 100644
index 4c3b543..0000000
--- 
a/client/src/main/java/org/apache/falcon/recipe/HdfsReplicationRecipeToolOptions.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-/**
- * Hdfs Recipe tool options.
- */
-public enum HdfsReplicationRecipeToolOptions {
-    REPLICATION_SOURCE_DIR("drSourceDir", "Location of source data to 
replicate"),
-    REPLICATION_SOURCE_CLUSTER_FS_WRITE_ENDPOINT("drSourceClusterFS", "Source 
replication cluster end point"),
-    REPLICATION_TARGET_DIR("drTargetDir", "Location on target cluster for 
replication"),
-    REPLICATION_TARGET_CLUSTER_FS_WRITE_ENDPOINT("drTargetClusterFS", "Target 
replication cluster end point"),
-    REPLICATION_MAX_MAPS("distcpMaxMaps", "Maximum number of maps used during 
replication"),
-    REPLICATION_MAP_BANDWIDTH_IN_MB("distcpMapBandwidth", "Bandwidth in MB/s 
used by each mapper during replication");
-
-    private final String name;
-    private final String description;
-    private final boolean isRequired;
-
-    HdfsReplicationRecipeToolOptions(String name, String description) {
-        this(name, description, true);
-    }
-
-    HdfsReplicationRecipeToolOptions(String name, String description, boolean 
isRequired) {
-        this.name = name;
-        this.description = description;
-        this.isRequired = isRequired;
-    }
-
-    public String getName() {
-        return this.name;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public boolean isRequired() {
-        return isRequired;
-    }
-
-    @Override
-    public String toString() {
-        return getName();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java 
b/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java
deleted file mode 100644
index 3df89d3..0000000
--- 
a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeTool.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.hcatalog.api.HCatClient;
-import org.apache.hive.hcatalog.api.HCatDatabase;
-import org.apache.hive.hcatalog.api.HCatTable;
-import org.apache.hive.hcatalog.api.ObjectNotFoundException;
-import org.apache.hive.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
-import org.apache.hive.hcatalog.common.HCatException;
-
-import java.io.IOException;
-import java.util.Properties;
-
-/**
- * Hive Replication recipe tool for Falcon recipes.
- */
-public class HiveReplicationRecipeTool implements Recipe {
-    private static final String ALL_TABLES = "*";
-
-    @Override
-    public void validate(final Properties recipeProperties) throws Exception {
-        for (HiveReplicationRecipeToolOptions option : 
HiveReplicationRecipeToolOptions.values()) {
-            if (recipeProperties.getProperty(option.getName()) == null && 
option.isRequired()) {
-                throw new IllegalArgumentException("Missing argument: " + 
option.getName());
-            }
-        }
-
-        HCatClient sourceMetastoreClient = null;
-        HCatClient targetMetastoreClient = null;
-        try {
-            // Validate if DB exists - source and target
-            sourceMetastoreClient = getHiveMetaStoreClient(
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            .REPLICATION_SOURCE_METASTORE_URI.getName()),
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            
.REPLICATION_SOURCE_HIVE_METASTORE_KERBEROS_PRINCIPAL.getName()),
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            
.REPLICATION_SOURCE_HIVE2_KERBEROS_PRINCIPAL.getName()));
-
-            String sourceDbList = recipeProperties.getProperty(
-                    
HiveReplicationRecipeToolOptions.REPLICATION_SOURCE_DATABASE.getName());
-
-            if (StringUtils.isEmpty(sourceDbList)) {
-                throw new Exception("No source DB specified in property file");
-            }
-
-            String sourceTableList = recipeProperties.getProperty(
-                    
HiveReplicationRecipeToolOptions.REPLICATION_SOURCE_TABLE.getName());
-            if (StringUtils.isEmpty(sourceTableList)) {
-                throw new Exception("No source table specified in property 
file. For DB replication please specify * "
-                        + "for sourceTable");
-            }
-
-            String[] srcDbs = sourceDbList.split(",");
-            if (srcDbs.length <= 0) {
-                throw new Exception("No source DB specified in property file");
-            }
-            for (String db : srcDbs) {
-                if (!dbExists(sourceMetastoreClient, db)) {
-                    throw new Exception("Database " + db + " doesn't exist on 
source cluster");
-                }
-            }
-
-            if (!sourceTableList.equals(ALL_TABLES)) {
-                String[] srcTables = sourceTableList.split(",");
-                if (srcTables.length > 0) {
-                    for (String table : srcTables) {
-                        if (!tableExists(sourceMetastoreClient, srcDbs[0], 
table)) {
-                            throw new Exception("Table " + table + " doesn't 
exist on source cluster");
-                        }
-                    }
-                }
-            }
-
-            targetMetastoreClient = getHiveMetaStoreClient(
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            .REPLICATION_TARGET_METASTORE_URI.getName()),
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            
.REPLICATION_TARGET_HIVE_METASTORE_KERBEROS_PRINCIPAL.getName()),
-                    
recipeProperties.getProperty(HiveReplicationRecipeToolOptions
-                            
.REPLICATION_TARGET_HIVE2_KERBEROS_PRINCIPAL.getName()));
-            // Verify db exists on target
-            for (String db : srcDbs) {
-                if (!dbExists(targetMetastoreClient, db)) {
-                    throw new Exception("Database " + db + " doesn't exist on 
target cluster");
-                }
-            }
-        } finally {
-            if (sourceMetastoreClient != null) {
-                sourceMetastoreClient.close();
-            }
-            if (targetMetastoreClient != null) {
-                targetMetastoreClient.close();
-            }
-        }
-    }
-
-    @Override
-    public Properties getAdditionalSystemProperties(final Properties 
recipeProperties) {
-        Properties additionalProperties = new Properties();
-        String recipeName = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName());
-        // Add recipe name as Hive DR job
-        
additionalProperties.put(HiveReplicationRecipeToolOptions.HIVE_DR_JOB_NAME.getName(),
 recipeName);
-        
additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_RUN.getName(),
-                
recipeProperties.getProperty(RecipeToolOptions.CLUSTER_NAME.getName()));
-        
additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_RUN_WRITE_EP.getName(),
-                
recipeProperties.getProperty(RecipeToolOptions.CLUSTER_HDFS_WRITE_ENDPOINT.getName()));
-        if 
(StringUtils.isNotEmpty(recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName())))
 {
-            
additionalProperties.put(HiveReplicationRecipeToolOptions.CLUSTER_FOR_JOB_NN_KERBEROS_PRINCIPAL.getName(),
-                    
recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName()));
-        }
-        if (StringUtils.isEmpty(
-                
recipeProperties.getProperty(HiveReplicationRecipeToolOptions.TDE_ENCRYPTION_ENABLED.getName())))
 {
-            
additionalProperties.put(HiveReplicationRecipeToolOptions.TDE_ENCRYPTION_ENABLED.getName(),
 "false");
-        }
-        return additionalProperties;
-    }
-
-    private HCatClient getHiveMetaStoreClient(String metastoreUrl, String 
metastorePrincipal,
-                                              String hive2Principal) throws 
Exception {
-        try {
-            HiveConf hcatConf = createHiveConf(new Configuration(false), 
metastoreUrl,
-                    metastorePrincipal, hive2Principal);
-            return HCatClient.create(hcatConf);
-        } catch (IOException e) {
-            throw new Exception("Exception creating HCatClient: " + 
e.getMessage(), e);
-        }
-    }
-
-    private static HiveConf createHiveConf(Configuration conf, String 
metastoreUrl, String metastorePrincipal,
-                                           String hive2Principal) throws 
IOException {
-        HiveConf hcatConf = new HiveConf(conf, HiveConf.class);
-
-        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUrl);
-        hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 
3);
-        hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                HCatSemanticAnalyzer.class.getName());
-        hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, 
"false");
-
-        hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-        hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-        if (StringUtils.isNotEmpty(metastorePrincipal)) {
-            
hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, 
metastorePrincipal);
-            hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, 
"true");
-            hcatConf.set(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname, 
"true");
-        }
-        if (StringUtils.isNotEmpty(hive2Principal)) {
-            
hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, 
hive2Principal);
-            
hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, "kerberos");
-        }
-
-        return hcatConf;
-    }
-
-    private static boolean tableExists(HCatClient client, final String 
database, final String tableName)
-        throws Exception {
-        try {
-            HCatTable table = client.getTable(database, tableName);
-            return table != null;
-        } catch (ObjectNotFoundException e) {
-            System.out.println(e.getMessage());
-            return false;
-        } catch (HCatException e) {
-            throw new Exception("Exception checking if the table exists:" + 
e.getMessage(), e);
-        }
-    }
-
-    private static boolean dbExists(HCatClient client, final String database)
-        throws Exception {
-        try {
-            HCatDatabase db = client.getDatabase(database);
-            return db != null;
-        } catch (ObjectNotFoundException e) {
-            System.out.println(e.getMessage());
-            return false;
-        } catch (HCatException e) {
-            throw new Exception("Exception checking if the db exists:" + 
e.getMessage(), e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java
 
b/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java
deleted file mode 100644
index 3d69d6e..0000000
--- 
a/client/src/main/java/org/apache/falcon/recipe/HiveReplicationRecipeToolOptions.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-/**
- * Hive Recipe tool options.
- */
-public enum HiveReplicationRecipeToolOptions {
-    REPLICATION_SOURCE_CLUSTER("sourceCluster", "Replication source cluster 
name"),
-    REPLICATION_SOURCE_METASTORE_URI("sourceMetastoreUri", "Source Hive 
metastore uri"),
-    REPLICATION_SOURCE_HS2_URI("sourceHiveServer2Uri", "Source HS2 uri"),
-    REPLICATION_SOURCE_DATABASE("sourceDatabase", "List of databases to 
replicate"),
-    REPLICATION_SOURCE_TABLE("sourceTable", "List of tables to replicate"),
-    REPLICATION_SOURCE_STAGING_PATH("sourceStagingPath", "Location of source 
staging path"),
-    REPLICATION_SOURCE_NN("sourceNN", "Source name node"),
-    REPLICATION_SOURCE_NN_KERBEROS_PRINCIPAL("sourceNNKerberosPrincipal", 
"Source name node kerberos principal", false),
-    
REPLICATION_SOURCE_HIVE_METASTORE_KERBEROS_PRINCIPAL("sourceHiveMetastoreKerberosPrincipal",
-            "Source hive metastore kerberos principal", false),
-    REPLICATION_SOURCE_HIVE2_KERBEROS_PRINCIPAL("sourceHive2KerberosPrincipal",
-            "Source hiveserver2 kerberos principal", false),
-
-    REPLICATION_TARGET_CLUSTER("targetCluster", "Replication target cluster 
name"),
-    REPLICATION_TARGET_METASTORE_URI("targetMetastoreUri", "Target Hive 
metastore uri"),
-    REPLICATION_TARGET_HS2_URI("targetHiveServer2Uri", "Target HS2 uri"),
-    REPLICATION_TARGET_STAGING_PATH("targetStagingPath", "Location of target 
staging path"),
-    REPLICATION_TARGET_NN("targetNN", "Target name node"),
-    REPLICATION_TARGET_NN_KERBEROS_PRINCIPAL("targetNNKerberosPrincipal", 
"Target name node kerberos principal", false),
-    
REPLICATION_TARGET_HIVE_METASTORE_KERBEROS_PRINCIPAL("targetHiveMetastoreKerberosPrincipal",
-            "Target hive metastore kerberos principal", false),
-    REPLICATION_TARGET_HIVE2_KERBEROS_PRINCIPAL("targetHive2KerberosPrincipal",
-            "Target hiveserver2 kerberos principal", false),
-
-    REPLICATION_MAX_EVENTS("maxEvents", "Maximum events to replicate"),
-    REPLICATION_MAX_MAPS("replicationMaxMaps", "Maximum number of maps used 
during replication"),
-    DISTCP_MAX_MAPS("distcpMaxMaps", "Maximum number of maps used during 
distcp"),
-    REPLICATION_MAP_BANDWIDTH_IN_MB("distcpMapBandwidth", "Bandwidth in MB/s 
used by each mapper during replication"),
-    CLUSTER_FOR_JOB_RUN("clusterForJobRun", "Cluster on which replication job 
runs", false),
-    CLUSTER_FOR_JOB_NN_KERBEROS_PRINCIPAL("clusterForJobNNKerberosPrincipal",
-            "Write EP of cluster on which replication job runs", false),
-    CLUSTER_FOR_JOB_RUN_WRITE_EP("clusterForJobRunWriteEP", "Write EP of 
cluster on which replication job runs", false),
-    TDE_ENCRYPTION_ENABLED("tdeEncryptionEnabled", "Set to true if TDE 
encryption is enabled", false),
-    HIVE_DR_JOB_NAME("drJobName", "Unique hive DR job name", false);
-
-    private final String name;
-    private final String description;
-    private final boolean isRequired;
-
-    HiveReplicationRecipeToolOptions(String name, String description) {
-        this(name, description, true);
-    }
-
-    HiveReplicationRecipeToolOptions(String name, String description, boolean 
isRequired) {
-        this.name = name;
-        this.description = description;
-        this.isRequired = isRequired;
-    }
-
-    public String getName() {
-        return this.name;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public boolean isRequired() {
-        return isRequired;
-    }
-
-    @Override
-    public String toString() {
-        return getName();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/Recipe.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/recipe/Recipe.java 
b/client/src/main/java/org/apache/falcon/recipe/Recipe.java
deleted file mode 100644
index 609131d..0000000
--- a/client/src/main/java/org/apache/falcon/recipe/Recipe.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import java.util.Properties;
-
-/**
- * Recipe interface.
- */
-public interface Recipe {
-    void validate(final Properties recipeProperties) throws Exception;
-    Properties getAdditionalSystemProperties(final Properties 
recipeProperties);
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java 
b/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java
deleted file mode 100644
index 32b0871..0000000
--- a/client/src/main/java/org/apache/falcon/recipe/RecipeFactory.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import org.apache.falcon.cli.FalconCLI.RecipeOperation;
-
-/**
- * Recipe factory.
- */
-public final class RecipeFactory {
-
-    private RecipeFactory() {
-    }
-
-    public static Recipe getRecipeToolType(String recipeType) {
-        if (recipeType == null) {
-            return null;
-        }
-
-        if 
(RecipeOperation.HDFS_REPLICATION.toString().equalsIgnoreCase(recipeType)) {
-            return new HdfsReplicationRecipeTool();
-        } else if 
(RecipeOperation.HIVE_DISASTER_RECOVERY.toString().equalsIgnoreCase(recipeType))
 {
-            return new HiveReplicationRecipeTool();
-        }
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java 
b/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
deleted file mode 100644
index 243ff4d..0000000
--- a/client/src/main/java/org/apache/falcon/recipe/RecipeTool.java
+++ /dev/null
@@ -1,285 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.falcon.recipe.util.RecipeProcessBuilderUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Base recipe tool for Falcon recipes.
- */
-public class RecipeTool extends Configured implements Tool {
-    private static final String HDFS_WF_PATH = "falcon" + File.separator + 
"recipes" + File.separator;
-    private static final FsPermission FS_PERMISSION =
-            new FsPermission(FsAction.ALL, FsAction.READ, FsAction.NONE);
-    private static final String FS_DEFAULT_NAME_KEY = "fs.defaultFS";
-    private static final String NN_PRINCIPAL = 
"dfs.namenode.kerberos.principal";
-
-    public static void main(String[] args) throws Exception {
-        ToolRunner.run(new Configuration(), new RecipeTool(), args);
-    }
-
-    @Override
-    public int run(String[] arguments) throws Exception {
-
-        Map<RecipeToolArgs, String> argMap = setupArgs(arguments);
-        if (argMap == null || argMap.isEmpty()) {
-            throw new Exception("Arguments passed to recipe is null");
-        }
-        Configuration conf = getConf();
-        String recipePropertiesFilePath = 
argMap.get(RecipeToolArgs.RECIPE_PROPERTIES_FILE_ARG);
-        Properties recipeProperties = loadProperties(recipePropertiesFilePath);
-        validateProperties(recipeProperties);
-
-        String recipeOperation = 
argMap.get(RecipeToolArgs.RECIPE_OPERATION_ARG);
-        Recipe recipeType = RecipeFactory.getRecipeToolType(recipeOperation);
-        if (recipeType != null) {
-            recipeType.validate(recipeProperties);
-            Properties props = 
recipeType.getAdditionalSystemProperties(recipeProperties);
-            if (props != null && !props.isEmpty()) {
-                recipeProperties.putAll(props);
-            }
-        }
-
-        String processFilename;
-
-        FileSystem fs = getFileSystemForHdfs(recipeProperties, conf);
-        validateArtifacts(recipeProperties, fs);
-
-        String recipeName = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName());
-        copyFilesToHdfsIfRequired(recipeProperties, fs, recipeName);
-
-        processFilename = 
RecipeProcessBuilderUtils.createProcessFromTemplate(argMap.get(RecipeToolArgs
-                .RECIPE_FILE_ARG), recipeProperties, 
argMap.get(RecipeToolArgs.RECIPE_PROCESS_XML_FILE_PATH_ARG));
-
-
-        System.out.println("Generated process file to be scheduled: ");
-        System.out.println(FileUtils.readFileToString(new 
File(processFilename)));
-
-        System.out.println("Completed recipe processing");
-        return 0;
-    }
-
-    private Map<RecipeToolArgs, String> setupArgs(final String[] arguments) 
throws ParseException {
-        Options options = new Options();
-        Map<RecipeToolArgs, String> argMap = new HashMap<RecipeToolArgs, 
String>();
-
-        for (RecipeToolArgs arg : RecipeToolArgs.values()) {
-            addOption(options, arg, arg.isRequired());
-        }
-
-        CommandLine cmd = new GnuParser().parse(options, arguments);
-        for (RecipeToolArgs arg : RecipeToolArgs.values()) {
-            String optionValue = arg.getOptionValue(cmd);
-            if (StringUtils.isNotEmpty(optionValue)) {
-                argMap.put(arg, optionValue);
-            }
-        }
-        return argMap;
-    }
-
-    private static void addOption(final Options options, final RecipeToolArgs 
arg,
-                                  final boolean isRequired) {
-        Option option = arg.getOption();
-        option.setRequired(isRequired);
-        options.addOption(option);
-    }
-
-    private static void validateProperties(final Properties recipeProperties) {
-        for (RecipeToolOptions option : RecipeToolOptions.values()) {
-            if (recipeProperties.getProperty(option.getName()) == null && 
option.isRequired()) {
-                throw new IllegalArgumentException("Missing argument: " + 
option.getName());
-            }
-        }
-    }
-
-    private static Properties loadProperties(final String propertiesFilePath) 
throws Exception {
-        InputStream inputStream = null;
-        try {
-            inputStream = new FileInputStream(propertiesFilePath);
-            Properties prop = new Properties();
-            prop.load(inputStream);
-            return prop;
-        } finally {
-            IOUtils.closeQuietly(inputStream);
-        }
-    }
-
-    private static void validateArtifacts(final Properties recipeProperties, 
final FileSystem fs) throws Exception {
-        // validate the WF path
-        String wfPath = 
recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_PATH.getName());
-
-        // Check if file exists on HDFS
-        if (StringUtils.isNotEmpty(wfPath) && !fs.exists(new Path(wfPath))) {
-            // If the file doesn't exist locally throw exception
-            if (!doesFileExist(wfPath)) {
-                throw new Exception("Recipe workflow file does not exist : " + 
wfPath + " on local FS or HDFS");
-            }
-        }
-
-        // validate lib path
-        String libPath = 
recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_LIB_PATH.getName());
-        if (StringUtils.isNotEmpty(libPath) && !fs.exists(new Path(libPath))) {
-            if (!doesFileExist(libPath)) {
-                throw new Exception("Recipe lib file path does not exist : " + 
libPath + " on local FS or HDFS");
-            }
-        }
-    }
-
-    private static void copyFilesToHdfsIfRequired(final Properties 
recipeProperties,
-                                                  final FileSystem fs,
-                                                  final String recipeName) 
throws Exception {
-
-        String hdfsPath = HDFS_WF_PATH + recipeName + File.separator;
-
-        String recipeWfPathName = RecipeToolOptions.WORKFLOW_PATH.getName();
-        String wfPath = recipeProperties.getProperty(recipeWfPathName);
-        String wfPathValue;
-
-        // Copy only if files are on local FS
-        if (StringUtils.isNotEmpty(wfPath) && !fs.exists(new Path(wfPath))) {
-            createDirOnHdfs(hdfsPath, fs);
-            if (new File(wfPath).isDirectory()) {
-                wfPathValue = hdfsPath + getLastPartOfPath(wfPath);
-                copyFileFromLocalToHdfs(wfPath, hdfsPath, true, wfPathValue, 
fs);
-            } else {
-                wfPathValue = hdfsPath + new File(wfPath).getName();
-                copyFileFromLocalToHdfs(wfPath, hdfsPath, false, null, fs);
-            }
-            // Update the property with the hdfs path
-            recipeProperties.setProperty(recipeWfPathName,
-                    fs.getFileStatus(new 
Path(wfPathValue)).getPath().toString());
-            System.out.println("Copied WF to: " + 
recipeProperties.getProperty(recipeWfPathName));
-        }
-
-        String recipeWfLibPathName = 
RecipeToolOptions.WORKFLOW_LIB_PATH.getName();
-        String libPath = recipeProperties.getProperty(recipeWfLibPathName);
-        String libPathValue;
-        // Copy only if files are on local FS
-        boolean isLibPathEmpty = StringUtils.isEmpty(libPath);
-        if (!isLibPathEmpty && !fs.exists(new Path(libPath))) {
-            if (new File(libPath).isDirectory()) {
-                libPathValue = hdfsPath + getLastPartOfPath(libPath);
-                copyFileFromLocalToHdfs(libPath, hdfsPath, true, libPathValue, 
fs);
-            } else {
-                libPathValue = hdfsPath + "lib" + File.separator + new 
File(libPath).getName();
-                copyFileFromLocalToHdfs(libPath, libPathValue, false, null, 
fs);
-            }
-
-            // Update the property with the hdfs path
-            recipeProperties.setProperty(recipeWfLibPathName,
-                    fs.getFileStatus(new 
Path(libPathValue)).getPath().toString());
-            System.out.println("Copied WF libs to: " + 
recipeProperties.getProperty(recipeWfLibPathName));
-        } else if (isLibPathEmpty) {
-            // Replace ##workflow.lib.path## with "" to ignore lib in workflow 
template
-            recipeProperties.setProperty(recipeWfLibPathName, "");
-        }
-    }
-
-    private static String getLastPartOfPath(final String path) {
-        String normalizedWfPath = FilenameUtils.normalizeNoEndSeparator(path);
-        return (normalizedWfPath == null) ? FilenameUtils.getName(path)
-                : FilenameUtils.getName(normalizedWfPath);
-    }
-
-    private static void createDirOnHdfs(String path, FileSystem fs) throws 
IOException {
-        Path hdfsPath = new Path(path);
-        if (!fs.exists(hdfsPath)) {
-            FileSystem.mkdirs(fs, hdfsPath, FS_PERMISSION);
-        }
-    }
-
-    private static boolean doesFileExist(final String filename) {
-        return new File(filename).exists();
-    }
-
-    private static void copyFileFromLocalToHdfs(final String localFilePath,
-                                                final String hdfsFilePath,
-                                                final boolean copyDir,
-                                                final String hdfsFileDirPath,
-                                                final FileSystem fs) throws 
IOException {
-        /* If directory already exists and has contents, copyFromLocalFile 
with overwrite set to yes will fail with
-         * "Target is a directory". Delete the directory */
-        if (copyDir) {
-            Path hdfsPath = new Path(hdfsFileDirPath);
-            fs.delete(hdfsPath, true);
-        }
-
-        /* For cases where validation of process entity file fails, the 
artifacts would have been already copied to
-         * HDFS. Set overwrite to true so that next submit recipe copies 
updated artifacts from local FS to HDFS */
-        fs.copyFromLocalFile(false, true, new Path(localFilePath), new 
Path(hdfsFilePath));
-    }
-
-    private FileSystem getFileSystemForHdfs(final Properties recipeProperties,
-                                            final Configuration conf) throws 
Exception {
-        String storageEndpoint = 
RecipeToolOptions.CLUSTER_HDFS_WRITE_ENDPOINT.getName();
-        String nameNode = recipeProperties.getProperty(storageEndpoint);
-        conf.set(FS_DEFAULT_NAME_KEY, nameNode);
-        if (UserGroupInformation.isSecurityEnabled()) {
-            String nameNodePrincipal = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_NN_PRINCIPAL.getName());
-            conf.set(NN_PRINCIPAL, nameNodePrincipal);
-        }
-        return createFileSystem(UserGroupInformation.getLoginUser(), new 
URI(nameNode), conf);
-    }
-
-    private FileSystem createFileSystem(UserGroupInformation ugi, final URI 
uri,
-                                       final Configuration conf) throws 
Exception {
-        try {
-            final String proxyUserName = ugi.getShortUserName();
-            if 
(proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
-                return FileSystem.get(uri, conf);
-            }
-
-            return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
-                public FileSystem run() throws Exception {
-                    return FileSystem.get(uri, conf);
-                }
-            });
-        } catch (InterruptedException ex) {
-            throw new IOException("Exception creating FileSystem:" + 
ex.getMessage(), ex);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java 
b/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java
deleted file mode 100644
index 79d8f18..0000000
--- a/client/src/main/java/org/apache/falcon/recipe/RecipeToolArgs.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
-
-/**
- * Recipe tool args.
- */
-public enum RecipeToolArgs {
-    RECIPE_FILE_ARG("file", "recipe template file path"),
-    RECIPE_PROPERTIES_FILE_ARG("propertiesFile", "recipe properties file 
path"),
-    RECIPE_PROCESS_XML_FILE_PATH_ARG(
-            "recipeProcessFilePath", "file path of recipe process to be 
submitted"),
-    RECIPE_OPERATION_ARG("recipeOperation", "recipe operation");
-
-    private final String name;
-    private final String description;
-    private final boolean isRequired;
-    RecipeToolArgs(String name, String description) {
-        this(name, description, true);
-    }
-
-    RecipeToolArgs(String name, String description, boolean isRequired) {
-        this.name = name;
-        this.description = description;
-        this.isRequired = isRequired;
-    }
-
-    public Option getOption() {
-        return new Option(this.name, true, this.description);
-    }
-
-    public String getName() {
-        return this.name;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public boolean isRequired() {
-        return isRequired;
-    }
-
-    public String getOptionValue(CommandLine cmd) {
-        return cmd.getOptionValue(this.name);
-    }
-
-    @Override
-    public String toString() {
-        return getName();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java 
b/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java
deleted file mode 100644
index 2a7a7a0..0000000
--- a/client/src/main/java/org/apache/falcon/recipe/RecipeToolOptions.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * Recipe tool options.
- */
-public enum RecipeToolOptions {
-    RECIPE_NAME("falcon.recipe.name", "Recipe name", false),
-    CLUSTER_NAME("falcon.recipe.cluster.name", "Cluster name where replication 
job should run", false),
-    CLUSTER_HDFS_WRITE_ENDPOINT(
-            "falcon.recipe.cluster.hdfs.writeEndPoint", "Cluster HDFS write 
endpoint"),
-    CLUSTER_VALIDITY_START("falcon.recipe.cluster.validity.start", "Source 
cluster validity start", false),
-    CLUSTER_VALIDITY_END("falcon.recipe.cluster.validity.end", "Source cluster 
validity end", false),
-    WORKFLOW_NAME("falcon.recipe.workflow.name", "Workflow name", false),
-    WORKFLOW_PATH("falcon.recipe.workflow.path", "Workflow path", false),
-    WORKFLOW_LIB_PATH("falcon.recipe.workflow.lib.path", "WF lib path", false),
-    PROCESS_FREQUENCY("falcon.recipe.process.frequency", "Process frequency", 
false),
-    RETRY_POLICY("falcon.recipe.retry.policy", "Retry policy", false),
-    RETRY_DELAY("falcon.recipe.retry.delay", "Retry delay", false),
-    RETRY_ATTEMPTS("falcon.recipe.retry.attempts", "Retry attempts", false),
-    RETRY_ON_TIMEOUT("falcon.recipe.retry.onTimeout", "Retry onTimeout", 
false),
-    RECIPE_TAGS("falcon.recipe.tags", "Recipe tags", false),
-    RECIPE_ACL_OWNER("falcon.recipe.acl.owner", "Recipe acl owner", false),
-    RECIPE_ACL_GROUP("falcon.recipe.acl.group", "Recipe acl group", false),
-    RECIPE_ACL_PERMISSION("falcon.recipe.acl.permission", "Recipe acl 
permission", false),
-    RECIPE_NN_PRINCIPAL("falcon.recipe.nn.principal", "Recipe DFS NN 
principal", false),
-    RECIPE_NOTIFICATION_TYPE("falcon.recipe.notification.type", "Recipe 
Notification Type", false),
-    RECIPE_NOTIFICATION_ADDRESS("falcon.recipe.notification.receivers", 
"Recipe Email Notification receivers", false);
-
-    private final String name;
-    private final String description;
-    private final boolean isRequired;
-
-    private static Map<String, RecipeToolOptions> optionsMap = new HashMap<>();
-    static {
-        for (RecipeToolOptions c : RecipeToolOptions.values()) {
-            optionsMap.put(c.getName(), c);
-        }
-    }
-
-    public static Map<String, RecipeToolOptions> getOptionsMap() {
-        return optionsMap;
-    }
-
-    RecipeToolOptions(String name, String description) {
-        this(name, description, true);
-    }
-
-    RecipeToolOptions(String name, String description, boolean isRequired) {
-        this.name = name;
-        this.description = description;
-        this.isRequired = isRequired;
-    }
-
-    public String getName() {
-        return this.name;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public boolean isRequired() {
-        return isRequired;
-    }
-
-    @Override
-    public String toString() {
-        return getName();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java
----------------------------------------------------------------------
diff --git 
a/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java
 
b/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java
deleted file mode 100644
index 550ca1b..0000000
--- 
a/client/src/main/java/org/apache/falcon/recipe/util/RecipeProcessBuilderUtils.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.recipe.util;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.falcon.entity.v0.Entity;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.SchemaHelper;
-import org.apache.falcon.entity.v0.process.ACL;
-import org.apache.falcon.entity.v0.process.Cluster;
-import org.apache.falcon.entity.v0.process.Notification;
-import org.apache.falcon.entity.v0.process.PolicyType;
-import org.apache.falcon.entity.v0.process.Property;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.entity.v0.process.Workflow;
-import org.apache.falcon.recipe.RecipeToolOptions;
-
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Unmarshaller;
-import javax.xml.bind.ValidationEvent;
-import javax.xml.bind.ValidationEventHandler;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.OutputStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Recipe builder utility.
- */
-public final class RecipeProcessBuilderUtils {
-
-    private static final Pattern RECIPE_VAR_PATTERN = 
Pattern.compile("##[A-Za-z0-9_.]*##");
-
-    private RecipeProcessBuilderUtils() {
-    }
-
-    public static String createProcessFromTemplate(final String 
processTemplateFile, final Properties recipeProperties,
-                                                   final String 
processFilename) throws Exception {
-        org.apache.falcon.entity.v0.process.Process process = 
bindAttributesInTemplate(
-                processTemplateFile, recipeProperties);
-        String recipeProcessFilename = createProcessXmlFile(processFilename, 
process);
-
-        validateProcessXmlFile(recipeProcessFilename);
-        return recipeProcessFilename;
-    }
-
-    private static org.apache.falcon.entity.v0.process.Process
-    bindAttributesInTemplate(final String templateFile, final Properties 
recipeProperties)
-        throws Exception {
-        if (templateFile == null || recipeProperties == null) {
-            throw new IllegalArgumentException("Invalid arguments passed");
-        }
-
-        Unmarshaller unmarshaller = EntityType.PROCESS.getUnmarshaller();
-        // Validation can be skipped for unmarshalling as we want to bind 
tempalte with the properties. Vaildation is
-        // hanles as part of marshalling
-        unmarshaller.setSchema(null);
-        unmarshaller.setEventHandler(new ValidationEventHandler() {
-                public boolean handleEvent(ValidationEvent validationEvent) {
-                    return true;
-                }
-            }
-        );
-
-        URL processResourceUrl = new File(templateFile).toURI().toURL();
-        org.apache.falcon.entity.v0.process.Process process =
-                (org.apache.falcon.entity.v0.process.Process) 
unmarshaller.unmarshal(processResourceUrl);
-
-        /* For optional properties user might directly set them in the process 
xml and might not set it in properties
-           file. Before doing the submission validation is done to confirm 
process xml doesn't have RECIPE_VAR_PATTERN
-        */
-
-        String processName = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_NAME.getName());
-        if (StringUtils.isNotEmpty(processName)) {
-            process.setName(processName);
-        }
-
-        // DR process template has only one cluster
-        bindClusterProperties(process.getClusters().getClusters().get(0), 
recipeProperties);
-
-        // bind scheduling properties
-        String processFrequency = 
recipeProperties.getProperty(RecipeToolOptions.PROCESS_FREQUENCY.getName());
-        if (StringUtils.isNotEmpty(processFrequency)) {
-            process.setFrequency(Frequency.fromString(processFrequency));
-        }
-
-        bindWorkflowProperties(process.getWorkflow(), recipeProperties);
-        bindRetryProperties(process.getRetry(), recipeProperties);
-        bindNotificationProperties(process.getNotification(), 
recipeProperties);
-        bindACLProperties(process.getACL(), recipeProperties);
-        bindTagsProperties(process, recipeProperties);
-        bindCustomProperties(process.getProperties(), recipeProperties);
-
-        return process;
-    }
-
-    private static void bindClusterProperties(final Cluster cluster,
-                                              final Properties 
recipeProperties) {
-        // DR process template has only one cluster
-        String clusterName = 
recipeProperties.getProperty(RecipeToolOptions.CLUSTER_NAME.getName());
-        if (StringUtils.isNotEmpty(clusterName)) {
-            cluster.setName(clusterName);
-        }
-
-        String clusterStartValidity = 
recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_START.getName());
-        if (StringUtils.isNotEmpty(clusterStartValidity)) {
-            
cluster.getValidity().setStart(SchemaHelper.parseDateUTC(clusterStartValidity));
-        }
-
-        String clusterEndValidity = 
recipeProperties.getProperty(RecipeToolOptions.CLUSTER_VALIDITY_END.getName());
-        if (StringUtils.isNotEmpty(clusterEndValidity)) {
-            
cluster.getValidity().setEnd(SchemaHelper.parseDateUTC(clusterEndValidity));
-        }
-    }
-
-    private static void bindWorkflowProperties(final Workflow wf,
-                                               final Properties 
recipeProperties) {
-        String wfName = 
recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_NAME.getName());
-        if (StringUtils.isNotEmpty(wfName)) {
-            wf.setName(wfName);
-        }
-
-        String wfLibPath = 
recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_LIB_PATH.getName());
-        if (StringUtils.isNotEmpty(wfLibPath)) {
-            wf.setLib(wfLibPath);
-        } else if (wf.getLib().startsWith("##")) {
-            wf.setLib("");
-        }
-
-        String wfPath = 
recipeProperties.getProperty(RecipeToolOptions.WORKFLOW_PATH.getName());
-        if (StringUtils.isNotEmpty(wfPath)) {
-            wf.setPath(wfPath);
-        }
-    }
-
-    private static void bindRetryProperties(final Retry processRetry,
-                                            final Properties recipeProperties) 
{
-        String retryPolicy = 
recipeProperties.getProperty(RecipeToolOptions.RETRY_POLICY.getName());
-        if (StringUtils.isNotEmpty(retryPolicy)) {
-            processRetry.setPolicy(PolicyType.fromValue(retryPolicy));
-        }
-
-        String retryAttempts = 
recipeProperties.getProperty(RecipeToolOptions.RETRY_ATTEMPTS.getName());
-        if (StringUtils.isNotEmpty(retryAttempts)) {
-            processRetry.setAttempts(Integer.parseInt(retryAttempts));
-        }
-
-        String retryDelay = 
recipeProperties.getProperty(RecipeToolOptions.RETRY_DELAY.getName());
-        if (StringUtils.isNotEmpty(retryDelay)) {
-            processRetry.setDelay(Frequency.fromString(retryDelay));
-        }
-
-        String retryOnTimeout = 
recipeProperties.getProperty(RecipeToolOptions.RETRY_ON_TIMEOUT.getName());
-        if (StringUtils.isNotEmpty(retryOnTimeout)) {
-            processRetry.setOnTimeout(Boolean.valueOf(retryOnTimeout));
-        }
-    }
-
-    private static void bindNotificationProperties(final Notification 
processNotification,
-                                                   final Properties 
recipeProperties) {
-        processNotification.setType(recipeProperties.getProperty(
-                RecipeToolOptions.RECIPE_NOTIFICATION_TYPE.getName()));
-
-        String notificationAddress = recipeProperties.getProperty(
-                RecipeToolOptions.RECIPE_NOTIFICATION_ADDRESS.getName());
-        if (StringUtils.isNotBlank(notificationAddress)) {
-            processNotification.setTo(notificationAddress);
-        } else {
-            processNotification.setTo("NA");
-        }
-    }
-
-    private static void bindACLProperties(final ACL acl,
-                                          final Properties recipeProperties) {
-        String aclowner = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_OWNER.getName());
-        if (StringUtils.isNotEmpty(aclowner)) {
-            acl.setOwner(aclowner);
-        }
-
-        String aclGroup = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_GROUP.getName());
-        if (StringUtils.isNotEmpty(aclGroup)) {
-            acl.setGroup(aclGroup);
-        }
-
-        String aclPermission = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_ACL_PERMISSION.getName());
-        if (StringUtils.isNotEmpty(aclPermission)) {
-            acl.setPermission(aclPermission);
-        }
-    }
-
-    private static void bindTagsProperties(final 
org.apache.falcon.entity.v0.process.Process process,
-                                           final Properties recipeProperties) {
-        String falconSystemTags = process.getTags();
-        String tags = 
recipeProperties.getProperty(RecipeToolOptions.RECIPE_TAGS.getName());
-        if (StringUtils.isNotEmpty(tags)) {
-            if (StringUtils.isNotEmpty(falconSystemTags)) {
-                tags += ", " + falconSystemTags;
-            }
-            process.setTags(tags);
-        }
-    }
-
-
-    private static void bindCustomProperties(final 
org.apache.falcon.entity.v0.process.Properties customProperties,
-                                             final Properties 
recipeProperties) {
-        List<Property> propertyList = new ArrayList<>();
-
-        for (Map.Entry<Object, Object> recipeProperty : 
recipeProperties.entrySet()) {
-            if 
(RecipeToolOptions.getOptionsMap().get(recipeProperty.getKey().toString()) == 
null) {
-                addProperty(propertyList, (String) recipeProperty.getKey(), 
(String) recipeProperty.getValue());
-            }
-        }
-
-        customProperties.getProperties().addAll(propertyList);
-    }
-
-    private static void addProperty(List<Property> propertyList, String name, 
String value) {
-        Property prop = new Property();
-        prop.setName(name);
-        prop.setValue(value);
-        propertyList.add(prop);
-    }
-
-    private static String createProcessXmlFile(final String outFilename,
-                                               final Entity entity) throws 
Exception {
-        if (outFilename == null || entity == null) {
-            throw new IllegalArgumentException("Invalid arguments passed");
-        }
-
-        EntityType type = EntityType.PROCESS;
-        OutputStream out = null;
-        try {
-            out = new FileOutputStream(outFilename);
-            type.getMarshaller().marshal(entity, out);
-        } catch (JAXBException e) {
-            throw new Exception("Unable to serialize the entity object " + 
type + "/" + entity.getName(), e);
-        } finally {
-            IOUtils.closeQuietly(out);
-        }
-        return outFilename;
-    }
-
-    private static void validateProcessXmlFile(final String processFileName) 
throws Exception {
-        if (processFileName == null) {
-            throw new IllegalArgumentException("Invalid arguments passed");
-        }
-
-        String line;
-        BufferedReader reader = null;
-
-        try {
-            reader = new BufferedReader(new FileReader(processFileName));
-            while ((line = reader.readLine()) != null) {
-                Matcher matcher = RECIPE_VAR_PATTERN.matcher(line);
-                if (matcher.find()) {
-                    String variable = line.substring(matcher.start(), 
matcher.end());
-                    throw new Exception("Match not found for the template: " + 
variable
-                            + " in recipe template file. Please add it in 
recipe properties file");
-                }
-            }
-        } finally {
-            IOUtils.closeQuietly(reader);
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java 
b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
index a1668c1..4805597 100644
--- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
+++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
@@ -18,7 +18,6 @@
 
 package org.apache.falcon.cli;
 
-import org.apache.commons.io.FilenameUtils;
 import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.metadata.RelationshipType;
 import org.apache.falcon.resource.TestContext;
@@ -32,12 +31,10 @@ import org.testng.annotations.Test;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.util.Date;
 import java.util.Map;
-import java.util.Properties;
 
 /**
  * Test for Falcon CLI.
@@ -46,10 +43,7 @@ import java.util.Properties;
  */
 @Test(groups = {"exhaustive"})
 public class FalconCLIIT {
-    private static final String RECIPE_PROPERTIES_FILE_XML = 
"/hdfs-replication.properties";
-
     private InMemoryWriter stream = new InMemoryWriter(System.out);
-    private String recipePropertiesFilePath;
 
     @BeforeClass
     public void prepare() throws Exception {
@@ -955,56 +949,6 @@ public class FalconCLIIT {
                 + " -filterBy STATUS:SUCCEEDED -orderBy wrongOrder -offset 0 
-numResults 1"), -1);
     }
 
-    @SuppressWarnings("ResultOfMethodCallIgnored")
-    @Test(enabled = false)
-    public void testRecipeCommand() throws Exception {
-        recipeSetup();
-        try {
-            Assert.assertEquals(executeWithURL("recipe -name " + 
"hdfs-replication"
-                    + " -operation HDFS_REPLICATION"), 0);
-        } finally {
-            if (recipePropertiesFilePath != null) {
-                new File(recipePropertiesFilePath).delete();
-            }
-        }
-    }
-
-    private void recipeSetup() throws Exception {
-        TestContext context = new TestContext();
-        Map<String, String> overlay = context.getUniqueOverlay();
-
-        createPropertiesFile(context);
-        String filePath = 
TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(),
-                overlay);
-        Assert.assertEquals(executeWithURL("entity -submit -type cluster -file 
" + filePath), 0);
-        context.setCluster(overlay.get("cluster"));
-    }
-
-    private void createPropertiesFile(TestContext context) throws Exception  {
-        InputStream in = 
this.getClass().getResourceAsStream(RECIPE_PROPERTIES_FILE_XML);
-        Properties props = new Properties();
-        props.load(in);
-        in.close();
-
-        String wfFile = 
TestContext.class.getResource("/fs-workflow.xml").getPath();
-        String resourcePath = FilenameUtils.getFullPathNoEndSeparator(wfFile);
-        String libPath = TestContext.getTempFile("target/lib", "recipe", 
".jar").getAbsolutePath();
-
-        File file = new File(resourcePath, "hdfs-replication.properties");
-        OutputStream out = new FileOutputStream(file);
-        props.setProperty("falcon.recipe.name", context.getProcessName());
-        props.setProperty("falcon.recipe.cluster.name", 
context.getClusterName());
-        props.setProperty("falcon.recipe.cluster.validity.end", 
context.getProcessEndTime());
-        props.setProperty("falcon.recipe.workflow.path", 
TestContext.class.getResource("/fs-workflow.xml").getPath());
-        props.setProperty("falcon.recipe.workflow.lib.path", new 
File(libPath).getParent());
-        props.setProperty("falcon.recipe.cluster.hdfs.writeEndPoint", 
"jail://global:00");
-
-        props.store(out, null);
-        out.close();
-
-        recipePropertiesFilePath = file.getAbsolutePath();
-    }
-
     private int executeWithURL(String command) throws Exception {
         //System.out.println("COMMAND IS "+command + " -url " + 
TestContext.BASE_URL);
         return new FalconCLI()

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/webapp/src/test/resources/client.properties
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/client.properties 
b/webapp/src/test/resources/client.properties
index 3f5c361..038b339 100644
--- a/webapp/src/test/resources/client.properties
+++ b/webapp/src/test/resources/client.properties
@@ -17,5 +17,4 @@
 #
 
 falcon.url=http://localhost:41000/
-falcon.recipe.path=target/test-classes
 

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/webapp/src/test/resources/hdfs-replication-template.xml
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/hdfs-replication-template.xml 
b/webapp/src/test/resources/hdfs-replication-template.xml
deleted file mode 100644
index c038484..0000000
--- a/webapp/src/test/resources/hdfs-replication-template.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  -->
-
-<process name="hdfs-replication" xmlns="uri:falcon:process:0.1">
-    <clusters>
-        <!--  source  -->
-        <cluster name="##falcon.recipe.cluster.name##">
-            <validity end="##falcon.recipe.cluster.validity.end##" 
start="##falcon.recipe.cluster.validity.start##"/>
-        </cluster>
-    </clusters>
-
-    <tags>_falcon_mirroring_type=HDFS</tags>
-
-    <parallel>1</parallel>
-    <!-- Dir replication needs to run only once to catch up -->
-    <order>LAST_ONLY</order>
-    <frequency>##falcon.recipe.frequency##</frequency>
-    <timezone>UTC</timezone>
-
-    <properties>
-        <property name="oozie.wf.subworkflow.classpath.inheritance" 
value="true"/>
-    </properties>
-
-    <workflow name="##falcon.recipe.workflow.name##" engine="oozie" 
path="/apps/data-mirroring/workflows/hdfs-replication-workflow.xml" 
lib="##workflow.lib.path##"/>
-    <retry policy="##falcon.recipe.retry.policy##" 
delay="##falcon.recipe.retry.delay##" attempts="3"/>
-    <notification type="##falcon.recipe.notification.type##" 
to="##falcon.recipe.notification.receivers##"/>
-    <ACL/>
-</process>

http://git-wip-us.apache.org/repos/asf/falcon/blob/c52961c6/webapp/src/test/resources/hdfs-replication.properties
----------------------------------------------------------------------
diff --git a/webapp/src/test/resources/hdfs-replication.properties 
b/webapp/src/test/resources/hdfs-replication.properties
deleted file mode 100644
index 09930b8..0000000
--- a/webapp/src/test/resources/hdfs-replication.properties
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-falcon.recipe.cluster.hdfs.writeEndPoint=jail://global:00
-falcon.recipe.workflow.path=
-falcon.recipe.processName=
-falcon.recipe.cluster.name=
-falcon.recipe.cluster.validity.end=
-falcon.recipe.cluster.validity.start=2012-04-20T00:00Z
-falcon.recipe.workflow.name=hdfs-dr-workflow
-falcon.recipe.process.frequency=minutes(5)
-
-##### Retry policy properties
-
-falcon.recipe.retry.policy=periodic
-falcon.recipe.retry.delay=minutes(30)
-falcon.recipe.retry.attempts=3
-falcon.recipe.retry.onTimeout=false
-
-drSourceDir=/tmp/test1
-drSourceClusterFS=jail://global:00
-drTargetDir=/tmp/test1
-drTargetClusterFS=jail://global:00
-
-# Change it to specify the maximum number of mappers for DistCP
-distcpMaxMaps=1
-# Change it to specify the bandwidth in MB for each mapper in DistCP
-distcpMapBandwidth=100
-
-##### Email Notification for Falcon instance completion ####
-falcon.recipe.notification.type=email
-falcon.recipe.notification.receivers=NA
\ No newline at end of file

Reply via email to