http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/src/main/assemblies/distributed-package.xml ---------------------------------------------------------------------- diff --git a/src/main/assemblies/distributed-package.xml b/src/main/assemblies/distributed-package.xml index 1e48c94..0b56572 100644 --- a/src/main/assemblies/distributed-package.xml +++ b/src/main/assemblies/distributed-package.xml @@ -21,6 +21,7 @@ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd"> <formats> <format>tar.gz</format> + <format>zip</format> </formats> <id>server</id> <baseDirectory>falcon-distributed-${project.version}</baseDirectory> @@ -126,6 +127,20 @@ <source>oozie-el-extensions/src/main/conf/oozie-site.xml</source> <outputDirectory>oozie/conf</outputDirectory> </file> + + <file> + <source>addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> + + <file> + <source>addons/recipes/hive-disaster-recovery/src/main/resources/hive-disaster-recovery-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> + <file> + <source>addons/recipes/hive-disaster-recovery/src/main/resources/hive-disaster-recovery-secure-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> </files> </assembly>
http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/src/main/assemblies/standalone-package.xml ---------------------------------------------------------------------- diff --git a/src/main/assemblies/standalone-package.xml b/src/main/assemblies/standalone-package.xml index 682d52f..40e6b99 100644 --- a/src/main/assemblies/standalone-package.xml +++ b/src/main/assemblies/standalone-package.xml @@ -21,6 +21,7 @@ xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd"> <formats> <format>tar.gz</format> + <format>zip</format> </formats> <id>bin</id> <baseDirectory>falcon-${project.version}</baseDirectory> @@ -115,6 +116,21 @@ </file> <file> + <source>addons/recipes/hdfs-replication/src/main/resources/hdfs-replication-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> + + <file> + <source>addons/recipes/hive-disaster-recovery/src/main/resources/hive-disaster-recovery-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> + + <file> + <source>addons/recipes/hive-disaster-recovery/src/main/resources/hive-disaster-recovery-secure-workflow.xml</source> + <outputDirectory>data-mirroring/workflows</outputDirectory> + </file> + + <file> <source>webapp/target/falcon-webapp-${project.version}.war</source> <outputDirectory>server/webapp</outputDirectory> <destName>falcon.war</destName> http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/test-tools/hadoop-webapp/pom.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/pom.xml b/test-tools/hadoop-webapp/pom.xml index cadc7f5..c122a89 100644 --- a/test-tools/hadoop-webapp/pom.xml +++ b/test-tools/hadoop-webapp/pom.xml @@ -53,6 +53,12 @@ <dependency> <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <classifier>tests</classifier> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <scope>compile</scope> </dependency> @@ -71,12 +77,24 @@ <dependency> <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-distcp</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-common</artifactId> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-yarn-server-resourcemanager</artifactId> <scope>compile</scope> </dependency> @@ -192,6 +210,11 @@ <!-- Oozie dependencies --> <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-exec</artifactId> + </dependency> + + <dependency> <groupId>org.apache.hive.hcatalog</groupId> <artifactId>hive-webhcat-java-client</artifactId> </dependency> http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/webapp/pom.xml ---------------------------------------------------------------------- diff --git a/webapp/pom.xml b/webapp/pom.xml index 063d42c..5a9e1da 100644 --- a/webapp/pom.xml +++ b/webapp/pom.xml @@ -45,7 +45,6 @@ <webResources> <resource> <directory>../falcon-ui/dist</directory> - <targetPath>public</targetPath> </resource> <resource> <directory>src/main/webapp/WEB-INF/distributed</directory> @@ -70,6 +69,16 @@ </dependency> </dependencies> </profile> + <profile> + <id>hivedr</id> + <dependencies> + <dependency> + <groupId>org.apache.falcon</groupId> + <artifactId>falcon-hive-replication</artifactId> + <version>${project.version}</version> + </dependency> + </dependencies> + </profile> </profiles> <dependencies> @@ -113,7 +122,8 @@ <dependency> <groupId>org.apache.falcon</groupId> - <artifactId>falcon-replication</artifactId> + <artifactId>falcon-distcp-replication</artifactId> + <version>${project.version}</version> </dependency> <dependency> http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java index 90acb59..1e0a096 100644 --- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java +++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java @@ -44,7 +44,7 @@ import java.util.Properties; */ @Test(groups = {"exhaustive"}) public class FalconCLIIT { - private static final String RECIPE_PROPERTIES_FILE_XML = "/process.properties"; + private static final String RECIPE_PROPERTIES_FILE_XML = "/hdfs-replication.properties"; private InMemoryWriter stream = new InMemoryWriter(System.out); private String recipePropertiesFilePath; @@ -217,23 +217,23 @@ public class FalconCLIIT { Assert.assertEquals(executeWithURL("entity -status -type process -name " + overlay.get("processName")), 0); Assert.assertEquals(executeWithURL("entity -summary -type feed -cluster "+ overlay.get("cluster") - + " -fields status,tags -start " + START_INSTANCE - + " -filterBy TYPE:FEED -orderBy name -sortOrder asc " - + " -offset 0 -numResults 1 -numInstances 5"), 0); + + " -fields status,tags -start " + START_INSTANCE + + " -filterBy TYPE:FEED -orderBy name -sortOrder asc " + + " -offset 0 -numResults 1 -numInstances 5"), 0); Assert.assertEquals(executeWithURL("entity -summary -type process -fields status,pipelines" - + " -cluster " + overlay.get("cluster") - + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) - + " -end " + SchemaHelper.getDateFormat().format(new Date()) - + " -filterBy TYPE:PROCESS -orderBy name -sortOrder desc " - + " -offset 0 -numResults 1 -numInstances 7"), 0); + + " -cluster " + overlay.get("cluster") + + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) + + " -end " + SchemaHelper.getDateFormat().format(new Date()) + + " -filterBy TYPE:PROCESS -orderBy name -sortOrder desc " + + " -offset 0 -numResults 1 -numInstances 7"), 0); Assert.assertEquals(executeWithURL("entity -summary -type process -fields status,pipelines" - + " -cluster " + overlay.get("cluster") - + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) - + " -end " + SchemaHelper.getDateFormat().format(new Date()) - + " -filterBy TYPE:PROCESS -orderBy name -sortOrder invalid " - + " -offset 0 -numResults 1 -numInstances 7"), -1); + + " -cluster " + overlay.get("cluster") + + " -start " + SchemaHelper.getDateFormat().format(new Date(0)) + + " -end " + SchemaHelper.getDateFormat().format(new Date()) + + " -filterBy TYPE:PROCESS -orderBy name -sortOrder invalid " + + " -offset 0 -numResults 1 -numInstances 7"), -1); // No start or end date Assert.assertEquals(executeWithURL("entity -summary -type process -fields status,pipelines" @@ -329,9 +329,9 @@ public class FalconCLIIT { + " -orderBy startTime -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start " + SchemaHelper.getDateFormat().format(new Date()) - + " -orderBy INVALID -offset 0 -numResults 1"), -1); + + overlay.get("outputFeedName") + + " -start " + SchemaHelper.getDateFormat().format(new Date()) + + " -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") @@ -339,14 +339,14 @@ public class FalconCLIIT { + " -orderBy startTime -sortOrder desc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start " + startTimeString - + " -orderBy startTime -sortOrder invalid -offset 0 -numResults 1"), -1); + + overlay.get("outputFeedName") + + " -start " + startTimeString + + " -orderBy startTime -sortOrder invalid -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start " + SchemaHelper.getDateFormat().format(new Date()) - + " -filterBy INVALID:FILTER -offset 0 -numResults 1"), -1); + + overlay.get("outputFeedName") + + " -start " + SchemaHelper.getDateFormat().format(new Date()) + + " -filterBy INVALID:FILTER -offset 0 -numResults 1"), -1); // testcase : start str is older than entity schedule time. Assert.assertEquals(executeWithURL("instance -running -type feed -lifecycle eviction -name " @@ -389,14 +389,14 @@ public class FalconCLIIT { + " -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date()) - +" -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + +" -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -list -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date()) - +" -filterBy STATUS:SUCCEEDED -orderBy INVALID -offset 0 -numResults 1"), -1); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + +" -filterBy STATUS:SUCCEEDED -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -status -type feed -lifecycle eviction -name " + overlay.get("outputFeedName") + " -start " + SchemaHelper.getDateFormat().format(new Date()) @@ -453,8 +453,8 @@ public class FalconCLIIT { // No end date, should fail. Assert.assertEquals(executeWithURL("instance -suspend -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); Assert.assertEquals(executeWithURL("instance -resume -type process -name " + overlay.get("processName") @@ -479,13 +479,13 @@ public class FalconCLIIT { OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("instance -kill -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); // Fail due to no end date Assert.assertEquals(executeWithURL("instance -kill -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date())), -1); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " + overlay.get("processName") @@ -575,13 +575,13 @@ public class FalconCLIIT { + " -tags [email protected],department=forecasting " + " -filterBy STATUS:SUBMITTED,type:process -orderBy name -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("entity -list -type process -fields status " - + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy INVALID -offset 0 -numResults 1"), -1); + + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy INVALID -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -list -type process -fields INVALID " - + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); + + " -filterBy STATUS:SUCCEEDED,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -list -type process -fields status " - + " -filterBy INVALID:FILTER,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); + + " -filterBy INVALID:FILTER,TYPE:process -orderBy name -offset 1 -numResults 1"), -1); Assert.assertEquals(executeWithURL("entity -definition -type cluster -name " + overlay.get("cluster")), 0); @@ -678,26 +678,26 @@ public class FalconCLIIT { OozieTestUtils.waitForProcessWFtoStart(context); Assert.assertEquals(executeWithURL("instance -kill -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -kill -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date()) - + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE), -1); + + overlay.get("processName") + + " -start " + START_INSTANCE), -1); Assert.assertEquals(executeWithURL("instance -rerun -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -rerun -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date()) - + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date()) + + " -end " + SchemaHelper.getDateFormat().format(new Date())), 0); } public void testInvalidCLIInstanceCommands() throws Exception { @@ -712,7 +712,7 @@ public class FalconCLIIT { + " -end 2010-01-01T03:00Z"), -1); Assert.assertEquals(executeWithURL("instance -kill -type process -name " - + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z"), -1); + + " -start 2010-01-01T01:00Z -end 2010-01-01T03:00Z"), -1); } public void testFalconURL() throws Exception { @@ -734,12 +734,12 @@ public class FalconCLIIT { submitTestFiles(context, overlay); Assert.assertEquals(new FalconCLI().run(("entity -schedule -type feed -name " - + overlay.get("outputFeedName") + " -url " - + TestContext.BASE_URL).split("\\s+")), 0); + + overlay.get("outputFeedName") + " -url " + + TestContext.BASE_URL).split("\\s+")), 0); Assert.assertEquals(new FalconCLI().run(("entity -schedule -type process -name " - + overlay.get("processName")+ " -url " - + TestContext.BASE_URL).split("\\s+")), 0); + + overlay.get("processName")+ " -url " + + TestContext.BASE_URL).split("\\s+")), 0); } public void testGetVersion() throws Exception { @@ -766,52 +766,52 @@ public class FalconCLIIT { Thread.sleep(500); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE), 0); Assert.assertEquals(executeWithURL("instance -logs -type feed -lifecycle eviction -name " - + overlay.get("outputFeedName") - + " -start "+ SchemaHelper.getDateFormat().format(new Date())), 0); + + overlay.get("outputFeedName") + + " -start "+ SchemaHelper.getDateFormat().format(new Date())), 0); // test filterBy, orderBy, offset, numResults Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED -orderBy endtime " - + " -sortOrder asc -offset 0 -numResults 1"), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED -orderBy endtime " + + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED -orderBy starttime " - + " -sortOrder asc -offset 0 -numResults 1"), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED -orderBy starttime " + + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED -orderBy cluster " - + " -sortOrder asc -offset 0 -numResults 1"), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED -orderBy cluster " + + " -sortOrder asc -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:WAITING -orderBy startTime -offset 0 -numResults 1"), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:WAITING -orderBy startTime -offset 0 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED -orderBy endtime " - + " -sortOrder invalid -offset 0 -numResults 1"), -1); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED -orderBy endtime " + + " -sortOrder invalid -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED,STARTEDAFTER:"+START_INSTANCE+" -offset 1 -numResults 1"), 0); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED,STARTEDAFTER:"+START_INSTANCE+" -offset 1 -numResults 1"), 0); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy INVALID:FILTER -orderBy startTime -offset 0 -numResults 1"), -1); Assert.assertEquals(executeWithURL("instance -logs -type process -name " - + overlay.get("processName") - + " -start " + START_INSTANCE + " -end " + START_INSTANCE - + " -filterBy STATUS:SUCCEEDED -orderBy wrongOrder -offset 0 -numResults 1"), -1); + + overlay.get("processName") + + " -start " + START_INSTANCE + " -end " + START_INSTANCE + + " -filterBy STATUS:SUCCEEDED -orderBy wrongOrder -offset 0 -numResults 1"), -1); } @SuppressWarnings("ResultOfMethodCallIgnored") @@ -819,7 +819,8 @@ public class FalconCLIIT { public void testRecipeCommand() throws Exception { recipeSetup(); try { - Assert.assertEquals(executeWithURL("recipe -name " + "process"), 0); + Assert.assertEquals(executeWithURL("recipe -name " + "hdfs-replication" + + " -operation HDFS_REPLICATION"), 0); } finally { if (recipePropertiesFilePath != null) { new File(recipePropertiesFilePath).delete(); @@ -836,12 +837,6 @@ public class FalconCLIIT { overlay); Assert.assertEquals(executeWithURL("entity -submit -type cluster -file " + filePath), 0); context.setCluster(overlay.get("cluster")); - - filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay); - Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); - - filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay); - Assert.assertEquals(executeWithURL("entity -submit -type feed -file " + filePath), 0); } private void createPropertiesFile(TestContext context) throws Exception { @@ -854,16 +849,14 @@ public class FalconCLIIT { String resourcePath = FilenameUtils.getFullPathNoEndSeparator(wfFile); String libPath = TestContext.getTempFile("target/lib", "recipe", ".jar").getAbsolutePath(); - File file = new File(resourcePath, "process.properties"); + File file = new File(resourcePath, "hdfs-replication.properties"); OutputStream out = new FileOutputStream(file); - props.setProperty("falcon.recipe.processName", context.getProcessName()); - props.setProperty("falcon.recipe.src.cluster.name", context.getClusterName()); - props.setProperty("falcon.recipe.processEndDate", context.getProcessEndTime()); - props.setProperty("falcon.recipe.inputFeedName", context.getInputFeedName()); - props.setProperty("falcon.recipe.outputFeedName", context.getOutputFeedName()); + props.setProperty("falcon.recipe.name", context.getProcessName()); + props.setProperty("falcon.recipe.cluster.name", context.getClusterName()); + props.setProperty("falcon.recipe.cluster.validity.end", context.getProcessEndTime()); props.setProperty("falcon.recipe.workflow.path", TestContext.class.getResource("/fs-workflow.xml").getPath()); props.setProperty("falcon.recipe.workflow.lib.path", new File(libPath).getParent()); - props.setProperty("falcon.recipe.src.cluster.hdfs.writeEndPoint", "jail://global:00"); + props.setProperty("falcon.recipe.cluster.hdfs.writeEndPoint", "jail://global:00"); props.store(out, null); out.close(); http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/webapp/src/test/resources/hdfs-replication-template.xml ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/hdfs-replication-template.xml b/webapp/src/test/resources/hdfs-replication-template.xml new file mode 100644 index 0000000..b93cc0b --- /dev/null +++ b/webapp/src/test/resources/hdfs-replication-template.xml @@ -0,0 +1,43 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<process name="hdfs-replication" xmlns="uri:falcon:process:0.1"> + <clusters> + <!-- source --> + <cluster name="##falcon.recipe.cluster.name##"> + <validity end="##falcon.recipe.cluster.validity.end##" start="##falcon.recipe.cluster.validity.start##"/> + </cluster> + </clusters> + + <tags>_falcon_mirroring_type=HDFS</tags> + + <parallel>1</parallel> + <!-- Dir replication needs to run only once to catch up --> + <order>LAST_ONLY</order> + <frequency>##falcon.recipe.frequency##</frequency> + <timezone>UTC</timezone> + + <properties> + <property name="oozie.wf.subworkflow.classpath.inheritance" value="true"/> + </properties> + + <workflow name="##falcon.recipe.workflow.name##" engine="oozie" path="/apps/data-mirroring/workflows/hdfs-replication-workflow.xml" lib="##workflow.lib.path##"/> + <retry policy="##falcon.recipe.retry.policy##" delay="##falcon.recipe.retry.delay##" attempts="3"/> + <ACL/> +</process> http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/webapp/src/test/resources/hdfs-replication.properties ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/hdfs-replication.properties b/webapp/src/test/resources/hdfs-replication.properties new file mode 100644 index 0000000..1b485b4 --- /dev/null +++ b/webapp/src/test/resources/hdfs-replication.properties @@ -0,0 +1,45 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +falcon.recipe.cluster.hdfs.writeEndPoint=jail://global:00 +falcon.recipe.workflow.path= +falcon.recipe.processName= +falcon.recipe.cluster.name= +falcon.recipe.cluster.validity.end= +falcon.recipe.cluster.validity.start=2012-04-20T00:00Z +falcon.recipe.workflow.name=hdfs-dr-workflow +falcon.recipe.process.frequency=minutes(5) + +##### Retry policy properties + +falcon.recipe.retry.policy=periodic +falcon.recipe.retry.delay=minutes(30) +falcon.recipe.retry.attempts=3 + +drSourceDir=/tmp/test1 +drSourceClusterFS=jail://global:00 +drTargetDir=/tmp/test1 +drTargetClusterFS=jail://global:00 + +# Change it to specify the maximum number of mappers for DistCP +distcpMaxMaps=1 +# Change it to specify the bandwidth in MB for each mapper in DistCP +distcpMapBandwidth=100 + +##### Email on failure +drNotificationReceivers=NA \ No newline at end of file http://git-wip-us.apache.org/repos/asf/falcon/blob/cc1d3840/webapp/src/test/resources/process.properties ---------------------------------------------------------------------- diff --git a/webapp/src/test/resources/process.properties b/webapp/src/test/resources/process.properties deleted file mode 100644 index 1eb282c..0000000 --- a/webapp/src/test/resources/process.properties +++ /dev/null @@ -1,25 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -falcon.recipe.src.cluster.hdfs.writeEndPoint=jail://global:00 -falcon.recipe.workflow.path= -falcon.recipe.processName= -falcon.recipe.src.cluster.name= -falcon.recipe.inputFeedName= -falcon.recipe.outputFeedName= -
