FALCON-896 Upgrade oozie to 4.1.0 and make it the default for falcon. Contributed by Peeyush Bishnoi
Project: http://git-wip-us.apache.org/repos/asf/falcon/repo Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/d2fd49e5 Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/d2fd49e5 Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/d2fd49e5 Branch: refs/heads/master Commit: d2fd49e5c4aeae7046afef1a8d23fa696dd9a4eb Parents: 4a9151e Author: Srikanth Sundarrajan <[email protected]> Authored: Sat Jan 10 23:09:30 2015 +0530 Committer: Srikanth Sundarrajan <[email protected]> Committed: Sat Jan 10 23:09:30 2015 +0530 ---------------------------------------------------------------------- Installation-steps.txt | 2 +- build-tools/src/bin/build-oozie.sh | 9 +- hadoop-webapp/pom.xml | 265 ------------------ .../org/apache/falcon/JobTrackerService.java | 28 -- .../falcon/listener/HadoopStartupListener.java | 90 ------- hadoop-webapp/src/main/resources/core-site.xml | 55 ---- hadoop-webapp/src/main/resources/hdfs-site.xml | 64 ----- hadoop-webapp/src/main/resources/hive-site.xml | 42 --- hadoop-webapp/src/main/resources/log4j.xml | 38 --- .../src/main/resources/mapred-site.xml | 68 ----- hadoop-webapp/src/main/resources/yarn-site.xml | 64 ----- hadoop-webapp/src/main/webapp/WEB-INF/web.xml | 32 --- .../apache/hadoop/mapred/LocalRunnerTest.java | 37 --- .../org/apache/hadoop/mapred/LocalRunnerV2.java | 242 ----------------- .../workflow/engine/OozieWorkflowEngine.java | 4 +- pom.xml | 6 +- test-tools/hadoop-webapp/pom.xml | 266 +++++++++++++++++++ .../org/apache/falcon/JobTrackerService.java | 28 ++ .../falcon/listener/HadoopStartupListener.java | 135 ++++++++++ .../src/main/resources/core-site.xml | 55 ++++ .../src/main/resources/hdfs-site.xml | 64 +++++ .../src/main/resources/hive-site.xml | 60 +++++ .../hadoop-webapp/src/main/resources/log4j.xml | 38 +++ .../src/main/resources/mapred-site.xml | 68 +++++ .../src/main/resources/yarn-site.xml | 64 +++++ .../src/main/webapp/WEB-INF/web.xml | 32 +++ .../apache/hadoop/mapred/LocalRunnerTest.java | 37 +++ .../org/apache/hadoop/mapred/LocalRunnerV2.java | 242 +++++++++++++++++ test-tools/hcatalog-sharelib/pom.xml | 64 +++++ test-tools/hive-sharelib/pom.xml | 64 +++++ test-tools/oozie-sharelib/pom.xml | 63 +++++ test-tools/pig-sharelib/pom.xml | 63 +++++ test-tools/pom.xml | 42 +++ webapp/pom.xml | 10 +- .../conf/oozie/conf/hadoop-conf/core-site.xml | 5 + .../conf/oozie/conf/hadoop-conf/mapred-site.xml | 28 ++ webapp/src/conf/oozie/conf/oozie-site.xml | 4 +- 37 files changed, 1433 insertions(+), 1045 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/Installation-steps.txt ---------------------------------------------------------------------- diff --git a/Installation-steps.txt b/Installation-steps.txt index 2e15805..2cd1f11 100644 --- a/Installation-steps.txt +++ b/Installation-steps.txt @@ -125,4 +125,4 @@ d. Stopping Falcon Server * cd <<project home>> * src/bin/pacakge.sh <<hadoop-version>> >> ex. src/bin/pacakge.sh 2.5.0 - >> oozie bundle available in target/package/oozie-4.0.0/distro/target/oozie-4.0.0-distro.tar.gz + >> oozie bundle available in target/package/oozie-4.1.0/distro/target/oozie-4.1.0-distro.tar.gz http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/build-tools/src/bin/build-oozie.sh ---------------------------------------------------------------------- diff --git a/build-tools/src/bin/build-oozie.sh b/build-tools/src/bin/build-oozie.sh index 5474761..b5f3179 100755 --- a/build-tools/src/bin/build-oozie.sh +++ b/build-tools/src/bin/build-oozie.sh @@ -55,17 +55,12 @@ sed -i.bak s/$VERSION\<\\/version\>/$BUILD_VERSION\<\\/version\>/g pom.xml */pom patch -p0 < ../../build-tools/src/patches/oozie-site.patch case $VERSION in -4.0.0 ) - patch -p1 --verbose < ../../build-tools/src/patches/OOZIE-1551-4.0.patch - patch -p0 < ../../build-tools/src/patches/OOZIE-1741.patch - ;; -4.0.1 ) - patch -p0 < ../../build-tools/src/patches/OOZIE-1741.patch +4.1.0 ) ;; esac rm `find . -name 'pom.xml.bak'` -$MVN_CMD clean install -DskipTests +$MVN_CMD clean install -DskipTests -Phadoop-2 popd http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/pom.xml b/hadoop-webapp/pom.xml deleted file mode 100644 index 7a2f905..0000000 --- a/hadoop-webapp/pom.xml +++ /dev/null @@ -1,265 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> - -<!-- - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --> - -<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> - - <modelVersion>4.0.0</modelVersion> - <parent> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-main</artifactId> - <version>0.7-SNAPSHOT</version> - </parent> - <artifactId>falcon-hadoop-webapp</artifactId> - <description>Apache Falcon Embedded Hadoop - Test Cluster</description> - <name>Apache Falcon Embedded Hadoop - Test Cluster</name> - <packaging>war</packaging> - - <profiles> - <profile> - <id>hadoop-2</id> - <activation> - <activeByDefault>true</activeByDefault> - </activation> - <dependencies> - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-client</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <classifier>tests</classifier> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-auth</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-mapreduce-client-common</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-server-resourcemanager</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-yarn-server-nodemanager</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-distcp</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-client</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-server</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-json</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>com.sun.jersey</groupId> - <artifactId>jersey-core</artifactId> - <scope>compile</scope> - </dependency> - - <dependency> - <groupId>org.codehaus.jettison</groupId> - <artifactId>jettison</artifactId> - <scope>compile</scope> - </dependency> - </dependencies> - </profile> - </profiles> - - <dependencies> - <dependency> - <groupId>org.apache.oozie</groupId> - <artifactId>oozie-core</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-oozie-el-extension</artifactId> - <exclusions> - <exclusion> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-log4j12</artifactId> - </exclusion> - <exclusion> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - </exclusion> - </exclusions> - </dependency> - - <dependency> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-hadoop-dependencies</artifactId> - <version>${project.version}</version> - </dependency> - - <dependency> - <groupId>org.apache.falcon</groupId> - <artifactId>falcon-test-util</artifactId> - <scope>compile</scope> - <exclusions> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-core</artifactId> - </exclusion> - <exclusion> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-test</artifactId> - </exclusion> - </exclusions> - </dependency> - - <dependency> - <groupId>org.apache.activemq</groupId> - <artifactId>activemq-core</artifactId> - </dependency> - - <dependency> - <groupId>org.apache.hive</groupId> - <artifactId>hive-metastore</artifactId> - </dependency> - - <!-- Hive Metastore and WebHcat fails with out these dependencies --> - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-api</artifactId> - </dependency> - - <dependency> - <groupId>org.slf4j</groupId> - <artifactId>slf4j-log4j12</artifactId> - </dependency> - - <!-- Oozie dependencies --> - <dependency> - <groupId>org.apache.hcatalog</groupId> - <artifactId>webhcat-java-client</artifactId> - </dependency> - - <dependency> - <groupId>org.testng</groupId> - <artifactId>testng</artifactId> - </dependency> - </dependencies> - - <build> - <plugins> - <plugin> - <artifactId>maven-dependency-plugin</artifactId> - <version>2.8</version> - <executions> - <execution> - <id>explode-war</id> - <phase>prepare-package</phase> - <goals> - <goal>unpack</goal> - </goals> - <configuration> - <artifactItems> - <artifactItem> - <groupId>org.apache.oozie</groupId> - <artifactId>oozie-webapp</artifactId> - <type>war</type> - <outputDirectory>${project.build.directory}/oozie</outputDirectory> - </artifactItem> - </artifactItems> - </configuration> - </execution> - <execution> - <id>add-jars</id> - <phase>prepare-package</phase> - <goals> - <goal>copy-dependencies</goal> - </goals> - <configuration> - <outputDirectory>${project.build.directory}/oozie/WEB-INF/lib</outputDirectory> - </configuration> - </execution> - </executions> - </plugin> - - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-war-plugin</artifactId> - </plugin> - - <plugin> - <groupId>org.apache.maven.plugins</groupId> - <artifactId>maven-antrun-plugin</artifactId> - <version>1.6</version> - <executions> - <execution> - <configuration> - <target> - <jar destfile="target/oozie.war" basedir="target/oozie"/> - </target> - </configuration> - <goals> - <goal>run</goal> - </goals> - <phase>package</phase> - </execution> - </executions> - </plugin> - </plugins> - </build> -</project> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java b/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java deleted file mode 100644 index fa3a84a..0000000 --- a/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.falcon; - -/** - * Job Tracker service interface. - */ -public interface JobTrackerService { - - void start() throws Exception; - - void stop() throws Exception; -} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java b/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java deleted file mode 100644 index a203290..0000000 --- a/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.falcon.listener; - -import org.apache.activemq.broker.BrokerService; -import org.apache.hadoop.hive.metastore.HiveMetaStore; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; - -/** - * Listener for bootstrapping embedded hadoop cluster for integration tests. - */ -public class HadoopStartupListener implements ServletContextListener { - private static final Logger LOG = LoggerFactory.getLogger(HadoopStartupListener.class); - private BrokerService broker; - - @Override - public void contextInitialized(ServletContextEvent sce) { - try { - startBroker(); - startHiveMetaStore(); - - } catch (Exception e) { - LOG.error("Unable to start daemons", e); - throw new RuntimeException("Unable to start daemons", e); - } - } - - private void startBroker() throws Exception { - broker = new BrokerService(); - broker.setUseJmx(false); - broker.setDataDirectory("target/data"); - broker.addConnector("vm://localhost"); - broker.addConnector("tcp://0.0.0.0:61616"); - broker.start(); - } - - public static final String META_STORE_PORT = "49083"; - private void startHiveMetaStore() { - try { - new Thread(new Runnable() { - @Override - public void run() { - try { - String[] args = new String[]{ - "-v", - "-p", META_STORE_PORT, - }; - - HiveMetaStore.main(args); - } catch (Throwable t) { - throw new RuntimeException(t); - } - } - }).start(); - } catch (Exception e) { - throw new RuntimeException("Unable to start hive metastore server.", e); - } - } - - @Override - public void contextDestroyed(ServletContextEvent sce) { - try { - if (broker != null) { - broker.stop(); - } - } catch(Exception e) { - LOG.warn("Failed to stop activemq", e); - } - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/core-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/core-site.xml b/hadoop-webapp/src/main/resources/core-site.xml deleted file mode 100644 index ef1558e..0000000 --- a/hadoop-webapp/src/main/resources/core-site.xml +++ /dev/null @@ -1,55 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<configuration> - <property> - <name>fs.fsext.impl</name> - <value>org.apache.falcon.hadoop.FileSystemExtension</value> - </property> - - <property> - <name>fs.jail.impl</name> - <value>org.apache.falcon.hadoop.JailedFileSystem</value> - </property> - - <property> - <name>fs.defaultFS</name> - <value>jail://global:00</value> - </property> - - <property> - <name>hadoop.proxyuser.${user.name}.groups</name> - <value>*</value> - <description>Allow the superuser oozie to impersonate any members of the group group1 and group2</description> - </property> - - <property> - <name>hadoop.proxyuser.${user.name}.hosts</name> - <value>*</value> - <description>The superuser can connect only from host1 and host2 to impersonate a user</description> - </property> - - <property> - <name>hadoop.tmp.dir</name> - <value>${project.build.directory}/tmp-hadoop-${user.name}</value> - <description>A base for other temporary directories.</description> - </property> - -</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/hdfs-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/hdfs-site.xml b/hadoop-webapp/src/main/resources/hdfs-site.xml deleted file mode 100644 index 0b929e6..0000000 --- a/hadoop-webapp/src/main/resources/hdfs-site.xml +++ /dev/null @@ -1,64 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<!-- Put site-specific property overrides in this file. --> - -<configuration> - <property> - <name>dfs.replication</name> - <value>1</value> - </property> - - <property> - <name>dfs.datanode.address</name> - <value>0.0.0.0:41110</value> - <description> - The datanode server address and port for data transfer. - If the port is 0 then the server will start on a free port. - </description> - </property> - - <property> - <name>dfs.datanode.http.address</name> - <value>0.0.0.0:41175</value> - <description> - The datanode http server address and port. - If the port is 0 then the server will start on a free port. - </description> - </property> - - <property> - <name>dfs.datanode.ipc.address</name> - <value>0.0.0.0:41120</value> - <description> - The datanode ipc server address and port. - If the port is 0 then the server will start on a free port. - </description> - </property> - - <property> - <name>dfs.http.address</name> - <value>0.0.0.0:41170</value> - <description> - The address and the base port where the dfs namenode web ui will listen on. - If the port is 0 then the server will start on a free port. - </description> - </property> -</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/hive-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/hive-site.xml b/hadoop-webapp/src/main/resources/hive-site.xml deleted file mode 100644 index 6b39592..0000000 --- a/hadoop-webapp/src/main/resources/hive-site.xml +++ /dev/null @@ -1,42 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<configuration> - <property> - <name>hive.metastore.uris</name> - <value>thrift://localhost:49083</value> - </property> - - <property> - <name>hive.metastore.local</name> - <value>false</value> - </property> - - <property> - <name>fs.defaultFS</name> - <value>jail://global:00</value> - </property> - - <!-- Forcing the creation of the db dir under target --> - <property> - <name>javax.jdo.option.ConnectionURL</name> - <value>jdbc:derby:;databaseName=./target/metastore_db;create=true</value> - </property> -</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/log4j.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/log4j.xml b/hadoop-webapp/src/main/resources/log4j.xml deleted file mode 100644 index d69e921..0000000 --- a/hadoop-webapp/src/main/resources/log4j.xml +++ /dev/null @@ -1,38 +0,0 @@ -<?xml version="1.0" encoding="UTF-8" ?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --> - -<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> - -<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> - - <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> - <param name="File" value="${user.dir}/target/logs/jetty.log"/> - <param name="Append" value="true"/> - <param name="Threshold" value="debug"/> - <layout class="org.apache.log4j.PatternLayout"> - <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> - </layout> - </appender> - - <root> - <priority value="info"/> - <appender-ref ref="FILE"/> - </root> - -</log4j:configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/mapred-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/mapred-site.xml b/hadoop-webapp/src/main/resources/mapred-site.xml deleted file mode 100644 index cf297de..0000000 --- a/hadoop-webapp/src/main/resources/mapred-site.xml +++ /dev/null @@ -1,68 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<!-- Put site-specific property overrides in this file. --> - -<configuration> - <property> - <name>mapreduce.jobtracker.address</name> - <value>localhost:41021</value> - </property> - - <property> - <name>mapred.tasktracker.map.tasks.maximum</name> - <value>2</value> - <description>The maximum number of map tasks that will be run - simultaneously by a task tracker. - </description> - </property> - - <property> - <name>mapred.tasktracker.reduce.tasks.maximum</name> - <value>1</value> - <description>The maximum number of reduce tasks that will be run - simultaneously by a task tracker. - </description> - </property> - - <property> - <name>mapreduce.jobtracker.staging.root.dir</name> - <value>/user</value> - </property> - - <property> - <name>mapred.job.tracker.http.address</name> - <value>0.0.0.0:41130</value> - <description> - The job tracker http server address and port the server will listen on. - If the port is 0 then the server will start on a free port. - </description> - </property> - - <property> - <name>mapred.task.tracker.http.address</name> - <value>0.0.0.0:41160</value> - <description> - The task tracker http server address and port. - If the port is 0 then the server will start on a free port. - </description> - </property> - -</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/resources/yarn-site.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/resources/yarn-site.xml b/hadoop-webapp/src/main/resources/yarn-site.xml deleted file mode 100644 index 658752b..0000000 --- a/hadoop-webapp/src/main/resources/yarn-site.xml +++ /dev/null @@ -1,64 +0,0 @@ -<?xml version="1.0"?> -<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---> - -<!-- Put site-specific property overrides in this file. --> - -<configuration> - <property> - <description>The address of the applications manager interface in the RM.</description> - <name>yarn.resourcemanager.address</name> - <value>0.0.0.0:41021</value> - </property> - - <property> - <name>mapreduce.framework.name</name> - <value>unittests</value> - </property> - - <property> - <name>yarn.resourcemanager.resource-tracker.address</name> - <value>0.0.0.0:41025</value> - </property> - - <property> - <description>The address of the RM web application.</description> - <name>yarn.resourcemanager.webapp.address</name> - <value>0.0.0.0:41130</value> - </property> - - <property> - <description>The address of the scheduler interface.</description> - <name>yarn.resourcemanager.scheduler.address</name> - <value>0.0.0.0:41030</value> - </property> - - <property> - <description>Address where the localizer IPC is.</description> - <name>yarn.nodemanager.localizer.address</name> - <value>0.0.0.0:41040</value> - </property> - - <property> - <description>The address of the RM admin interface.</description> - <name>yarn.resourcemanager.admin.address</name> - <value>0.0.0.0:41140</value> - </property> - -</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/main/webapp/WEB-INF/web.xml ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/main/webapp/WEB-INF/web.xml b/hadoop-webapp/src/main/webapp/WEB-INF/web.xml deleted file mode 100644 index 8b186ad..0000000 --- a/hadoop-webapp/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,32 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?> -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --> - -<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" - "http://java.sun.com/dtd/web-app_2_3.dtd"> - -<web-app> - - <display-name>Apache Falcon Embedded Hadoop - Test Cluster</display-name> - <description>Apache Falcon Embedded Hadoop - Test Cluster</description> - - <listener> - <listener-class>org.apache.falcon.listener.HadoopStartupListener</listener-class> - </listener> - -</web-app> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java b/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java deleted file mode 100644 index 193fab0..0000000 --- a/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.mapred; - -import org.apache.hadoop.conf.Configuration; -import org.testng.annotations.Test; - -/** - * Test for LocalRunner. - */ -@Test (enabled = false) -public class LocalRunnerTest { - - @SuppressWarnings("unchecked") - public void testLocalRunner() throws Exception { - Configuration conf = new Configuration(); - conf.set("mapreduce.jobtracker.address", "localhost:41021"); - conf.set("mapreduce.framework.name", "unittests"); - JobClient client = new JobClient(new JobConf(conf)); - System.out.println(client.getSystemDir()); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/hadoop-webapp/src/versioned-src/v2/java/org/apache/hadoop/mapred/LocalRunnerV2.java ---------------------------------------------------------------------- diff --git a/hadoop-webapp/src/versioned-src/v2/java/org/apache/hadoop/mapred/LocalRunnerV2.java b/hadoop-webapp/src/versioned-src/v2/java/org/apache/hadoop/mapred/LocalRunnerV2.java deleted file mode 100644 index ccb1bd5..0000000 --- a/hadoop-webapp/src/versioned-src/v2/java/org/apache/hadoop/mapred/LocalRunnerV2.java +++ /dev/null @@ -1,242 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.mapred; - -import org.apache.falcon.JobTrackerService; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtocolSignature; -import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.mapreduce.Cluster; -import org.apache.hadoop.mapreduce.ClusterMetrics; -import org.apache.hadoop.mapreduce.Counters; -import org.apache.hadoop.mapreduce.JobID; -import org.apache.hadoop.mapreduce.JobStatus; -import org.apache.hadoop.mapreduce.QueueAclsInfo; -import org.apache.hadoop.mapreduce.QueueInfo; -import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.TaskCompletionEvent; -import org.apache.hadoop.mapreduce.TaskReport; -import org.apache.hadoop.mapreduce.TaskTrackerInfo; -import org.apache.hadoop.mapreduce.TaskType; -import org.apache.hadoop.mapreduce.protocol.ClientProtocol; -import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; -import org.apache.hadoop.mapreduce.v2.LogParams; -import org.apache.hadoop.security.Credentials; -import org.apache.hadoop.security.authorize.AccessControlList; -import org.apache.hadoop.security.token.Token; - -import java.io.IOException; - -/** - * Local Job Runner for Hadoop v2. - * Please note that one of org.apache.hadoop.mapred.LocalRunnerV2 or - * org.apache.hadoop.mapred.LocalRunnerV2 is active in the project depending - * on the profile chosen. - */ -public class LocalRunnerV2 implements ClientProtocol, JobTrackerService { - - private final ClientProtocol localProxy; - private final Configuration conf; - private Server server; - - public LocalRunnerV2() { - try { - conf = new Configuration(); - localProxy = new LocalJobRunner(conf); - } catch (IOException e) { - throw new RuntimeException("Unable to initialize localRunner"); - } - } - - @Override - public void start() throws Exception { - server = new RPC.Builder(conf).setBindAddress("0.0.0.0").setPort(41021).setInstance(this). - setProtocol(ClientProtocol.class).build(); - server.start(); - } - - public void stop() throws Exception { - server.stop(); - } - - @Override - public JobID getNewJobID() throws IOException, InterruptedException { - return localProxy.getNewJobID(); - } - - @Override - public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) - throws IOException, InterruptedException { - return localProxy.submitJob(jobId, jobSubmitDir, ts); - } - - @Override - public ClusterMetrics getClusterMetrics() throws IOException, InterruptedException { - return localProxy.getClusterMetrics(); - } - - @Override - public Cluster.JobTrackerStatus getJobTrackerStatus() throws IOException, InterruptedException { - return localProxy.getJobTrackerStatus(); - } - - @Override - public long getTaskTrackerExpiryInterval() throws IOException, InterruptedException { - return localProxy.getTaskTrackerExpiryInterval(); - } - - @Override - public AccessControlList getQueueAdmins(String queueName) throws IOException { - return localProxy.getQueueAdmins(queueName); - } - - @Override - public void killJob(JobID jobid) throws IOException, InterruptedException { - localProxy.killJob(jobid); - } - - @Override - public void setJobPriority(JobID jobid, String priority) throws IOException, InterruptedException { - localProxy.setJobPriority(jobid, priority); - } - - @Override - public boolean killTask(TaskAttemptID taskId, boolean shouldFail) throws IOException, InterruptedException { - return localProxy.killTask(taskId, shouldFail); - } - - @Override - public JobStatus getJobStatus(JobID jobid) throws IOException, InterruptedException { - return localProxy.getJobStatus(jobid); - } - - @Override - public Counters getJobCounters(JobID jobid) throws IOException, InterruptedException { - return localProxy.getJobCounters(jobid); - } - - @Override - public TaskReport[] getTaskReports(JobID jobid, TaskType type) throws IOException, InterruptedException { - return localProxy.getTaskReports(jobid, type); - } - - @Override - public String getFilesystemName() throws IOException, InterruptedException { - return localProxy.getFilesystemName(); - } - - @Override - public JobStatus[] getAllJobs() throws IOException, InterruptedException { - return localProxy.getAllJobs(); - } - - @Override - public TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid, int fromEventId, int maxEvents) - throws IOException, InterruptedException { - return localProxy.getTaskCompletionEvents(jobid, fromEventId, maxEvents); - } - - @Override - public String[] getTaskDiagnostics(TaskAttemptID taskId) throws IOException, InterruptedException { - return localProxy.getTaskDiagnostics(taskId); - } - - @Override - public TaskTrackerInfo[] getActiveTrackers() throws IOException, InterruptedException { - return localProxy.getActiveTrackers(); - } - - @Override - public TaskTrackerInfo[] getBlacklistedTrackers() throws IOException, InterruptedException { - return localProxy.getBlacklistedTrackers(); - } - - @Override - public String getSystemDir() throws IOException, InterruptedException { - return localProxy.getSystemDir(); - } - - @Override - public String getStagingAreaDir() throws IOException, InterruptedException { - return localProxy.getStagingAreaDir(); - } - - @Override - public String getJobHistoryDir() throws IOException, InterruptedException { - return localProxy.getJobHistoryDir(); - } - - @Override - public QueueInfo[] getQueues() throws IOException, InterruptedException { - return localProxy.getQueues(); - } - - @Override - public QueueInfo getQueue(String queueName) throws IOException, InterruptedException { - return localProxy.getQueue(queueName); - } - - @Override - public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException, InterruptedException { - return localProxy.getQueueAclsForCurrentUser(); - } - - @Override - public QueueInfo[] getRootQueues() throws IOException, InterruptedException { - return localProxy.getRootQueues(); - } - - @Override - public QueueInfo[] getChildQueues(String queueName) throws IOException, InterruptedException { - return localProxy.getChildQueues(queueName); - } - - @Override - public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException, InterruptedException { - return localProxy.getDelegationToken(renewer); - } - - @Override - public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException, InterruptedException { - return localProxy.renewDelegationToken(token); - } - - @Override - public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws IOException, InterruptedException { - localProxy.cancelDelegationToken(token); - } - - @Override - public LogParams getLogFileParams(JobID jobID, TaskAttemptID taskAttemptID) - throws IOException, InterruptedException { - return localProxy.getLogFileParams(jobID, taskAttemptID); - } - - @Override - public long getProtocolVersion(String protocol, long clientVersion) throws IOException { - return localProxy.getProtocolVersion(protocol, clientVersion); - } - - @Override - public ProtocolSignature getProtocolSignature(String protocol, long clientVersion, int clientMethodsHash) - throws IOException { - return localProxy.getProtocolSignature(protocol, clientVersion, clientMethodsHash); - } -} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java ---------------------------------------------------------------------- diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java index f1f96b1..ce292bd 100644 --- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java +++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java @@ -809,7 +809,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine { private void reRunCoordAction(String cluster, CoordinatorAction coordinatorAction) throws FalconException { try { OozieClient client = OozieClientFactory.get(cluster); - client.reRunCoord(coordinatorAction.getJobId(), RestConstants.JOB_COORD_RERUN_ACTION, + client.reRunCoord(coordinatorAction.getJobId(), RestConstants.JOB_COORD_ACTION_RERUN, Integer.toString(coordinatorAction.getActionNumber()), true, true); assertCoordActionStatus(cluster, coordinatorAction.getId(), org.apache.oozie.client.CoordinatorAction.Status.RUNNING, @@ -869,7 +869,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine { || CoordinatorAction.Status.WAITING.toString().equals(status) || CoordinatorAction.Status.SUBMITTED.toString().equals(status)) { return InstancesResult.WorkflowStatus.WAITING.name(); - } else if (CoordinatorAction.Status.DISCARDED.toString().equals(status)) { + } else if (CoordinatorAction.Status.IGNORED.toString().equals(status)) { return InstancesResult.WorkflowStatus.KILLED.name(); } else if (CoordinatorAction.Status.TIMEDOUT.toString().equals(status)) { return InstancesResult.WorkflowStatus.FAILED.name(); http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index e39f250..cd1a8fb 100644 --- a/pom.xml +++ b/pom.xml @@ -103,7 +103,7 @@ <include.prism>true</include.prism> <slf4j.version>1.7.5</slf4j.version> - <oozie.version>4.0.1</oozie.version> + <oozie.version>4.1.0</oozie.version> <oozie.buildversion>${oozie.version}-falcon</oozie.buildversion> <oozie.forcebuild>false</oozie.forcebuild> <activemq.version>5.4.3</activemq.version> @@ -343,7 +343,7 @@ <configuration> <!--debug>true</debug --> <xmlOutput>true</xmlOutput> - <excludeFilterFile>${basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml</excludeFilterFile> + <excludeFilterFile>checkstyle/src/main/resources/falcon/findbugs-exclude.xml</excludeFilterFile> <failOnError>true</failOnError> </configuration> <executions> @@ -377,7 +377,7 @@ <module>common</module> <module>test-util</module> <module>hadoop-dependencies</module> - <module>hadoop-webapp</module> + <module>test-tools</module> <module>messaging</module> <module>oozie-el-extensions</module> <module>oozie</module> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/pom.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/pom.xml b/test-tools/hadoop-webapp/pom.xml new file mode 100644 index 0000000..71df231 --- /dev/null +++ b/test-tools/hadoop-webapp/pom.xml @@ -0,0 +1,266 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> + + <modelVersion>4.0.0</modelVersion> + <parent> + <groupId>org.apache.falcon</groupId> + <artifactId>falcon-main</artifactId> + <version>0.7-SNAPSHOT</version> + <relativePath>../..</relativePath> + </parent> + <artifactId>falcon-hadoop-webapp</artifactId> + <description>Apache Falcon Embedded Hadoop - Test Cluster</description> + <name>Apache Falcon Embedded Hadoop - Test Cluster</name> + <packaging>war</packaging> + + <profiles> + <profile> + <id>hadoop-2</id> + <activation> + <activeByDefault>true</activeByDefault> + </activation> + <dependencies> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-client</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <classifier>tests</classifier> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-common</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-resourcemanager</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-nodemanager</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-distcp</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-client</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-server</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-json</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>com.sun.jersey</groupId> + <artifactId>jersey-core</artifactId> + <scope>compile</scope> + </dependency> + + <dependency> + <groupId>org.codehaus.jettison</groupId> + <artifactId>jettison</artifactId> + <scope>compile</scope> + </dependency> + </dependencies> + </profile> + </profiles> + + <dependencies> + <dependency> + <groupId>org.apache.oozie</groupId> + <artifactId>oozie-core</artifactId> + </dependency> + + <dependency> + <groupId>org.apache.falcon</groupId> + <artifactId>falcon-oozie-el-extension</artifactId> + <exclusions> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </exclusion> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.falcon</groupId> + <artifactId>falcon-hadoop-dependencies</artifactId> + <version>${project.version}</version> + </dependency> + + <dependency> + <groupId>org.apache.falcon</groupId> + <artifactId>falcon-test-util</artifactId> + <scope>compile</scope> + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-core</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-test</artifactId> + </exclusion> + </exclusions> + </dependency> + + <dependency> + <groupId>org.apache.activemq</groupId> + <artifactId>activemq-core</artifactId> + </dependency> + + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-metastore</artifactId> + </dependency> + + <!-- Hive Metastore and WebHcat fails with out these dependencies --> + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-api</artifactId> + </dependency> + + <dependency> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </dependency> + + <!-- Oozie dependencies --> + <dependency> + <groupId>org.apache.hcatalog</groupId> + <artifactId>webhcat-java-client</artifactId> + </dependency> + + <dependency> + <groupId>org.testng</groupId> + <artifactId>testng</artifactId> + </dependency> + </dependencies> + + <build> + <plugins> + <plugin> + <artifactId>maven-dependency-plugin</artifactId> + <version>2.8</version> + <executions> + <execution> + <id>explode-war</id> + <phase>prepare-package</phase> + <goals> + <goal>unpack</goal> + </goals> + <configuration> + <artifactItems> + <artifactItem> + <groupId>org.apache.oozie</groupId> + <artifactId>oozie-webapp</artifactId> + <type>war</type> + <outputDirectory>${project.build.directory}/oozie</outputDirectory> + </artifactItem> + </artifactItems> + </configuration> + </execution> + <execution> + <id>add-jars</id> + <phase>prepare-package</phase> + <goals> + <goal>copy-dependencies</goal> + </goals> + <configuration> + <outputDirectory>${project.build.directory}/oozie/WEB-INF/lib</outputDirectory> + </configuration> + </execution> + </executions> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-war-plugin</artifactId> + </plugin> + + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-antrun-plugin</artifactId> + <version>1.6</version> + <executions> + <execution> + <configuration> + <target> + <jar destfile="target/oozie.war" basedir="target/oozie"/> + </target> + </configuration> + <goals> + <goal>run</goal> + </goals> + <phase>package</phase> + </execution> + </executions> + </plugin> + </plugins> + </build> +</project> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java b/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java new file mode 100644 index 0000000..fa3a84a --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/JobTrackerService.java @@ -0,0 +1,28 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.falcon; + +/** + * Job Tracker service interface. + */ +public interface JobTrackerService { + + void start() throws Exception; + + void stop() throws Exception; +} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java b/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java new file mode 100644 index 0000000..4298558 --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/java/org/apache/falcon/listener/HadoopStartupListener.java @@ -0,0 +1,135 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.falcon.listener; + +import org.apache.activemq.broker.BrokerService; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; +import java.io.File; +import java.text.SimpleDateFormat; +import java.util.Date; + +/** + * Listener for bootstrapping embedded hadoop cluster for integration tests. + */ +public class HadoopStartupListener implements ServletContextListener { + private static final Logger LOG = LoggerFactory.getLogger(HadoopStartupListener.class); + private BrokerService broker; + private final String shareLibPath = "target/share/lib"; + private static final String SHARE_LIB_PREFIX = "lib_"; + private static final String USER = System.getProperty("user.name"); + + @Override + public void contextInitialized(ServletContextEvent sce) { + try { + copyShareLib(); + startBroker(); + startHiveMetaStore(); + } catch (Exception e) { + LOG.error("Unable to start daemons", e); + throw new RuntimeException("Unable to start daemons", e); + } + } + + private void copyShareLib() throws Exception { + String shareLibHDFSPath = getShareLibPath() + File.separator + SHARE_LIB_PREFIX + + getTimestampDirectory(); + Configuration conf = new Configuration(); + FileSystem fs = FileSystem.get(new Path(shareLibHDFSPath).toUri(), conf); + String[] actionDirectories = getLibActionDirectories(); + for(String actionDirectory : actionDirectories) { + LOG.info("Copying Action Directory {0}", actionDirectory); + fs.copyFromLocalFile(new Path(shareLibPath, actionDirectory), new Path(shareLibHDFSPath)); + } + } + + private void startBroker() throws Exception { + broker = new BrokerService(); + broker.setUseJmx(false); + broker.setDataDirectory("target/data"); + broker.addConnector("vm://localhost"); + broker.addConnector("tcp://0.0.0.0:61616"); + broker.start(); + } + + public static final String META_STORE_PORT = "49083"; + private void startHiveMetaStore() { + try { + new Thread(new Runnable() { + @Override + public void run() { + try { + String[] args = new String[]{ + "-v", + "-p", META_STORE_PORT, + }; + + HiveMetaStore.main(args); + } catch (Throwable t) { + throw new RuntimeException(t); + } + } + }).start(); + } catch (Exception e) { + throw new RuntimeException("Unable to start hive metastore server.", e); + } + } + + @Override + public void contextDestroyed(ServletContextEvent sce) { + try { + if (broker != null) { + broker.stop(); + } + } catch(Exception e) { + LOG.warn("Failed to stop activemq", e); + } + } + + private String getShareLibPath() { + return File.separator + "user" + File.separator + USER + File.separator + "share/lib"; + } + + private String getTimestampDirectory() { + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss"); + Date date = new Date(); + return dateFormat.format(date).toString(); + } + + private String[] getLibActionDirectories() { + StringBuilder libActionDirectories = new StringBuilder(); + File f = new File(shareLibPath); + + for(File libDir : f.listFiles()) { + if (libDir.isDirectory()) { + libActionDirectories.append(libDir.getName()).append("\t"); + } + } + String actionDirectories = libActionDirectories.toString(); + return (actionDirectories).substring(0, actionDirectories.lastIndexOf('\t')) + .split("\t"); + } +} http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/core-site.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/core-site.xml b/test-tools/hadoop-webapp/src/main/resources/core-site.xml new file mode 100644 index 0000000..ef1558e --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/core-site.xml @@ -0,0 +1,55 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<configuration> + <property> + <name>fs.fsext.impl</name> + <value>org.apache.falcon.hadoop.FileSystemExtension</value> + </property> + + <property> + <name>fs.jail.impl</name> + <value>org.apache.falcon.hadoop.JailedFileSystem</value> + </property> + + <property> + <name>fs.defaultFS</name> + <value>jail://global:00</value> + </property> + + <property> + <name>hadoop.proxyuser.${user.name}.groups</name> + <value>*</value> + <description>Allow the superuser oozie to impersonate any members of the group group1 and group2</description> + </property> + + <property> + <name>hadoop.proxyuser.${user.name}.hosts</name> + <value>*</value> + <description>The superuser can connect only from host1 and host2 to impersonate a user</description> + </property> + + <property> + <name>hadoop.tmp.dir</name> + <value>${project.build.directory}/tmp-hadoop-${user.name}</value> + <description>A base for other temporary directories.</description> + </property> + +</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/hdfs-site.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/hdfs-site.xml b/test-tools/hadoop-webapp/src/main/resources/hdfs-site.xml new file mode 100644 index 0000000..0b929e6 --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/hdfs-site.xml @@ -0,0 +1,64 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<!-- Put site-specific property overrides in this file. --> + +<configuration> + <property> + <name>dfs.replication</name> + <value>1</value> + </property> + + <property> + <name>dfs.datanode.address</name> + <value>0.0.0.0:41110</value> + <description> + The datanode server address and port for data transfer. + If the port is 0 then the server will start on a free port. + </description> + </property> + + <property> + <name>dfs.datanode.http.address</name> + <value>0.0.0.0:41175</value> + <description> + The datanode http server address and port. + If the port is 0 then the server will start on a free port. + </description> + </property> + + <property> + <name>dfs.datanode.ipc.address</name> + <value>0.0.0.0:41120</value> + <description> + The datanode ipc server address and port. + If the port is 0 then the server will start on a free port. + </description> + </property> + + <property> + <name>dfs.http.address</name> + <value>0.0.0.0:41170</value> + <description> + The address and the base port where the dfs namenode web ui will listen on. + If the port is 0 then the server will start on a free port. + </description> + </property> +</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/hive-site.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/hive-site.xml b/test-tools/hadoop-webapp/src/main/resources/hive-site.xml new file mode 100644 index 0000000..1bc8596 --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/hive-site.xml @@ -0,0 +1,60 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<configuration> + <property> + <name>hive.metastore.uris</name> + <value>thrift://localhost:49083</value> + </property> + + <property> + <name>hive.metastore.local</name> + <value>false</value> + </property> + + <property> + <name>fs.defaultFS</name> + <value>jail://global:00</value> + </property> + + <property> + <name>hive.metastore.client.socket.timeout</name> + <value>600</value> + <description>MetaStore Client socket timeout in seconds</description> + </property> + + <property> + <name>hive.hmshandler.retry.attempts</name> + <value>3</value> + <description>The number of times to retry a HMSHandler call if there were a connection error</description> + </property> + + <property> + <name>hive.hmshandler.retry.interval</name> + <value>1000</value> + <description>The number of milliseconds between HMSHandler retry attempts</description> + </property> + + <!-- Forcing the creation of the db dir under target --> + <property> + <name>javax.jdo.option.ConnectionURL</name> + <value>jdbc:derby:;databaseName=./target/metastore_db;create=true</value> + </property> +</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/log4j.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/log4j.xml b/test-tools/hadoop-webapp/src/main/resources/log4j.xml new file mode 100644 index 0000000..d69e921 --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/log4j.xml @@ -0,0 +1,38 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> + +<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"> + + <appender name="FILE" class="org.apache.log4j.DailyRollingFileAppender"> + <param name="File" value="${user.dir}/target/logs/jetty.log"/> + <param name="Append" value="true"/> + <param name="Threshold" value="debug"/> + <layout class="org.apache.log4j.PatternLayout"> + <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n"/> + </layout> + </appender> + + <root> + <priority value="info"/> + <appender-ref ref="FILE"/> + </root> + +</log4j:configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/mapred-site.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/mapred-site.xml b/test-tools/hadoop-webapp/src/main/resources/mapred-site.xml new file mode 100644 index 0000000..cf297de --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/mapred-site.xml @@ -0,0 +1,68 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<!-- Put site-specific property overrides in this file. --> + +<configuration> + <property> + <name>mapreduce.jobtracker.address</name> + <value>localhost:41021</value> + </property> + + <property> + <name>mapred.tasktracker.map.tasks.maximum</name> + <value>2</value> + <description>The maximum number of map tasks that will be run + simultaneously by a task tracker. + </description> + </property> + + <property> + <name>mapred.tasktracker.reduce.tasks.maximum</name> + <value>1</value> + <description>The maximum number of reduce tasks that will be run + simultaneously by a task tracker. + </description> + </property> + + <property> + <name>mapreduce.jobtracker.staging.root.dir</name> + <value>/user</value> + </property> + + <property> + <name>mapred.job.tracker.http.address</name> + <value>0.0.0.0:41130</value> + <description> + The job tracker http server address and port the server will listen on. + If the port is 0 then the server will start on a free port. + </description> + </property> + + <property> + <name>mapred.task.tracker.http.address</name> + <value>0.0.0.0:41160</value> + <description> + The task tracker http server address and port. + If the port is 0 then the server will start on a free port. + </description> + </property> + +</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/resources/yarn-site.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/resources/yarn-site.xml b/test-tools/hadoop-webapp/src/main/resources/yarn-site.xml new file mode 100644 index 0000000..658752b --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/resources/yarn-site.xml @@ -0,0 +1,64 @@ +<?xml version="1.0"?> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> + +<!-- Put site-specific property overrides in this file. --> + +<configuration> + <property> + <description>The address of the applications manager interface in the RM.</description> + <name>yarn.resourcemanager.address</name> + <value>0.0.0.0:41021</value> + </property> + + <property> + <name>mapreduce.framework.name</name> + <value>unittests</value> + </property> + + <property> + <name>yarn.resourcemanager.resource-tracker.address</name> + <value>0.0.0.0:41025</value> + </property> + + <property> + <description>The address of the RM web application.</description> + <name>yarn.resourcemanager.webapp.address</name> + <value>0.0.0.0:41130</value> + </property> + + <property> + <description>The address of the scheduler interface.</description> + <name>yarn.resourcemanager.scheduler.address</name> + <value>0.0.0.0:41030</value> + </property> + + <property> + <description>Address where the localizer IPC is.</description> + <name>yarn.nodemanager.localizer.address</name> + <value>0.0.0.0:41040</value> + </property> + + <property> + <description>The address of the RM admin interface.</description> + <name>yarn.resourcemanager.admin.address</name> + <value>0.0.0.0:41140</value> + </property> + +</configuration> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/main/webapp/WEB-INF/web.xml ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/main/webapp/WEB-INF/web.xml b/test-tools/hadoop-webapp/src/main/webapp/WEB-INF/web.xml new file mode 100644 index 0000000..8b186ad --- /dev/null +++ b/test-tools/hadoop-webapp/src/main/webapp/WEB-INF/web.xml @@ -0,0 +1,32 @@ +<?xml version="1.0" encoding="UTF-8"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + --> + +<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" + "http://java.sun.com/dtd/web-app_2_3.dtd"> + +<web-app> + + <display-name>Apache Falcon Embedded Hadoop - Test Cluster</display-name> + <description>Apache Falcon Embedded Hadoop - Test Cluster</description> + + <listener> + <listener-class>org.apache.falcon.listener.HadoopStartupListener</listener-class> + </listener> + +</web-app> http://git-wip-us.apache.org/repos/asf/falcon/blob/d2fd49e5/test-tools/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java ---------------------------------------------------------------------- diff --git a/test-tools/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java b/test-tools/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java new file mode 100644 index 0000000..193fab0 --- /dev/null +++ b/test-tools/hadoop-webapp/src/test/java/org/apache/hadoop/mapred/LocalRunnerTest.java @@ -0,0 +1,37 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapred; + +import org.apache.hadoop.conf.Configuration; +import org.testng.annotations.Test; + +/** + * Test for LocalRunner. + */ +@Test (enabled = false) +public class LocalRunnerTest { + + @SuppressWarnings("unchecked") + public void testLocalRunner() throws Exception { + Configuration conf = new Configuration(); + conf.set("mapreduce.jobtracker.address", "localhost:41021"); + conf.set("mapreduce.framework.name", "unittests"); + JobClient client = new JobClient(new JobConf(conf)); + System.out.println(client.getSystemDir()); + } +}
