Author: cos
Date: Fri May 14 23:56:34 2010
New Revision: 944521
URL: http://svn.apache.org/viewvc?rev=944521&view=rev
Log:
HADOOP-6332. Large-scale Automated Test Framework. Contributed by Sharad
Agarwal, Sreekanth Ramakrishnan, Konstantin Boudnik, at all.
Added:
hadoop/common/trunk/ivy/hadoop-core-system-template.xml
hadoop/common/trunk/ivy/hadoop-core-system.pom
hadoop/common/trunk/ivy/hadoop-core-system.xml
hadoop/common/trunk/src/test/all-tests
hadoop/common/trunk/src/test/system/
hadoop/common/trunk/src/test/system/aop/
hadoop/common/trunk/src/test/system/aop/org/
hadoop/common/trunk/src/test/system/aop/org/apache/
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
hadoop/common/trunk/src/test/system/c++/
hadoop/common/trunk/src/test/system/c++/runAs/
hadoop/common/trunk/src/test/system/c++/runAs/Makefile.in
hadoop/common/trunk/src/test/system/c++/runAs/configure
hadoop/common/trunk/src/test/system/c++/runAs/configure.ac
hadoop/common/trunk/src/test/system/c++/runAs/main.c
hadoop/common/trunk/src/test/system/c++/runAs/runAs.c
hadoop/common/trunk/src/test/system/c++/runAs/runAs.h.in
hadoop/common/trunk/src/test/system/conf/
hadoop/common/trunk/src/test/system/conf/hadoop-policy-system-test.xml
hadoop/common/trunk/src/test/system/java/
hadoop/common/trunk/src/test/system/java/org/
hadoop/common/trunk/src/test/system/java/org/apache/
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonClient.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/AbstractDaemonCluster.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/ControlAction.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/DaemonProtocol.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/ProcessInfo.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/ProcessInfoImpl.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/ClusterProcessManager.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/MultiUserHadoopDaemonRemoteCluster.java
hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/RemoteProcess.java
hadoop/common/trunk/src/test/system/scripts/
hadoop/common/trunk/src/test/system/scripts/pushConfig.sh
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/build.xml
hadoop/common/trunk/src/test/aop/build/aop.xml
Modified: hadoop/common/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=944521&r1=944520&r2=944521&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Fri May 14 23:56:34 2010
@@ -125,6 +125,9 @@ Release 0.21.0 - Unreleased
NEW FEATURES
+ HADOOP-6332. Large-scale Automated Test Framework. (sharad, Sreekanth
+ Ramakrishnan, at all via cos)
+
HADOOP-4268. Change fsck to use ClientProtocol methods so that the
corresponding permission requirement for running the ClientProtocol
methods will be enforced. (szetszwo)
Modified: hadoop/common/trunk/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/build.xml?rev=944521&r1=944520&r2=944521&view=diff
==============================================================================
--- hadoop/common/trunk/build.xml (original)
+++ hadoop/common/trunk/build.xml Fri May 14 23:56:34 2010
@@ -92,6 +92,7 @@
<property name="test.core.build.classes"
value="${test.build.dir}/core/classes"/>
+ <property name="test.all.tests.file" value="${test.src.dir}/all-tests"/>
<property name="javadoc.link.java"
value="http://java.sun.com/javase/6/docs/api/"/>
@@ -565,6 +566,7 @@
description="Make hadoop-fi.jar">
<macro-jar-fault-inject
target.name="jar"
+ build.dir="${build-fi.dir}"
jar.final.name="final.name"
jar.final.value="${final.name}-fi" />
</target>
@@ -618,71 +620,96 @@
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
- <target name="run-test-core" depends="compile-core-test" description="Run
core unit tests">
+ <macrodef name="macro-test-runner">
+ <attribute name="test.file" />
+ <attribute name="classpath" />
+ <attribute name="test.dir" />
+ <attribute name="fileset.dir" />
+ <attribute name="hadoop.conf.dir.deployed" default="" />
+ <sequential>
+ <delete file="${test.build.dir}/testsfailed"/>
+ <delete dir="@{test.dir}/data" />
+ <mkdir dir="@{test.dir}/data" />
+ <delete dir="@{test.dir}/logs" />
+ <mkdir dir="@{test.dir}/logs" />
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="@{test.dir}/extraconf" />
+ <copy file="${test.src.dir}/fi-site.xml"
+ todir="@{test.dir}/extraconf" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}"
+ timeout="${test.timeout}"
+ errorProperty="tests.failed"
+ failureProperty="tests.failed">
+ <jvmarg value="-ea" />
+ <sysproperty key="test.build.data" value="${test.build.data}" />
+ <sysproperty key="test.cache.data" value="${test.cache.data}" />
+ <sysproperty key="test.debug.data" value="${test.debug.data}" />
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}" />
+ <sysproperty key="test.src.dir" value="${test.src.dir}" />
+ <sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf"
/>
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples"
value="${install.c++.examples}"/>
+ <!-- set io.compression.codec.lzo.class in the child jvm only if it is
set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="io.compression.codec.lzo.class"/>
+ </syspropertyset>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="@{classpath}" />
+ <!-- Pass probability specifications to the spawn JVM -->
+ <syspropertyset id="FaultProbabilityProperties">
+ <propertyref regex="fi.*"/>
+ </syspropertyset>
+ <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
+ value="@{hadoop.conf.dir.deployed}" />
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="@{test.dir}" if="tests.notestcase">
+ <fileset dir="@{fileset.dir}/core"
+ excludes="**/${test.exclude}.java aop/** system/**">
+ <patternset>
+ <includesfile name="@{test.file}"/>
+ </patternset>
+ </fileset>
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
+ <fileset dir="@{fileset.dir}/aop"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="@{test.dir}" if="tests.testcase">
+ <fileset dir="@{fileset.dir}/core"
+ includes="**/${testcase}.java" excludes="aop/** system/**"/>
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
+ <fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java" />
+ </batchtest>
+ <!--The following batch is for very special occasions only when
+ a non-FI tests are needed to be executed against
FI-environment -->
+ <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
+ <fileset dir="@{fileset.dir}/core" includes="**/${testcase}.java" />
+ </batchtest>
+ </junit>
+ <antcall target="checkfailure"/>
+ </sequential>
+ </macrodef>
- <delete dir="${test.build.data}"/>
- <mkdir dir="${test.build.data}"/>
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
- <copy file="${test.src.dir}/hadoop-policy.xml"
- todir="${test.build.extraconf}" />
- <copy file="${test.src.dir}/fi-site.xml"
- todir="${test.build.extraconf}" />
- <junit showoutput="${test.output}"
- printsummary="${test.junit.printsummary}"
- haltonfailure="${test.junit.haltonfailure}"
- fork="yes"
- forkmode="${test.junit.fork.mode}"
- maxmemory="${test.junit.maxmemory}"
- dir="${basedir}" timeout="${test.timeout}"
- errorProperty="tests.failed" failureProperty="tests.failed">
- <jvmarg value="-ea" />
- <sysproperty key="test.build.data" value="${test.build.data}"/>
- <sysproperty key="test.cache.data" value="${test.cache.data}"/>
- <sysproperty key="test.debug.data" value="${test.debug.data}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}/core"/>
- <sysproperty key="test.build.extraconf" value="${test.build.extraconf}"
/>
- <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
- <sysproperty key="java.library.path"
- value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
- <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
- <!-- set io.compression.codec.lzo.class in the child jvm only if it is
set -->
- <syspropertyset dynamic="no">
- <propertyref name="io.compression.codec.lzo.class"/>
- </syspropertyset>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++"/>
- </syspropertyset>
- <classpath refid="test.classpath"/>
- <syspropertyset id="FaultProbabilityProperties">
- <propertyref regex="fi.*"/>
- </syspropertyset>
- <formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" if="tests.notestcase">
- <fileset dir="${test.src.dir}/core"
- includes="**/${test.include}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
- <fileset dir="${test.src.dir}/aop"
- includes="**/${test.include}.java"
- excludes="**/${test.exclude}.java" />
- </batchtest>
- <batchtest todir="${test.build.dir}" if="tests.testcase">
- <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
- </batchtest>
- <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
- <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
- </batchtest>
- <!--The following batch is for very special occasions only when
- a non-FI tests are needed to be executed against FI-environment -->
- <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
- <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
- </batchtest>
- </junit>
- <antcall target="checkfailure"/>
+ <target name="run-test-core" depends="compile-core-test" description="Run
core unit tests">
+ <macro-test-runner test.file="${test.all.tests.file}"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"
+ >
+ </macro-test-runner>
</target>
<target name="checkfailure" if="tests.failed">
@@ -1175,7 +1202,8 @@
classpathref="mvn-ant-task.classpath"/>
</target>
- <target name="mvn-install" depends="mvn-taskdef,jar,jar-test,set-version"
+ <target name="mvn-install" depends="mvn-taskdef,jar,jar-test,
+ -mvn-system-install,set-version"
description="To install hadoop core and test jars to local filesystem's
m2 cache">
<artifact:pom file="${hadoop-core.pom}" id="hadoop.core"/>
<artifact:pom file="${hadoop-core-test.pom}" id="hadoop.core.test"/>
Added: hadoop/common/trunk/ivy/hadoop-core-system-template.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/hadoop-core-system-template.xml?rev=944521&view=auto
==============================================================================
--- hadoop/common/trunk/ivy/hadoop-core-system-template.xml (added)
+++ hadoop/common/trunk/ivy/hadoop-core-system-template.xml Fri May 14 23:56:34
2010
@@ -0,0 +1,127 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core</artifactId>
+ <packaging>jar</packaging>
+ <version>@version</version>
+ <dependencies>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.2</version>
+ </dependency>
+ <dependency>
+ <groupId>xmlenc</groupId>
+ <artifactId>xmlenc</artifactId>
+ <version>0.52</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.0.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>1.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-net</groupId>
+ <artifactId>commons-net</artifactId>
+ <version>1.4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-runtime</artifactId>
+ <version>5.5.12</version>
+ </dependency>
+ <dependency>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-compiler</artifactId>
+ <version>5.5.12</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-api-2.1</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-2.1</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ <version>1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>net.java.dev.jets3t</groupId>
+ <artifactId>jets3t</artifactId>
+ <version>0.7.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-net</groupId>
+ <artifactId>commons-net</artifactId>
+ <version>1.4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>servlet-api-2.5</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>net.sf.kosmosfs</groupId>
+ <artifactId>kfs</artifactId>
+ <version>0.3</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.5</version>
+ </dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.8.0.10</version>
+ </dependency>
+ <dependency>
+ <groupId>oro</groupId>
+ <artifactId>oro</artifactId>
+ <version>2.0.8</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>avro</artifactId>
+ <version>1.3.0</version>
+ </dependency>
+ </dependencies>
+</project>
Added: hadoop/common/trunk/ivy/hadoop-core-system.pom
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/hadoop-core-system.pom?rev=944521&view=auto
==============================================================================
--- hadoop/common/trunk/ivy/hadoop-core-system.pom (added)
+++ hadoop/common/trunk/ivy/hadoop-core-system.pom Fri May 14 23:56:34 2010
@@ -0,0 +1,258 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ -->
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core-system</artifactId>
+ <packaging>jar</packaging>
+ <version>${hadoop.version}</version>
+ <description>
+ Hadoop is the distributed computing framework of Apache;
+ hadoop-core-system contains shared classes of embeded
+ Hadoop test framework for system testing.
+ </description>
+ <licenses>
+ <license>
+ <name>Apache License, Version 2.0</name>
+ <url>http://apache.org/licenses/LICENSE-2.0</url>
+ </license>
+ </licenses>
+ <dependencies>
+
+
+ <!-- always include commons-logging and log4J -->
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>${commons-logging.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>avalon-framework</groupId>
+ <artifactId>avalon-framework</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>logkit</groupId>
+ <artifactId>logkit</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ <version>${log4j.version}</version>
+ <scope>optional</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>javax.mail</groupId>
+ <artifactId>mail</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.jms</groupId>
+ <artifactId>jms</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jdmk</groupId>
+ <artifactId>jmxtools</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jmx</groupId>
+ <artifactId>jmxri</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <!--SLF4J is a JAR-based dependency; this POM binds it to log4J-->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>${slf4j-api.version}</version>
+ <scope>optional</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>${slf4j-log4j12.version}</version>
+ <scope>optional</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <!--Httpclient and its components are optional-->
+
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.1</version>
+ <scope>optional</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>1.3</version>
+ <scope>optional</scope>
+ </dependency>
+
+ <!--CLI is needed to scan the command line, but only the 1.0 branch is
released -->
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>2.0-20070823</version>
+ <scope>optional</scope>
+ </dependency>
+
+
+ <!-- this is used for the ftp:// filesystem-->
+ <dependency>
+ <groupId>commons-net</groupId>
+ <artifactId>commons-net</artifactId>
+ <version>1.4.1</version>
+ <scope>optional</scope>
+ </dependency>
+
+ <!-- Jetty is used to serve up the application. It is marked as optional
because
+ clients do not need it. All server-side deployments will need
+ all of these files.-->
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>${servlet-api.version}</version>
+ <scope>optional</scope>
+ </dependency>
+ <dependency>
+ <groupId>jetty</groupId>
+ <artifactId>org.mortbay.jetty</artifactId>
+ <version>${jetty.version}</version>
+ <scope>optional</scope>
+ </dependency>
+
+
+ <!--JSP support -->
+
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-2.1</artifactId>
+ <version>${jetty.version}</version>
+ <scope>optional</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-api-2.1</artifactId>
+ <version>${jetty.version}</version>
+ <scope>optional</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ <version>${commons-el.version}</version>
+ <scope>optional</scope>
+ </dependency>
+
+
+ <!--JSPC assistance-->
+
+ <dependency>
+ <groupId>org.eclipse.jdt</groupId>
+ <artifactId>core</artifactId>
+ <version>${core.version}</version>
+ <scope>optional</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.ant</groupId>
+ <artifactId>ant</artifactId>
+ <version>${apacheant.version}</version>
+ <scope>optional</scope>
+ </dependency>
+
+ <!-- JetS3t is a client library for S3.
+ -It is only needed if you want to work with S3 filesystems
+ -It pulls in commons-logging 1.1.1 and does not exclude all the cruft that
comes with it.
+ By excluding it we stay in control of versions and dependencies
+ -->
+
+ <dependency>
+ <groupId>net.java.dev.jets3t</groupId>
+ <artifactId>jets3t</artifactId>
+ <version>${jets3t.version}</version>
+ <scope>optional</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <!--Kosmos filesystem
+ http://kosmosfs.sourceforge.net/
+ This is not in the central repository
+ -->
+ <!--
+ <dependency>
+ <groupId>org.kosmix</groupId>
+ <artifactId>kfs</artifactId>
+ <version>0.1</version>
+ <scope>optional</scope>
+ </dependency>
+ -->
+
+ <!--
+ http://xmlenc.sourceforge.net/
+ "The xmlenc library is a fast stream-based XML output library for Java."
+ -->
+ <dependency>
+ <groupId>xmlenc</groupId>
+ <artifactId>xmlenc</artifactId>
+ <version>0.52</version>
+ <scope>optional</scope>
+ </dependency>
+ </dependencies>
+</project>
Added: hadoop/common/trunk/ivy/hadoop-core-system.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/hadoop-core-system.xml?rev=944521&view=auto
==============================================================================
--- hadoop/common/trunk/ivy/hadoop-core-system.xml (added)
+++ hadoop/common/trunk/ivy/hadoop-core-system.xml Fri May 14 23:56:34 2010
@@ -0,0 +1,127 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-core-system</artifactId>
+ <packaging>jar</packaging>
+ <version>0.22.0-SNAPSHOT</version>
+ <dependencies>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.2</version>
+ </dependency>
+ <dependency>
+ <groupId>xmlenc</groupId>
+ <artifactId>xmlenc</artifactId>
+ <version>0.52</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.0.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ <version>1.4</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-net</groupId>
+ <artifactId>commons-net</artifactId>
+ <version>1.4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jetty-util</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-runtime</artifactId>
+ <version>5.5.12</version>
+ </dependency>
+ <dependency>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-compiler</artifactId>
+ <version>5.5.12</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-api-2.1</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-2.1</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ <version>1.0</version>
+ </dependency>
+ <dependency>
+ <groupId>net.java.dev.jets3t</groupId>
+ <artifactId>jets3t</artifactId>
+ <version>0.7.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-net</groupId>
+ <artifactId>commons-net</artifactId>
+ <version>1.4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>servlet-api-2.5</artifactId>
+ <version>6.1.14</version>
+ </dependency>
+ <dependency>
+ <groupId>net.sf.kosmosfs</groupId>
+ <artifactId>kfs</artifactId>
+ <version>0.3</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.5</version>
+ </dependency>
+ <dependency>
+ <groupId>hsqldb</groupId>
+ <artifactId>hsqldb</artifactId>
+ <version>1.8.0.10</version>
+ </dependency>
+ <dependency>
+ <groupId>oro</groupId>
+ <artifactId>oro</artifactId>
+ <version>2.0.8</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>avro</artifactId>
+ <version>1.3.0</version>
+ </dependency>
+ </dependencies>
+</project>
Added: hadoop/common/trunk/src/test/all-tests
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/all-tests?rev=944521&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/all-tests (added)
+++ hadoop/common/trunk/src/test/all-tests Fri May 14 23:56:34 2010
@@ -0,0 +1 @@
+**/Test*.java
Modified: hadoop/common/trunk/src/test/aop/build/aop.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/aop/build/aop.xml?rev=944521&r1=944520&r2=944521&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/aop/build/aop.xml (original)
+++ hadoop/common/trunk/src/test/aop/build/aop.xml Fri May 14 23:56:34 2010
@@ -14,13 +14,27 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<project name="aspects">
+<project name="aspects"
+ xmlns:artifact="urn:maven-artifact-ant">
+ <!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar"
/>
<property name="compile-inject.output"
value="${build-fi.dir}/compile-fi.log"/>
<property name="aspectversion" value="1.6.5"/>
<property file="${basedir}/build.properties"/>
+ <!-- Properties related to system fault injection and tests -->
+ <property name="system-test-build-dir" value="${build-fi.dir}/system"/>
+
+ <!-- Properties specifically for system fault-injections and system tests -->
+
+ <property name="hadoop-core-system.pom"
+ location="${ivy.dir}/hadoop-core-system.xml" />
+ <property name="hadoop-core-system.jar"
+ location="${system-test-build-dir}/${final.name}-system.jar" />
+ <property name="hadoop-core-system-sources.jar"
+
location="${system-test-build-dir}/${final.name}-system-sources.jar" />
+
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
@@ -46,15 +60,19 @@
<echo message="Start weaving aspects in place"/>
<iajc
encoding="${build.encoding}"
- srcdir="${java.src.dir};${build.src};${test.src.dir}/aop"
- includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
+ srcdir="${java.src.dir};${build.src};${src.dir.path}"
+ includes="**/org/apache/hadoop/**/*.java, **/org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/classification/tools/**/*,
org/apache/hadoop/record/**/*"
- destDir="${build.classes}"
+ destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
- <classpath refid="test.classpath"/>
+
+ <classpath>
+ <path refid="test.classpath"/>
+ </classpath>
+
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
<filterchain>
@@ -69,15 +87,76 @@
<echo message="Weaving of aspects is finished"/>
</target>
- <target name="injectfaults"
- description="Instrument classes with faults and other AOP advices">
+ <!-- Classpath for running system tests -->
+ <path id="test.system.classpath">
+ <pathelement location="${hadoop.conf.dir.deployed}" />
+ <pathelement location="${system-test-build-dir}/test/extraconf" />
+ <pathelement location="${system-test-build-dir}/test/classes" />
+ <pathelement location="${system-test-build-dir}/classes" />
+ <pathelement location="${test.src.dir}" />
+ <pathelement location="${build-fi.dir}" />
+ <pathelement location="${build-fi.dir}/tools" />
+ <pathelement path="${clover.jar}" />
+ <fileset dir="${test.lib.dir}">
+ <include name="**/*.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <fileset dir="${system-test-build-dir}">
+ <include name="**/*.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <path refid="classpath" />
+ </path>
+
+ <target name="injectfaults"
+ description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been
removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
- <subant buildpath="${basedir}" target="compile-fault-inject"
- output="${compile-inject.output}">
- <property name="build.dir" value="${build-fi.dir}"/>
- </subant>
+ <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
+ src.dir="${test.src.dir}/aop">
+ </weave-injectfault-aspects>
+ </target>
+
+ <!-- =============================================================== -->
+ <!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
+ <!-- cluster for system tests -->
+ <!-- =============================================================== -->
+ <target name="jar-system"
+ depends="inject-system-faults"
+ description="make hadoop.jar">
+ <macro-jar-fault-inject target.name="jar"
+ build.dir="${system-test-build-dir}"
+ jar.final.name="final.name"
+ jar.final.value="${final.name}-system">
+ </macro-jar-fault-inject>
+ <jar jarfile="${system-test-build-dir}/${final.name}-system-sources.jar"
+ update="yes">
+ <fileset dir="${test.src.dir}/system/java"
includes="org/apache/hadoop/**/*.java"/>
+ <fileset dir="${test.src.dir}/system/aop"
includes="org/apache/hadoop/**/*.aj"/>
+ </jar>
+ </target>
+
+ <macrodef name="weave-injectfault-aspects">
+ <attribute name="dest.dir" />
+ <attribute name="src.dir" />
+ <sequential>
+ <subant buildpath="build.xml" target="compile-fault-inject"
+ output="${compile-inject.output}">
+ <property name="build.dir" value="${build-fi.dir}" />
+ <property name="src.dir.path" value="@{src.dir}" />
+ <property name="dest.dir" value="@{dest.dir}" />
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <target name="inject-system-faults" description="Inject system faults">
+ <property name="build-fi.dir" value="${system-test-build-dir}" />
+ <mkdir dir="${build-fi.dir}"/>
+ <delete file="${compile-inject.output}"/>
+ <weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
+ src.dir="${test.src.dir}/system">
+ </weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
@@ -99,11 +178,12 @@
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
+ <attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
- <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
@@ -129,4 +209,53 @@
</macrodef>
<!--End of Fault Injection (FI) related session-->
+
+ <!-- Start of cluster controller binary target -->
+ <property name="runAs.src"
+ value ="${test.src.dir}/system/c++/runAs"/>
+ <property name="runAs.build.dir"
+ value="${system-test-build-dir}/c++-build"/>
+ <property name="runAs.configure.script"
+ value="${runAs.build.dir}/configure"/>
+ <target name="init-runAs-build">
+ <condition property="runAs.parameters.passed">
+ <not>
+ <equals arg1="${run-as.hadoop.home.dir}"
+ arg2="$${run-as.hadoop.home.dir}"/>
+ </not>
+ </condition>
+ <fail unless="runAs.parameters.passed"
+ message="Required parameters run-as.hadoop.home.dir not passed to
the build"/>
+ <mkdir dir="${runAs.build.dir}"/>
+ <copy todir="${runAs.build.dir}" overwrite="true">
+ <fileset dir="${runAs.src}" includes="**/*"/>
+ </copy>
+ <chmod perm="+x" file="${runAs.configure.script}">
+ </chmod>
+ </target>
+
+ <target name="configure-runAs"
+ depends="init-runAs-build">
+ <exec executable="${runAs.configure.script}"
+ dir="${runAs.build.dir}" failonerror="true">
+ <arg value="--with-home=${run-as.hadoop.home.dir}"/>
+ </exec>
+ </target>
+ <target name="run-as" depends="configure-runAs">
+ <exec executable="${make.cmd}" dir="${runAs.build.dir}"
+ searchpath="yes" failonerror="yes">
+ <arg value="all" />
+ </exec>
+ </target>
+ <!-- End of cluster controller binary target -->
+ <!-- Maven -->
+
+ <target name="-mvn-system-install" depends="mvn-taskdef, jar-system">
+ <artifact:pom file="${hadoop-core-system.pom}" id="hadoop.core.system"/>
+ <artifact:install file="${hadoop-core-system.jar}">
+ <pom refid="hadoop.core.system"/>
+ <attach file="${hadoop-core-system-sources.jar}" classifier="sources" />
+ </artifact:install>
+ </target>
+
</project>
Added:
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj?rev=944521&view=auto
==============================================================================
---
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
(added)
+++
hadoop/common/trunk/src/test/system/aop/org/apache/hadoop/test/system/DaemonProtocolAspect.aj
Fri May 14 23:56:34 2010
@@ -0,0 +1,287 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.test.system;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.Properties;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Default DaemonProtocolAspect which is used to provide default implementation
+ * for all the common daemon methods. If a daemon requires more specialized
+ * version of method, it is responsibility of the DaemonClient to introduce the
+ * same in woven classes.
+ *
+ */
+public aspect DaemonProtocolAspect {
+
+ private boolean DaemonProtocol.ready;
+
+ @SuppressWarnings("unchecked")
+ private HashMap<Object, List<ControlAction>> DaemonProtocol.actions =
+ new HashMap<Object, List<ControlAction>>();
+ private static final Log LOG = LogFactory.getLog(
+ DaemonProtocolAspect.class.getName());
+ /**
+ * Set if the daemon process is ready or not, concrete daemon protocol should
+ * implement pointcuts to determine when the daemon is ready and use the
+ * setter to set the ready state.
+ *
+ * @param ready
+ * true if the Daemon is ready.
+ */
+ public void DaemonProtocol.setReady(boolean ready) {
+ this.ready = ready;
+ }
+
+ /**
+ * Checks if the daemon process is alive or not.
+ *
+ * @throws IOException
+ * if daemon is not alive.
+ */
+ public void DaemonProtocol.ping() throws IOException {
+ }
+
+ /**
+ * Checks if the daemon process is ready to accepting RPC connections after
it
+ * finishes initialization. <br/>
+ *
+ * @return true if ready to accept connection.
+ *
+ * @throws IOException
+ */
+ public boolean DaemonProtocol.isReady() throws IOException {
+ return ready;
+ }
+
+ /**
+ * Returns the process related information regarding the daemon process.
<br/>
+ *
+ * @return process information.
+ * @throws IOException
+ */
+ public ProcessInfo DaemonProtocol.getProcessInfo() throws IOException {
+ int activeThreadCount = Thread.activeCount();
+ long currentTime = System.currentTimeMillis();
+ long maxmem = Runtime.getRuntime().maxMemory();
+ long freemem = Runtime.getRuntime().freeMemory();
+ long totalmem = Runtime.getRuntime().totalMemory();
+ Map<String, String> envMap = System.getenv();
+ Properties sysProps = System.getProperties();
+ Map<String, String> props = new HashMap<String, String>();
+ for (Map.Entry entry : sysProps.entrySet()) {
+ props.put((String) entry.getKey(), (String) entry.getValue());
+ }
+ ProcessInfo info = new ProcessInfoImpl(activeThreadCount, currentTime,
+ freemem, maxmem, totalmem, envMap, props);
+ return info;
+ }
+
+ public void DaemonProtocol.enable(List<Enum<?>> faults) throws IOException {
+ }
+
+ public void DaemonProtocol.disableAll() throws IOException {
+ }
+
+ public abstract Configuration DaemonProtocol.getDaemonConf()
+ throws IOException;
+
+ public FileStatus DaemonProtocol.getFileStatus(String path, boolean local)
+ throws IOException {
+ Path p = new Path(path);
+ FileSystem fs = getFS(p, local);
+ p.makeQualified(fs);
+ FileStatus fileStatus = fs.getFileStatus(p);
+ return cloneFileStatus(fileStatus);
+ }
+
+ public FileStatus[] DaemonProtocol.listStatus(String path, boolean local)
+ throws IOException {
+ Path p = new Path(path);
+ FileSystem fs = getFS(p, local);
+ FileStatus[] status = fs.listStatus(p);
+ if (status != null) {
+ FileStatus[] result = new FileStatus[status.length];
+ int i = 0;
+ for (FileStatus fileStatus : status) {
+ result[i++] = cloneFileStatus(fileStatus);
+ }
+ return result;
+ }
+ return status;
+ }
+
+ /**
+ * FileStatus object may not be serializable. Clone it into raw FileStatus
+ * object.
+ */
+ private FileStatus DaemonProtocol.cloneFileStatus(FileStatus fileStatus) {
+ return new FileStatus(fileStatus.getLen(),
+ fileStatus.isDir(),
+ fileStatus.getReplication(),
+ fileStatus.getBlockSize(),
+ fileStatus.getModificationTime(),
+ fileStatus.getAccessTime(),
+ fileStatus.getPermission(),
+ fileStatus.getOwner(),
+ fileStatus.getGroup(),
+ fileStatus.getPath());
+ }
+
+ private FileSystem DaemonProtocol.getFS(final Path path, final boolean local)
+ throws IOException {
+ FileSystem ret = null;
+ try {
+ ret = UserGroupInformation.getLoginUser().doAs (
+ new PrivilegedExceptionAction<FileSystem>() {
+ public FileSystem run() throws IOException {
+ FileSystem fs = null;
+ if (local) {
+ fs = FileSystem.getLocal(getDaemonConf());
+ } else {
+ fs = path.getFileSystem(getDaemonConf());
+ }
+ return fs;
+ }
+ });
+ } catch (InterruptedException ie) {
+ }
+ return ret;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ControlAction[] DaemonProtocol.getActions(Writable key)
+ throws IOException {
+ synchronized (actions) {
+ List<ControlAction> actionList = actions.get(key);
+ if(actionList == null) {
+ return new ControlAction[0];
+ } else {
+ return (ControlAction[]) actionList.toArray(new
ControlAction[actionList
+
.size()]);
+ }
+ }
+ }
+
+
+ @SuppressWarnings("unchecked")
+ public void DaemonProtocol.sendAction(ControlAction action)
+ throws IOException {
+ synchronized (actions) {
+ List<ControlAction> actionList = actions.get(action.getTarget());
+ if(actionList == null) {
+ actionList = new ArrayList<ControlAction>();
+ actions.put(action.getTarget(), actionList);
+ }
+ actionList.add(action);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public boolean DaemonProtocol.isActionPending(ControlAction action)
+ throws IOException{
+ synchronized (actions) {
+ List<ControlAction> actionList = actions.get(action.getTarget());
+ if(actionList == null) {
+ return false;
+ } else {
+ return actionList.contains(action);
+ }
+ }
+ }
+
+
+ @SuppressWarnings("unchecked")
+ public void DaemonProtocol.removeAction(ControlAction action)
+ throws IOException {
+ synchronized (actions) {
+ List<ControlAction> actionList = actions.get(action.getTarget());
+ if(actionList == null) {
+ return;
+ } else {
+ actionList.remove(action);
+ }
+ }
+ }
+
+ public void DaemonProtocol.clearActions() throws IOException {
+ synchronized (actions) {
+ actions.clear();
+ }
+ }
+
+ public String DaemonProtocol.getFilePattern() {
+ //We use the environment variable HADOOP_LOGFILE to get the
+ //pattern to use in the search.
+ String logDir = System.getenv("HADOOP_LOG_DIR");
+ String daemonLogPattern = System.getenv("HADOOP_LOGFILE");
+ if(daemonLogPattern == null && daemonLogPattern.isEmpty()) {
+ return "*";
+ }
+ return logDir+File.separator+daemonLogPattern+"*";
+ }
+
+ public int DaemonProtocol.getNumberOfMatchesInLogFile(String pattern,
+ String[] list) throws IOException {
+ StringBuffer filePattern = new StringBuffer(getFilePattern());
+ if(list != null){
+ for(int i =0; i < list.length; ++i)
+ {
+ filePattern.append(" | grep -v " + list[i] );
+ }
+ }
+ String[] cmd =
+ new String[] {
+ "bash",
+ "-c",
+ "grep -c "
+ + pattern + " " + filePattern
+ + " | awk -F: '{s+=$2} END {print s}'" };
+ ShellCommandExecutor shexec = new ShellCommandExecutor(cmd);
+ shexec.execute();
+ String output = shexec.getOutput();
+ return Integer.parseInt(output.replaceAll("\n", "").trim());
+ }
+
+ private String DaemonProtocol.user = null;
+
+ public String DaemonProtocol.getDaemonUser() {
+ return user;
+ }
+
+ public void DaemonProtocol.setUser(String user) {
+ this.user = user;
+ }
+}
+
Added: hadoop/common/trunk/src/test/system/c++/runAs/Makefile.in
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/system/c%2B%2B/runAs/Makefile.in?rev=944521&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/system/c++/runAs/Makefile.in (added)
+++ hadoop/common/trunk/src/test/system/c++/runAs/Makefile.in Fri May 14
23:56:34 2010
@@ -0,0 +1,41 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+OBJS=main.o runAs.o
+...@cc@
+CFLAGS = @CFLAGS@
+BINARY=runAs
+installdir = @prefix@
+
+all: $(OBJS)
+ $(CC) $(CFLAG) -o $(BINARY) $(OBJS)
+
+main.o: runAs.o main.c
+ $(CC) $(CFLAG) -o main.o -c main.c
+
+runAs.o: runAs.h runAs.c
+ $(CC) $(CFLAG) -o runAs.o -c runAs.c
+
+clean:
+ rm -rf $(BINARY) $(OBJS) $(TESTOBJS)
+
+install: all
+ cp $(BINARY) $(installdir)
+
+uninstall:
+ rm -rf $(installdir)/$(BINARY)
+ rm -rf $(BINARY)