Author: suresh
Date: Thu Jan 3 21:23:58 2013
New Revision: 1428601
URL: http://svn.apache.org/viewvc?rev=1428601&view=rev
Log:
Merging trunk changes to branch-trunk-win
Added:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/HttpAuthentication.apt.vm
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/HttpAuthentication.apt.vm
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
- copied unchanged from r1428155,
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeLibraryChecker.java
Removed:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/HttpAuthentication.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/cluster_setup.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/commands_manual.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/src/documentation/content/xdocs/file_system_shell.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
(contents, props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
(props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
(props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/site/apt/ClusterSetup.apt.vm
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
(props changed)
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDU.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemBaseTest.java
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
Thu Jan 3 21:23:58 2013
@@ -132,9 +132,6 @@ Trunk (Unreleased)
HADOOP-8776. Provide an option in test-patch that can enable/disable
compiling native code. (Chris Nauroth via suresh)
- HADOOP-9004. Allow security unit tests to use external KDC. (Stephen Chu
- via suresh)
-
HADOOP-6616. Improve documentation for rack awareness. (Adam Faris via
jghoman)
@@ -144,6 +141,11 @@ Trunk (Unreleased)
HADOOP-9093. Move all the Exception in PathExceptions to o.a.h.fs package.
(suresh)
+ HADOOP-9140 Cleanup rpc PB protos (sanjay Radia)
+
+ HADOOP-9162. Add utility to check native library availability.
+ (Binglin Chang via suresh)
+
BUG FIXES
HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in
@@ -410,6 +412,14 @@ Release 2.0.3-alpha - Unreleased
HADOOP-9127. Update documentation for ZooKeeper Failover Controller.
(Daisuke Kobayashi via atm)
+ HADOOP-9004. Allow security unit tests to use external KDC. (Stephen Chu
+ via suresh)
+
+ HADOOP-9147. Add missing fields to FIleStatus.toString.
+ (Jonathan Allen via suresh)
+
+ HADOOP-8427. Convert Forrest docs to APT, incremental. (adi2 via tucu)
+
OPTIMIZATIONS
HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -500,6 +510,12 @@ Release 2.0.3-alpha - Unreleased
HADOOP-9135. JniBasedUnixGroupsMappingWithFallback should log at debug
rather than info during fallback. (Colin Patrick McCabe via todd)
+ HADOOP-9152. HDFS can report negative DFS Used on clusters with very small
+ amounts of data. (Brock Noland via atm)
+
+ HADOOP-9153. Support createNonRecursive in ViewFileSystem.
+ (Sandy Ryza via tomwhite)
+
Release 2.0.2-alpha - 2012-09-07
INCOMPATIBLE CHANGES
@@ -1211,6 +1227,8 @@ Release 0.23.6 - UNRELEASED
HADOOP-9038. unit-tests for AllocatorPerContext.PathIterator (Ivan A.
Veselovsky via bobby)
+ HADOOP-9105. FsShell -moveFromLocal erroneously fails (daryn via bobby)
+
Release 0.23.5 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1423068-1428155
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
Thu Jan 3 21:23:58 2013
@@ -260,7 +260,7 @@
</Match>
<Match>
<!-- protobuf generated code -->
- <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.HadoopRpcProtos.*"/>
+ <Class
name="~org\.apache\.hadoop\.ipc\.protobuf\.ProtobufRpcEngineProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
@@ -272,7 +272,7 @@
</Match>
<Match>
<!-- protobuf generated code -->
- <Class
name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcPayloadHeaderProtos.*"/>
+ <Class name="~org\.apache\.hadoop\.ipc\.protobuf\.RpcHeaderProtos.*"/>
</Match>
<Match>
<!-- protobuf generated code -->
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/pom.xml
Thu Jan 3 21:23:58 2013
@@ -402,9 +402,9 @@
<argument>src/main/proto/HAServiceProtocol.proto</argument>
<argument>src/main/proto/IpcConnectionContext.proto</argument>
<argument>src/main/proto/ProtocolInfo.proto</argument>
- <argument>src/main/proto/RpcPayloadHeader.proto</argument>
+ <argument>src/main/proto/RpcHeader.proto</argument>
<argument>src/main/proto/ZKFCProtocol.proto</argument>
- <argument>src/main/proto/hadoop_rpc.proto</argument>
+ <argument>src/main/proto/ProtobufRpcEngine.proto</argument>
</arguments>
</configuration>
</execution>
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/bin/hadoop
Thu Jan 3 21:23:58 2013
@@ -31,6 +31,7 @@ function print_usage(){
echo " fs run a generic filesystem user client"
echo " version print the version"
echo " jar <jar> run a jar file"
+ echo " checknative [-a|-h] check native hadoop and compression libraries
availability"
echo " distcp <srcurl> <desturl> copy file or directories recursively"
echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a
hadoop archive"
echo " classpath prints the class path needed to get the"
@@ -100,6 +101,8 @@ case $COMMAND in
CLASS=org.apache.hadoop.util.VersionInfo
elif [ "$COMMAND" = "jar" ] ; then
CLASS=org.apache.hadoop.util.RunJar
+ elif [ "$COMMAND" = "checknative" ] ; then
+ CLASS=org.apache.hadoop.util.NativeLibraryChecker
elif [ "$COMMAND" = "distcp" ] ; then
CLASS=org.apache.hadoop.tools.DistCp
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
Propchange:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1423068-1428155
Propchange:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged
/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1423068-1428155
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DU.java
Thu Jan 3 21:23:58 2013
@@ -136,7 +136,7 @@ public class DU extends Shell {
}
}
- return used.longValue();
+ return Math.max(used.longValue(), 0L);
}
/**
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
Thu Jan 3 21:23:58 2013
@@ -349,9 +349,15 @@ public class FileStatus implements Writa
sb.append("; replication=" + block_replication);
sb.append("; blocksize=" + blocksize);
}
+ sb.append("; modification_time=" + modification_time);
+ sb.append("; access_time=" + access_time);
sb.append("; owner=" + owner);
sb.append("; group=" + group);
sb.append("; permission=" + permission);
+ sb.append("; isSymlink=" + isSymlink());
+ if(isSymlink()) {
+ sb.append("; symlink=" + symlink);
+ }
sb.append("}");
return sb.toString();
}
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
Thu Jan 3 21:23:58 2013
@@ -166,6 +166,18 @@ public class FilterFileSystem extends Fi
return fs.create(f, permission,
overwrite, bufferSize, replication, blockSize, progress);
}
+
+
+
+ @Override
+ @Deprecated
+ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+ EnumSet<CreateFlag> flags, int bufferSize, short replication, long
blockSize,
+ Progressable progress) throws IOException {
+
+ return fs.createNonRecursive(f, permission, flags, bufferSize,
replication, blockSize,
+ progress);
+ }
/**
* Set replication for an existing file.
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
Thu Jan 3 21:23:58 2013
@@ -30,6 +30,7 @@ import java.io.FileDescriptor;
import java.net.URI;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.EnumSet;
import java.util.StringTokenizer;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -281,6 +282,18 @@ public class RawLocalFileSystem extends
return new FSDataOutputStream(new BufferedOutputStream(
new LocalFSFileOutputStream(f, false), bufferSize), statistics);
}
+
+ @Override
+ @Deprecated
+ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+ EnumSet<CreateFlag> flags, int bufferSize, short replication, long
blockSize,
+ Progressable progress) throws IOException {
+ if (exists(f) && !flags.contains(CreateFlag.OVERWRITE)) {
+ throw new IOException("File already exists: "+f);
+ }
+ return new FSDataOutputStream(new BufferedOutputStream(
+ new LocalFSFileOutputStream(f, false), bufferSize), statistics);
+ }
@Override
public FSDataOutputStream create(Path f, FsPermission permission,
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
Thu Jan 3 21:23:58 2013
@@ -311,6 +311,7 @@ abstract public class Command extends Co
if (recursive && item.stat.isDirectory()) {
recursePath(item);
}
+ postProcessPath(item);
} catch (IOException e) {
displayError(e);
}
@@ -330,6 +331,15 @@ abstract public class Command extends Co
}
/**
+ * Hook for commands to implement an operation to be applied on each
+ * path for the command after being processed successfully
+ * @param item a {@link PathData} object
+ * @throws IOException if anything goes wrong...
+ */
+ protected void postProcessPath(PathData item) throws IOException {
+ }
+
+ /**
* Gets the directory listing for a path and invokes
* {@link #processPaths(PathData, PathData...)}
* @param item {@link PathData} for directory to recurse into
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/MoveCommands.java
Thu Jan 3 21:23:58 2013
@@ -24,6 +24,7 @@ import java.util.LinkedList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.fs.PathExistsException;
import org.apache.hadoop.fs.shell.CopyCommands.CopyFromLocal;
/** Various commands for moving files */
@@ -49,7 +50,21 @@ class MoveCommands {
@Override
protected void processPath(PathData src, PathData target) throws
IOException {
- target.fs.moveFromLocalFile(src.path, target.path);
+ // unlike copy, don't merge existing dirs during move
+ if (target.exists && target.stat.isDirectory()) {
+ throw new PathExistsException(target.toString());
+ }
+ super.processPath(src, target);
+ }
+
+ @Override
+ protected void postProcessPath(PathData src) throws IOException {
+ if (!src.fs.delete(src.path, false)) {
+ // we have no way to know the actual error...
+ PathIOException e = new PathIOException(src.toString());
+ e.setOperation("remove");
+ throw e;
+ }
}
}
@@ -95,4 +110,4 @@ class MoveCommands {
}
}
}
-}
\ No newline at end of file
+}
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
Thu Jan 3 21:23:58 2013
@@ -19,11 +19,14 @@ package org.apache.hadoop.fs.viewfs;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.util.EnumSet;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
@@ -171,6 +174,16 @@ class ChRootedFileSystem extends FilterF
return super.create(fullPath(f), permission, overwrite, bufferSize,
replication, blockSize, progress);
}
+
+ @Override
+ @Deprecated
+ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+ EnumSet<CreateFlag> flags, int bufferSize, short replication, long
blockSize,
+ Progressable progress) throws IOException {
+
+ return super.createNonRecursive(fullPath(f), permission, flags,
bufferSize, replication, blockSize,
+ progress);
+ }
@Override
public boolean delete(final Path f, final boolean recursive)
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
Thu Jan 3 21:23:58 2013
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
+import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -35,6 +36,7 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -265,6 +267,21 @@ public class ViewFileSystem extends File
}
@Override
+ public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
+ EnumSet<CreateFlag> flags, int bufferSize, short replication, long
blockSize,
+ Progressable progress) throws IOException {
+ InodeTree.ResolveResult<FileSystem> res;
+ try {
+ res = fsState.resolve(getUriPath(f), false);
+ } catch (FileNotFoundException e) {
+ throw readOnlyMountTable("create", f);
+ }
+ assert(res.remainingPath != null);
+ return res.targetFileSystem.createNonRecursive(res.remainingPath,
permission,
+ flags, bufferSize, replication, blockSize, progress);
+ }
+
+ @Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
final boolean overwrite, final int bufferSize, final short replication,
final long blockSize, final Progressable progress) throws IOException {
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
Thu Jan 3 21:23:58 2013
@@ -63,11 +63,10 @@ import org.apache.hadoop.io.WritableUtil
import org.apache.hadoop.io.retry.RetryPolicies;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
-import
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
-import
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadHeaderProto;
-import
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcPayloadOperationProto;
-import
org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcResponseHeaderProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.RpcStatusProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto;
+import
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcRequestHeaderProto.OperationProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto;
+import
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.SaslRpcClient;
@@ -191,7 +190,7 @@ public class Client {
*/
private class Call {
final int id; // call id
- final Writable rpcRequest; // the serialized rpc request - RpcPayload
+ final Writable rpcRequest; // the serialized rpc request
Writable rpcResponse; // null if rpc has error
IOException error; // exception, null if success
final RPC.RpcKind rpcKind; // Rpc EngineKind
@@ -266,7 +265,7 @@ public class Client {
private AtomicBoolean shouldCloseConnection = new AtomicBoolean(); //
indicate if the connection is closed
private IOException closeException; // close reason
- private final Object sendParamsLock = new Object();
+ private final Object sendRpcRequestLock = new Object();
public Connection(ConnectionId remoteId) throws IOException {
this.remoteId = remoteId;
@@ -768,7 +767,7 @@ public class Client {
remoteId.getTicket(),
authMethod).writeTo(buf);
- // Write out the payload length
+ // Write out the packet length
int bufLen = buf.getLength();
out.writeInt(bufLen);
@@ -832,7 +831,7 @@ public class Client {
try {
while (waitForWork()) {//wait here for work - read or close connection
- receiveResponse();
+ receiveRpcResponse();
}
} catch (Throwable t) {
// This truly is unexpected, since we catch IOException in
receiveResponse
@@ -849,11 +848,12 @@ public class Client {
+ connections.size());
}
- /** Initiates a call by sending the parameter to the remote server.
+ /** Initiates a rpc call by sending the rpc request to the remote server.
* Note: this is not called from the Connection thread, but by other
* threads.
+ * @param call - the rpc request
*/
- public void sendParam(final Call call)
+ public void sendRpcRequest(final Call call)
throws InterruptedException, IOException {
if (shouldCloseConnection.get()) {
return;
@@ -866,17 +866,17 @@ public class Client {
//
// Format of a call on the wire:
// 0) Length of rest below (1 + 2)
- // 1) PayloadHeader - is serialized Delimited hence contains length
- // 2) the Payload - the RpcRequest
+ // 1) RpcRequestHeader - is serialized Delimited hence contains length
+ // 2) RpcRequest
//
// Items '1' and '2' are prepared here.
final DataOutputBuffer d = new DataOutputBuffer();
- RpcPayloadHeaderProto header = ProtoUtil.makeRpcPayloadHeader(
- call.rpcKind, RpcPayloadOperationProto.RPC_FINAL_PAYLOAD, call.id);
+ RpcRequestHeaderProto header = ProtoUtil.makeRpcRequestHeader(
+ call.rpcKind, OperationProto.RPC_FINAL_PACKET, call.id);
header.writeDelimitedTo(d);
call.rpcRequest.write(d);
- synchronized (sendParamsLock) {
+ synchronized (sendRpcRequestLock) {
Future<?> senderFuture = SEND_PARAMS_EXECUTOR.submit(new Runnable() {
@Override
public void run() {
@@ -892,7 +892,7 @@ public class Client {
byte[] data = d.getData();
int totalLength = d.getLength();
out.writeInt(totalLength); // Total Length
- out.write(data, 0, totalLength);//PayloadHeader + RpcRequest
+ out.write(data, 0, totalLength);// RpcRequestHeader +
RpcRequest
out.flush();
}
} catch (IOException e) {
@@ -927,7 +927,7 @@ public class Client {
/* Receive a response.
* Because only one receiver, so no synchronization on in.
*/
- private void receiveResponse() {
+ private void receiveRpcResponse() {
if (shouldCloseConnection.get()) {
return;
}
@@ -1194,12 +1194,12 @@ public class Client {
Call call = new Call(rpcKind, rpcRequest);
Connection connection = getConnection(remoteId, call);
try {
- connection.sendParam(call); // send the parameter
+ connection.sendRpcRequest(call); // send the rpc request
} catch (RejectedExecutionException e) {
throw new IOException("connection has been closed", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
- LOG.warn("interrupted waiting to send params to server", e);
+ LOG.warn("interrupted waiting to send rpc request to server", e);
throw new IOException(e);
}
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
Thu Jan 3 21:23:58 2013
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.ipc.Client.ConnectionId;
import org.apache.hadoop.ipc.RPC.RpcInvoker;
-import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto;
+import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestProto;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -128,10 +128,10 @@ public class ProtobufRpcEngine implement
.getProtocolVersion(protocol);
}
- private HadoopRpcRequestProto constructRpcRequest(Method method,
+ private RequestProto constructRpcRequest(Method method,
Object[] params) throws ServiceException {
- HadoopRpcRequestProto rpcRequest;
- HadoopRpcRequestProto.Builder builder = HadoopRpcRequestProto
+ RequestProto rpcRequest;
+ RequestProto.Builder builder = RequestProto
.newBuilder();
builder.setMethodName(method.getName());
@@ -190,7 +190,7 @@ public class ProtobufRpcEngine implement
startTime = Time.now();
}
- HadoopRpcRequestProto rpcRequest = constructRpcRequest(method, args);
+ RequestProto rpcRequest = constructRpcRequest(method, args);
RpcResponseWritable val = null;
if (LOG.isTraceEnabled()) {
@@ -271,13 +271,13 @@ public class ProtobufRpcEngine implement
* Writable Wrapper for Protocol Buffer Requests
*/
private static class RpcRequestWritable implements Writable {
- HadoopRpcRequestProto message;
+ RequestProto message;
@SuppressWarnings("unused")
public RpcRequestWritable() {
}
- RpcRequestWritable(HadoopRpcRequestProto message) {
+ RpcRequestWritable(RequestProto message) {
this.message = message;
}
@@ -292,7 +292,7 @@ public class ProtobufRpcEngine implement
int length = ProtoUtil.readRawVarint32(in);
byte[] bytes = new byte[length];
in.readFully(bytes);
- message = HadoopRpcRequestProto.parseFrom(bytes);
+ message = RequestProto.parseFrom(bytes);
}
@Override
@@ -426,7 +426,7 @@ public class ProtobufRpcEngine implement
public Writable call(RPC.Server server, String connectionProtocolName,
Writable writableRequest, long receiveTime) throws Exception {
RpcRequestWritable request = (RpcRequestWritable) writableRequest;
- HadoopRpcRequestProto rpcRequest = request.message;
+ RequestProto rpcRequest = request.message;
String methodName = rpcRequest.getMethodName();
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
Thu Jan 3 21:23:58 2013
@@ -80,7 +80,8 @@ import org.apache.hadoop.ipc.RPC.Version
import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
import org.apache.hadoop.ipc.metrics.RpcMetrics;
import
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
+import
org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.SaslRpcServer;
@@ -160,7 +161,7 @@ public abstract class Server {
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes());
/**
- * Serialization type for ConnectionContext and RpcPayloadHeader
+ * Serialization type for ConnectionContext and RpcRequestHeader
*/
public enum IpcSerializationType {
// Add new serialization type to the end without affecting the enum order
@@ -197,7 +198,7 @@ public abstract class Server {
// 4 : Introduced SASL security layer
// 5 : Introduced use of {@link ArrayPrimitiveWritable$Internal}
// in ObjectWritable to efficiently transmit arrays of primitives
- // 6 : Made RPC payload header explicit
+ // 6 : Made RPC Request header explicit
// 7 : Changed Ipc Connection Header to use Protocol buffers
// 8 : SASL server always sends a final response
public static final byte CURRENT_VERSION = 8;
@@ -1637,14 +1638,15 @@ public abstract class Server {
private void processData(byte[] buf) throws IOException,
InterruptedException {
DataInputStream dis =
new DataInputStream(new ByteArrayInputStream(buf));
- RpcPayloadHeaderProto header =
RpcPayloadHeaderProto.parseDelimitedFrom(dis);
+ RpcRequestHeaderProto header =
RpcRequestHeaderProto.parseDelimitedFrom(dis);
if (LOG.isDebugEnabled())
LOG.debug(" got #" + header.getCallId());
if (!header.hasRpcOp()) {
- throw new IOException(" IPC Server: No rpc op in rpcPayloadHeader");
+ throw new IOException(" IPC Server: No rpc op in rpcRequestHeader");
}
- if (header.getRpcOp() != RpcPayloadOperationProto.RPC_FINAL_PAYLOAD) {
+ if (header.getRpcOp() !=
+ RpcRequestHeaderProto.OperationProto.RPC_FINAL_PACKET) {
throw new IOException("IPC Server does not implement operation" +
header.getRpcOp());
}
@@ -1652,7 +1654,7 @@ public abstract class Server {
// (Note it would make more sense to have the handler deserialize but
// we continue with this original design.
if (!header.hasRpcKind()) {
- throw new IOException(" IPC Server: No rpc kind in rpcPayloadHeader");
+ throw new IOException(" IPC Server: No rpc kind in rpcRequestHeader");
}
Class<? extends Writable> rpcRequestClass =
getRpcRequestWrapper(header.getRpcKind());
Modified:
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
URL:
http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java?rev=1428601&r1=1428600&r2=1428601&view=diff
==============================================================================
---
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
(original)
+++
hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ProtoUtil.java
Thu Jan 3 21:23:58 2013
@@ -24,7 +24,7 @@ import java.io.IOException;
import org.apache.hadoop.ipc.RPC;
import
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
import
org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto;
-import org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos.*;
+import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.UserGroupInformation;
@@ -157,9 +157,9 @@ public abstract class ProtoUtil {
return null;
}
- public static RpcPayloadHeaderProto makeRpcPayloadHeader(RPC.RpcKind rpcKind,
- RpcPayloadOperationProto operation, int callId) {
- RpcPayloadHeaderProto.Builder result = RpcPayloadHeaderProto.newBuilder();
+ public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind,
+ RpcRequestHeaderProto.OperationProto operation, int callId) {
+ RpcRequestHeaderProto.Builder result = RpcRequestHeaderProto.newBuilder();
result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId);
return result.build();
}