Author: cmccabe
Date: Fri Aug 1 20:41:05 2014
New Revision: 1615223
URL: http://svn.apache.org/r1615223
Log:
HDFS-6482. Use block ID-based block layout on datanodes (James Thomas via Colin
Patrick McCabe)
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1615223&r1=1615222&r2=1615223&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
(original)
+++
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
Fri Aug 1 20:41:05 2014
@@ -33,6 +33,7 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.HardLink;
import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.Shell;
@@ -823,6 +824,14 @@ public class NativeIO {
}
}
+ public static void link(File src, File dst) throws IOException {
+ if (!nativeLoaded) {
+ HardLink.createHardLink(src, dst);
+ } else {
+ link0(src.getAbsolutePath(), dst.getAbsolutePath());
+ }
+ }
+
/**
* A version of renameTo that throws a descriptive exception when it fails.
*
@@ -833,4 +842,7 @@ public class NativeIO {
*/
private static native void renameTo0(String src, String dst)
throws NativeIOException;
+
+ private static native void link0(String src, String dst)
+ throws NativeIOException;
}
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1615223&r1=1615222&r2=1615223&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
(original)
+++
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
Fri Aug 1 20:41:05 2014
@@ -78,6 +78,20 @@ public class DiskChecker {
(mkdirsWithExistsCheck(new File(parent)) &&
(canonDir.mkdir() || canonDir.exists()));
}
+
+ /**
+ * Recurse down a directory tree, checking all child directories.
+ * @param dir
+ * @throws DiskErrorException
+ */
+ public static void checkDirs(File dir) throws DiskErrorException {
+ checkDir(dir);
+ for (File child : dir.listFiles()) {
+ if (child.isDirectory()) {
+ checkDirs(child);
+ }
+ }
+ }
/**
* Create the directory if it doesn't exist and check that dir is readable,
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1615223&r1=1615222&r2=1615223&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
(original)
+++
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Fri Aug 1 20:41:05 2014
@@ -1054,6 +1054,43 @@ done:
#endif
}
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_link0(JNIEnv *env,
+jclass clazz, jstring jsrc, jstring jdst)
+{
+#ifdef UNIX
+ const char *src = NULL, *dst = NULL;
+
+ src = (*env)->GetStringUTFChars(env, jsrc, NULL);
+ if (!src) goto done; // exception was thrown
+ dst = (*env)->GetStringUTFChars(env, jdst, NULL);
+ if (!dst) goto done; // exception was thrown
+ if (link(src, dst)) {
+ throw_ioe(env, errno);
+ }
+
+done:
+ if (src) (*env)->ReleaseStringUTFChars(env, jsrc, src);
+ if (dst) (*env)->ReleaseStringUTFChars(env, jdst, dst);
+#endif
+
+#ifdef WINDOWS
+ LPCTSTR src = NULL, dst = NULL;
+
+ src = (LPCTSTR) (*env)->GetStringChars(env, jsrc, NULL);
+ if (!src) goto done; // exception was thrown
+ dst = (LPCTSTR) (*env)->GetStringChars(env, jdst, NULL);
+ if (!dst) goto done; // exception was thrown
+ if (!CreateHardLink(dst, src)) {
+ throw_ioe(env, GetLastError());
+ }
+
+done:
+ if (src) (*env)->ReleaseStringChars(env, jsrc, src);
+ if (dst) (*env)->ReleaseStringChars(env, jdst, dst);
+#endif
+}
+
JNIEXPORT jlong JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_getMemlockLimit0(
JNIEnv *env, jclass clazz)