This is an automated email from the ASF dual-hosted git repository.
ayushsaxena pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/branch-3.3 by this push:
new 54c40cb HADOOP-16878. FileUtil.copy() to throw IOException if the
source and destination are the same (#2383)
54c40cb is described below
commit 54c40cbf49f2ebf4bbc1976279a6eba7a2c5fe23
Author: Ayush Saxena <[email protected]>
AuthorDate: Sat Oct 17 01:34:01 2020 +0530
HADOOP-16878. FileUtil.copy() to throw IOException if the source and
destination are the same (#2383)
Contributed by Gabor Bota.
---
.../src/main/java/org/apache/hadoop/fs/FileUtil.java | 6 ++++++
.../test/java/org/apache/hadoop/fs/TestFsShellCopy.java | 16 +++++++++++++++-
.../apache/hadoop/hdfs/TestDistributedFileSystem.java | 16 ++++++++++++++++
3 files changed, 37 insertions(+), 1 deletion(-)
diff --git
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 7bc93f9..73ca6e6 100644
---
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -398,6 +398,12 @@ public class FileUtil {
Configuration conf) throws IOException {
Path src = srcStatus.getPath();
dst = checkDest(src.getName(), dstFS, dst, overwrite);
+
+ if (srcFS.makeQualified(src).equals(dstFS.makeQualified(dst))) {
+ throw new PathOperationException("Source (" + src + ") and destination "
+
+ "(" + dst + ") are equal in the copy command.");
+ }
+
if (srcStatus.isDirectory()) {
checkDependencies(srcFS, src, dstFS, dst);
if (!dstFS.mkdirs(dst)) {
diff --git
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
index 72ae296..117fad2 100644
---
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
+++
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java
@@ -34,6 +34,7 @@ import java.io.PrintStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -175,7 +176,20 @@ public class TestFsShellCopy {
checkPut(dirPath, targetDir, true);
}
-
+ @Test
+ public void testCopyBetweenFsEqualPath() throws Exception {
+ Path testRoot = new Path(testRootDir, "testPutFile");
+ lfs.delete(testRoot, true);
+ lfs.mkdirs(testRoot);
+
+ Path filePath = new Path(testRoot, "sameSourceTarget");
+ lfs.create(filePath).close();
+ final FileStatus status = lfs.getFileStatus(filePath);
+ LambdaTestUtils.intercept(PathOperationException.class, () ->
+ FileUtil.copy(lfs, status, lfs, filePath, false, true, conf)
+ );
+ }
+
private void checkPut(Path srcPath, Path targetDir, boolean useWindowsPath)
throws Exception {
lfs.delete(targetDir, true);
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index 628a618..3c90356 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -65,6 +65,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem.Statistics.StatisticsData;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
@@ -75,6 +76,7 @@ import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
+import org.apache.hadoop.fs.PathOperationException;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.apache.hadoop.fs.StorageType;
@@ -2090,4 +2092,18 @@ public class TestDistributedFileSystem {
assertFalse(result.isSupported());
}
}
+
+ @Test
+ public void testCopyBetweenFsEqualPath() throws Exception {
+ Configuration conf = getTestConfiguration();
+ try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) {
+ cluster.waitActive();
+ final DistributedFileSystem dfs = cluster.getFileSystem();
+ Path filePath = new Path("/dir/file");
+ dfs.create(filePath).close();
+ FileStatus fstatus = dfs.getFileStatus(filePath);
+ LambdaTestUtils.intercept(PathOperationException.class,
+ () -> FileUtil.copy(dfs, fstatus, dfs, filePath, false, true, conf));
+ }
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]