steveloughran commented on a change in pull request #1993:
URL: https://github.com/apache/hadoop/pull/1993#discussion_r454350102



##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      Path target = (Path)args[0];
+      Path[] src = (Path[]) args[1];
+      mockConcat(target, src);
+      return null;
+    }).when(mockFs).concat(any(Path.class), any(Path[].class));
+    Concat.setTstFs(mockFs);
+    shellRun(0, "-concat", dstPath.toString(), testRootDir+"/file-*");
+
+    assertTrue(lfs.exists(dstPath));

Review comment:
       use the relevant ContractTestUtils assertion here, for better error 
reporting.

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      Path target = (Path)args[0];
+      Path[] src = (Path[]) args[1];
+      mockConcat(target, src);
+      return null;
+    }).when(mockFs).concat(any(Path.class), any(Path[].class));
+    Concat.setTstFs(mockFs);
+    shellRun(0, "-concat", dstPath.toString(), testRootDir+"/file-*");
+
+    assertTrue(lfs.exists(dstPath));
+    assertEquals(1, lfs.listStatus(testRootDir).length);
+  }
+
+  @Test
+  public void testUnsupportedFs() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doThrow(
+        new UnsupportedOperationException("Mock unsupported exception."))
+        .when(mockFs).concat(any(Path.class), any(Path[].class));
+    Mockito.doAnswer(invocationOnMock -> new URI("mockfs:///")).when(mockFs)
+        .getUri();
+    Concat.setTstFs(mockFs);
+    PrintStream oldErr = System.err;
+    final ByteArrayOutputStream err = new ByteArrayOutputStream();
+    System.setErr(new PrintStream(err));
+    shellRun(1, "-concat", dstPath.toString(), testRootDir + "/file-*");
+    System.setErr(oldErr);
+    System.err.print(err.toString());
+    assertTrue(err.toString()

Review comment:
       Prefer Assertions.assertThat for new tests, as it will generate a 
meaningful error message

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {

Review comment:
       make a subclass of AbstractHadoopTestBase for test timeout and thread 
naming

##########
File path: 
hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
##########
@@ -824,6 +824,19 @@ Example:
 * `hadoop fs -truncate 55 /user/hadoop/file1 /user/hadoop/file2`
 * `hadoop fs -truncate -w 127 hdfs://nn1.example.com/user/hadoop/file1`
 
+concat
+--------
+
+Usage: `hadoop fs -concat <target file> <source files>`
+
+Concatenate existing source files into the target file. Target file and source
+files should be in the same directory.
+
+Example:
+
+* `hadoop fs -concat /user/hadoop/target-file /user/hadoop/file-0 
/user/hadoop/file-1`
+* `hadoop fs -concat /user/hadoop/target-file /user/hadoop/file-*`

Review comment:
       one of the examples should include the hdfs:// reference; for the -* 
one, maybe add  \* for those shells which do the expansion locally (i.e. bash), 
otherwise things don't work right

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;

Review comment:
       prefer an import ordering of
   
   ```
   org.java
   
   anything-not-org-apache (here junit and mockito)
   
   org.apache
   
   all static imports
   ```
   

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      Path target = (Path)args[0];
+      Path[] src = (Path[]) args[1];
+      mockConcat(target, src);
+      return null;
+    }).when(mockFs).concat(any(Path.class), any(Path[].class));
+    Concat.setTstFs(mockFs);
+    shellRun(0, "-concat", dstPath.toString(), testRootDir+"/file-*");
+
+    assertTrue(lfs.exists(dstPath));
+    assertEquals(1, lfs.listStatus(testRootDir).length);
+  }
+
+  @Test
+  public void testUnsupportedFs() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doThrow(
+        new UnsupportedOperationException("Mock unsupported exception."))
+        .when(mockFs).concat(any(Path.class), any(Path[].class));
+    Mockito.doAnswer(invocationOnMock -> new URI("mockfs:///")).when(mockFs)
+        .getUri();
+    Concat.setTstFs(mockFs);
+    PrintStream oldErr = System.err;
+    final ByteArrayOutputStream err = new ByteArrayOutputStream();
+    System.setErr(new PrintStream(err));

Review comment:
       try/finally here to always restore the error stream

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {

Review comment:
       just call delete with no probe. or use `ContractTestUtils` method which 
checks delete() return value

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));

Review comment:
       use String.format with a 0 prefix, e.g %02d for ordering

##########
File path: 
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Concat.java
##########
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * Concat the given files.
+ */
[email protected]
[email protected]
+public class Concat extends FsCommand {
+  public static void registerCommands(CommandFactory factory) {
+    factory.addClass(Concat.class, "-concat");
+  }
+
+  public static final String NAME = "concat";
+  public static final String USAGE = "<target path> <src path> <src path> ...";
+  public static final String DESCRIPTION = "Concatenate existing source files"
+      + " into the target file. Target file and source files should be in the"
+      + " same directory.";
+  private static FileSystem tstFs; // test only.
+
+  @Override
+  protected void processArguments(LinkedList<PathData> args)
+      throws IOException {
+    if (args.size() < 1) {
+      throw new IOException("Target path not specified.");
+    }
+    if (args.size() < 3) {
+      throw new IOException("The number of source paths is less than 2.");

Review comment:
       include Usage text

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      Path target = (Path)args[0];
+      Path[] src = (Path[]) args[1];
+      mockConcat(target, src);
+      return null;
+    }).when(mockFs).concat(any(Path.class), any(Path[].class));
+    Concat.setTstFs(mockFs);
+    shellRun(0, "-concat", dstPath.toString(), testRootDir+"/file-*");
+
+    assertTrue(lfs.exists(dstPath));
+    assertEquals(1, lfs.listStatus(testRootDir).length);
+  }
+
+  @Test
+  public void testUnsupportedFs() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doThrow(
+        new UnsupportedOperationException("Mock unsupported exception."))
+        .when(mockFs).concat(any(Path.class), any(Path[].class));
+    Mockito.doAnswer(invocationOnMock -> new URI("mockfs:///")).when(mockFs)
+        .getUri();
+    Concat.setTstFs(mockFs);
+    PrintStream oldErr = System.err;
+    final ByteArrayOutputStream err = new ByteArrayOutputStream();
+    System.setErr(new PrintStream(err));
+    shellRun(1, "-concat", dstPath.toString(), testRootDir + "/file-*");
+    System.setErr(oldErr);
+    System.err.print(err.toString());
+    assertTrue(err.toString()
+        .contains("Dest filesystem 'mockfs' doesn't support concat"));
+  }
+
+  private void shellRun(int n, String... args) {
+    assertEquals(n, shell.run(args));
+  }
+
+  /**
+   * Simple simulation of concat.
+   */
+  private void mockConcat(Path target, Path[] srcArray) throws IOException {
+    Path tmp = new Path(target.getParent(), target.getName() + ".bak");
+    lfs.rename(target, tmp);
+    OutputStream out = lfs.create(target);

Review comment:
       there's some `ContractTestUtils` helper method for writing files. If 
not, use try-with-resources

##########
File path: 
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Concat.java
##########
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * Concat the given files.
+ */
[email protected]
[email protected]
+public class Concat extends FsCommand {
+  public static void registerCommands(CommandFactory factory) {
+    factory.addClass(Concat.class, "-concat");
+  }
+
+  public static final String NAME = "concat";
+  public static final String USAGE = "<target path> <src path> <src path> ...";
+  public static final String DESCRIPTION = "Concatenate existing source files"
+      + " into the target file. Target file and source files should be in the"
+      + " same directory.";
+  private static FileSystem tstFs; // test only.
+
+  @Override
+  protected void processArguments(LinkedList<PathData> args)
+      throws IOException {
+    if (args.size() < 1) {
+      throw new IOException("Target path not specified.");

Review comment:
       include Usage text

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);

Review comment:
       why can't localfs be used directly? is it because it doesn't actually 
support it? 
   

##########
File path: 
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Concat.java
##########
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * Concat the given files.
+ */
[email protected]
[email protected]
+public class Concat extends FsCommand {
+  public static void registerCommands(CommandFactory factory) {
+    factory.addClass(Concat.class, "-concat");
+  }
+
+  public static final String NAME = "concat";
+  public static final String USAGE = "<target path> <src path> <src path> ...";
+  public static final String DESCRIPTION = "Concatenate existing source files"
+      + " into the target file. Target file and source files should be in the"
+      + " same directory.";
+  private static FileSystem tstFs; // test only.
+
+  @Override
+  protected void processArguments(LinkedList<PathData> args)
+      throws IOException {
+    if (args.size() < 1) {
+      throw new IOException("Target path not specified.");
+    }
+    if (args.size() < 3) {
+      throw new IOException("The number of source paths is less than 2.");
+    }
+    PathData target = args.removeFirst();
+    LinkedList<PathData> srcList = args;
+    if (!target.exists || !target.stat.isFile()) {
+      throw new IOException(String.format("Target path %s does not exist or is"
+              + " not file.", target.path));
+    }
+    Path[] srcArray = new Path[srcList.size()];
+    for (int i = 0; i < args.size(); i++) {
+      PathData src = srcList.get(i);
+      if (!src.exists || !src.stat.isFile()) {
+        throw new IOException(

Review comment:
       raise FileNotFoundException

##########
File path: 
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Concat.java
##########
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * Concat the given files.
+ */
[email protected]
[email protected]
+public class Concat extends FsCommand {
+  public static void registerCommands(CommandFactory factory) {
+    factory.addClass(Concat.class, "-concat");
+  }
+
+  public static final String NAME = "concat";
+  public static final String USAGE = "<target path> <src path> <src path> ...";
+  public static final String DESCRIPTION = "Concatenate existing source files"
+      + " into the target file. Target file and source files should be in the"
+      + " same directory.";
+  private static FileSystem tstFs; // test only.
+
+  @Override
+  protected void processArguments(LinkedList<PathData> args)
+      throws IOException {
+    if (args.size() < 1) {
+      throw new IOException("Target path not specified.");
+    }
+    if (args.size() < 3) {
+      throw new IOException("The number of source paths is less than 2.");
+    }
+    PathData target = args.removeFirst();
+    LinkedList<PathData> srcList = args;
+    if (!target.exists || !target.stat.isFile()) {
+      throw new IOException(String.format("Target path %s does not exist or is"
+              + " not file.", target.path));
+    }
+    Path[] srcArray = new Path[srcList.size()];
+    for (int i = 0; i < args.size(); i++) {
+      PathData src = srcList.get(i);
+      if (!src.exists || !src.stat.isFile()) {
+        throw new IOException(
+            String.format("%s does not exist or is not file.", src.path));
+      }
+      srcArray[i] = src.path;
+    }
+    FileSystem fs = target.fs;
+    if (tstFs != null) {
+      fs = tstFs;
+    }
+    try {
+      fs.concat(target.path, srcArray);
+    } catch (UnsupportedOperationException exception) {
+      throw new IOException("Dest filesystem '" + fs.getUri().getScheme()

Review comment:
       * throw PathIOException with full target path. Bits of a mounted FS may 
have different support for concat,
   * include inner stack trace

##########
File path: 
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestFsShellConcat.java
##########
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.util.Random;
+
+import static org.mockito.ArgumentMatchers.any;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Test Concat.
+ */
+public class TestFsShellConcat {
+
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
+  private static Path dstPath;
+
+  @Before
+  public void before() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+        "testFsShellCopy")));
+
+    if (lfs.exists(testRootDir)) {
+      lfs.delete(testRootDir, true);
+    }
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+    dstPath = new Path(testRootDir, "dstFile");
+    lfs.create(dstPath).close();
+
+    Random random = new Random();
+    for (int i = 0; i < 10; i++) {
+      OutputStream out = lfs.create(new Path(testRootDir, "file-" + i));
+      out.write(random.nextInt());
+      out.close();
+    }
+  }
+
+  @Test
+  public void testConcat() throws Exception {
+    FileSystem mockFs = Mockito.mock(FileSystem.class);
+    Mockito.doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      Path target = (Path)args[0];
+      Path[] src = (Path[]) args[1];
+      mockConcat(target, src);

Review comment:
       
   save the list of concatenated paths and then verify that the list passed in 
is of the correct length and order. 

##########
File path: 
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Concat.java
##########
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.shell;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * Concat the given files.
+ */
[email protected]
[email protected]
+public class Concat extends FsCommand {
+  public static void registerCommands(CommandFactory factory) {
+    factory.addClass(Concat.class, "-concat");
+  }
+
+  public static final String NAME = "concat";
+  public static final String USAGE = "<target path> <src path> <src path> ...";
+  public static final String DESCRIPTION = "Concatenate existing source files"
+      + " into the target file. Target file and source files should be in the"
+      + " same directory.";
+  private static FileSystem tstFs; // test only.
+
+  @Override
+  protected void processArguments(LinkedList<PathData> args)
+      throws IOException {
+    if (args.size() < 1) {
+      throw new IOException("Target path not specified.");
+    }
+    if (args.size() < 3) {
+      throw new IOException("The number of source paths is less than 2.");
+    }
+    PathData target = args.removeFirst();
+    LinkedList<PathData> srcList = args;
+    if (!target.exists || !target.stat.isFile()) {
+      throw new IOException(String.format("Target path %s does not exist or is"

Review comment:
       raise FileNotFoundException




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to