[ 
https://issues.apache.org/jira/browse/HADOOP-19254?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17912344#comment-17912344
 ] 

ASF GitHub Bot commented on HADOOP-19254:
-----------------------------------------

HarshitGupta11 commented on code in PR #7197:
URL: https://github.com/apache/hadoop/pull/7197#discussion_r1912702454


##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -1,45 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.fs.shell;
 
+import java.io.*;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.HadoopTestBase;
+import org.assertj.core.api.Assertions;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.LinkedList;
+public class TestBulkDeleteCommand extends HadoopTestBase {
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+  @BeforeClass
+  public static void setup() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+            "testFsShellBulkDelete")));
+    lfs.delete(testRootDir, true);
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+  }
 
-public class TestBulkDeleteCommand {
-    private static Configuration conf;
+  @Test
+  public void testDefaults() throws IOException {
+    LinkedList<String> options = new LinkedList<>();
+    BulkDeleteCommand bulkDeleteCommand = new BulkDeleteCommand(conf);
+    bulkDeleteCommand.processOptions(options);
+    assertTrue(bulkDeleteCommand.childArgs.isEmpty());

Review Comment:
   done



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -1,45 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.fs.shell;
 
+import java.io.*;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.test.HadoopTestBase;
+import org.assertj.core.api.Assertions;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.LinkedList;
+public class TestBulkDeleteCommand extends HadoopTestBase {
+  private static Configuration conf;
+  private static FsShell shell;
+  private static LocalFileSystem lfs;
+  private static Path testRootDir;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+  @BeforeClass
+  public static void setup() throws IOException {
+    conf = new Configuration();
+    shell = new FsShell(conf);
+    lfs = FileSystem.getLocal(conf);
+    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath(
+            "testFsShellBulkDelete")));
+    lfs.delete(testRootDir, true);
+    lfs.mkdirs(testRootDir);
+    lfs.setWorkingDirectory(testRootDir);
+  }
 
-public class TestBulkDeleteCommand {
-    private static Configuration conf;
+  @Test
+  public void testDefaults() throws IOException {
+    LinkedList<String> options = new LinkedList<>();
+    BulkDeleteCommand bulkDeleteCommand = new BulkDeleteCommand(conf);
+    bulkDeleteCommand.processOptions(options);
+    assertTrue(bulkDeleteCommand.childArgs.isEmpty());
+  }
 
-    @BeforeClass
-    public static void setup() throws IOException {
-        conf = new Configuration();
-    }
+  @Test
+  public void testArguments() throws IOException, URISyntaxException {
+    BulkDeleteCommand bulkDeleteCommand = new BulkDeleteCommand(conf);
+    LinkedList<String> arguments = new LinkedList<>();
+    String arg1 = "file:///file/name/1";
+    String arg2 = "file:///file/name/1/2";
+    arguments.add(arg1);
+    arguments.add(arg2);
+    LinkedList<PathData> pathData = 
bulkDeleteCommand.expandArguments(arguments);
+    Assertions.assertThat(pathData.size()).
+            describedAs("Only one root path must be present").isEqualTo(1);
+    Assertions.assertThat(pathData.get(0).path.toUri().getPath()).
+            describedAs("Base path of the command should match").isEqualTo(new 
URI(arg1).getPath());
+    Assertions.assertThat(bulkDeleteCommand.childArgs.size()).

Review Comment:
   done





> Implement bulk delete command as hadoop fs command operation 
> -------------------------------------------------------------
>
>                 Key: HADOOP-19254
>                 URL: https://issues.apache.org/jira/browse/HADOOP-19254
>             Project: Hadoop Common
>          Issue Type: Improvement
>          Components: fs
>    Affects Versions: 3.4.1
>            Reporter: Mukund Thakur
>            Assignee: Harshit Gupta
>            Priority: Major
>              Labels: pull-request-available
>
> {code}
> hadoop fs -bulkdelete <base-url> <file> 
> {code}
> Key uses
> * QE: Testing from python and other scripting languages
> * cluster maintenance: actual bulk deletion operations from the store
> one thought there: we MUST qualify paths with / elements: if a passed in path 
> ends in /, it means "delete a marker", not "delete a dir"'. and if it doesn't 
> have one then it's an object.. This makes it possible to be used to delete 
> surplus markers or where there is a file above another file...cloudstore 
> listobjects finds this



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to