[ 
https://issues.apache.org/jira/browse/HADOOP-19254?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17906221#comment-17906221
 ] 

ASF GitHub Bot commented on HADOOP-19254:
-----------------------------------------

mukund-thakur commented on code in PR #7197:
URL: https://github.com/apache/hadoop/pull/7197#discussion_r1887697967


##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);

Review Comment:
   there are utilities for create files and directories in ContractTestUtils. 



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            listOfPaths.add(p.toUri().toString());
+        }
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add(baseDir.toUri().toString());
+        finalCommandList.addAll(listOfPaths);
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);
+
+    }
+
+    @Test
+    public void testLocalFileDeletionWithFileName() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        Path fileWithDeletePaths = new Path(testRootDir, 
"fileWithDeletePaths");
+        FSDataOutputStream fsDataOutputStream = 
lfs.create(fileWithDeletePaths, true);
+        BufferedWriter br = new BufferedWriter(new 
OutputStreamWriter(fsDataOutputStream));
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            br.write(p.toUri().toString());
+            br.newLine();
+        }
+        br.flush(); // flush the file to write the contents
+        br.close(); // close the writer
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add("-readFromFile");
+        finalCommandList.add(fileWithDeletePaths.toUri().toString());
+        finalCommandList.add(baseDir.toUri().toString());
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);

Review Comment:
   add "should have been deleted under the <given path>"



##########
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/BulkDeleteCommand.java:
##########
@@ -88,56 +116,78 @@ protected LinkedList<PathData> 
expandArguments(LinkedList<String> args) throws I
         return pathData;
     }
 
+    /**
+     * Deletes the objects using the bulk delete api
+     * @param bulkDelete Bulkdelete object exposing the API
+     * @param paths list of paths to be deleted in the base path
+     * @throws IOException on error in execution of the delete command
+     */
     void deleteInBatches(BulkDelete bulkDelete, List<Path> paths) throws 
IOException {
         Batch<Path> batches = new Batch<>(paths, pageSize);
         while(batches.hasNext()) {
-            bulkDelete.bulkDelete(batches.next());
+            List<Map.Entry<Path, String>> result = 
bulkDelete.bulkDelete(batches.next());

Review Comment:
    bulkDelete.bulkDelete() throws IllegalArgumentException as well. 



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test

Review Comment:
   I am thinking if we can add some tests for wrong arguments. ? 



##########
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/BulkDeleteCommand.java:
##########
@@ -88,56 +116,78 @@ protected LinkedList<PathData> 
expandArguments(LinkedList<String> args) throws I
         return pathData;
     }
 
+    /**
+     * Deletes the objects using the bulk delete api
+     * @param bulkDelete Bulkdelete object exposing the API
+     * @param paths list of paths to be deleted in the base path
+     * @throws IOException on error in execution of the delete command
+     */
     void deleteInBatches(BulkDelete bulkDelete, List<Path> paths) throws 
IOException {
         Batch<Path> batches = new Batch<>(paths, pageSize);
         while(batches.hasNext()) {
-            bulkDelete.bulkDelete(batches.next());
+            List<Map.Entry<Path, String>> result = 
bulkDelete.bulkDelete(batches.next());
+            LOG.debug(result.toString());

Review Comment:
   put a message in debug log. 



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            listOfPaths.add(p.toUri().toString());
+        }
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add(baseDir.toUri().toString());
+        finalCommandList.addAll(listOfPaths);
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);

Review Comment:
   add "should have been deleted under the <given path>"



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            listOfPaths.add(p.toUri().toString());

Review Comment:
   maybe use a list to verify first if the files are created properly and then 
use the same list to delete. 
   same for below test. 





> Implement bulk delete command as hadoop fs command operation 
> -------------------------------------------------------------
>
>                 Key: HADOOP-19254
>                 URL: https://issues.apache.org/jira/browse/HADOOP-19254
>             Project: Hadoop Common
>          Issue Type: Improvement
>          Components: fs
>    Affects Versions: 3.4.1
>            Reporter: Mukund Thakur
>            Assignee: Harshit Gupta
>            Priority: Major
>              Labels: pull-request-available
>
> {code}
> hadoop fs -bulkdelete <base-url> <file> 
> {code}
> Key uses
> * QE: Testing from python and other scripting languages
> * cluster maintenance: actual bulk deletion operations from the store
> one thought there: we MUST qualify paths with / elements: if a passed in path 
> ends in /, it means "delete a marker", not "delete a dir"'. and if it doesn't 
> have one then it's an object.. This makes it possible to be used to delete 
> surplus markers or where there is a file above another file...cloudstore 
> listobjects finds this



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to