[ 
https://issues.apache.org/jira/browse/HADOOP-19254?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17906309#comment-17906309
 ] 

ASF GitHub Bot commented on HADOOP-19254:
-----------------------------------------

HarshitGupta11 commented on code in PR #7197:
URL: https://github.com/apache/hadoop/pull/7197#discussion_r1888059467


##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            listOfPaths.add(p.toUri().toString());
+        }
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add(baseDir.toUri().toString());
+        finalCommandList.addAll(listOfPaths);
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);
+
+    }
+
+    @Test
+    public void testLocalFileDeletionWithFileName() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        Path fileWithDeletePaths = new Path(testRootDir, 
"fileWithDeletePaths");
+        FSDataOutputStream fsDataOutputStream = 
lfs.create(fileWithDeletePaths, true);
+        BufferedWriter br = new BufferedWriter(new 
OutputStreamWriter(fsDataOutputStream));
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            br.write(p.toUri().toString());
+            br.newLine();
+        }
+        br.flush(); // flush the file to write the contents
+        br.close(); // close the writer
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add("-readFromFile");
+        finalCommandList.add(fileWithDeletePaths.toUri().toString());
+        finalCommandList.add(baseDir.toUri().toString());
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);

Review Comment:
   Done



##########
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestBulkDeleteCommand.java:
##########
@@ -64,4 +78,52 @@ public void testArguments() throws IOException, 
URISyntaxException {
         Assertions.assertThat(bulkDeleteCommand.childArgs.get(0)).
                 describedAs("Children arguments must match").isEqualTo(arg2);
     }
+
+    @Test
+    public void testLocalFileDeletion() throws IOException {
+        String deletionDir = "toDelete";
+        String baseFileName = "file_";
+        Path baseDir = new Path(testRootDir, deletionDir);
+        List<String> listOfPaths = new ArrayList<>();
+        for(int i = 0; i < 100; i++) {
+            Path p = new Path(baseDir, baseFileName + i);
+            lfs.create(p);
+            listOfPaths.add(p.toUri().toString());
+        }
+        List<String> finalCommandList = new ArrayList<>();
+        finalCommandList.add("-bulkDelete");
+        finalCommandList.add(baseDir.toUri().toString());
+        finalCommandList.addAll(listOfPaths);
+        shell.run(finalCommandList.toArray(new String[0]));
+        Assertions.assertThat(lfs.listFiles(baseDir, false).hasNext())
+                .as("All the files should have been deleted").isEqualTo(false);

Review Comment:
   Done





> Implement bulk delete command as hadoop fs command operation 
> -------------------------------------------------------------
>
>                 Key: HADOOP-19254
>                 URL: https://issues.apache.org/jira/browse/HADOOP-19254
>             Project: Hadoop Common
>          Issue Type: Improvement
>          Components: fs
>    Affects Versions: 3.4.1
>            Reporter: Mukund Thakur
>            Assignee: Harshit Gupta
>            Priority: Major
>              Labels: pull-request-available
>
> {code}
> hadoop fs -bulkdelete <base-url> <file> 
> {code}
> Key uses
> * QE: Testing from python and other scripting languages
> * cluster maintenance: actual bulk deletion operations from the store
> one thought there: we MUST qualify paths with / elements: if a passed in path 
> ends in /, it means "delete a marker", not "delete a dir"'. and if it doesn't 
> have one then it's an object.. This makes it possible to be used to delete 
> surplus markers or where there is a file above another file...cloudstore 
> listobjects finds this



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to