deniskuzZ commented on code in PR #3531:
URL: https://github.com/apache/hive/pull/3531#discussion_r949881145


##########
ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java:
##########
@@ -1499,29 +1499,34 @@ private static ValidTxnList 
getValidTxnList(Configuration conf) {
   public static Map<Path, HdfsDirSnapshot> getHdfsDirSnapshotsForCleaner(final 
FileSystem fs, final Path path)
           throws IOException {
     Map<Path, HdfsDirSnapshot> dirToSnapshots = new HashMap<>();
-    Deque<RemoteIterator<FileStatus>> stack = new ArrayDeque<>();
-    stack.push(fs.listStatusIterator(path));
-    while (!stack.isEmpty()) {
-      RemoteIterator<FileStatus> itr = stack.pop();
-      while (itr.hasNext()) {
-        FileStatus fStatus = itr.next();
-        Path fPath = fStatus.getPath();
-        if (acidHiddenFileFilter.accept(fPath)) {
-          if (baseFileFilter.accept(fPath) ||
-                  deltaFileFilter.accept(fPath) ||
-                  deleteEventDeltaDirFilter.accept(fPath)) {
-            addToSnapshoot(dirToSnapshots, fPath);
-          } else {
-            if (fStatus.isDirectory()) {
-              stack.push(fs.listStatusIterator(fPath));
+    try {
+      Deque<RemoteIterator<FileStatus>> stack = new ArrayDeque<>();
+      stack.push(fs.listStatusIterator(path));
+      while (!stack.isEmpty()) {
+        RemoteIterator<FileStatus> itr = stack.pop();
+        while (itr.hasNext()) {
+          FileStatus fStatus = itr.next();
+          Path fPath = fStatus.getPath();
+          if (acidHiddenFileFilter.accept(fPath)) {
+            if (baseFileFilter.accept(fPath) ||
+                    deltaFileFilter.accept(fPath) ||
+                    deleteEventDeltaDirFilter.accept(fPath)) {
+              addToSnapshoot(dirToSnapshots, fPath);
             } else {
-              // Found an original file
-              HdfsDirSnapshot hdfsDirSnapshot = addToSnapshoot(dirToSnapshots, 
fPath.getParent());
-              hdfsDirSnapshot.addFile(fStatus);
+              if (fStatus.isDirectory()) {
+                stack.push(fs.listStatusIterator(fPath));
+              } else {
+                // Found an original file
+                HdfsDirSnapshot hdfsDirSnapshot = 
addToSnapshoot(dirToSnapshots, fPath.getParent());
+                hdfsDirSnapshot.addFile(fStatus);
+              }
             }
           }
         }
       }
+    } catch (FileNotFoundException fne) {
+      //ignore

Review Comment:
   please add some comment that the current FS API doesn't provide the ability 
to supply a PathFilter to ignore the staging dirs, and that is why we need to 
catch this exception



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org
For additional commands, e-mail: gitbox-h...@hive.apache.org

Reply via email to