abstractdog commented on code in PR #5054:
URL: https://github.com/apache/hive/pull/5054#discussion_r1494124553


##########
llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LocalDirCleaner.java:
##########
@@ -0,0 +1,114 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.llap.daemon.impl;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.FileTime;
+import java.time.Instant;
+import java.time.temporal.ChronoUnit;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.service.AbstractService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * LocalDirCleaner is an LLAP daemon service to clean up old local files. 
Under normal circumstances,
+ * intermediate/local files are cleaned up (typically at end of the DAG), but 
daemons crash sometimes,
+ * and the attached local disk might end up being the same when a new daemon 
starts (this applies to
+ * on-prem as well as cloud scenarios).
+ */
+public class LocalDirCleaner extends AbstractService {
+  private static final Logger LOG = 
LoggerFactory.getLogger(LocalDirCleaner.class);
+
+  private String[] localDirs;
+
+  private long cleanupIntervalSec;
+  private long fileModifyTimeThresholdSec;
+
+  ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
+
+  public LocalDirCleaner(String[] localDirs, Configuration conf) {
+    super("LocalDirCleaner");
+    this.localDirs = localDirs;
+    this.cleanupIntervalSec = getInterval(conf);
+    this.fileModifyTimeThresholdSec = getFileModifyTimeThreshold(conf);
+    LOG.info("Initialized local dir cleaner: interval: {}s, threshold: {}s", 
cleanupIntervalSec,
+        fileModifyTimeThresholdSec);
+  }
+
+  @Override
+  public void serviceStart() throws IOException {
+    scheduler.scheduleAtFixedRate(this::cleanup, 0, cleanupIntervalSec, 
TimeUnit.SECONDS);
+  }
+
+  @Override
+  public void serviceStop() throws IOException {
+    // we can shutdown this service now and ignore leftovers, because under 
normal circumstances,
+    // files from the local dirs are cleaned up (so LocalDirCleaner is a best 
effort utility)
+    scheduler.shutdownNow();
+  }
+
+  private long getFileModifyTimeThreshold(Configuration conf) {
+    return HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.LLAP_LOCAL_DIR_CLEANER_FILE_MODIFY_TIME_THRESHOLD,
+        TimeUnit.SECONDS);
+  }
+
+  private long getInterval(Configuration conf) {
+    return HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.LLAP_LOCAL_DIR_CLEANER_CLEANUP_INTERVAL, TimeUnit.SECONDS);
+  }
+
+  private void cleanup() {
+    Instant deleteBefore = Instant.now().minus(fileModifyTimeThresholdSec, 
ChronoUnit.SECONDS);
+
+    for (String localDir : localDirs) {
+      Path pathLocalDir = Paths.get(localDir);
+      cleanupPath(deleteBefore, pathLocalDir);
+    }
+  }
+
+  private void cleanupPath(Instant deleteBefore, Path pathLocalDir) {
+    LOG.info("Cleaning up files older than {} from {}", deleteBefore, 
pathLocalDir);
+
+    try (Stream<Path> files = Files.walk(pathLocalDir)) {

Review Comment:
   we have a root folder like:
   ```
   /apps/llap/work/
   ```
   
   and we'll have dangling files like:
   ```
   
/apps/llap/work/usercache/hive/appcache/application_1707917402901_0001/3/output/attempt_1707917402901_0001_3_05_000002_0_10414:
   total 16
   drwxr-xr-x  2 hive hive   44 Feb 14 13:41 .
   drwxr-xr-x 63 hive hive 4096 Feb 14 13:41 ..
   -rw-r--r--  1 hive hive  425 Feb 14 13:41 file.out
   -rw-r--r--  1 hive hive   32 Feb 14 13:41 file.out.index
   
   
/apps/llap/work/usercache/hive/appcache/application_1707917402901_0001/3/output/attempt_1707917402901_0001_3_05_000005_0_10416:
   total 16
   drwxr-xr-x  2 hive hive   44 Feb 14 13:41 .
   drwxr-xr-x 63 hive hive 4096 Feb 14 13:41 ..
   -rw-r--r--  1 hive hive  383 Feb 14 13:41 file.out
   -rw-r--r--  1 hive hive   32 Feb 14 13:41 file.out.index
   
   ```
   
   so the point is to find all dangling intermediate files which might be a few 
layers below then the root work dir, so Files.walk()mseemed to be the way to 
find them all and I'm a bit concerned about how complicated it would be to 
achieve the same with Files.list()



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org
For additional commands, e-mail: gitbox-h...@hive.apache.org

Reply via email to