This is an automated email from the ASF dual-hosted git repository.
ggal pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-livy.git
The following commit(s) were added to refs/heads/master by this push:
new 64521ce8 [LIVY-763][SERVER] Fix File Descriptor leak from
DirectoryStream (#397)
64521ce8 is described below
commit 64521ce806c8263cd243e221982b026edd9cdf28
Author: RajshekharMuchandi <[email protected]>
AuthorDate: Tue Apr 11 17:42:40 2023 +0530
[LIVY-763][SERVER] Fix File Descriptor leak from DirectoryStream (#397)
## What changes were proposed in this pull request?
Took separate handle for directory stream. Closed directory stream handle
in finally block. Code changes done in InteractiveSession.scala.
JIRA: https://issues.apache.org/jira/browse/LIVY-763
## How was this patch tested?
Ran lsof | grep python command to see the Directory FDs for
{SPARK_HOME}/python/lib. Tested by creating interactive session for pyspark and
verified FDs are not created.
---
.../server/interactive/InteractiveSession.scala | 22 ++++++++++++++--------
1 file changed, 14 insertions(+), 8 deletions(-)
diff --git
a/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
b/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
index 61305242..6a1dba0f 100644
---
a/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
+++
b/server/src/main/scala/org/apache/livy/server/interactive/InteractiveSession.scala
@@ -20,7 +20,7 @@ package org.apache.livy.server.interactive
import java.io.{File, InputStream}
import java.net.URI
import java.nio.ByteBuffer
-import java.nio.file.{Files, Paths}
+import java.nio.file.{DirectoryStream, Files, Paths}
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicLong
@@ -259,21 +259,27 @@ object InteractiveSession extends Logging {
sys.env.get("SPARK_HOME") .map { case sparkHome =>
val pyLibPath = Seq(sparkHome, "python",
"lib").mkString(File.separator)
val pyArchivesFile = new File(pyLibPath, "pyspark.zip")
- val py4jFile = Try {
- Files.newDirectoryStream(Paths.get(pyLibPath), "py4j-*-src.zip")
- .iterator()
+ var py4jZip: DirectoryStream[java.nio.file.Path] = null;
+ var py4jFile: File = null;
+ try {
+ py4jZip = Files.newDirectoryStream(Paths.get(pyLibPath),
"py4j-*-src.zip")
+ py4jFile = py4jZip.iterator()
.next()
.toFile
- }.toOption
-
+ }
+ finally {
+ if (py4jZip != null) {
+ py4jZip.close()
+ }
+ }
if (!pyArchivesFile.exists()) {
warn("pyspark.zip not found; cannot start pyspark interpreter.")
Seq.empty
- } else if (py4jFile.isEmpty || !py4jFile.get.exists()) {
+ } else if (!py4jFile.exists()) {
warn("py4j-*-src.zip not found; can start pyspark interpreter.")
Seq.empty
} else {
- Seq(pyArchivesFile.getAbsolutePath, py4jFile.get.getAbsolutePath)
+ Seq(pyArchivesFile.getAbsolutePath, py4jFile.getAbsolutePath)
}
}.getOrElse(Seq())
}