This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 1de59bf6e [KYUUBI #3635] Delete files on exit in test suites with
Utils.createTempDir
1de59bf6e is described below
commit 1de59bf6ec81857862cb1150ca14fb6b68a6be75
Author: Bowen Liang <[email protected]>
AuthorDate: Mon Oct 17 10:00:49 2022 +0800
[KYUUBI #3635] Delete files on exit in test suites with Utils.createTempDir
### _Why are the changes needed?_
to close #3635 .
- change test suits to to use Utils.createTempDir of kyuubi-common module
to create temp dir with delete on exit to prevent disk usage leaking
- change signature Utils.createTempDir by making `prefix` param from second
place to the first, to make it friendly to use without repeating param name
itself. The name`prefix` is more closer to Java's style in
`Files.createTempDirectory`
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run
test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests)
locally before make a pull request
Closes #3636 from bowenliang123/3635-tempdir.
Closes #3635
5f84a16f [Bowen Liang] nit
b82a149f [Bowen Liang] rename `namePrefix` param to `prefix` of
`Utils.createTempDir` method, and make it in first place
76d33143 [Bowen Liang] delete files on exit in test suits with
Utils.createTempDir
Authored-by: Bowen Liang <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
extensions/spark/kyuubi-spark-authz/pom.xml | 7 +++++++
.../apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala | 5 +++--
.../authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala | 5 ++---
.../scala/org/apache/kyuubi/engine/flink/WithFlinkSQLEngine.scala | 6 ++----
.../apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala | 4 ++--
.../main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala | 2 +-
.../engine/hive/operation/HiveCatalogDatabaseOperationSuite.scala | 2 +-
.../apache/kyuubi/engine/hive/operation/HiveOperationSuite.scala | 2 +-
.../main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala | 2 +-
.../operation/KyuubiOperationHiveEnginePerConnectionSuite.scala | 2 +-
.../it/hive/operation/KyuubiOperationHiveEnginePerUserSuite.scala | 2 +-
kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala | 6 +++---
.../src/test/scala/org/apache/kyuubi/KerberizedTestHelper.scala | 5 ++---
.../src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala | 4 ++--
.../org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala | 6 +++---
15 files changed, 32 insertions(+), 28 deletions(-)
diff --git a/extensions/spark/kyuubi-spark-authz/pom.xml
b/extensions/spark/kyuubi-spark-authz/pom.xml
index cf61281d4..45513a83b 100644
--- a/extensions/spark/kyuubi-spark-authz/pom.xml
+++ b/extensions/spark/kyuubi-spark-authz/pom.xml
@@ -253,6 +253,13 @@
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.kyuubi</groupId>
+ <artifactId>kyuubi-common_${scala.binary.version}</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_${scala.binary.version}</artifactId>
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index eb2b77959..46e722ba4 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -21,6 +21,7 @@ import java.nio.file.Files
import org.apache.spark.sql.{DataFrame, SparkSession, SparkSessionExtensions}
+import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
trait SparkSessionProvider {
@@ -36,7 +37,7 @@ trait SparkSessionProvider {
protected lazy val spark: SparkSession = {
val metastore = {
- val path = Files.createTempDirectory("hms")
+ val path = Utils.createTempDir("hms")
Files.delete(path)
path
}
@@ -47,7 +48,7 @@ trait SparkSessionProvider {
.config("spark.sql.catalogImplementation", catalogImpl)
.config(
"spark.sql.warehouse.dir",
- Files.createTempDirectory("spark-warehouse").toString)
+ Utils.createTempDir("spark-warehouse").toString)
.config("spark.sql.extensions", sqlExtensions)
.withExtensions(extension)
.getOrCreate()
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
index fe2902ada..ed92e59f4 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
@@ -17,8 +17,7 @@
package org.apache.kyuubi.plugin.spark.authz.ranger
// scalastyle:off
-import java.nio.file.Files
-
+import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
/**
@@ -45,7 +44,7 @@ class IcebergCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite
spark.conf.set(s"spark.sql.catalog.$catalogV2.type", "hadoop")
spark.conf.set(
s"spark.sql.catalog.$catalogV2.warehouse",
- Files.createTempDirectory("iceberg-hadoop").toString)
+ Utils.createTempDir("iceberg-hadoop").toString)
super.beforeAll()
diff --git
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/WithFlinkSQLEngine.scala
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/WithFlinkSQLEngine.scala
index 67a9618b6..fbfb8df29 100644
---
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/WithFlinkSQLEngine.scala
+++
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/WithFlinkSQLEngine.scala
@@ -17,8 +17,6 @@
package org.apache.kyuubi.engine.flink
-import java.nio.file.Files
-
import scala.collection.JavaConverters._
import org.apache.flink.client.cli.{CustomCommandLine, DefaultCLI}
@@ -26,7 +24,7 @@ import org.apache.flink.configuration.{Configuration,
RestOptions}
import org.apache.flink.runtime.minicluster.{MiniCluster,
MiniClusterConfiguration}
import org.apache.flink.table.client.gateway.context.DefaultContext
-import org.apache.kyuubi.KyuubiFunSuite
+import org.apache.kyuubi.{KyuubiFunSuite, Utils}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.engine.flink.util.TestUserClassLoaderJar
@@ -70,7 +68,7 @@ trait WithFlinkSQLEngine extends KyuubiFunSuite {
kyuubiConf.set(k, v)
}
val udfJar = TestUserClassLoaderJar.createJarFile(
- Files.createTempDirectory("test-jar").toFile,
+ Utils.createTempDir("test-jar").toFile,
"test-classloader-udf.jar",
GENERATED_UDF_CLASS,
GENERATED_UDF_CODE)
diff --git
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
index 660c9b0e7..a7bc2428e 100644
---
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
+++
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
@@ -17,7 +17,6 @@
package org.apache.kyuubi.engine.flink.operation
-import java.nio.file.Files
import java.sql.DatabaseMetaData
import java.util.UUID
@@ -29,6 +28,7 @@ import org.apache.hive.service.rpc.thrift._
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.time.SpanSugar._
+import org.apache.kyuubi.Utils
import org.apache.kyuubi.config.KyuubiConf._
import org.apache.kyuubi.engine.flink.FlinkEngineUtils._
import org.apache.kyuubi.engine.flink.WithFlinkSQLEngine
@@ -999,7 +999,7 @@ class FlinkOperationSuite extends WithFlinkSQLEngine with
HiveJDBCTestHelper {
test("execute statement - add/remove/show jar") {
val jarName = s"newly-added-${UUID.randomUUID()}.jar"
val newJar = TestUserClassLoaderJar.createJarFile(
- Files.createTempDirectory("add-jar-test").toFile,
+ Utils.createTempDir("add-jar-test").toFile,
jarName,
GENERATED_UDF_CLASS,
GENERATED_UDF_CODE).toPath
diff --git
a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
index 73132d5fc..346f9c44b 100644
---
a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
+++
b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/HiveSQLEngine.scala
@@ -92,7 +92,7 @@ object HiveSQLEngine extends Logging {
hiveConf.setBoolean("datanucleus.schema.autoCreateAll", true)
hiveConf.set(
"hive.metastore.warehouse.dir",
- Utils.createTempDir(namePrefix = "kyuubi_hive_warehouse").toString)
+ Utils.createTempDir("kyuubi_hive_warehouse").toString)
hiveConf.set("hive.metastore.fastpath", "true")
}
diff --git
a/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveCatalogDatabaseOperationSuite.scala
b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveCatalogDatabaseOperationSuite.scala
index aa95c4df6..956ba620a 100644
---
a/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveCatalogDatabaseOperationSuite.scala
+++
b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveCatalogDatabaseOperationSuite.scala
@@ -27,7 +27,7 @@ import org.apache.kyuubi.operation.HiveJDBCTestHelper
class HiveCatalogDatabaseOperationSuite extends HiveJDBCTestHelper {
override def beforeAll(): Unit = {
- val metastore = Utils.createTempDir(namePrefix = getClass.getSimpleName)
+ val metastore = Utils.createTempDir(getClass.getSimpleName)
metastore.toFile.delete()
val args = Array(
"--conf",
diff --git
a/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationSuite.scala
b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationSuite.scala
index 3595b35c2..04d58a880 100644
---
a/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationSuite.scala
+++
b/externals/kyuubi-hive-sql-engine/src/test/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationSuite.scala
@@ -26,7 +26,7 @@ import org.apache.kyuubi.jdbc.hive.KyuubiStatement
class HiveOperationSuite extends HiveEngineTests {
override def beforeAll(): Unit = {
- val metastore = Utils.createTempDir(namePrefix = getClass.getSimpleName)
+ val metastore = Utils.createTempDir(getClass.getSimpleName)
metastore.toFile.delete()
val args = Array(
"--conf",
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
index 3462c67ce..ff96ef4f9 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
@@ -143,7 +143,7 @@ object SparkSQLEngine extends Logging {
_sparkConf = new SparkConf()
_kyuubiConf = KyuubiConf()
val rootDir =
_sparkConf.getOption("spark.repl.classdir").getOrElse(getLocalDir(_sparkConf))
- val outputDir = Utils.createTempDir(root = rootDir, namePrefix = "repl")
+ val outputDir = Utils.createTempDir(prefix = "repl", root = rootDir)
_sparkConf.setIfMissing("spark.sql.execution.topKSortFallbackThreshold",
"10000")
_sparkConf.setIfMissing("spark.sql.legacy.castComplexTypesToString.enabled",
"true")
_sparkConf.setIfMissing("spark.master", "local")
diff --git
a/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerConnectionSuite.scala
b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerConnectionSuite.scala
index 08ce79e42..f3f267818 100644
---
a/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerConnectionSuite.scala
+++
b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerConnectionSuite.scala
@@ -29,7 +29,7 @@ class KyuubiOperationHiveEnginePerConnectionSuite extends
WithKyuubiServer with
val kyuubiHome: String =
Utils.getCodeSourceLocation(getClass).split("integration-tests").head
override protected val conf: KyuubiConf = {
- val metastore = Utils.createTempDir(namePrefix = getClass.getSimpleName)
+ val metastore = Utils.createTempDir(getClass.getSimpleName)
metastore.toFile.delete()
val currentUser = Utils.currentUser
KyuubiConf()
diff --git
a/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerUserSuite.scala
b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerUserSuite.scala
index de4b9f5cb..8f5efdcd4 100644
---
a/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerUserSuite.scala
+++
b/integration-tests/kyuubi-hive-it/src/test/scala/org/apache/kyuubi/it/hive/operation/KyuubiOperationHiveEnginePerUserSuite.scala
@@ -28,7 +28,7 @@ class KyuubiOperationHiveEnginePerUserSuite extends
WithKyuubiServer with HiveEn
val kyuubiHome: String =
Utils.getCodeSourceLocation(getClass).split("integration-tests").head
override protected val conf: KyuubiConf = {
- val metastore = Utils.createTempDir(namePrefix = getClass.getSimpleName)
+ val metastore = Utils.createTempDir(getClass.getSimpleName)
metastore.toFile.delete()
KyuubiConf()
.set(s"$KYUUBI_ENGINE_ENV_PREFIX.$KYUUBI_HOME", kyuubiHome)
diff --git a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
index b79038b2f..19b84d7d6 100644
--- a/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
+++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/Utils.scala
@@ -136,9 +136,9 @@ object Utils extends Logging {
* automatically deleted when the VM shuts down.
*/
def createTempDir(
- root: String = System.getProperty("java.io.tmpdir"),
- namePrefix: String = "kyuubi"): Path = {
- val dir = createDirectory(root, namePrefix)
+ prefix: String = "kyuubi",
+ root: String = System.getProperty("java.io.tmpdir")): Path = {
+ val dir = createDirectory(root, prefix)
dir.toFile.deleteOnExit()
dir
}
diff --git
a/kyuubi-common/src/test/scala/org/apache/kyuubi/KerberizedTestHelper.scala
b/kyuubi-common/src/test/scala/org/apache/kyuubi/KerberizedTestHelper.scala
index 6bca3244a..d877f1d52 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/KerberizedTestHelper.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/KerberizedTestHelper.scala
@@ -34,9 +34,8 @@ import org.ietf.jgss.{GSSContext, GSSException, GSSManager,
GSSName}
import org.scalatest.time.SpanSugar._
trait KerberizedTestHelper extends KyuubiFunSuite {
- val baseDir: File = Utils.createTempDir(
- Utils.getCodeSourceLocation(getClass),
- "kyuubi-kdc").toFile
+ val baseDir: File =
+ Utils.createTempDir("kyuubi-kdc",
Utils.getCodeSourceLocation(getClass)).toFile
val kdcConf = MiniKdc.createConf()
val hostName = "localhost"
kdcConf.setProperty(MiniKdc.INSTANCE, this.getClass.getSimpleName)
diff --git
a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
index 65c457601..ed7855399 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/ProcBuilder.scala
@@ -125,10 +125,10 @@ trait ProcBuilder {
if (Files.isDirectory(working)) {
working
} else {
- Utils.createTempDir(rootAbs, proxyUser)
+ Utils.createTempDir(prefix = proxyUser, root = rootAbs)
}
}.getOrElse {
- Utils.createTempDir(namePrefix = proxyUser)
+ Utils.createTempDir(proxyUser)
}
}
diff --git
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index a4f33dde1..5ae03f545 100644
---
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -151,7 +151,7 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper
with MockitoSugar {
test(s"sub process log should be overwritten") {
def atomicTest(): Unit = {
val pool = Executors.newFixedThreadPool(3)
- val fakeWorkDir = Files.createTempDirectory("fake")
+ val fakeWorkDir = Utils.createTempDir("fake")
val dir = fakeWorkDir.toFile
try {
assert(dir.list().length == 0)
@@ -197,7 +197,7 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper
with MockitoSugar {
}
test("overwrite log file should cleanup before write") {
- val fakeWorkDir = Files.createTempDirectory("fake")
+ val fakeWorkDir = Utils.createTempDir("fake")
val conf = KyuubiConf()
conf.set(ENGINE_LOG_TIMEOUT, Duration.ofDays(1).toMillis)
val builder1 = new FakeSparkProcessBuilder(conf) {
@@ -220,7 +220,7 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper
with MockitoSugar {
}
test("main resource jar should not check when is not a local file") {
- val workDir = Files.createTempDirectory("resource")
+ val workDir = Utils.createTempDir("resource")
val jarPath = Paths.get(workDir.toString, "test.jar")
val hdfsPath = s"hdfs://$jarPath"