yaooqinn commented on a change in pull request #35048:
URL: https://github.com/apache/spark/pull/35048#discussion_r776132629
##########
File path:
sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
##########
@@ -486,4 +487,83 @@ class SparkSessionBuilderSuite extends SparkFunSuite with
BeforeAndAfterEach wit
}
assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
}
+
+ test("SPARK-37727: Show ignored configurations in debug level logs") {
+ // Create one existing SparkSession to check following logs.
+ SparkSession.builder().master("local").getOrCreate()
+
+ val logAppender = new LogAppender
+ logAppender.setThreshold(Level.DEBUG)
+ withLogAppender(logAppender, level = Some(Level.DEBUG)) {
+ SparkSession.builder()
+ .master("local")
+ .config("spark.sql.warehouse.dir", "2")
+ .config("spark.abc", "abcb")
+ .config("spark.abcd", "abcb4")
+ .getOrCreate()
+ }
+
+ Seq(
+ "Ignored static SQL configurations",
+ "spark.sql.warehouse.dir=2",
+ "Configurations that might not take effect",
+ "spark.abcd=abcb4",
+ "spark.abc=abcb").foreach { msg =>
+
assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
+ }
+ }
+
+ test("SPARK-37727: Hide the same configuration already explicitly set in
logs") {
+ // Create one existing SparkSession to check following logs.
+ SparkSession.builder().master("local").config("spark.abc",
"abc").getOrCreate()
+
+ val logAppender = new LogAppender
+ logAppender.setThreshold(Level.DEBUG)
+ withLogAppender(logAppender, level = Some(Level.DEBUG)) {
+ // Ignore logs because it's already set.
+ SparkSession.builder().master("local").config("spark.abc",
"abc").getOrCreate()
+ // Show logs for only configuration newly set.
+ SparkSession.builder().master("local").config("spark.abc.new",
"abc").getOrCreate()
+ // Ignore logs because it's set ^.
+ SparkSession.builder().master("local").config("spark.abc.new",
"abc").getOrCreate()
+ }
+
+ Seq(
+ "Using an existing Spark session; only runtime SQL configurations will
take effect",
+ "Configurations that might not take effect",
+ "spark.abc.new=abc").foreach { msg =>
+
assert(logAppender.loggingEvents.exists(_.getMessage.getFormattedMessage.contains(msg)))
+ }
+
+ assert(!logAppender.loggingEvents.exists(
+ _.getMessage.getFormattedMessage.contains("spark.abc=abc")))
+ }
+
+ test("SPARK-37727: Hide runtime SQL configurations in logs") {
+ // Create one existing SparkSession to check following logs.
+ SparkSession.builder().master("local").getOrCreate()
+
+ val logAppender = new LogAppender
+ logAppender.setThreshold(Level.DEBUG)
+ withLogAppender(logAppender, level = Some(Level.DEBUG)) {
+ // Ignore logs for runtime SQL configurations
+ SparkSession.builder().master("local").config("spark.sql.ansi.enabled",
"true").getOrCreate()
+ // Show logs for Spark core configuration
+ SparkSession.builder().master("local").config("spark.buffer.size",
"1234").getOrCreate()
+ // Show logs for custom runtime options
+ SparkSession.builder().master("local").config("spark.sql.source.abc",
"abc").getOrCreate()
Review comment:
```suggestion
SparkSession.builder().config("spark.sql.source.abc",
"abc").getOrCreate()
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]