This is an automated email from the ASF dual-hosted git repository.

changchen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 6c6a56a7f5 [GLUTEN-11550][UT] Fix GlutenLogQuerySuite
6c6a56a7f5 is described below

commit 6c6a56a7f51f8122a361963d80b8945c3725f51c
Author: Kapil Kumar Singh <[email protected]>
AuthorDate: Fri Feb 6 10:25:43 2026 +0530

    [GLUTEN-11550][UT] Fix GlutenLogQuerySuite
---
 .../spark40/src/test/resources/log4j2.properties   | 13 +++++++
 .../gluten/utils/velox/VeloxTestSettings.scala     |  4 ++-
 .../org/apache/spark/sql/GlutenLogQuerySuite.scala | 40 +++++++++++++++++++++-
 .../spark41/src/test/resources/log4j2.properties   | 13 +++++++
 .../gluten/utils/velox/VeloxTestSettings.scala     |  4 ++-
 .../org/apache/spark/sql/GlutenLogQuerySuite.scala | 40 +++++++++++++++++++++-
 6 files changed, 110 insertions(+), 4 deletions(-)

diff --git a/gluten-ut/spark40/src/test/resources/log4j2.properties 
b/gluten-ut/spark40/src/test/resources/log4j2.properties
index fb1cadec5f..6760c31fac 100644
--- a/gluten-ut/spark40/src/test/resources/log4j2.properties
+++ b/gluten-ut/spark40/src/test/resources/log4j2.properties
@@ -37,3 +37,16 @@ appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: 
%m%n%ex
 
 appender.file.filter.threshold.type = ThresholdFilter
 appender.file.filter.threshold.level = info
+
+# For GlutenLogQuerySuite
+appender.structured.type = File
+appender.structured.name = structured
+appender.structured.fileName = target/LogQuerySuite.log
+appender.structured.layout.type = JsonTemplateLayout
+appender.structured.layout.eventTemplateUri = 
classpath:org/apache/spark/SparkLayout.json
+
+# Custom loggers
+logger.structured.name = org.apache.spark.sql.GlutenLogQuerySuite
+logger.structured.level = trace
+logger.structured.appenderRefs = structured
+logger.structured.appenderRef.structured.ref = structured
diff --git 
a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
 
b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index ad7796621a..202705b6d1 100644
--- 
a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ 
b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -822,7 +822,9 @@ class VeloxTestSettings extends BackendTestSettings {
   enableSuite[GlutenICUCollationsMapSuite]
   enableSuite[GlutenInlineTableParsingImprovementsSuite]
   enableSuite[GlutenJoinHintSuite]
-  // TODO: 4.x enableSuite[GlutenLogQuerySuite]  // 2 failures
+  enableSuite[GlutenLogQuerySuite]
+    // Overridden
+    .exclude("Query Spark logs with exception using SQL")
   enableSuite[GlutenPercentileQuerySuite]
   enableSuite[GlutenRandomDataGeneratorSuite]
   enableSuite[GlutenRowJsonSuite]
diff --git 
a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
 
b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
index 634da8516f..ce92e5b35b 100644
--- 
a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
+++ 
b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
@@ -16,4 +16,42 @@
  */
 package org.apache.spark.sql
 
-class GlutenLogQuerySuite extends LogQuerySuite with GlutenSQLTestsTrait {}
+import org.apache.spark.internal.{LogKeys, MDC}
+import org.apache.spark.util.LogUtils.SPARK_LOG_SCHEMA
+
+import java.util.Locale
+
+class GlutenLogQuerySuite extends LogQuerySuite with GlutenSQLTestsTrait {
+  private def createTempView(viewName: String): Unit = {
+    spark.read
+      .schema(SPARK_LOG_SCHEMA)
+      .json(logFile.getCanonicalPath)
+      .createOrReplaceTempView(viewName)
+  }
+
+  testGluten("Query Spark logs with exception using SQL") {
+    val msg = log"Task ${MDC(LogKeys.TASK_ID, "2")} failed."
+    val exception = new RuntimeException("OOM")
+    logError(msg, exception)
+
+    withTempView("logs") {
+      createTempView("logs")
+      val expectedMDC = Map(LogKeys.TASK_ID.name.toLowerCase(Locale.ROOT) -> 
"2")
+      checkAnswer(
+        spark.sql(
+          "SELECT level, msg, context, exception.class, exception.msg FROM 
logs " +
+            s"WHERE msg = '${msg.message}'"),
+        Row("ERROR", msg.message, expectedMDC, "java.lang.RuntimeException", 
"OOM") :: Nil
+      )
+
+      val stacktrace =
+        spark.sql(s"SELECT exception.stacktrace FROM logs WHERE msg = 
'${msg.message}'").collect()
+      assert(stacktrace.length == 1)
+      val topStacktraceArray = stacktrace.head.getSeq[Row](0).head
+      assert(topStacktraceArray.getString(0) == this.getClass.getName)
+      assert(topStacktraceArray.getString(1) != "")
+      assert(topStacktraceArray.getString(2) == this.getClass.getSimpleName + 
".scala")
+      assert(topStacktraceArray.getString(3) != "")
+    }
+  }
+}
diff --git a/gluten-ut/spark41/src/test/resources/log4j2.properties 
b/gluten-ut/spark41/src/test/resources/log4j2.properties
index fb1cadec5f..6760c31fac 100644
--- a/gluten-ut/spark41/src/test/resources/log4j2.properties
+++ b/gluten-ut/spark41/src/test/resources/log4j2.properties
@@ -37,3 +37,16 @@ appender.file.layout.pattern = %d{HH:mm:ss.SSS} %t %p %c{1}: 
%m%n%ex
 
 appender.file.filter.threshold.type = ThresholdFilter
 appender.file.filter.threshold.level = info
+
+# For GlutenLogQuerySuite
+appender.structured.type = File
+appender.structured.name = structured
+appender.structured.fileName = target/LogQuerySuite.log
+appender.structured.layout.type = JsonTemplateLayout
+appender.structured.layout.eventTemplateUri = 
classpath:org/apache/spark/SparkLayout.json
+
+# Custom loggers
+logger.structured.name = org.apache.spark.sql.GlutenLogQuerySuite
+logger.structured.level = trace
+logger.structured.appenderRefs = structured
+logger.structured.appenderRef.structured.ref = structured
diff --git 
a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
 
b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index dd10657ed7..a74142c95d 100644
--- 
a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ 
b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -787,7 +787,9 @@ class VeloxTestSettings extends BackendTestSettings {
   enableSuite[GlutenICUCollationsMapSuite]
   enableSuite[GlutenInlineTableParsingImprovementsSuite]
   enableSuite[GlutenJoinHintSuite]
-  // TODO: 4.x enableSuite[GlutenLogQuerySuite]  // 2 failures
+  enableSuite[GlutenLogQuerySuite]
+    // Overridden
+    .exclude("Query Spark logs with exception using SQL")
   enableSuite[GlutenPercentileQuerySuite]
   enableSuite[GlutenRandomDataGeneratorSuite]
   enableSuite[GlutenRowJsonSuite]
diff --git 
a/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
 
b/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
index 634da8516f..cac8bd874a 100644
--- 
a/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
+++ 
b/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/GlutenLogQuerySuite.scala
@@ -16,4 +16,42 @@
  */
 package org.apache.spark.sql
 
-class GlutenLogQuerySuite extends LogQuerySuite with GlutenSQLTestsTrait {}
+import org.apache.spark.internal.LogKeys
+import org.apache.spark.util.LogUtils.SPARK_LOG_SCHEMA
+
+import java.util.Locale
+
+class GlutenLogQuerySuite extends LogQuerySuite with GlutenSQLTestsTrait {
+  private def createTempView(viewName: String): Unit = {
+    spark.read
+      .schema(SPARK_LOG_SCHEMA)
+      .json(logFile.getCanonicalPath)
+      .createOrReplaceTempView(viewName)
+  }
+
+  testGluten("Query Spark logs with exception using SQL") {
+    val msg = log"Task ${MDC(LogKeys.TASK_ID, "2")} failed."
+    val exception = new RuntimeException("OOM")
+    logError(msg, exception)
+
+    withTempView("logs") {
+      createTempView("logs")
+      val expectedMDC = Map(LogKeys.TASK_ID.name.toLowerCase(Locale.ROOT) -> 
"2")
+      checkAnswer(
+        spark.sql(
+          "SELECT level, msg, context, exception.class, exception.msg FROM 
logs " +
+            s"WHERE msg = '${msg.message}'"),
+        Row("ERROR", msg.message, expectedMDC, "java.lang.RuntimeException", 
"OOM") :: Nil
+      )
+
+      val stacktrace =
+        spark.sql(s"SELECT exception.stacktrace FROM logs WHERE msg = 
'${msg.message}'").collect()
+      assert(stacktrace.length == 1)
+      val topStacktraceArray = stacktrace.head.getSeq[Row](0).head
+      assert(topStacktraceArray.getString(0) == this.getClass.getName)
+      assert(topStacktraceArray.getString(1) != "")
+      assert(topStacktraceArray.getString(2) == this.getClass.getSimpleName + 
".scala")
+      assert(topStacktraceArray.getString(3) != "")
+    }
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to