This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new fe317dc [SPARK-27243][SQL] RuleExecutor.dumpTimeSpent should not throw exception when empty fe317dc is described below commit fe317dc74e5fa1509ae9d735485f66724f7292e5 Author: Marco Gaido <marcogaid...@gmail.com> AuthorDate: Sat Mar 23 09:49:20 2019 +0900 [SPARK-27243][SQL] RuleExecutor.dumpTimeSpent should not throw exception when empty ## What changes were proposed in this pull request? `RuleExecutor.dumpTimeSpent` currently throws an exception when invoked before any rule is run or immediately after `RuleExecutor.reset`. The PR makes it returning an empty summary, which is the expected output instead. ## How was this patch tested? added UT Closes #24180 from mgaido91/SPARK-27243. Authored-by: Marco Gaido <marcogaid...@gmail.com> Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> --- .../apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala | 6 +++++- .../org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala | 6 ++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala index e4d5fa9..7a86433 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/QueryExecutionMetering.scala @@ -64,7 +64,11 @@ case class QueryExecutionMetering() { /** Dump statistics about time spent running specific rules. */ def dumpTimeSpent(): String = { val map = timeMap.asMap().asScala - val maxLengthRuleNames = map.keys.map(_.toString.length).max + val maxLengthRuleNames = if (map.isEmpty) { + 0 + } else { + map.keys.map(_.toString.length).max + } val colRuleName = "Rule".padTo(maxLengthRuleNames, " ").mkString val colRunTime = "Effective Time / Total Time".padTo(len = 47, " ").mkString diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala index ab5d722..8dbe198 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala @@ -91,4 +91,10 @@ class RuleExecutorSuite extends SparkFunSuite { }.getMessage assert(message.contains("the structural integrity of the plan is broken")) } + + test("SPARK-27243: dumpTimeSpent when no rule has run") { + RuleExecutor.resetMetrics() + // This should not throw an exception + RuleExecutor.dumpTimeSpent() + } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org