This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 986b0f769b8 [SPARK-39117][SQL][TESTS] Do not include number of functions in sql-expression-schema.md 986b0f769b8 is described below commit 986b0f769b8ffa8a033d0f182217e83faa38fb4a Author: Wenchen Fan <wenc...@databricks.com> AuthorDate: Fri May 6 20:43:36 2022 +0300 [SPARK-39117][SQL][TESTS] Do not include number of functions in sql-expression-schema.md ### What changes were proposed in this pull request? `sql-expression-schema.md` is a golden file for tracking purposes: whenever we change a function or add a new function, this file must be updated. However, the number of functions in this file is not very useful and stops people from adding functions at the same time. This PR prints the summary information during test instead of putting it in the golden file. ### Why are the changes needed? Increase development velocity. ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? N/A Closes #36472 from cloud-fan/small. Authored-by: Wenchen Fan <wenc...@databricks.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../sql-functions/sql-expression-schema.md | 4 -- .../apache/spark/sql/ExpressionsSchemaSuite.scala | 48 +++++----------------- 2 files changed, 11 insertions(+), 41 deletions(-) diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md index accf9ea4577..0115578e909 100644 --- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md +++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md @@ -1,8 +1,4 @@ <!-- Automatically generated by ExpressionsSchemaSuite --> -## Summary - - Number of queries: 390 - - Number of expressions that missing example: 12 - - Expressions missing examples: bigint,binary,boolean,date,decimal,double,float,int,smallint,string,timestamp,tinyint ## Schema of Built-in Functions | Class name | Function name or alias | Query example | Output schema | | ---------- | ---------------------- | ------------- | ------------- | diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala index f8071e6cda1..d6ef90ce0b7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala @@ -133,10 +133,6 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession { val header = Seq( s"<!-- Automatically generated by ${getClass.getSimpleName} -->", - "## Summary", - s" - Number of queries: ${outputs.size}", - s" - Number of expressions that missing example: ${missingExamples.size}", - s" - Expressions missing examples: ${missingExamples.mkString(",")}", "## Schema of Built-in Functions", "| Class name | Function name or alias | Query example | Output schema |", "| ---------- | ---------------------- | ------------- | ------------- |" @@ -149,11 +145,20 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession { assert(parent.mkdirs(), "Could not create directory: " + parent) } stringToFile(resultFile, goldenOutput) + // scalastyle:off println + println( + s""" + |## Summary + | - Number of queries: ${outputs.size} + | - Number of expressions that missing example: ${missingExamples.size} + | - Expressions missing examples: ${missingExamples.mkString(",")} + |""".stripMargin) + // scalastyle:on println } val outputSize = outputs.size val headerSize = header.size - val (expectedMissingExamples, expectedOutputs) = { + val expectedOutputs = { val expectedGoldenOutput = fileToString(resultFile) val lines = expectedGoldenOutput.split("\n") val expectedSize = lines.size @@ -162,8 +167,7 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession { s"Expected $expectedSize blocks in result file but got " + s"${outputSize + headerSize}. Try regenerating the result files.") - val numberOfQueries = lines(2).split(":")(1).trim.toInt - val expectedOutputs = Seq.tabulate(outputSize) { i => + Seq.tabulate(outputSize) { i => val segments = lines(i + headerSize).split('|') QueryOutput( className = segments(1).trim, @@ -171,28 +175,6 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession { sql = segments(3).trim, schema = segments(4).trim) } - - assert(numberOfQueries == expectedOutputs.size, - s"expected outputs size: ${expectedOutputs.size} not same as numberOfQueries: " + - s"$numberOfQueries record in result file. Try regenerating the result files.") - - val numberOfMissingExamples = lines(3).split(":")(1).trim.toInt - val expectedMissingExamples = { - val missingExamples = lines(4).split(":")(1).trim - // Splitting on a empty string would return [""] - if (missingExamples.nonEmpty) { - missingExamples.split(",") - } else { - Array.empty[String] - } - } - - assert(numberOfMissingExamples == expectedMissingExamples.size, - s"expected missing examples size: ${expectedMissingExamples.size} not same as " + - s"numberOfMissingExamples: $numberOfMissingExamples " + - "record in result file. Try regenerating the result files.") - - (expectedMissingExamples, expectedOutputs) } // Compare results. @@ -203,13 +185,5 @@ class ExpressionsSchemaSuite extends QueryTest with SharedSparkSession { assert(expected.sql == output.sql, "SQL query did not match") assert(expected.schema == output.schema, s"Schema did not match for query ${expected.sql}") } - - // Compare expressions missing examples - assert(expectedMissingExamples.length == missingExamples.size, - "The number of missing examples not equals the number of expected missing examples.") - - missingExamples.zip(expectedMissingExamples).foreach { case (output, expected) => - assert(expected == output, "Missing example expression not match") - } } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org