This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new fe7bdce8d12 
[SPARK-43820][SPARK-43822][SPARK-43823][SPARK-43826][SPARK-43827] Assign names 
to the error class _LEGACY_ERROR_TEMP_241[1-7]
fe7bdce8d12 is described below

commit fe7bdce8d121e2733e82706177d34f0342db0cbe
Author: Jiaan Geng <[email protected]>
AuthorDate: Sun May 28 13:50:59 2023 +0300

    [SPARK-43820][SPARK-43822][SPARK-43823][SPARK-43826][SPARK-43827] Assign 
names to the error class _LEGACY_ERROR_TEMP_241[1-7]
    
    ### What changes were proposed in this pull request?
    The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_241[1-7].
    
    ### Why are the changes needed?
    Improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    Exists test cases.
    
    Closes #41339 from beliefer/2411-2417.
    
    Authored-by: Jiaan Geng <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 core/src/main/resources/error/error-classes.json   | 45 +++++++++-----------
 .../sql/catalyst/analysis/CheckAnalysis.scala      | 32 ++++++++-------
 .../sql/catalyst/analysis/AnalysisErrorSuite.scala | 20 ++++++---
 .../sql-tests/analyzer-results/percentiles.sql.out | 48 +++++++++++-----------
 .../sql-tests/results/percentiles.sql.out          | 48 +++++++++++-----------
 5 files changed, 99 insertions(+), 94 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 10a483396e6..3a11001ad9d 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -617,6 +617,11 @@
       "Not found an encoder of the type <typeName> to Spark SQL internal 
representation. Consider to change the input type to one of supported at 
'<docroot>/sql-ref-datatypes.html'."
     ]
   },
+  "EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : {
+    "message" : [
+      "The event time <eventName> has the invalid type <eventType>, but 
expected \"TIMESTAMP\"."
+    ]
+  },
   "FAILED_EXECUTE_UDF" : {
     "message" : [
       "Failed to execute user defined function (<functionName>: (<signature>) 
=> <result>)."
@@ -1371,6 +1376,11 @@
     ],
     "sqlState" : "42903"
   },
+  "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC" : {
+    "message" : [
+      "Cannot specify ORDER BY or a window frame for <aggFunc>."
+    ]
+  },
   "INVALID_WRITE_DISTRIBUTION" : {
     "message" : [
       "The requested write distribution is invalid."
@@ -1393,6 +1403,11 @@
       }
     }
   },
+  "JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE" : {
+    "message" : [
+      "The join condition <joinCondition> has the invalid type 
<conditionType>, expected \"BOOLEAN\"."
+    ]
+  },
   "LOCATION_ALREADY_EXISTS" : {
     "message" : [
       "Cannot name the managed table as <identifier>, as its associated 
location <location> already exists. Please pick a different table name, or 
remove the existing location first."
@@ -1785,6 +1800,11 @@
     ],
     "sqlState" : "22023"
   },
+  "SEED_EXPRESSION_IS_UNFOLDABLE" : {
+    "message" : [
+      "The seed expression <seedExpr> of the expression <exprWithSeed> must be 
foldable."
+    ]
+  },
   "SORT_BY_WITHOUT_BUCKETING" : {
     "message" : [
       "sortBy must be used together with bucketBy."
@@ -5441,31 +5461,6 @@
       "failed to evaluate expression <sqlExpr>: <msg>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_2411" : {
-    "message" : [
-      "Cannot specify order by or frame for '<aggFunc>'."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2413" : {
-    "message" : [
-      "Input argument to <argName> must be a constant."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2414" : {
-    "message" : [
-      "Event time must be defined on a window or a timestamp, but <evName> is 
of type <evType>."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2416" : {
-    "message" : [
-      "join condition '<join>' of type <type> is not a boolean."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_2417" : {
-    "message" : [
-      "join condition '<condition>' of type <dataType> is not a boolean."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2418" : {
     "message" : [
       "Input argument tolerance must be a constant."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index 43f12fabf70..cafabb22d10 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -329,8 +329,8 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
                 if w.windowSpec.orderSpec.nonEmpty || 
w.windowSpec.frameSpecification !=
                     SpecifiedWindowFrame(RowFrame, UnboundedPreceding, 
UnboundedFollowing) =>
                 agg.failAnalysis(
-                  errorClass = "_LEGACY_ERROR_TEMP_2411",
-                  messageParameters = Map("aggFunc" -> 
agg.aggregateFunction.prettyName))
+                  errorClass = "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
+                  messageParameters = Map("aggFunc" -> 
toSQLExpr(agg.aggregateFunction)))
               case _: AggregateExpression | _: FrameLessOffsetWindowFunction |
                   _: AggregateWindowFunction => // OK
               case other =>
@@ -344,8 +344,10 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
 
           case e: ExpressionWithRandomSeed if !e.seedExpression.foldable =>
             e.failAnalysis(
-              errorClass = "_LEGACY_ERROR_TEMP_2413",
-              messageParameters = Map("argName" -> e.prettyName))
+              errorClass = "SEED_EXPRESSION_IS_UNFOLDABLE",
+              messageParameters = Map(
+                "seedExpr" -> toSQLExpr(e.seedExpression),
+                "exprWithSeed" -> toSQLExpr(e)))
 
           case p: Parameter =>
             p.failAnalysis(
@@ -363,10 +365,10 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
               case _: TimestampType =>
               case _ =>
                 etw.failAnalysis(
-                  errorClass = "_LEGACY_ERROR_TEMP_2414",
+                  errorClass = "EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE",
                   messageParameters = Map(
-                    "evName" -> etw.eventTime.name,
-                    "evType" -> etw.eventTime.dataType.catalogString))
+                    "eventName" -> toSQLId(etw.eventTime.name),
+                    "eventType" -> toSQLType(etw.eventTime.dataType)))
             }
           case f: Filter if f.condition.dataType != BooleanType =>
             f.failAnalysis(
@@ -378,18 +380,18 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog with QueryErrorsB
 
           case j @ Join(_, _, _, Some(condition), _) if condition.dataType != 
BooleanType =>
             j.failAnalysis(
-              errorClass = "_LEGACY_ERROR_TEMP_2416",
+              errorClass = "JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE",
               messageParameters = Map(
-                "join" -> condition.sql,
-                "type" -> condition.dataType.catalogString))
+                "joinCondition" -> toSQLExpr(condition),
+                "conditionType" -> toSQLType(condition.dataType)))
 
           case j @ AsOfJoin(_, _, _, Some(condition), _, _, _)
               if condition.dataType != BooleanType =>
-            j.failAnalysis(
-              errorClass = "_LEGACY_ERROR_TEMP_2417",
-              messageParameters = Map(
-                "condition" -> condition.sql,
-                "dataType" -> condition.dataType.catalogString))
+            throw SparkException.internalError(
+              msg = s"join condition '${toSQLExpr(condition)}' " +
+                s"of type ${toSQLType(condition.dataType)} is not a boolean.",
+              context = j.origin.getQueryContext,
+              summary = j.origin.context.summary)
 
           case j @ AsOfJoin(_, _, _, _, _, _, Some(toleranceAssertion)) =>
             if (!toleranceAssertion.foldable) {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
index 1dcecebd5dc..198c44f69ae 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
@@ -361,10 +361,11 @@ class AnalysisErrorSuite extends AnalysisTest {
     errorClass = "DATATYPE_MISMATCH.FILTER_NOT_BOOLEAN",
     messageParameters = Map("sqlExpr" -> "\"1\"", "filter" -> "\"1\"", "type" 
-> "\"INT\""))
 
-  errorTest(
+  errorClassTest(
     "non-boolean join conditions",
     testRelation.join(testRelation, condition = Some(Literal(1))),
-    "condition" :: "'1'" :: "not a boolean" :: 
Literal(1).dataType.simpleString :: Nil)
+    errorClass = "JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE",
+    messageParameters = Map("joinCondition" -> "\"1\"", "conditionType" -> 
"\"INT\""))
 
   errorClassTest(
     "missing group by",
@@ -964,10 +965,17 @@ class AnalysisErrorSuite extends AnalysisTest {
   }
 
   test("SPARK-33909: Check rand functions seed is legal at analyzer side") {
-    Seq(Rand("a".attr), Randn("a".attr)).foreach { r =>
-      val plan = Project(Seq(r.as("r")), testRelation)
-      assertAnalysisError(plan,
-        s"Input argument to ${r.prettyName} must be a constant." :: Nil)
+    Seq((Rand("a".attr), "\"rand(a)\""),
+      (Randn("a".attr), "\"randn(a)\"")).foreach {
+      case (r, expectedArg) =>
+        val plan = Project(Seq(r.as("r")), testRelation)
+        assertAnalysisErrorClass(plan,
+          expectedErrorClass = "SEED_EXPRESSION_IS_UNFOLDABLE",
+          expectedMessageParameters = Map(
+            "seedExpr" -> "\"a\"",
+            "exprWithSeed" -> expectedArg),
+          caseSensitive = false
+        )
     }
     Seq(
       Rand(1.0) -> ("\"rand(1.0)\"", "\"1.0\"", "\"DOUBLE\""),
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
index f4c05039b7f..9c945687830 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/percentiles.sql.out
@@ -206,9 +206,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -232,9 +232,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -257,9 +257,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -283,9 +283,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -309,9 +309,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -334,9 +334,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -416,9 +416,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -443,9 +443,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -469,9 +469,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -496,9 +496,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -523,9 +523,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -549,9 +549,9 @@ ORDER BY salary
 -- !query analysis
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out 
b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
index 38319875c71..1e307d9a061 100644
--- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out
@@ -179,9 +179,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -207,9 +207,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -234,9 +234,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -262,9 +262,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -290,9 +290,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -317,9 +317,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -409,9 +409,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -438,9 +438,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -466,9 +466,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -495,9 +495,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_cont"
+    "aggFunc" : "\"percentile_cont(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -524,9 +524,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "percentile_disc"
+    "aggFunc" : "\"percentile_disc(salary, 0.25)\""
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -552,9 +552,9 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2411",
+  "errorClass" : "INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC",
   "messageParameters" : {
-    "aggFunc" : "median"
+    "aggFunc" : "\"median(salary)\""
   },
   "queryContext" : [ {
     "objectType" : "",


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to