This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 2103bb1df6b [SPARK-42249][SQL] Refining html link for documentation in 
error messages
2103bb1df6b is described below

commit 2103bb1df6bca3920d50920665c0aee1fdf0996b
Author: itholic <haejoon....@databricks.com>
AuthorDate: Wed Feb 8 06:28:17 2023 +0500

    [SPARK-42249][SQL] Refining html link for documentation in error messages
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to refine html link for documentation in error messages by 
introducing `SPARK_DOC_ROOT` into 
`core/src/main/scala/org/apache/spark/package.scala` that contains global 
directory for documentation root link: `https://spark.apache.org/docs/latest`
    
    ### Why are the changes needed?
    
    To improve error class readability and make sure using right root document 
directory across all source codes.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Updated the whole existing tests related to this changes.
    
    Basically, run `./build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite*"`
    
    Closes #39820 from itholic/refine_html.
    
    Authored-by: itholic <haejoon....@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit 5225a3224f8339f13b1d917dc4ba7c63dd713552)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 build/spark-build-info                             |  1 +
 core/src/main/resources/error/error-classes.json   | 10 +++----
 core/src/main/scala/org/apache/spark/package.scala |  7 +++--
 .../spark/sql/catalyst/JavaTypeInference.scala     |  3 +-
 .../spark/sql/catalyst/ScalaReflection.scala       |  3 +-
 .../catalyst/util/DateTimeFormatterHelper.scala    |  6 ++--
 .../spark/sql/errors/QueryCompilationErrors.scala  |  9 +++---
 .../spark/sql/errors/QueryExecutionErrors.scala    | 19 ++++++++----
 .../analysis/ExpressionTypeCheckingSuite.scala     | 20 ++++++++-----
 .../encoders/EncoderErrorMessageSuite.scala        | 22 ++++++++++----
 .../catalyst/encoders/ExpressionEncoderSuite.scala |  6 ++--
 .../expressions/CallMethodViaReflectionSuite.scala |  8 +++--
 .../catalyst/expressions/ComplexTypeSuite.scala    |  8 +++--
 .../expressions/GeneratorExpressionSuite.scala     |  8 +++--
 .../expressions/StringExpressionsSuite.scala       | 17 +++++++----
 .../scala/org/apache/spark/sql/RuntimeConfig.scala |  3 +-
 .../resources/sql-tests/results/ansi/date.sql.out  |  4 +++
 .../results/ansi/datetime-parsing-invalid.sql.out  |  1 +
 .../results/ansi/string-functions.sql.out          |  4 +++
 .../sql-tests/results/ansi/timestamp.sql.out       |  6 ++++
 .../results/ansi/try_datetime_functions.sql.out    |  1 +
 .../results/ceil-floor-with-scale-param.sql.out    |  2 ++
 .../sql-tests/results/csv-functions.sql.out        |  1 +
 .../test/resources/sql-tests/results/date.sql.out  |  4 +++
 .../results/datetime-formatting-invalid.sql.out    | 22 ++++++++++++++
 .../sql-tests/results/datetime-legacy.sql.out      |  1 +
 .../results/datetime-parsing-invalid.sql.out       |  1 +
 .../sql-tests/results/json-functions.sql.out       |  4 +++
 .../results/sql-compatibility-functions.sql.out    |  1 +
 .../sql-tests/results/string-functions.sql.out     |  4 +++
 .../results/table-valued-functions.sql.out         |  2 ++
 .../sql-tests/results/timestamp-ntz.sql.out        |  1 +
 .../resources/sql-tests/results/timestamp.sql.out  |  6 ++++
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  4 +++
 .../results/timestampNTZ/timestamp.sql.out         |  4 +++
 .../results/try_datetime_functions.sql.out         |  1 +
 .../native/stringCastAndExpressions.sql.out        |  3 ++
 .../resources/sql-tests/results/udaf/udaf.sql.out  |  1 +
 .../sql-tests/results/udf/udf-udaf.sql.out         |  1 +
 .../apache/spark/sql/DataFrameFunctionsSuite.scala | 35 ++++++++++++++--------
 .../org/apache/spark/sql/DateFunctionsSuite.scala  |  5 ++--
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala |  5 ++--
 .../apache/spark/sql/StringFunctionsSuite.scala    |  5 ++--
 .../test/scala/org/apache/spark/sql/UDFSuite.scala |  8 +++--
 .../sql/errors/QueryCompilationErrorsSuite.scala   |  4 ++-
 .../spark/sql/hive/execution/HiveUDAFSuite.scala   |  4 ++-
 46 files changed, 220 insertions(+), 75 deletions(-)

diff --git a/build/spark-build-info b/build/spark-build-info
index 26157e8cf8c..4a4ff9169b3 100755
--- a/build/spark-build-info
+++ b/build/spark-build-info
@@ -33,6 +33,7 @@ echo_build_properties() {
   echo branch=$(git rev-parse --abbrev-ref HEAD)
   echo date=$(date -u +%Y-%m-%dT%H:%M:%SZ)
   echo url=$(git config --get remote.origin.url |  sed 
's|https://\(.*\)@\(.*\)|https://\2|')
+  echo docroot=https://spark.apache.org/docs/latest
 }
 
 echo_build_properties $2 > "$SPARK_BUILD_INFO"
diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 2b4fc4abe68..efa27e825ea 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -514,7 +514,7 @@
   },
   "ENCODER_NOT_FOUND" : {
     "message" : [
-      "Not found an encoder of the type <typeName> to Spark SQL internal 
representation. Consider to change the input type to one of supported at 
https://spark.apache.org/docs/latest/sql-ref-datatypes.html.";
+      "Not found an encoder of the type <typeName> to Spark SQL internal 
representation. Consider to change the input type to one of supported at 
'<docroot>/sql-ref-datatypes.html'."
     ]
   },
   "FAILED_EXECUTE_UDF" : {
@@ -659,7 +659,7 @@
       "DATETIME_PATTERN_RECOGNITION" : {
         "message" : [
           "Spark >= 3.0:",
-          "Fail to recognize <pattern> pattern in the DateTimeFormatter. 1) 
You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0. 2) 
You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html.";
+          "Fail to recognize <pattern> pattern in the DateTimeFormatter. 1) 
You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0. 2) 
You can form a valid datetime pattern with the guide from 
'<docroot>/sql-ref-datetime-pattern.html'."
         ]
       },
       "PARSE_DATETIME_BY_NEW_PARSER" : {
@@ -1849,7 +1849,7 @@
     "subClass" : {
       "WITHOUT_SUGGESTION" : {
         "message" : [
-          "Please, refer to 
'https://spark.apache.org/docs/latest/sql-ref-functions.html' for a fix."
+          "Please, refer to '<docroot>/sql-ref-functions.html' for a fix."
         ]
       },
       "WITH_SUGGESTION" : {
@@ -3506,7 +3506,7 @@
   "_LEGACY_ERROR_TEMP_1326" : {
     "message" : [
       "Cannot modify the value of a Spark config: <key>.",
-      "See also 
'https://spark.apache.org/docs/latest/sql-migration-guide.html#ddl-statements'."
+      "See also '<docroot>/sql-migration-guide.html#ddl-statements'."
     ]
   },
   "_LEGACY_ERROR_TEMP_1327" : {
@@ -4221,7 +4221,7 @@
   },
   "_LEGACY_ERROR_TEMP_2130" : {
     "message" : [
-      "Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html.";
+      "Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
'<docroot>/sql-ref-datetime-pattern.html'."
     ]
   },
   "_LEGACY_ERROR_TEMP_2131" : {
diff --git a/core/src/main/scala/org/apache/spark/package.scala 
b/core/src/main/scala/org/apache/spark/package.scala
index 5d0639e92c3..92cab14294f 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -54,7 +54,8 @@ package object spark {
         spark_revision: String,
         spark_build_user: String,
         spark_repo_url: String,
-        spark_build_date: String) = {
+        spark_build_date: String,
+        spark_doc_root: String) = {
 
       val resourceStream = Thread.currentThread().getContextClassLoader.
         getResourceAsStream("spark-version-info.properties")
@@ -72,7 +73,8 @@ package object spark {
           props.getProperty("revision", unknownProp),
           props.getProperty("user", unknownProp),
           props.getProperty("url", unknownProp),
-          props.getProperty("date", unknownProp)
+          props.getProperty("date", unknownProp),
+          props.getProperty("docroot", unknownProp)
         )
       } catch {
         case e: Exception =>
@@ -97,5 +99,6 @@ package object spark {
   val SPARK_BUILD_USER = SparkBuildInfo.spark_build_user
   val SPARK_REPO_URL = SparkBuildInfo.spark_repo_url
   val SPARK_BUILD_DATE = SparkBuildInfo.spark_build_date
+  val SPARK_DOC_ROOT = SparkBuildInfo.spark_doc_root
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala
index 105bed38704..36b98737a20 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/JavaTypeInference.scala
@@ -24,6 +24,7 @@ import javax.annotation.Nonnull
 import scala.annotation.tailrec
 import scala.reflect.ClassTag
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.sql.catalyst.encoders.AgnosticEncoder
 import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.{ArrayEncoder, 
BinaryEncoder, BoxedBooleanEncoder, BoxedByteEncoder, BoxedDoubleEncoder, 
BoxedFloatEncoder, BoxedIntEncoder, BoxedLongEncoder, BoxedShortEncoder, 
DayTimeIntervalEncoder, DEFAULT_JAVA_DECIMAL_ENCODER, EncoderField, 
IterableEncoder, JavaBeanEncoder, JavaBigIntEncoder, JavaEnumEncoder, 
LocalDateTimeEncoder, MapEncoder, PrimitiveBooleanEncoder, 
PrimitiveByteEncoder, PrimitiveDoubleEncoder, PrimitiveFloatEncoder, P [...]
 import org.apache.spark.sql.errors.QueryExecutionErrors
@@ -138,7 +139,7 @@ object JavaTypeInference {
       JavaBeanEncoder(ClassTag(c), fields)
 
     case _ =>
-      throw QueryExecutionErrors.cannotFindEncoderForTypeError(t.toString)
+      throw QueryExecutionErrors.cannotFindEncoderForTypeError(t.toString, 
SPARK_DOC_ROOT)
   }
 
   def getJavaBeanReadableProperties(beanClass: Class[_]): 
Array[PropertyDescriptor] = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index 4680a2aec2b..2e03f32a58d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -27,6 +27,7 @@ import scala.util.{Failure, Success}
 
 import org.apache.commons.lang3.reflect.ConstructorUtils
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.catalyst.{expressions => exprs}
 import org.apache.spark.sql.catalyst.DeserializerBuildHelper._
@@ -954,7 +955,7 @@ object ScalaReflection extends ScalaReflection {
         }
         ProductEncoder(ClassTag(getClassFromType(t)), params)
       case _ =>
-        throw QueryExecutionErrors.cannotFindEncoderForTypeError(tpe.toString)
+        throw QueryExecutionErrors.cannotFindEncoderForTypeError(tpe.toString, 
SPARK_DOC_ROOT)
     }
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
index 96812cd65c1..fe14d74488c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
@@ -25,6 +25,7 @@ import java.util.{Date, Locale}
 
 import com.google.common.cache.CacheBuilder
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.sql.catalyst.util.DateTimeFormatterHelper._
 import org.apache.spark.sql.errors.QueryExecutionErrors
 import org.apache.spark.sql.internal.SQLConf
@@ -184,12 +185,13 @@ trait DateTimeFormatterHelper {
       } catch {
         case _: Throwable => throw e
       }
-      throw 
QueryExecutionErrors.failToRecognizePatternAfterUpgradeError(pattern, e)
+      throw QueryExecutionErrors.failToRecognizePatternAfterUpgradeError(
+        pattern, e, SPARK_DOC_ROOT)
   }
 
   protected def checkInvalidPattern(pattern: String): 
PartialFunction[Throwable, Nothing] = {
     case e: IllegalArgumentException =>
-      throw QueryExecutionErrors.failToRecognizePatternError(pattern, e)
+      throw QueryExecutionErrors.failToRecognizePatternError(pattern, e, 
SPARK_DOC_ROOT)
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 10649e1474a..634e4ac094d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -21,7 +21,7 @@ import scala.collection.mutable
 
 import org.apache.hadoop.fs.Path
 
-import org.apache.spark.{SparkException, SparkThrowable, SparkThrowableHelper}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkThrowable, 
SparkThrowableHelper}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName, 
TableIdentifier}
 import 
org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, 
FunctionAlreadyExistsException, NamespaceAlreadyExistsException, 
NoSuchFunctionException, NoSuchNamespaceException, NoSuchPartitionException, 
NoSuchTableException, ResolvedTable, Star, TableAlreadyExistsException, 
UnresolvedRegex}
@@ -677,7 +677,8 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
         messageParameters = Map(
           "functionName" -> toSQLId(name),
           "expectedNum" -> expectedNumberOfParameters,
-          "actualNum" -> actualNumber.toString))
+          "actualNum" -> actualNumber.toString,
+          "docroot" -> SPARK_DOC_ROOT))
     } else {
       new AnalysisException(
         errorClass = "WRONG_NUM_ARGS.WITH_SUGGESTION",
@@ -2986,10 +2987,10 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map("key" -> key))
   }
 
-  def cannotModifyValueOfSparkConfigError(key: String): Throwable = {
+  def cannotModifyValueOfSparkConfigError(key: String, docroot: String): 
Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1326",
-      messageParameters = Map("key" -> key))
+      messageParameters = Map("key" -> key, "docroot" -> docroot))
   }
 
   def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index c8c0ad67c45..7eca9c3cd10 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1375,19 +1375,24 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       e)
   }
 
-  def failToRecognizePatternAfterUpgradeError(pattern: String, e: Throwable): 
Throwable = {
+  def failToRecognizePatternAfterUpgradeError(
+      pattern: String, e: Throwable, docroot: String): Throwable = {
     new SparkUpgradeException(
       errorClass = 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION",
       messageParameters = Map(
         "pattern" -> toSQLValue(pattern, StringType),
-        "config" -> toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key)),
+        "config" -> toSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key),
+        "docroot" -> docroot),
       e)
   }
 
-  def failToRecognizePatternError(pattern: String, e: Throwable): 
SparkRuntimeException = {
+  def failToRecognizePatternError(
+      pattern: String, e: Throwable, docroot: String): SparkRuntimeException = 
{
     new SparkRuntimeException(
       errorClass = "_LEGACY_ERROR_TEMP_2130",
-      messageParameters = Map("pattern" -> toSQLValue(pattern, StringType)),
+      messageParameters = Map(
+        "pattern" -> toSQLValue(pattern, StringType),
+        "docroot" -> docroot),
       cause = e)
   }
 
@@ -1476,11 +1481,13 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
         "walkedTypePath" -> walkedTypePath.toString()))
   }
 
-  def cannotFindEncoderForTypeError(typeName: String): 
SparkUnsupportedOperationException = {
+  def cannotFindEncoderForTypeError(
+      typeName: String, docroot: String): SparkUnsupportedOperationException = 
{
     new SparkUnsupportedOperationException(
       errorClass = "ENCODER_NOT_FOUND",
       messageParameters = Map(
-        "typeName" -> typeName))
+        "typeName" -> typeName,
+        "docroot" -> docroot))
   }
 
   def attributesForTypeUnsupportedError(schema: Schema): 
SparkUnsupportedOperationException = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
index eb5bc36c707..665204cd0c5 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst.analysis
 
-import org.apache.spark.{SparkException, SparkFunSuite}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkFunSuite}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
 import org.apache.spark.sql.catalyst.dsl.expressions._
@@ -473,7 +473,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       parameters = Map(
         "functionName" -> toSQLId(coalesce.prettyName),
         "expectedNum" -> "> 0",
-        "actualNum" -> "0"))
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT))
 
     val murmur3Hash = new Murmur3Hash(Nil)
     checkError(
@@ -484,7 +485,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       parameters = Map(
         "functionName" -> toSQLId(murmur3Hash.prettyName),
         "expectedNum" -> "> 0",
-        "actualNum" -> "0"))
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT))
 
     val xxHash64 = new XxHash64(Nil)
     checkError(
@@ -495,7 +497,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       parameters = Map(
         "functionName" -> toSQLId(xxHash64.prettyName),
         "expectedNum" -> "> 0",
-        "actualNum" -> "0"))
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT))
 
     checkError(
       exception = intercept[AnalysisException] {
@@ -530,7 +533,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       parameters = Map(
         "functionName" -> "`named_struct`",
         "expectedNum" -> "2n (n > 0)",
-        "actualNum" -> "3")
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = analysisException(CreateNamedStruct(Seq(1, "a", "b", 2.0))),
@@ -562,7 +566,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
       parameters = Map(
         "functionName" -> "`map`",
         "expectedNum" -> "2n (n > 0)",
-        "actualNum" -> "3")
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = analysisException(CreateMap(Seq(Literal(1),
@@ -690,7 +695,8 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite 
with SQLHelper with Quer
         messageParameters = Map(
           "functionName" -> toSQLId(expr1.prettyName),
           "expectedNum" -> "> 1",
-          "actualNum" -> "1")
+          "actualNum" -> "1",
+          "docroot" -> SPARK_DOC_ROOT)
       )
 
       val expr2 = operator(Seq($"intField", $"stringField"))
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
index 501dfa58305..b77cc4cf4d9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderErrorMessageSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.encoders
 
 import scala.reflect.ClassTag
 
-import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite, 
SparkUnsupportedOperationException}
 import org.apache.spark.sql.Encoders
 
 class NonEncodable(i: Int)
@@ -56,35 +56,45 @@ class EncoderErrorMessageSuite extends SparkFunSuite {
       exception = intercept[
         
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable1]),
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> 
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+      parameters = Map(
+        "typeName" -> "org.apache.spark.sql.catalyst.encoders.NonEncodable",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
       exception = intercept[
         
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable2]),
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> 
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+      parameters = Map(
+        "typeName" -> "org.apache.spark.sql.catalyst.encoders.NonEncodable",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
       exception = intercept[
         
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable3]),
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> 
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+      parameters = Map(
+        "typeName" -> "org.apache.spark.sql.catalyst.encoders.NonEncodable",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
       exception = intercept[
         
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable4]),
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> 
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+      parameters = Map(
+        "typeName" -> "org.apache.spark.sql.catalyst.encoders.NonEncodable",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
       exception = intercept[
         
SparkUnsupportedOperationException](ExpressionEncoder[ComplexNonEncodable5]),
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> 
"org.apache.spark.sql.catalyst.encoders.NonEncodable")
+      parameters = Map(
+        "typeName" -> "org.apache.spark.sql.catalyst.encoders.NonEncodable",
+        "docroot" -> SPARK_DOC_ROOT)
     )
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
index c6546105231..79417c4ca1f 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala
@@ -24,7 +24,7 @@ import java.util.Arrays
 import scala.collection.mutable.ArrayBuffer
 import scala.reflect.runtime.universe.TypeTag
 
-import org.apache.spark.{SparkArithmeticException, SparkRuntimeException, 
SparkUnsupportedOperationException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkArithmeticException, 
SparkRuntimeException, SparkUnsupportedOperationException}
 import org.apache.spark.sql.{Encoder, Encoders}
 import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, OptionalData, 
PrimitiveData, ScroogeLikeExample}
 import org.apache.spark.sql.catalyst.analysis.AnalysisTest
@@ -490,7 +490,9 @@ class ExpressionEncoderSuite extends 
CodegenInterpretedPlanTest with AnalysisTes
     checkError(
       exception = exception,
       errorClass = "ENCODER_NOT_FOUND",
-      parameters = Map("typeName" -> "Any")
+      parameters = Map(
+        "typeName" -> "Any",
+        "docroot" -> SPARK_DOC_ROOT)
     )
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
index 4f5ca2843b1..e5b3d0e7f0a 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions
 
 import java.sql.Timestamp
 
-import org.apache.spark.SparkFunSuite
+import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
 import org.apache.spark.sql.catalyst.expressions.Cast.toSQLType
@@ -106,7 +106,8 @@ class CallMethodViaReflectionSuite extends SparkFunSuite 
with ExpressionEvalHelp
       parameters = Map(
         "functionName" -> "`reflect`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = intercept[AnalysisException] {
@@ -116,7 +117,8 @@ class CallMethodViaReflectionSuite extends SparkFunSuite 
with ExpressionEvalHelp
       parameters = Map(
         "functionName" -> "`reflect`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     assert(CallMethodViaReflection(
       Seq(Literal(staticClassName), Literal(1))).checkInputDataTypes() ==
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala
index 8dbca473cfb..8818fe645ad 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst.expressions
 
-import org.apache.spark.{SparkFunSuite, SparkRuntimeException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite, SparkRuntimeException}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, 
UnresolvedExtractValue}
@@ -326,7 +326,8 @@ class ComplexTypeSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`map`",
         "expectedNum" -> "2n (n > 0)",
-        "actualNum" -> "3")
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     // The given keys of function map should all be the same type
@@ -444,7 +445,8 @@ class ComplexTypeSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`named_struct`",
         "expectedNum" -> "2n (n > 0)",
-        "actualNum" -> "3")
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT)
     )
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratorExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratorExpressionSuite.scala
index fab1086dac2..03ab04f2348 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratorExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratorExpressionSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst.expressions
 
-import org.apache.spark.SparkFunSuite
+import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
@@ -86,7 +86,8 @@ class GeneratorExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`stack`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = intercept[AnalysisException] {
@@ -96,7 +97,8 @@ class GeneratorExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`stack`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     assert(Stack(Seq(Literal(1), Literal(1), 
Literal(1.0))).checkInputDataTypes().isSuccess)
     assert(Stack(Seq(Literal(2), Literal(1), 
Literal(1.0))).checkInputDataTypes() ==
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index cc65a298272..11ec7f6babc 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions
 
 import java.math.{BigDecimal => JavaBigDecimal}
 
-import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkFunSuite, 
SparkIllegalArgumentException}
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
 import 
org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, 
InvalidFormat}
@@ -158,7 +158,8 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`elt`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = intercept[AnalysisException] {
@@ -168,7 +169,8 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`elt`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     assert(Elt(Seq(Literal(1), Literal("A"))).checkInputDataTypes().isSuccess)
     assert(Elt(Seq(Literal(1), Literal(2))).checkInputDataTypes() ==
@@ -1781,7 +1783,8 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`parse_url`",
         "expectedNum" -> "[2, 3]",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     checkError(
       exception = intercept[AnalysisException] {
@@ -1792,7 +1795,8 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`parse_url`",
         "expectedNum" -> "[2, 3]",
-        "actualNum" -> "4")
+        "actualNum" -> "4",
+        "docroot" -> SPARK_DOC_ROOT)
     )
     assert(ParseUrl(Seq(Literal("1"), Literal(2))).checkInputDataTypes() == 
DataTypeMismatch(
       errorSubClass = "UNEXPECTED_INPUT_TYPE",
@@ -1912,7 +1916,8 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       parameters = Map(
         "functionName" -> "`elt`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "1")
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     // first input to function etl should have IntegerType
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
index a3237cf1b6b..532cc7e08e1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.annotation.Stable
 import org.apache.spark.internal.config.{ConfigEntry, OptionalConfigEntry}
 import org.apache.spark.sql.errors.QueryCompilationErrors
@@ -162,7 +163,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new 
SQLConf) {
     }
     if (sqlConf.setCommandRejectsSparkCoreConfs &&
         ConfigEntry.findEntry(key) != null && !SQLConf.containsConfigKey(key)) 
{
-      throw QueryCompilationErrors.cannotModifyValueOfSparkConfigError(key)
+      throw QueryCompilationErrors.cannotModifyValueOfSparkConfigError(key, 
SPARK_DOC_ROOT)
     }
   }
 }
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index a621d0f431e..83e9e9ba82b 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -149,6 +149,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "1",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "0",
     "functionName" : "`curdate`"
   },
@@ -920,6 +921,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -936,6 +938,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -952,6 +955,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index ea67b974a5f..ed1a38fd8cd 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -83,6 +83,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyyyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index e7911dc918f..9406a1e203b 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -10,6 +10,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "> 0",
     "functionName" : "`concat_ws`"
   }
@@ -27,6 +28,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "> 0",
     "functionName" : "`format_string`"
   }
@@ -812,6 +814,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`decode`"
   },
@@ -836,6 +839,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "1",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`decode`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index 8852deace94..3afae8ab91a 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -888,6 +888,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
@@ -904,6 +905,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
@@ -920,6 +922,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -936,6 +939,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -952,6 +956,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -968,6 +973,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out
index af026751cc2..75a6f15bd36 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out
@@ -50,6 +50,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
index ee8c3fda35a..d55e665a2a1 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out
@@ -144,6 +144,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "3",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`ceil`"
   },
@@ -302,6 +303,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "3",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`floor`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
index 00e7da4c4e7..38fcc982b98 100644
--- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out
@@ -135,6 +135,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "[2, 3]",
     "functionName" : "`from_csv`"
   },
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index 03932e120a8..da2c6e7c5d9 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -135,6 +135,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "1",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "0",
     "functionName" : "`curdate`"
   },
@@ -926,6 +927,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -942,6 +944,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -958,6 +961,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
index 7455a42a4f4..3a69857e5e5 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out
@@ -10,6 +10,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'GGGGG'"
   }
 }
@@ -26,6 +27,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyyyyy'"
   }
 }
@@ -60,6 +62,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'MMMMM'"
   }
 }
@@ -76,6 +79,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'LLLLL'"
   }
 }
@@ -92,6 +96,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'EEEEE'"
   }
 }
@@ -108,6 +113,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'FF'"
   }
 }
@@ -124,6 +130,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'ddd'"
   }
 }
@@ -140,6 +147,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'DDDD'"
   }
 }
@@ -156,6 +164,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'HHH'"
   }
 }
@@ -172,6 +181,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'hhh'"
   }
 }
@@ -188,6 +198,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'kkk'"
   }
 }
@@ -204,6 +215,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'KKK'"
   }
 }
@@ -220,6 +232,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'mmm'"
   }
 }
@@ -236,6 +249,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'sss'"
   }
 }
@@ -252,6 +266,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'SSSSSSSSSS'"
   }
 }
@@ -268,6 +283,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'aa'"
   }
 }
@@ -293,6 +309,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'zzzzz'"
   }
 }
@@ -318,6 +335,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'ZZZZZZ'"
   }
 }
@@ -388,6 +406,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'Y'"
   }
 }
@@ -404,6 +423,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'w'"
   }
 }
@@ -420,6 +440,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'W'"
   }
 }
@@ -436,6 +457,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'u'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index e35c682943b..cc7e385585a 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -135,6 +135,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "1",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "0",
     "functionName" : "`curdate`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
index 634bb61777f..be77a0b4a92 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
@@ -75,6 +75,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyyyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index 0111ec0159d..5bf4c4e44ed 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -115,6 +115,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "[1, 2]",
     "functionName" : "`to_json`"
   },
@@ -241,6 +242,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "[2, 3]",
     "functionName" : "`from_json`"
   },
@@ -588,6 +590,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`json_array_length`"
   },
@@ -676,6 +679,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`json_object_keys`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
index 6638497278d..1d3257fdaae 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out
@@ -98,6 +98,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "2",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`string`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index 85ca3158704..79c938d5954 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -10,6 +10,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "> 0",
     "functionName" : "`concat_ws`"
   }
@@ -27,6 +28,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "> 0",
     "functionName" : "`format_string`"
   }
@@ -744,6 +746,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "0",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`decode`"
   },
@@ -768,6 +771,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "1",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "2",
     "functionName" : "`decode`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
index 0fa334aa19d..e381c798465 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
@@ -84,6 +84,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "5",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "[1, 2, 3, 4]",
     "functionName" : "`range`"
   },
@@ -330,6 +331,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "2",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`explode`"
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
index 46011348839..81fa1f1dc33 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out
@@ -50,6 +50,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "7",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "6",
     "functionName" : "`make_timestamp_ntz`"
   },
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index fc93f006ec1..9047b64e01d 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -884,6 +884,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
@@ -900,6 +901,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
@@ -916,6 +918,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -932,6 +935,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -948,6 +952,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
@@ -964,6 +969,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd/MMMMM/yyyy'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 51238172fbb..53d86dfd518 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -901,6 +901,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
@@ -915,6 +916,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
@@ -929,6 +931,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -945,6 +948,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index 484075e8d77..0c5f1aeb78b 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -882,6 +882,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'yyyy-MM-dd GGGGG'"
   }
 }
@@ -896,6 +897,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
@@ -910,6 +912,7 @@ org.apache.spark.SparkRuntimeException
 {
   "errorClass" : "_LEGACY_ERROR_TEMP_2130",
   "messageParameters" : {
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
@@ -926,6 +929,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEE'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out
index af026751cc2..75a6f15bd36 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out
@@ -50,6 +50,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'dd MM yyyy EEEEEE'"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
index 98cebface35..3a367f6b3f3 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out
@@ -186,6 +186,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'aa'"
   }
 }
@@ -210,6 +211,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'aa'"
   }
 }
@@ -234,6 +236,7 @@ org.apache.spark.SparkUpgradeException
   "sqlState" : "42K0B",
   "messageParameters" : {
     "config" : "\"spark.sql.legacy.timeParserPolicy\"",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "pattern" : "'aa'"
   }
 }
diff --git a/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out 
b/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out
index 51eabfba75e..2f072293979 100644
--- a/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udaf/udaf.sql.out
@@ -36,6 +36,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "2",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`spark_catalog`.`default`.`mydoubleavg`"
   },
diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out 
b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out
index 674215779ad..228a31ba257 100644
--- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out
@@ -36,6 +36,7 @@ org.apache.spark.sql.AnalysisException
   "sqlState" : "42605",
   "messageParameters" : {
     "actualNum" : "2",
+    "docroot" : "https://spark.apache.org/docs/latest";,
     "expectedNum" : "1",
     "functionName" : "`spark_catalog`.`default`.`mydoubleavg`"
   },
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index 14def67ba40..6ed8299976c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -24,7 +24,7 @@ import java.sql.{Date, Timestamp}
 
 import scala.util.Random
 
-import org.apache.spark.{SparkException, SparkRuntimeException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkRuntimeException}
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, 
UnresolvedAttribute}
 import org.apache.spark.sql.catalyst.expressions.{Alias, ArraysZip, 
AttributeReference, Expression, NamedExpression, UnaryExpression}
@@ -4947,7 +4947,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> toSQLId("zip_with"),
         "expectedNum" -> "3",
-        "actualNum" -> "4"),
+        "actualNum" -> "4",
+        "docroot" -> SPARK_DOC_ROOT),
       context = ExpectedContext(
         fragment = "zip_with(a1, a2, (acc, x) -> x, (acc, x) -> x)",
         start = 0,
@@ -5046,7 +5047,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`coalesce`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5058,7 +5060,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`coalesce`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5070,7 +5073,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`hash`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5082,7 +5086,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`hash`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5094,7 +5099,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`xxhash64`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5106,7 +5112,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`xxhash64`",
         "expectedNum" -> "> 0",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5118,7 +5125,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`greatest`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5130,7 +5138,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`greatest`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5142,7 +5151,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`least`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
 
     checkError(
@@ -5154,7 +5164,8 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`least`",
         "expectedNum" -> "> 1",
-        "actualNum" -> "0")
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT)
     )
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index e9ebb82d18b..eb1a0d191af 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -23,7 +23,7 @@ import java.time.{Instant, LocalDateTime, ZoneId}
 import java.util.{Locale, TimeZone}
 import java.util.concurrent.TimeUnit
 
-import org.apache.spark.{SparkConf, SparkException, SparkUpgradeException}
+import org.apache.spark.{SPARK_DOC_ROOT, SparkConf, SparkException, 
SparkUpgradeException}
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{CEST, LA}
 import org.apache.spark.sql.catalyst.util.DateTimeUtils
 import org.apache.spark.sql.functions._
@@ -58,7 +58,8 @@ class DateFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> "`curdate`",
         "expectedNum" -> "0",
-        "actualNum" -> "1"
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT
       ),
       context = ExpectedContext("", "", 7, 16, "CURDATE(1)")
     )
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 06a5b799279..f0566619e74 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -28,7 +28,7 @@ import scala.collection.mutable
 
 import org.apache.commons.io.FileUtils
 
-import org.apache.spark.{AccumulatorSuite, SparkException}
+import org.apache.spark.{AccumulatorSuite, SPARK_DOC_ROOT, SparkException}
 import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
 import org.apache.spark.sql.catalyst.expressions.{GenericRow, Hex}
 import org.apache.spark.sql.catalyst.expressions.Cast._
@@ -2646,7 +2646,8 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
       parameters = Map(
         "functionName" -> toSQLId("nvl"),
         "expectedNum" -> "2",
-        "actualNum" -> "3"
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT
       ),
       context = ExpectedContext(
         start = 7,
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
index 3b91bdc9683..3c4ad7c1dca 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql
 
-import org.apache.spark.SparkRuntimeException
+import org.apache.spark.{SPARK_DOC_ROOT, SparkRuntimeException}
 import org.apache.spark.sql.catalyst.expressions.Cast._
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.internal.SQLConf
@@ -584,7 +584,8 @@ class StringFunctionsSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "functionName" -> toSQLId("sentences"),
         "expectedNum" -> "[1, 2, 3]",
-        "actualNum" -> "0"
+        "actualNum" -> "0",
+        "docroot" -> SPARK_DOC_ROOT
       ),
       context = ExpectedContext(
         fragment = "sentences()",
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
index 711e7e7b265..734fcebc80e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
@@ -24,7 +24,7 @@ import java.time.format.DateTimeFormatter
 
 import scala.collection.mutable.{ArrayBuffer, WrappedArray}
 
-import org.apache.spark.SparkException
+import org.apache.spark.{SPARK_DOC_ROOT, SparkException}
 import org.apache.spark.sql.api.java._
 import org.apache.spark.sql.catalyst.FunctionIdentifier
 import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, OuterScopes}
@@ -109,7 +109,8 @@ class UDFSuite extends QueryTest with SharedSparkSession {
       parameters = Map(
         "functionName" -> toSQLId("substr"),
         "expectedNum" -> "[2, 3]",
-        "actualNum" -> "4"
+        "actualNum" -> "4",
+        "docroot" -> SPARK_DOC_ROOT
       ),
       context = ExpectedContext(
         fragment = "substr('abcd', 2, 3, 4)",
@@ -129,7 +130,8 @@ class UDFSuite extends QueryTest with SharedSparkSession {
       parameters = Map(
         "functionName" -> toSQLId("foo"),
         "expectedNum" -> "1",
-        "actualNum" -> "3"
+        "actualNum" -> "3",
+        "docroot" -> SPARK_DOC_ROOT
       ),
       context = ExpectedContext(
         fragment = "foo(2, 3, 4)",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 47c4a86ccc5..35e4dc360bf 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.errors
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.sql.{AnalysisException, ClassData, 
IntegratedUDFTestUtils, QueryTest, Row}
 import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test}
 import org.apache.spark.sql.catalyst.parser.ParseException
@@ -677,7 +678,8 @@ class QueryCompilationErrorsSuite
       parameters = Map(
         "functionName" -> "`cast`",
         "expectedNum" -> "0",
-        "actualNum" -> "1"),
+        "actualNum" -> "1",
+        "docroot" -> SPARK_DOC_ROOT),
       context = ExpectedContext("", "", 7, 13, "CAST(1)")
     )
   }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala
index e8da68c002c..dd6fb1ebb1e 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala
@@ -28,6 +28,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo
 import test.org.apache.spark.sql.MyDoubleAvg
 
+import org.apache.spark.SPARK_DOC_ROOT
 import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
 import org.apache.spark.sql.catalyst.expressions.Cast._
 import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
@@ -179,7 +180,8 @@ class HiveUDAFSuite extends QueryTest
         parameters = Map(
           "functionName" -> toSQLId("longProductSum"),
           "expectedNum" -> "2",
-          "actualNum" -> "1"),
+          "actualNum" -> "1",
+          "docroot" -> SPARK_DOC_ROOT),
         context = ExpectedContext(
           fragment = "longProductSum(100)",
           start = 7,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to