This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3ef529be73e0 [SPARK-49014][BUILD][DOCS][FOLLOWUP] Synchronize the Avro 
version in `SparkBuild.scala/docs/comments to` 1.12.0
3ef529be73e0 is described below

commit 3ef529be73e0232f912887da5a54fdd3851983bc
Author: yangjie01 <[email protected]>
AuthorDate: Fri Jan 17 11:32:49 2025 -0800

    [SPARK-49014][BUILD][DOCS][FOLLOWUP] Synchronize the Avro version in 
`SparkBuild.scala/docs/comments to` 1.12.0
    
    ### What changes were proposed in this pull request?
    https://github.com/apache/spark/pull/47498 has upgraded Avro to version 
1.12.0, Synchronize the Avro version in `SparkBuild.scala/docs/comments to` 
1.12.0.
    
    ### Why are the changes needed?
    The Avro version within the project should be consistent.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #49552 from LuciferYang/SPARK-49014-FOLLOWUP.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 docs/sql-data-sources-avro.md                                         | 4 ++--
 project/SparkBuild.scala                                              | 2 +-
 sql/core/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala   | 4 ++--
 .../test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala | 2 +-
 4 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/docs/sql-data-sources-avro.md b/docs/sql-data-sources-avro.md
index c06e1fd46d2d..7c1b71636071 100644
--- a/docs/sql-data-sources-avro.md
+++ b/docs/sql-data-sources-avro.md
@@ -477,7 +477,7 @@ Submission Guide for more details.
 [adm]: submitting-applications.html#advanced-dependency-management
 
 ## Supported types for Avro -> Spark SQL conversion
-Currently Spark supports reading all [primitive 
types](https://avro.apache.org/docs/1.11.3/specification/#primitive-types) and 
[complex 
types](https://avro.apache.org/docs/1.11.3/specification/#complex-types) under 
records of Avro.
+Currently Spark supports reading all [primitive 
types](https://avro.apache.org/docs/1.12.0/specification/#primitive-types) and 
[complex 
types](https://avro.apache.org/docs/1.12.0/specification/#complex-types) under 
records of Avro.
 <table>
   <thead><tr><th><b>Avro type</b></th><th><b>Spark SQL 
type</b></th></tr></thead>
   <tr>
@@ -541,7 +541,7 @@ In addition to the types listed above, it supports reading 
`union` types. The fo
 3. `union(something, null)`, where something is any supported Avro type. This 
will be mapped to the same Spark SQL type as that of something, with nullable 
set to true.
 All other union types are considered complex. They will be mapped to 
StructType where field names are member0, member1, etc., in accordance with 
members of the union. This is consistent with the behavior when converting 
between Avro and Parquet.
 
-It also supports reading the following Avro [logical 
types](https://avro.apache.org/docs/1.11.3/specification/#logical-types):
+It also supports reading the following Avro [logical 
types](https://avro.apache.org/docs/1.12.0/specification/#logical-types):
 
 <table>
   <thead><tr><th><b>Avro logical type</b></th><th><b>Avro 
type</b></th><th><b>Spark SQL type</b></th></tr></thead>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d84c0f17d2b2..0fbb9e2861d6 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -1061,7 +1061,7 @@ object DependencyOverrides {
   lazy val settings = Seq(
     dependencyOverrides += "com.google.guava" % "guava" % guavaVersion,
     dependencyOverrides += "jline" % "jline" % "2.14.6",
-    dependencyOverrides += "org.apache.avro" % "avro" % "1.11.3")
+    dependencyOverrides += "org.apache.avro" % "avro" % "1.12.0")
 }
 
 /**
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
index e0c6ad3ee69d..d571b3ed6050 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
@@ -82,14 +82,14 @@ private[sql] class AvroOptions(
 
   /**
    * Top level record name in write result, which is required in Avro spec.
-   * See https://avro.apache.org/docs/1.11.3/specification/#schema-record .
+   * See https://avro.apache.org/docs/1.12.0/specification/#schema-record .
    * Default value is "topLevelRecord"
    */
   val recordName: String = parameters.getOrElse(RECORD_NAME, "topLevelRecord")
 
   /**
    * Record namespace in write result. Default value is "".
-   * See Avro spec for details: 
https://avro.apache.org/docs/1.11.3/specification/#schema-record .
+   * See Avro spec for details: 
https://avro.apache.org/docs/1.12.0/specification/#schema-record .
    */
   val recordNamespace: String = parameters.getOrElse(RECORD_NAMESPACE, "")
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
index 27dc80fbfc17..355fece722b3 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
@@ -814,7 +814,7 @@ class HiveClientSuite(version: String) extends 
HiveVersionSuite(version) {
   test("Decimal support of Avro Hive serde") {
     val tableName = "tab1"
     // TODO: add the other logical types. For details, see the link:
-    // https://avro.apache.org/docs/1.11.3/specification/#logical-types
+    // https://avro.apache.org/docs/1.12.0/specification/#logical-types
     val avroSchema =
     """{
       |  "name": "test_record",


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to