This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 02fa3faaec6a [SPARK-53953][BUILD][3.5] Bump Avro 1.11.5
02fa3faaec6a is described below

commit 02fa3faaec6adac2f7503553d66dc7adc2973eff
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Oct 20 10:37:05 2025 -0700

    [SPARK-53953][BUILD][3.5] Bump Avro 1.11.5
    
    ### What changes were proposed in this pull request?
    
    As title, bump Avro to the latest patched version (contains security 
fixes), for branch-3.5
    
    ### Why are the changes needed?
    
    Release Notes are available at https://github.com/apache/avro/pull/3518
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass GHA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52663 from pan3793/SPARK-53953.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala | 4 ++--
 dev/deps/spark-deps-hadoop-3-hive-2.3                               | 6 +++---
 docs/sql-data-sources-avro.md                                       | 4 ++--
 pom.xml                                                             | 2 +-
 project/SparkBuild.scala                                            | 2 +-
 .../scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala    | 2 +-
 6 files changed, 10 insertions(+), 10 deletions(-)

diff --git 
a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala 
b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
index 5fd39393335d..ff3c07203f74 100644
--- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
+++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroOptions.scala
@@ -81,14 +81,14 @@ private[sql] class AvroOptions(
 
   /**
    * Top level record name in write result, which is required in Avro spec.
-   * See https://avro.apache.org/docs/1.11.4/specification/#schema-record .
+   * See https://avro.apache.org/docs/1.11.5/specification/#schema-record .
    * Default value is "topLevelRecord"
    */
   val recordName: String = parameters.getOrElse(RECORD_NAME, "topLevelRecord")
 
   /**
    * Record namespace in write result. Default value is "".
-   * See Avro spec for details: 
https://avro.apache.org/docs/1.11.4/specification/#schema-record .
+   * See Avro spec for details: 
https://avro.apache.org/docs/1.11.5/specification/#schema-record .
    */
   val recordNamespace: String = parameters.getOrElse(RECORD_NAMESPACE, "")
 
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 
b/dev/deps/spark-deps-hadoop-3-hive-2.3
index f110a1988fbf..c7aa3eea703a 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -21,9 +21,9 @@ arrow-memory-core/12.0.1//arrow-memory-core-12.0.1.jar
 arrow-memory-netty/12.0.1//arrow-memory-netty-12.0.1.jar
 arrow-vector/12.0.1//arrow-vector-12.0.1.jar
 audience-annotations/0.5.0//audience-annotations-0.5.0.jar
-avro-ipc/1.11.4//avro-ipc-1.11.4.jar
-avro-mapred/1.11.4//avro-mapred-1.11.4.jar
-avro/1.11.4//avro-1.11.4.jar
+avro-ipc/1.11.5//avro-ipc-1.11.5.jar
+avro-mapred/1.11.5//avro-mapred-1.11.5.jar
+avro/1.11.5//avro-1.11.5.jar
 aws-java-sdk-bundle/1.12.262//aws-java-sdk-bundle-1.12.262.jar
 azure-data-lake-store-sdk/2.3.9//azure-data-lake-store-sdk-2.3.9.jar
 azure-keyvault-core/1.0.0//azure-keyvault-core-1.0.0.jar
diff --git a/docs/sql-data-sources-avro.md b/docs/sql-data-sources-avro.md
index a23c438af6d4..271819d36388 100644
--- a/docs/sql-data-sources-avro.md
+++ b/docs/sql-data-sources-avro.md
@@ -417,7 +417,7 @@ applications. Read the [Advanced Dependency 
Management](https://spark.apache
 Submission Guide for more details.
 
 ## Supported types for Avro -> Spark SQL conversion
-Currently Spark supports reading all [primitive 
types](https://avro.apache.org/docs/1.11.4/specification/#primitive-types) and 
[complex 
types](https://avro.apache.org/docs/1.11.4/specification/#complex-types) under 
records of Avro.
+Currently Spark supports reading all [primitive 
types](https://avro.apache.org/docs/1.11.5/specification/#primitive-types) and 
[complex 
types](https://avro.apache.org/docs/1.11.5/specification/#complex-types) under 
records of Avro.
 <table>
   <thead><tr><th><b>Avro type</b></th><th><b>Spark SQL 
type</b></th></tr></thead>
   <tr>
@@ -481,7 +481,7 @@ In addition to the types listed above, it supports reading 
`union` types. The fo
 3. `union(something, null)`, where something is any supported Avro type. This 
will be mapped to the same Spark SQL type as that of something, with nullable 
set to true.
 All other union types are considered complex. They will be mapped to 
StructType where field names are member0, member1, etc., in accordance with 
members of the union. This is consistent with the behavior when converting 
between Avro and Parquet.
 
-It also supports reading the following Avro [logical 
types](https://avro.apache.org/docs/1.11.4/specification/#logical-types):
+It also supports reading the following Avro [logical 
types](https://avro.apache.org/docs/1.11.5/specification/#logical-types):
 
 <table>
   <thead><tr><th><b>Avro logical type</b></th><th><b>Avro 
type</b></th><th><b>Spark SQL type</b></th></tr></thead>
diff --git a/pom.xml b/pom.xml
index 2cad20594c71..50124447081f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -158,7 +158,7 @@
     -->
     <codahale.metrics.version>4.2.19</codahale.metrics.version>
     <!-- Should be consistent with SparkBuild.scala and docs -->
-    <avro.version>1.11.4</avro.version>
+    <avro.version>1.11.5</avro.version>
     <aws.kinesis.client.version>1.12.0</aws.kinesis.client.version>
     <!-- Should be consistent with Kinesis client dependency -->
     <aws.java.sdk.version>1.11.655</aws.java.sdk.version>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 737d5383abd8..c5254658db5e 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -1103,7 +1103,7 @@ object DependencyOverrides {
     dependencyOverrides += "com.google.guava" % "guava" % guavaVersion,
     dependencyOverrides += "xerces" % "xercesImpl" % "2.12.2",
     dependencyOverrides += "jline" % "jline" % "2.14.6",
-    dependencyOverrides += "org.apache.avro" % "avro" % "1.11.4",
+    dependencyOverrides += "org.apache.avro" % "avro" % "1.11.5",
     dependencyOverrides += "org.apache.commons" % "commons-compress" % 
"1.23.0")
 }
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
index d35cc79c6a32..f6c7c2673069 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala
@@ -908,7 +908,7 @@ class HiveClientSuite(version: String, allVersions: 
Seq[String])
   test("Decimal support of Avro Hive serde") {
     val tableName = "tab1"
     // TODO: add the other logical types. For details, see the link:
-    // https://avro.apache.org/docs/1.11.4/specification/#logical-types
+    // https://avro.apache.org/docs/1.11.5/specification/#logical-types
     val avroSchema =
     """{
       |  "name": "test_record",


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to