This is an automated email from the ASF dual-hosted git repository.

bhavanisudha pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 553da4fae6b9 docs: Adding docs for release 1.1.1 (#17650)
553da4fae6b9 is described below

commit 553da4fae6b9141378c26fb4ea68bd2d728ae415
Author: Sivabalan Narayanan <[email protected]>
AuthorDate: Wed Dec 24 10:20:44 2025 -0800

    docs: Adding docs for release 1.1.1 (#17650)
    
    * Adding docs for release 1.1.1
    
    * Minor fixes
    
    * addressing feedback
    
    * Fixing roadmap page for 1.1.1
---
 ...5-11-25-apache-hudi-release-1-1-announcement.md | 10 ++--
 website/docs/cli.md                                | 10 ++--
 website/docs/concurrency_control.md                |  2 +-
 website/docs/flink-quick-start-guide.md            |  4 +-
 website/docs/quick-start-guide.md                  |  6 +-
 website/docusaurus.config.js                       | 10 ++--
 website/releases/download.md                       | 62 ++++++++++----------
 .../{release-1.1.0.md => release-1.1.1.md}         | 30 +++++-----
 website/sidebarsReleases.js                        |  2 +-
 website/src/pages/roadmap.md                       |  2 +-
 .../azure_hoodie.md                                |  0
 .../basic_configurations.md                        |  0
 .../{version-1.1.0 => version-1.1.1}/bos_hoodie.md |  0
 .../catalog_polaris.md                             |  0
 .../{version-1.1.0 => version-1.1.1}/cleaning.md   |  0
 .../{version-1.1.0 => version-1.1.1}/cli.md        | 10 ++--
 .../{version-1.1.0 => version-1.1.1}/cloud.md      |  0
 .../{version-1.1.0 => version-1.1.1}/clustering.md |  0
 .../{version-1.1.0 => version-1.1.1}/compaction.md |  0
 .../{version-1.1.0 => version-1.1.1}/comparison.md |  2 -
 .../{version-1.1.0 => version-1.1.1}/concepts.md   |  0
 .../concurrency_control.md                         |  4 +-
 .../configurations.md                              |  0
 .../{version-1.1.0 => version-1.1.1}/cos_hoodie.md |  0
 .../{version-1.1.0 => version-1.1.1}/deployment.md |  0
 .../disaster_recovery.md                           |  0
 .../docker_demo.md                                 |  0
 .../{version-1.1.0 => version-1.1.1}/encryption.md |  0
 .../file_sizing.md                                 |  0
 .../flink-quick-start-guide.md                     |  4 +-
 .../flink_tuning.md                                |  0
 .../gcp_bigquery.md                                |  0
 .../{version-1.1.0 => version-1.1.1}/gcs_hoodie.md |  0
 .../hoodie_streaming_ingestion.md                  |  0
 .../{version-1.1.0 => version-1.1.1}/hudi_stack.md |  0
 .../ibm_cos_hoodie.md                              |  0
 .../{version-1.1.0 => version-1.1.1}/indexes.md    |  0
 .../ingestion_flink.md                             |  0
 .../ingestion_kafka_connect.md                     |  0
 .../{version-1.1.0 => version-1.1.1}/jfs_hoodie.md |  0
 .../key_generation.md                              |  0
 .../{version-1.1.0 => version-1.1.1}/ks3_hoodie.md |  0
 .../{version-1.1.0 => version-1.1.1}/markers.md    |  0
 .../{version-1.1.0 => version-1.1.1}/metadata.md   |  2 +
 .../metadata_indexing.md                           |  3 +-
 .../{version-1.1.0 => version-1.1.1}/metrics.md    |  0
 .../migration_guide.md                             |  0
 .../{version-1.1.0 => version-1.1.1}/notebooks.md  |  0
 .../{version-1.1.0 => version-1.1.1}/oci_hoodie.md |  0
 .../{version-1.1.0 => version-1.1.1}/oss_hoodie.md |  0
 .../{version-1.1.0 => version-1.1.1}/overview.mdx  |  0
 .../performance.md                                 |  0
 .../platform_services_post_commit_callback.md      |  0
 .../precommit_validator.md                         |  0
 .../{version-1.1.0 => version-1.1.1}/procedures.md |  0
 .../python-rust-quick-start-guide.md               |  0
 .../querying_data.md                               |  0
 .../quick-start-guide.md                           |  6 +-
 .../reading_tables_batch_reads.md                  |  0
 .../reading_tables_streaming_reads.md              |  0
 .../record_merger.md                               |  0
 .../{version-1.1.0 => version-1.1.1}/rollbacks.md  |  0
 .../{version-1.1.0 => version-1.1.1}/s3_hoodie.md  |  0
 .../schema_evolution.md                            |  0
 .../snapshot_exporter.md                           |  0
 .../{version-1.1.0 => version-1.1.1}/sql_ddl.md    |  0
 .../{version-1.1.0 => version-1.1.1}/sql_dml.md    |  0
 .../sql_queries.md                                 | 66 +++++++++++++++++++++-
 .../storage_layouts.md                             |  0
 .../{version-1.1.0 => version-1.1.1}/structure.md  |  0
 .../syncing_aws_glue_data_catalog.md               |  0
 .../syncing_datahub.md                             |  0
 .../syncing_metastore.md                           |  0
 .../syncing_xtable.md                              |  0
 .../table_types.md                                 |  0
 .../{version-1.1.0 => version-1.1.1}/timeline.md   |  0
 .../troubleshooting.md                             |  0
 .../tuning-guide.md                                |  0
 .../{version-1.1.0 => version-1.1.1}/use_cases.md  |  0
 .../write_operations.md                            |  0
 .../writing_data.md                                |  0
 .../writing_tables_streaming_writes.md             |  0
 ...0-sidebars.json => version-1.1.1-sidebars.json} |  0
 website/versions.json                              |  2 +-
 84 files changed, 148 insertions(+), 89 deletions(-)

diff --git a/website/blog/2025-11-25-apache-hudi-release-1-1-announcement.md 
b/website/blog/2025-11-25-apache-hudi-release-1-1-announcement.md
index 5c41f1fcd6d6..84b8477c6604 100644
--- a/website/blog/2025-11-25-apache-hudi-release-1-1-announcement.md
+++ b/website/blog/2025-11-25-apache-hudi-release-1-1-announcement.md
@@ -11,7 +11,7 @@ tags:
   - performance
 ---
 
-The Hudi community is excited to announce the [release of Hudi 
1.1](https://hudi.apache.org/releases/release-1.1.0), a major milestone that 
sets the stage for the next generation of data lakehouse capabilities. This 
release represents months of focused engineering on foundational improvements, 
engine-specific optimizations, and key architectural enhancements, laying the 
foundation for ambitious features coming in future releases.
+The Hudi community is excited to announce the [release of Hudi 
1.1](https://hudi.apache.org/releases/release-1.1.1), a major milestone that 
sets the stage for the next generation of data lakehouse capabilities. This 
release represents months of focused engineering on foundational improvements, 
engine-specific optimizations, and key architectural enhancements, laying the 
foundation for ambitious features coming in future releases.
 
 Hudi continues to evolve rapidly, with contributions from a vibrant community 
of developers and users. The 1.1 release brings over 700 commits addressing 
performance bottlenecks, expanding engine support, and introducing new 
capabilities that make Hudi tables more reliable, faster, and easier to 
operate. Let’s dive into the highlights.
 
@@ -153,7 +153,7 @@ The default behavior is adaptive: if no ordering field 
(`hoodie.table.ordering.f
 
 ### Custom Mergers—The Flexible Approach
 
-For complex merging logic—such as field-level reconciliation, aggregating 
counters, or preserving audit fields—the `HoodieRecordMerger` interface 
provides a modern, engine-native alternative to payload classes. You need to 
set the merge mode to `CUSTOM` and provide your own implementation of 
`HoodieRecordMerger`. By using the new API, you can achieve consistent merging 
across all code paths: precombine, updating writes, compaction, and snapshot 
reads—you are strongly encouraged to migrat [...]
+For complex merging logic—such as field-level reconciliation, aggregating 
counters, or preserving audit fields—the `HoodieRecordMerger` interface 
provides a modern, engine-native alternative to payload classes. You need to 
set the merge mode to `CUSTOM` and provide your own implementation of 
`HoodieRecordMerger`. By using the new API, you can achieve consistent merging 
across all code paths: precombine, updating writes, compaction, and snapshot 
reads—you are strongly encouraged to migrat [...]
 
 ## Apache Spark Integration Improvements
 
@@ -161,7 +161,7 @@ Spark remains one of the most popular engines for working 
with Hudi tables, and
 
 ### Spark 4.0 Support
 
-Spark 4.0 brought significant performance gains for ML/AI workloads, smarter 
query optimization with automatic join strategy switching, dynamic partition 
skew mitigation, and enhanced streaming capabilities. Hudi 1.1 adds Spark 4.0 
support to unlock these improvements for working with Hudi tables. To get 
started, use the new `hudi-spark4.0-bundle_2.13:1.1.0` artifact in your 
dependency list.
+Spark 4.0 brought significant performance gains for ML/AI workloads, smarter 
query optimization with automatic join strategy switching, dynamic partition 
skew mitigation, and enhanced streaming capabilities. Hudi 1.1 adds Spark 4.0 
support to unlock these improvements for working with Hudi tables. To get 
started, use the new `hudi-spark4.0-bundle_2.13:1.1.1` artifact in your 
dependency list.
 
 ### Metadata Table Streaming Writes
 
@@ -211,7 +211,7 @@ Flink is a popular choice for real-time data pipelines, and 
Hudi 1.1 brings subs
 
 ### Flink 2.0 Support
 
-Hudi 1.1 brings support for Flink 2.0, the first major Flink release in nine 
years. Flink 2.0 introduced disaggregated state storage (ForSt) that decouples 
state from compute for unlimited scalability, asynchronous state execution for 
improved resource utilization, adaptive broadcast join for efficient query 
processing, and materialized tables for simplified stream-batch unification. 
Use the new `hudi-flink2.0-bundle:1.1.0` artifact to get started.
+Hudi 1.1 brings support for Flink 2.0, the first major Flink release in nine 
years. Flink 2.0 introduced disaggregated state storage (ForSt) that decouples 
state from compute for unlimited scalability, asynchronous state execution for 
improved resource utilization, adaptive broadcast join for efficient query 
processing, and materialized tables for simplified stream-batch unification. 
Use the new `hudi-flink2.0-bundle:1.1.1` artifact to get started.
 
 ### Engine-Native Record Support
 
@@ -235,4 +235,4 @@ Hudi 1.1 introduces [native integration with 
Polaris](https://hudi.apache.org/do
 
 The future of Hudi is incredibly exciting, and we're building it together with 
a vibrant, global community of contributors. Building on the strong foundation 
of 1.1, we're actively developing transformative AI/ML-focused capabilities for 
Hudi 1.2 and beyond—unstructured data types and column groups for efficient 
storage of embeddings and documents, Lance, Vortex, blob-optimized Parquet 
support, and vector search capabilities for lakehouse tables. This is just the 
beginning—we're reimagin [...]
 
-Join us in building the future. Check out the [1.1 release 
notes](https://hudi.apache.org/releases/release-1.1.0) to get started, join our 
[Slack space](https://hudi.apache.org/slack/), follow us on 
[LinkedIn](https://www.linkedin.com/company/apache-hudi) and [X 
(twitter)](http://x.com/apachehudi), and subscribe (send an empty email) to the 
[mailing list](mailto:[email protected])—let's build the next generation of 
Hudi together.
+Join us in building the future. Check out the [1.1 release 
notes](https://hudi.apache.org/releases/release-1.1.1) to get started, join our 
[Slack space](https://hudi.apache.org/slack/), follow us on 
[LinkedIn](https://www.linkedin.com/company/apache-hudi) and [X 
(twitter)](http://x.com/apachehudi), and subscribe (send an empty email) to the 
[mailing list](mailto:[email protected])—let's build the next generation of 
Hudi together.
diff --git a/website/docs/cli.md b/website/docs/cli.md
index 36d44f8584b5..a29ffdfc2637 100644
--- a/website/docs/cli.md
+++ b/website/docs/cli.md
@@ -85,7 +85,7 @@ Once these are set, you are good to launch hudi-cli and 
access S3 dataset.
 Apache Flink, Presto and many other frameworks, including Hudi. If you want to 
run the Hudi CLI on a Dataproc node 
 which has not been launched with Hudi support enabled, you can use the steps 
below:  
 
-These steps use Hudi version 1.1.0. If you want to use a different version you 
will have to edit the below commands 
+These steps use Hudi version 1.1.1. If you want to use a different version you 
will have to edit the below commands 
 appropriately:  
 1. Once you've started the Dataproc cluster, you can ssh into it as follows:
 ```
@@ -94,22 +94,22 @@ $ gcloud compute ssh --zone "YOUR_ZONE" 
"HOSTNAME_OF_MASTER_NODE"  --project "YO
 
 2. Download the Hudi CLI bundle
 ```
-wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-cli-bundle_2.12/1.1.0/hudi-cli-bundle_2.12-1.1.0.jar
  
+wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-cli-bundle_2.12/1.1.1/hudi-cli-bundle_2.12-1.1.1.jar
  
 ```
 
 3. Download the Hudi Spark bundle
 ```
-wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.0/hudi-spark3.5-bundle_2.12-1.1.0.jar
+wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.1/hudi-spark3.5-bundle_2.12-1.1.1.jar
 ```     
 
 4. Download the shell script that launches Hudi CLI bundle
 ```
-wget 
https://raw.githubusercontent.com/apache/hudi/release-1.1.0/packaging/hudi-cli-bundle/hudi-cli-with-bundle.sh
+wget 
https://raw.githubusercontent.com/apache/hudi/release-1.1.1/packaging/hudi-cli-bundle/hudi-cli-with-bundle.sh
 ```    
 
 5. Launch Hudi CLI bundle with appropriate environment variables as follows:
 ``` 
-CLIENT_JAR=$DATAPROC_DIR/lib/gcs-connector.jar 
CLI_BUNDLE_JAR=hudi-cli-bundle_2.12-1.1.0.jar 
SPARK_BUNDLE_JAR=hudi-spark3.5-bundle_2.12-1.1.0.jar ./hudi-cli-with-bundle.sh  
+CLIENT_JAR=$DATAPROC_DIR/lib/gcs-connector.jar 
CLI_BUNDLE_JAR=hudi-cli-bundle_2.12-1.1.1.jar 
SPARK_BUNDLE_JAR=hudi-spark3.5-bundle_2.12-1.1.1.jar ./hudi-cli-with-bundle.sh  
 ```
 
 6. hudi->connect --path gs://path_to_some_table  
diff --git a/website/docs/concurrency_control.md 
b/website/docs/concurrency_control.md
index d75759fdbedb..d6231aca79b7 100644
--- a/website/docs/concurrency_control.md
+++ b/website/docs/concurrency_control.md
@@ -291,7 +291,7 @@ A Hudi Streamer job can then be triggered as follows:
 
 ```java
 [hoodie]$ spark-submit \
-  --jars 
"packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle_2.12-1.1.0.jar,packaging/hudi-spark-bundle/target/hudi-spark3.5-bundle_2.12-1.1.0.jar"
 \
+  --jars 
"packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle_2.12-1.1.1.jar,packaging/hudi-spark-bundle/target/hudi-spark3.5-bundle_2.12-1.1.1.jar"
 \
   --class org.apache.hudi.utilities.streamer.HoodieStreamer `ls 
packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle-*.jar` \
   --props 
file://${PWD}/hudi-utilities/src/test/resources/streamer-config/kafka-source.properties
 \
   --schemaprovider-class 
org.apache.hudi.utilities.schema.SchemaRegistryProvider \
diff --git a/website/docs/flink-quick-start-guide.md 
b/website/docs/flink-quick-start-guide.md
index 00e238ec665a..2a04dc85abb2 100644
--- a/website/docs/flink-quick-start-guide.md
+++ b/website/docs/flink-quick-start-guide.md
@@ -64,7 +64,7 @@ Now start the SQL CLI:
 ```bash
 # For Flink versions: 1.17-1.20, 2.0
 export FLINK_VERSION=1.20 
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-flink${FLINK_VERSION}-bundle/${HUDI_VERSION}/hudi-flink${FLINK_VERSION}-bundle-${HUDI_VERSION}.jar
 -P /tmp/
 ./bin/sql-client.sh embedded -j 
/tmp/hudi-flink${FLINK_VERSION}-bundle-${HUDI_VERSION}.jar shell
 ```
@@ -81,7 +81,7 @@ Please add the desired dependency to your project:
 <properties>
     <flink.version>1.20.0</flink.version>
     <flink.binary.version>1.20</flink.binary.version>
-    <hudi.version>1.1.0</hudi.version>
+    <hudi.version>1.1.1</hudi.version>
 </properties>
 <dependency>
     <groupId>org.apache.hudi</groupId>
diff --git a/website/docs/quick-start-guide.md 
b/website/docs/quick-start-guide.md
index a9a293d9db64..8c05a9af0452 100644
--- a/website/docs/quick-start-guide.md
+++ b/website/docs/quick-start-guide.md
@@ -47,7 +47,7 @@ From the extracted directory run spark-shell with Hudi:
 ```shell
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
@@ -68,7 +68,7 @@ From the extracted directory run pyspark with Hudi:
 export PYSPARK_PYTHON=$(which python3)
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
@@ -89,7 +89,7 @@ From the extracted directory run Spark SQL with Hudi:
 ```shell
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js
index f9567fc7a7cc..82c2118b256a 100644
--- a/website/docusaurus.config.js
+++ b/website/docusaurus.config.js
@@ -146,11 +146,11 @@ module.exports = {
           },
           {
             from: ["/docs/releases", "/docs/next/releases"],
-            to: "/releases/release-1.1.0",
+            to: "/releases/release-1.1.1",
           },
           {
             from: ["/releases"],
-            to: "/releases/release-1.1.0",
+            to: "/releases/release-1.1.1",
           },
         ],
       },
@@ -323,7 +323,7 @@ module.exports = {
             },
             {
               label: "Releases",
-              to: "/releases/release-1.1.0",
+              to: "/releases/release-1.1.1",
             },
             {
               label: "Download",
@@ -508,8 +508,8 @@ module.exports = {
               path: "next",
               banner: "unreleased",
             },
-            "1.1.0": {
-              label: "1.1.0",
+            "1.1.1": {
+              label: "1.1.1",
               path: "",
             },
           },
diff --git a/website/releases/download.md b/website/releases/download.md
index e84ac539b951..7878a361d733 100644
--- a/website/releases/download.md
+++ b/website/releases/download.md
@@ -5,30 +5,27 @@ keywords: [ hudi, download ]
 toc: true
 ---
 
-## Release 1.1.0
+## Release 1.1.1
 
-* Source Release : [Apache Hudi 1.1.0 Source 
Release](https://downloads.apache.org/hudi/1.1.0/hudi-1.1.0.src.tgz) 
([asc](https://downloads.apache.org/hudi/1.1.0/hudi-1.1.0.src.tgz.asc), 
[sha512](https://downloads.apache.org/hudi/1.1.0/hudi-1.1.0.src.tgz.sha512))
-* Release Note : ([Release Note for Apache Hudi 
1.1.0](/releases/release-1.1.0))
+* Source Release : [Apache Hudi 1.1.1 Source 
Release](https://downloads.apache.org/hudi/1.1.1/hudi-1.1.1.src.tgz) 
([asc](https://downloads.apache.org/hudi/1.1.1/hudi-1.1.1.src.tgz.asc), 
[sha512](https://downloads.apache.org/hudi/1.1.1/hudi-1.1.1.src.tgz.sha512))
+* Release Note : ([Release Note for Apache Hudi 
1.1.1](/releases/release-1.1.1))
 
-<!--
-TODO: keep this commented out until 1.1.1 is released
-TODO: replace repository.apache.org link with maven.org links when updating 
this for 1.1.1
 * Maven Artifacts:
 
   <details>
   <summary><strong>Spark Bundles</strong></summary>
   * **Spark 4.0**
-    * 
[hudi-spark4.0-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark4.0-bundle_2.13/1.1.0/hudi-spark4.0-bundle_2.13-1.1.0.jar)
+    * 
[hudi-spark4.0-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark4.0-bundle_2.13/1.1.1/hudi-spark4.0-bundle_2.13-1.1.1.jar)
   
   * **Spark 3.5**
-    * 
[hudi-spark3.5-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.5-bundle_2.13/1.1.0/hudi-spark3.5-bundle_2.13-1.1.0.jar)
-    * 
[hudi-spark3.5-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.0/hudi-spark3.5-bundle_2.12-1.1.0.jar)
+    * 
[hudi-spark3.5-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.5-bundle_2.13/1.1.1/hudi-spark3.5-bundle_2.13-1.1.1.jar)
+    * 
[hudi-spark3.5-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.1/hudi-spark3.5-bundle_2.12-1.1.1.jar)
 
   * **Spark 3.4**
-    * 
[hudi-spark3.4-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.4-bundle_2.12/1.1.0/hudi-spark3.4-bundle_2.12-1.1.0.jar)
+    * 
[hudi-spark3.4-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.4-bundle_2.12/1.1.1/hudi-spark3.4-bundle_2.12-1.1.1.jar)
 
   * **Spark 3.3**
-    * 
[hudi-spark3.3-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.3-bundle_2.12/1.1.0/hudi-spark3.3-bundle_2.12-1.1.0.jar)
+    * 
[hudi-spark3.3-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-spark3.3-bundle_2.12/1.1.1/hudi-spark3.3-bundle_2.12-1.1.1.jar)
 
   </details>
 
@@ -36,13 +33,13 @@ TODO: replace repository.apache.org link with maven.org 
links when updating this
   <summary><strong>Flink Bundles</strong></summary>
 
   * **Flink 2.x**
-    * 
[hudi-flink2.0-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink2.0-bundle/1.1.0/hudi-flink2.0-bundle-1.1.0.jar)
+    * 
[hudi-flink2.0-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink2.0-bundle/1.1.1/hudi-flink2.0-bundle-1.1.1.jar)
 
   * **Flink 1.x**
-    * 
[hudi-flink1.20-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.20-bundle/1.1.0/hudi-flink1.20-bundle-1.1.0.jar)
-    * 
[hudi-flink1.19-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.19-bundle/1.1.0/hudi-flink1.19-bundle-1.1.0.jar)
-    * 
[hudi-flink1.18-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.18-bundle/1.1.0/hudi-flink1.18-bundle-1.1.0.jar)
-    * 
[hudi-flink1.17-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.17-bundle/1.1.0/hudi-flink1.17-bundle-1.1.0.jar)
+    * 
[hudi-flink1.20-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.20-bundle/1.1.1/hudi-flink1.20-bundle-1.1.1.jar)
+    * 
[hudi-flink1.19-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.19-bundle/1.1.1/hudi-flink1.19-bundle-1.1.1.jar)
+    * 
[hudi-flink1.18-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.18-bundle/1.1.1/hudi-flink1.18-bundle-1.1.1.jar)
+    * 
[hudi-flink1.17-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-flink1.17-bundle/1.1.1/hudi-flink1.17-bundle-1.1.1.jar)
 
   </details>
 
@@ -50,13 +47,13 @@ TODO: replace repository.apache.org link with maven.org 
links when updating this
   <summary><strong>Query Engines</strong></summary>
 
   * **Presto**
-    * 
[hudi-presto-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-presto-bundle/1.1.0/hudi-presto-bundle-1.1.0.jar)
+    * 
[hudi-presto-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-presto-bundle/1.1.1/hudi-presto-bundle-1.1.1.jar)
 
   * **Trino**
-    * 
[hudi-trino-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-trino-bundle/1.1.0/hudi-trino-bundle-1.1.0.jar)
+    * 
[hudi-trino-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-trino-bundle/1.1.1/hudi-trino-bundle-1.1.1.jar)
 
   * **Hive**
-    * 
[hudi-hadoop-mr-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-hadoop-mr-bundle/1.1.0/hudi-hadoop-mr-bundle-1.1.0.jar)
+    * 
[hudi-hadoop-mr-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-hadoop-mr-bundle/1.1.1/hudi-hadoop-mr-bundle-1.1.1.jar)
 
   </details>
 
@@ -64,14 +61,14 @@ TODO: replace repository.apache.org link with maven.org 
links when updating this
   <summary><strong>Utilities & Tools</strong></summary>
 
   * **Hudi Utilities**
-    * 
[hudi-utilities-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-bundle_2.13/1.1.0/hudi-utilities-bundle_2.13-1.1.0.jar)
-    * 
[hudi-utilities-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-bundle_2.12/1.1.0/hudi-utilities-bundle_2.12-1.1.0.jar)
-    * 
[hudi-utilities-slim-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-slim-bundle_2.13/1.1.0/hudi-utilities-slim-bundle_2.13-1.1.0.jar)
-    * 
[hudi-utilities-slim-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-slim-bundle_2.12/1.1.0/hudi-utilities-slim-bundle_2.12-1.1.0.jar)
+    * 
[hudi-utilities-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-bundle_2.13/1.1.1/hudi-utilities-bundle_2.13-1.1.1.jar)
+    * 
[hudi-utilities-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-bundle_2.12/1.1.1/hudi-utilities-bundle_2.12-1.1.1.jar)
+    * 
[hudi-utilities-slim-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-slim-bundle_2.13/1.1.1/hudi-utilities-slim-bundle_2.13-1.1.1.jar)
+    * 
[hudi-utilities-slim-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-utilities-slim-bundle_2.12/1.1.1/hudi-utilities-slim-bundle_2.12-1.1.1.jar)
 
   * **Hudi CLI**
-    * 
[hudi-cli-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-cli-bundle_2.13/1.1.0/hudi-cli-bundle_2.13-1.1.0.jar)
-    * 
[hudi-cli-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-cli-bundle_2.12/1.1.0/hudi-cli-bundle_2.12-1.1.0.jar)
+    * 
[hudi-cli-bundle_2.13](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-cli-bundle_2.13/1.1.1/hudi-cli-bundle_2.13-1.1.1.jar)
+    * 
[hudi-cli-bundle_2.12](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-cli-bundle_2.12/1.1.1/hudi-cli-bundle_2.12-1.1.1.jar)
 
   </details>
 
@@ -79,26 +76,25 @@ TODO: replace repository.apache.org link with maven.org 
links when updating this
   <summary><strong>Platform Integrations</strong></summary>
 
   * **AWS**
-    * 
[hudi-aws-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-aws-bundle/1.1.0/hudi-aws-bundle-1.1.0.jar)
+    * 
[hudi-aws-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-aws-bundle/1.1.1/hudi-aws-bundle-1.1.1.jar)
 
   * **Google Cloud**
-    * 
[hudi-gcp-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-gcp-bundle/1.1.0/hudi-gcp-bundle-1.1.0.jar)
+    * 
[hudi-gcp-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-gcp-bundle/1.1.1/hudi-gcp-bundle-1.1.1.jar)
 
   * **Data Catalogs**
-    * 
[hudi-hive-sync-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-hive-sync-bundle/1.1.0/hudi-hive-sync-bundle-1.1.0.jar)
-    * 
[hudi-datahub-sync-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-datahub-sync-bundle/1.1.0/hudi-datahub-sync-bundle-1.1.0.jar)
+    * 
[hudi-hive-sync-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-hive-sync-bundle/1.1.1/hudi-hive-sync-bundle-1.1.1.jar)
+    * 
[hudi-datahub-sync-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-datahub-sync-bundle/1.1.1/hudi-datahub-sync-bundle-1.1.1.jar)
   
   * **Kafka Connect**
-    * 
[hudi-kafka-connect-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-kafka-connect-bundle/1.1.0/hudi-kafka-connect-bundle-1.1.0.jar)
+    * 
[hudi-kafka-connect-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-kafka-connect-bundle/1.1.1/hudi-kafka-connect-bundle-1.1.1.jar)
 
   * **Timeline Server**
-    * 
[hudi-timeline-server-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-timeline-server-bundle/1.1.0/hudi-timeline-server-bundle-1.1.0.jar)
+    * 
[hudi-timeline-server-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-timeline-server-bundle/1.1.1/hudi-timeline-server-bundle-1.1.1.jar)
 
   * **Metaserver**
-    * 
[hudi-metaserver-server-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-metaserver-server-bundle/1.1.0/hudi-metaserver-server-bundle-1.1.0.jar)
+    * 
[hudi-metaserver-server-bundle](https://repository.apache.org/content/repositories/releases/org/apache/hudi/hudi-metaserver-server-bundle/1.1.1/hudi-metaserver-server-bundle-1.1.1.jar)
 
   </details>
--->
 
 ## Release 1.0.2
 
diff --git a/website/releases/release-1.1.0.md 
b/website/releases/release-1.1.1.md
similarity index 94%
rename from website/releases/release-1.1.0.md
rename to website/releases/release-1.1.1.md
index 469e258b8d03..d2cf4472a75d 100644
--- a/website/releases/release-1.1.0.md
+++ b/website/releases/release-1.1.1.md
@@ -1,12 +1,12 @@
 ---
-title: "Release 1.1.0"
+title: "Release 1.1.1"
 layout: releases
 toc: true
 ---
 
-## [Release 1.1.0](https://github.com/apache/hudi/releases/tag/release-1.1.0)
+## [Release 1.1.1](https://github.com/apache/hudi/releases/tag/release-1.1.1)
 
-Apache Hudi 1.1.0 is a major release that brings significant performance 
improvements, new features, and important changes to the platform. This release 
focuses on enhanced table format support, improved indexing capabilities, 
expanded engine support, and modernized record merging APIs.
+Apache Hudi 1.1.1 is a major release that brings significant performance 
improvements, new features, and important changes to the platform. This release 
focuses on enhanced table format support, improved indexing capabilities, 
expanded engine support, and modernized record merging APIs.
 
 ## Highlights
 
@@ -26,28 +26,28 @@ Apache Hudi 1.1.0 is a major release that brings 
significant performance improve
 
 #### Pluggable Table Format Support
 
-Hudi 1.1.0 introduces a new [Pluggable Table 
Format](/docs/hudi_stack#pluggable-table-format) framework that enables native 
integration of multiple table formats within the system. This foundation 
includes a base interface for pluggable table formats, designed to simplify 
extension and allow seamless interoperability across different storage 
backends. The Metadata Table (MDT) integration has been enhanced to support 
pluggability, ensuring modularity and unified metadata management across [...]
+Hudi 1.1.1 introduces a new [Pluggable Table 
Format](/docs/hudi_stack#pluggable-table-format) framework that enables native 
integration of multiple table formats within the system. This foundation 
includes a base interface for pluggable table formats, designed to simplify 
extension and allow seamless interoperability across different storage 
backends. The Metadata Table (MDT) integration has been enhanced to support 
pluggability, ensuring modularity and unified metadata management across [...]
 
 This release brings native Hudi integration through the new framework, 
allowing users to leverage Hudi's advanced capabilities directly while 
maintaining consistent semantics and performance. The configuration 
`hoodie.table.format` is set to `native` by default, which works as the Hudi 
table format. **No configuration changes are required** for existing and new 
Hudi tables. As additional table formats are supported in future releases, 
users will be able to set this configuration to work  [...]
 
 #### Table Version 9 with Index Versioning
 
-Hudi 1.1.0 introduces table version 9 with support for index versioning. 
Indexes in the Metadata Table (column stats, secondary index, expression index, 
etc) now have version tracking. In 1.1.0, these indexes use V2 layouts with 
enhanced capabilities including comprehensive logical data type support. Tables 
migrated from older versions will retain V1 index layouts, while new tables 
created with 1.1.0 use V2. Both versions remain backward compatible, and no 
action is required when upgradi [...]
+Hudi 1.1.1 introduces table version 9 with support for index versioning. 
Indexes in the Metadata Table (column stats, secondary index, expression index, 
etc) now have version tracking. In 1.1.1, these indexes use V2 layouts with 
enhanced capabilities including comprehensive logical data type support. Tables 
migrated from older versions will retain V1 index layouts, while new tables 
created with 1.1.1 use V2. Both versions remain backward compatible, and no 
action is required when upgradi [...]
 
 ### Indexing
 
 #### Partitioned Record Index
 
-In addition to the global record index introduced in 0.14.0, Hudi 1.1.0 adds a 
partitioned variant that guarantees uniqueness for partition path and record 
key pairs. This index speeds up lookups in very large partitioned datasets. For 
more details, see [record index](/docs/indexes#record-index).
+In addition to the global record index introduced in 0.14.0, Hudi 1.1.1 adds a 
partitioned variant that guarantees uniqueness for partition path and record 
key pairs. This index speeds up lookups in very large partitioned datasets. For 
more details, see [record index](/docs/indexes#record-index).
 
-Prior to 1.1.0, only global record index was available, configured as:
+Prior to 1.1.1, only global record index was available, configured as:
 
 ```properties
 hoodie.metadata.record.index.enable=true
 hoodie.index.type=RECORD_INDEX
 ```
 
-From 1.1.0 onwards, both global and partitioned variants are available:
+From 1.1.1 onwards, both global and partitioned variants are available:
 
 For partitioned record index:
 
@@ -119,7 +119,7 @@ Support for efficient streaming reads of HoodieDataBlocks 
(currently for AvroDat
 
 #### ORC Support in FileGroupReader
 
-Enhanced support for multiple base file formats (ORC and Parquet) in 
HoodieFileGroupReader. The 1.1.0 release introduced SparkColumnarFileReader 
trait and MultipleColumnarFileFormatReader to uniformly handle ORC and Parquet 
records for both Merge-on-Read (MOR) and Copy-on-Write (COW) tables.
+Enhanced support for multiple base file formats (ORC and Parquet) in 
HoodieFileGroupReader. The 1.1.1 release introduced SparkColumnarFileReader 
trait and MultipleColumnarFileFormatReader to uniformly handle ORC and Parquet 
records for both Merge-on-Read (MOR) and Copy-on-Write (COW) tables.
 
 #### Hive Schema Evolution Support
 
@@ -233,7 +233,7 @@ A regression affecting Complex Key Generator with a single 
record key field has
 
 **Who Is Affected**: Tables using Complex Key Generator 
(`ComplexAvroKeyGenerator` or `ComplexKeyGenerator`) with a **single** record 
key field.
 
-**Default Behavior in 1.1.0**: Reverts to the correct encoding format 
(`field_name:field_value`) matching 0.14.0 and earlier.
+**Default Behavior in 1.1.1**: Reverts to the correct encoding format 
(`field_name:field_value`) matching 0.14.0 and earlier.
 
 **Migration Path**:
 
@@ -272,7 +272,7 @@ This change applies to table services as well.
 
 ### INSERT INTO Behavior Change
 
-The default behavior of Spark SQL's `INSERT INTO` command has changed. 
Previously, it used "upsert" operation for tables with ordering field(s), which 
caused deduplication. From 1.1.0, `INSERT INTO` now performs "insert" operation 
by default, ingesting records as-is without deduplication.
+The default behavior of Spark SQL's `INSERT INTO` command has changed. 
Previously, it used "upsert" operation for tables with ordering field(s), which 
caused deduplication. From 1.1.1, `INSERT INTO` now performs "insert" operation 
by default, ingesting records as-is without deduplication.
 
 **Example**:
 
@@ -281,8 +281,8 @@ Commit1:
   Partition1, recordKey1, val1, orderingValue1
   Partition1, recordKey1, val2, orderingValue2
 
-Pre-1.1.0: Returns one record (based on ordering field)
-From 1.1.0: Returns both records
+Pre-1.1.1: Returns one record (based on ordering field)
+From 1.1.1: Returns both records
 ```
 
 **To Restore Previous Behavior**: Set `hoodie.spark.sql.insert.into.operation 
= upsert`
@@ -366,7 +366,7 @@ Bucket index now supports only UPSERT operations and cannot 
be used with append
 
 ### End of Life for Hudi Versions Prior to 0.14.0
 
-As of this release, Hudi versions prior to 0.14.0 have reached end of life. 
Users on these older versions should plan to upgrade to 1.1.0 or later to 
receive ongoing support, bug fixes, and new features. The Hudi community will 
focus support efforts on versions 0.14.0 and later.
+As of this release, Hudi versions prior to 0.14.0 have reached end of life. 
Users on these older versions should plan to upgrade to 1.1.1 or later to 
receive ongoing support, bug fixes, and new features. The Hudi community will 
focus support efforts on versions 0.14.0 and later.
 
 For more details, see the [community 
discussion](https://github.com/apache/hudi/discussions/13847).
 
@@ -374,4 +374,4 @@ For more details, see the [community 
discussion](https://github.com/apache/hudi/
 
 ## Contributors
 
-Hudi 1.1.0 is the result of contributions from the entire Hudi community. We 
thank all contributors who made this release possible.
+Hudi 1.1.1 is the result of contributions from the entire Hudi community. We 
thank all contributors who made this release possible.
diff --git a/website/sidebarsReleases.js b/website/sidebarsReleases.js
index d8dbf14f8aad..f2d1177f38e4 100644
--- a/website/sidebarsReleases.js
+++ b/website/sidebarsReleases.js
@@ -12,7 +12,7 @@
 module.exports = {
   releases: [
     'download',
-    'release-1.1.0',
+    'release-1.1.1',
     'release-1.0.2',
     'release-1.0.1',
     'release-1.0.0',
diff --git a/website/src/pages/roadmap.md b/website/src/pages/roadmap.md
index 09ea94411823..1f8364326c35 100644
--- a/website/src/pages/roadmap.md
+++ b/website/src/pages/roadmap.md
@@ -11,7 +11,7 @@ down by areas on our [stack](/docs/hudi_stack).
 
 ## Recent Release(s)
 
-[1.1.0](/releases/release-1.1.0) (Nov 2025)
+[1.1.1](/releases/release-1.1.1) (Dec 2025)
 
 ## Future Releases
 
diff --git a/website/versioned_docs/version-1.1.0/azure_hoodie.md 
b/website/versioned_docs/version-1.1.1/azure_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/azure_hoodie.md
rename to website/versioned_docs/version-1.1.1/azure_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/basic_configurations.md 
b/website/versioned_docs/version-1.1.1/basic_configurations.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/basic_configurations.md
rename to website/versioned_docs/version-1.1.1/basic_configurations.md
diff --git a/website/versioned_docs/version-1.1.0/bos_hoodie.md 
b/website/versioned_docs/version-1.1.1/bos_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/bos_hoodie.md
rename to website/versioned_docs/version-1.1.1/bos_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/catalog_polaris.md 
b/website/versioned_docs/version-1.1.1/catalog_polaris.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/catalog_polaris.md
rename to website/versioned_docs/version-1.1.1/catalog_polaris.md
diff --git a/website/versioned_docs/version-1.1.0/cleaning.md 
b/website/versioned_docs/version-1.1.1/cleaning.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/cleaning.md
rename to website/versioned_docs/version-1.1.1/cleaning.md
diff --git a/website/versioned_docs/version-1.1.0/cli.md 
b/website/versioned_docs/version-1.1.1/cli.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/cli.md
rename to website/versioned_docs/version-1.1.1/cli.md
index 36d44f8584b5..a29ffdfc2637 100644
--- a/website/versioned_docs/version-1.1.0/cli.md
+++ b/website/versioned_docs/version-1.1.1/cli.md
@@ -85,7 +85,7 @@ Once these are set, you are good to launch hudi-cli and 
access S3 dataset.
 Apache Flink, Presto and many other frameworks, including Hudi. If you want to 
run the Hudi CLI on a Dataproc node 
 which has not been launched with Hudi support enabled, you can use the steps 
below:  
 
-These steps use Hudi version 1.1.0. If you want to use a different version you 
will have to edit the below commands 
+These steps use Hudi version 1.1.1. If you want to use a different version you 
will have to edit the below commands 
 appropriately:  
 1. Once you've started the Dataproc cluster, you can ssh into it as follows:
 ```
@@ -94,22 +94,22 @@ $ gcloud compute ssh --zone "YOUR_ZONE" 
"HOSTNAME_OF_MASTER_NODE"  --project "YO
 
 2. Download the Hudi CLI bundle
 ```
-wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-cli-bundle_2.12/1.1.0/hudi-cli-bundle_2.12-1.1.0.jar
  
+wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-cli-bundle_2.12/1.1.1/hudi-cli-bundle_2.12-1.1.1.jar
  
 ```
 
 3. Download the Hudi Spark bundle
 ```
-wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.0/hudi-spark3.5-bundle_2.12-1.1.0.jar
+wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-spark3.5-bundle_2.12/1.1.1/hudi-spark3.5-bundle_2.12-1.1.1.jar
 ```     
 
 4. Download the shell script that launches Hudi CLI bundle
 ```
-wget 
https://raw.githubusercontent.com/apache/hudi/release-1.1.0/packaging/hudi-cli-bundle/hudi-cli-with-bundle.sh
+wget 
https://raw.githubusercontent.com/apache/hudi/release-1.1.1/packaging/hudi-cli-bundle/hudi-cli-with-bundle.sh
 ```    
 
 5. Launch Hudi CLI bundle with appropriate environment variables as follows:
 ``` 
-CLIENT_JAR=$DATAPROC_DIR/lib/gcs-connector.jar 
CLI_BUNDLE_JAR=hudi-cli-bundle_2.12-1.1.0.jar 
SPARK_BUNDLE_JAR=hudi-spark3.5-bundle_2.12-1.1.0.jar ./hudi-cli-with-bundle.sh  
+CLIENT_JAR=$DATAPROC_DIR/lib/gcs-connector.jar 
CLI_BUNDLE_JAR=hudi-cli-bundle_2.12-1.1.1.jar 
SPARK_BUNDLE_JAR=hudi-spark3.5-bundle_2.12-1.1.1.jar ./hudi-cli-with-bundle.sh  
 ```
 
 6. hudi->connect --path gs://path_to_some_table  
diff --git a/website/versioned_docs/version-1.1.0/cloud.md 
b/website/versioned_docs/version-1.1.1/cloud.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/cloud.md
rename to website/versioned_docs/version-1.1.1/cloud.md
diff --git a/website/versioned_docs/version-1.1.0/clustering.md 
b/website/versioned_docs/version-1.1.1/clustering.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/clustering.md
rename to website/versioned_docs/version-1.1.1/clustering.md
diff --git a/website/versioned_docs/version-1.1.0/compaction.md 
b/website/versioned_docs/version-1.1.1/compaction.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/compaction.md
rename to website/versioned_docs/version-1.1.1/compaction.md
diff --git a/website/versioned_docs/version-1.1.0/comparison.md 
b/website/versioned_docs/version-1.1.1/comparison.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/comparison.md
rename to website/versioned_docs/version-1.1.1/comparison.md
index 30ededd13a83..3d2ef46a77e6 100644
--- a/website/versioned_docs/version-1.1.0/comparison.md
+++ b/website/versioned_docs/version-1.1.1/comparison.md
@@ -14,7 +14,6 @@ and bring out the different tradeoffs these systems have 
accepted in their desig
 class support for `upserts`. A key differentiator is that Kudu also attempts 
to serve as a datastore for OLTP workloads, something that Hudi does not aspire 
to be.
 Consequently, Kudu does not support incremental pulling (as of early 2017), 
something Hudi does to enable incremental processing use cases.
 
-
 Kudu diverges from a distributed file system abstraction and HDFS altogether, 
with its own set of storage servers talking to each  other via RAFT.
 Hudi, on the other hand, is designed to work with an underlying Hadoop 
compatible filesystem (HDFS,S3 or Ceph) and does not have its own fleet of 
storage servers,
 instead relying on Apache Spark to do the heavy-lifting. Thus, Hudi can be 
scaled easily, just like other Spark jobs, while Kudu would require hardware
@@ -22,7 +21,6 @@ instead relying on Apache Spark to do the heavy-lifting. 
Thus, Hudi can be scale
 But, if we were to go with results shared by 
[CERN](https://db-blog.web.cern.ch/blog/zbigniew-baranowski/2017-01-performance-comparison-different-file-formats-and-storage-engines)
 ,
 we expect Hudi to positioned at something that ingests parquet with superior 
performance.
 
-
 ## Hive Transactions
 
 [Hive 
Transactions/ACID](https://cwiki.apache.org/confluence/display/Hive/Hive+Transactions)
 is another similar effort, which tries to implement storage like
diff --git a/website/versioned_docs/version-1.1.0/concepts.md 
b/website/versioned_docs/version-1.1.1/concepts.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/concepts.md
rename to website/versioned_docs/version-1.1.1/concepts.md
diff --git a/website/versioned_docs/version-1.1.0/concurrency_control.md 
b/website/versioned_docs/version-1.1.1/concurrency_control.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/concurrency_control.md
rename to website/versioned_docs/version-1.1.1/concurrency_control.md
index 15d4a02b4bd4..d6231aca79b7 100644
--- a/website/versioned_docs/version-1.1.0/concurrency_control.md
+++ b/website/versioned_docs/version-1.1.1/concurrency_control.md
@@ -260,7 +260,7 @@ Multi writing using OCC allows multiple writers to 
concurrently write and atomic
 
 To improve the concurrency control, the [0.13.0 
release](https://hudi.apache.org/releases/release-0.13.0#early-conflict-detection-for-multi-writer)
 introduced a new feature, early conflict detection in OCC, to detect the 
conflict during the data writing phase and abort the writing early on once a 
conflict is detected, using Hudi's marker mechanism. Hudi can now stop a 
conflicting writer much earlier because of the early conflict detection and 
release computing resources necessary to clus [...]
 
-By default, this feature is turned off. To try this out, a user needs to set 
`hoodie.write.concurrency.early.conflict.detection.enable` to true, when using 
OCC for concurrency control (Refer 
[configs](https://hudi.apache.org/docs/next/configurations#Write-Configurations-advanced-configs)
 page for all relevant configs).
+By default, this feature is turned off. To try this out, a user needs to set 
`hoodie.write.concurrency.early.conflict.detection.enable` to true, when using 
OCC for concurrency control (Refer 
[configs](configurations.md#Write-Configurations-advanced-configs) page for all 
relevant configs).
 :::note
 Early conflict Detection in OCC is an **EXPERIMENTAL** feature
 :::
@@ -291,7 +291,7 @@ A Hudi Streamer job can then be triggered as follows:
 
 ```java
 [hoodie]$ spark-submit \
-  --jars 
"packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle_2.12-1.1.0.jar,packaging/hudi-spark-bundle/target/hudi-spark3.5-bundle_2.12-1.1.0.jar"
 \
+  --jars 
"packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle_2.12-1.1.1.jar,packaging/hudi-spark-bundle/target/hudi-spark3.5-bundle_2.12-1.1.1.jar"
 \
   --class org.apache.hudi.utilities.streamer.HoodieStreamer `ls 
packaging/hudi-utilities-slim-bundle/target/hudi-utilities-slim-bundle-*.jar` \
   --props 
file://${PWD}/hudi-utilities/src/test/resources/streamer-config/kafka-source.properties
 \
   --schemaprovider-class 
org.apache.hudi.utilities.schema.SchemaRegistryProvider \
diff --git a/website/versioned_docs/version-1.1.0/configurations.md 
b/website/versioned_docs/version-1.1.1/configurations.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/configurations.md
rename to website/versioned_docs/version-1.1.1/configurations.md
diff --git a/website/versioned_docs/version-1.1.0/cos_hoodie.md 
b/website/versioned_docs/version-1.1.1/cos_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/cos_hoodie.md
rename to website/versioned_docs/version-1.1.1/cos_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/deployment.md 
b/website/versioned_docs/version-1.1.1/deployment.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/deployment.md
rename to website/versioned_docs/version-1.1.1/deployment.md
diff --git a/website/versioned_docs/version-1.1.0/disaster_recovery.md 
b/website/versioned_docs/version-1.1.1/disaster_recovery.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/disaster_recovery.md
rename to website/versioned_docs/version-1.1.1/disaster_recovery.md
diff --git a/website/versioned_docs/version-1.1.0/docker_demo.md 
b/website/versioned_docs/version-1.1.1/docker_demo.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/docker_demo.md
rename to website/versioned_docs/version-1.1.1/docker_demo.md
diff --git a/website/versioned_docs/version-1.1.0/encryption.md 
b/website/versioned_docs/version-1.1.1/encryption.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/encryption.md
rename to website/versioned_docs/version-1.1.1/encryption.md
diff --git a/website/versioned_docs/version-1.1.0/file_sizing.md 
b/website/versioned_docs/version-1.1.1/file_sizing.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/file_sizing.md
rename to website/versioned_docs/version-1.1.1/file_sizing.md
diff --git a/website/versioned_docs/version-1.1.0/flink-quick-start-guide.md 
b/website/versioned_docs/version-1.1.1/flink-quick-start-guide.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/flink-quick-start-guide.md
rename to website/versioned_docs/version-1.1.1/flink-quick-start-guide.md
index 00e238ec665a..2a04dc85abb2 100644
--- a/website/versioned_docs/version-1.1.0/flink-quick-start-guide.md
+++ b/website/versioned_docs/version-1.1.1/flink-quick-start-guide.md
@@ -64,7 +64,7 @@ Now start the SQL CLI:
 ```bash
 # For Flink versions: 1.17-1.20, 2.0
 export FLINK_VERSION=1.20 
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 wget 
https://repo1.maven.org/maven2/org/apache/hudi/hudi-flink${FLINK_VERSION}-bundle/${HUDI_VERSION}/hudi-flink${FLINK_VERSION}-bundle-${HUDI_VERSION}.jar
 -P /tmp/
 ./bin/sql-client.sh embedded -j 
/tmp/hudi-flink${FLINK_VERSION}-bundle-${HUDI_VERSION}.jar shell
 ```
@@ -81,7 +81,7 @@ Please add the desired dependency to your project:
 <properties>
     <flink.version>1.20.0</flink.version>
     <flink.binary.version>1.20</flink.binary.version>
-    <hudi.version>1.1.0</hudi.version>
+    <hudi.version>1.1.1</hudi.version>
 </properties>
 <dependency>
     <groupId>org.apache.hudi</groupId>
diff --git a/website/versioned_docs/version-1.1.0/flink_tuning.md 
b/website/versioned_docs/version-1.1.1/flink_tuning.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/flink_tuning.md
rename to website/versioned_docs/version-1.1.1/flink_tuning.md
diff --git a/website/versioned_docs/version-1.1.0/gcp_bigquery.md 
b/website/versioned_docs/version-1.1.1/gcp_bigquery.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/gcp_bigquery.md
rename to website/versioned_docs/version-1.1.1/gcp_bigquery.md
diff --git a/website/versioned_docs/version-1.1.0/gcs_hoodie.md 
b/website/versioned_docs/version-1.1.1/gcs_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/gcs_hoodie.md
rename to website/versioned_docs/version-1.1.1/gcs_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/hoodie_streaming_ingestion.md 
b/website/versioned_docs/version-1.1.1/hoodie_streaming_ingestion.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/hoodie_streaming_ingestion.md
rename to website/versioned_docs/version-1.1.1/hoodie_streaming_ingestion.md
diff --git a/website/versioned_docs/version-1.1.0/hudi_stack.md 
b/website/versioned_docs/version-1.1.1/hudi_stack.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/hudi_stack.md
rename to website/versioned_docs/version-1.1.1/hudi_stack.md
diff --git a/website/versioned_docs/version-1.1.0/ibm_cos_hoodie.md 
b/website/versioned_docs/version-1.1.1/ibm_cos_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/ibm_cos_hoodie.md
rename to website/versioned_docs/version-1.1.1/ibm_cos_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/indexes.md 
b/website/versioned_docs/version-1.1.1/indexes.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/indexes.md
rename to website/versioned_docs/version-1.1.1/indexes.md
diff --git a/website/versioned_docs/version-1.1.0/ingestion_flink.md 
b/website/versioned_docs/version-1.1.1/ingestion_flink.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/ingestion_flink.md
rename to website/versioned_docs/version-1.1.1/ingestion_flink.md
diff --git a/website/versioned_docs/version-1.1.0/ingestion_kafka_connect.md 
b/website/versioned_docs/version-1.1.1/ingestion_kafka_connect.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/ingestion_kafka_connect.md
rename to website/versioned_docs/version-1.1.1/ingestion_kafka_connect.md
diff --git a/website/versioned_docs/version-1.1.0/jfs_hoodie.md 
b/website/versioned_docs/version-1.1.1/jfs_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/jfs_hoodie.md
rename to website/versioned_docs/version-1.1.1/jfs_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/key_generation.md 
b/website/versioned_docs/version-1.1.1/key_generation.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/key_generation.md
rename to website/versioned_docs/version-1.1.1/key_generation.md
diff --git a/website/versioned_docs/version-1.1.0/ks3_hoodie.md 
b/website/versioned_docs/version-1.1.1/ks3_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/ks3_hoodie.md
rename to website/versioned_docs/version-1.1.1/ks3_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/markers.md 
b/website/versioned_docs/version-1.1.1/markers.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/markers.md
rename to website/versioned_docs/version-1.1.1/markers.md
diff --git a/website/versioned_docs/version-1.1.0/metadata.md 
b/website/versioned_docs/version-1.1.1/metadata.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/metadata.md
rename to website/versioned_docs/version-1.1.1/metadata.md
index d50d3999e163..c5a01b42dfa9 100644
--- a/website/versioned_docs/version-1.1.0/metadata.md
+++ b/website/versioned_docs/version-1.1.1/metadata.md
@@ -92,6 +92,7 @@ cleaned up, before re-enabling the metadata table again.
 ## Leveraging metadata during queries
 
 ### files index
+
 Metadata based listing using *files_index* can be leveraged on the read side 
by setting appropriate configs/session properties
 from different engines as shown below:
 
@@ -104,6 +105,7 @@ from different engines as shown below:
 | Athena                                           | 
[hudi.metadata-listing-enabled](https://docs.aws.amazon.com/athena/latest/ug/querying-hudi.html)
 | When this table property is set to `TRUE` enables the Hudi metadata table 
and the related file listing functionality          |
 
 ### column_stats index and data skipping
+
 Enabling metadata table and column stats index is a prerequisite to enabling 
data skipping capabilities. Following are the 
 corresponding configs across Spark and Flink readers.
 
diff --git a/website/versioned_docs/version-1.1.0/metadata_indexing.md 
b/website/versioned_docs/version-1.1.1/metadata_indexing.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/metadata_indexing.md
rename to website/versioned_docs/version-1.1.1/metadata_indexing.md
index 66c8d02bbde5..7056a1e02671 100644
--- a/website/versioned_docs/version-1.1.0/metadata_indexing.md
+++ b/website/versioned_docs/version-1.1.1/metadata_indexing.md
@@ -313,6 +313,7 @@ Some of these limitations will be removed in the upcoming 
releases. Please
 follow [this GitHub issue](https://github.com/apache/hudi/issues/14870) for 
developments on this feature.
 
 ## Related Resources
+
 <h3>Videos</h3>
 
-* [Advantages of Metadata Indexing and Asynchronous Indexing in Hudi Hands on 
Lab](https://www.youtube.com/watch?v=TSphQCsY4pY)
+- [Advantages of Metadata Indexing and Asynchronous Indexing in Hudi Hands on 
Lab](https://www.youtube.com/watch?v=TSphQCsY4pY)
diff --git a/website/versioned_docs/version-1.1.0/metrics.md 
b/website/versioned_docs/version-1.1.1/metrics.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/metrics.md
rename to website/versioned_docs/version-1.1.1/metrics.md
diff --git a/website/versioned_docs/version-1.1.0/migration_guide.md 
b/website/versioned_docs/version-1.1.1/migration_guide.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/migration_guide.md
rename to website/versioned_docs/version-1.1.1/migration_guide.md
diff --git a/website/versioned_docs/version-1.1.0/notebooks.md 
b/website/versioned_docs/version-1.1.1/notebooks.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/notebooks.md
rename to website/versioned_docs/version-1.1.1/notebooks.md
diff --git a/website/versioned_docs/version-1.1.0/oci_hoodie.md 
b/website/versioned_docs/version-1.1.1/oci_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/oci_hoodie.md
rename to website/versioned_docs/version-1.1.1/oci_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/oss_hoodie.md 
b/website/versioned_docs/version-1.1.1/oss_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/oss_hoodie.md
rename to website/versioned_docs/version-1.1.1/oss_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/overview.mdx 
b/website/versioned_docs/version-1.1.1/overview.mdx
similarity index 100%
rename from website/versioned_docs/version-1.1.0/overview.mdx
rename to website/versioned_docs/version-1.1.1/overview.mdx
diff --git a/website/versioned_docs/version-1.1.0/performance.md 
b/website/versioned_docs/version-1.1.1/performance.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/performance.md
rename to website/versioned_docs/version-1.1.1/performance.md
diff --git 
a/website/versioned_docs/version-1.1.0/platform_services_post_commit_callback.md
 
b/website/versioned_docs/version-1.1.1/platform_services_post_commit_callback.md
similarity index 100%
rename from 
website/versioned_docs/version-1.1.0/platform_services_post_commit_callback.md
rename to 
website/versioned_docs/version-1.1.1/platform_services_post_commit_callback.md
diff --git a/website/versioned_docs/version-1.1.0/precommit_validator.md 
b/website/versioned_docs/version-1.1.1/precommit_validator.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/precommit_validator.md
rename to website/versioned_docs/version-1.1.1/precommit_validator.md
diff --git a/website/versioned_docs/version-1.1.0/procedures.md 
b/website/versioned_docs/version-1.1.1/procedures.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/procedures.md
rename to website/versioned_docs/version-1.1.1/procedures.md
diff --git 
a/website/versioned_docs/version-1.1.0/python-rust-quick-start-guide.md 
b/website/versioned_docs/version-1.1.1/python-rust-quick-start-guide.md
similarity index 100%
rename from 
website/versioned_docs/version-1.1.0/python-rust-quick-start-guide.md
rename to website/versioned_docs/version-1.1.1/python-rust-quick-start-guide.md
diff --git a/website/versioned_docs/version-1.1.0/querying_data.md 
b/website/versioned_docs/version-1.1.1/querying_data.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/querying_data.md
rename to website/versioned_docs/version-1.1.1/querying_data.md
diff --git a/website/versioned_docs/version-1.1.0/quick-start-guide.md 
b/website/versioned_docs/version-1.1.1/quick-start-guide.md
similarity index 99%
rename from website/versioned_docs/version-1.1.0/quick-start-guide.md
rename to website/versioned_docs/version-1.1.1/quick-start-guide.md
index a9a293d9db64..8c05a9af0452 100644
--- a/website/versioned_docs/version-1.1.0/quick-start-guide.md
+++ b/website/versioned_docs/version-1.1.1/quick-start-guide.md
@@ -47,7 +47,7 @@ From the extracted directory run spark-shell with Hudi:
 ```shell
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
@@ -68,7 +68,7 @@ From the extracted directory run pyspark with Hudi:
 export PYSPARK_PYTHON=$(which python3)
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
@@ -89,7 +89,7 @@ From the extracted directory run Spark SQL with Hudi:
 ```shell
 # For Spark versions: 3.3 - 4.0
 export SPARK_VERSION=3.5
-export HUDI_VERSION=1.1.0
+export HUDI_VERSION=1.1.1
 # For Scala versions: 2.12/2.13
 export SCALA_VERSION=2.13
 
diff --git a/website/versioned_docs/version-1.1.0/reading_tables_batch_reads.md 
b/website/versioned_docs/version-1.1.1/reading_tables_batch_reads.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/reading_tables_batch_reads.md
rename to website/versioned_docs/version-1.1.1/reading_tables_batch_reads.md
diff --git 
a/website/versioned_docs/version-1.1.0/reading_tables_streaming_reads.md 
b/website/versioned_docs/version-1.1.1/reading_tables_streaming_reads.md
similarity index 100%
rename from 
website/versioned_docs/version-1.1.0/reading_tables_streaming_reads.md
rename to website/versioned_docs/version-1.1.1/reading_tables_streaming_reads.md
diff --git a/website/versioned_docs/version-1.1.0/record_merger.md 
b/website/versioned_docs/version-1.1.1/record_merger.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/record_merger.md
rename to website/versioned_docs/version-1.1.1/record_merger.md
diff --git a/website/versioned_docs/version-1.1.0/rollbacks.md 
b/website/versioned_docs/version-1.1.1/rollbacks.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/rollbacks.md
rename to website/versioned_docs/version-1.1.1/rollbacks.md
diff --git a/website/versioned_docs/version-1.1.0/s3_hoodie.md 
b/website/versioned_docs/version-1.1.1/s3_hoodie.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/s3_hoodie.md
rename to website/versioned_docs/version-1.1.1/s3_hoodie.md
diff --git a/website/versioned_docs/version-1.1.0/schema_evolution.md 
b/website/versioned_docs/version-1.1.1/schema_evolution.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/schema_evolution.md
rename to website/versioned_docs/version-1.1.1/schema_evolution.md
diff --git a/website/versioned_docs/version-1.1.0/snapshot_exporter.md 
b/website/versioned_docs/version-1.1.1/snapshot_exporter.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/snapshot_exporter.md
rename to website/versioned_docs/version-1.1.1/snapshot_exporter.md
diff --git a/website/versioned_docs/version-1.1.0/sql_ddl.md 
b/website/versioned_docs/version-1.1.1/sql_ddl.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/sql_ddl.md
rename to website/versioned_docs/version-1.1.1/sql_ddl.md
diff --git a/website/versioned_docs/version-1.1.0/sql_dml.md 
b/website/versioned_docs/version-1.1.1/sql_dml.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/sql_dml.md
rename to website/versioned_docs/version-1.1.1/sql_dml.md
diff --git a/website/versioned_docs/version-1.1.0/sql_queries.md 
b/website/versioned_docs/version-1.1.1/sql_queries.md
similarity index 92%
rename from website/versioned_docs/version-1.1.0/sql_queries.md
rename to website/versioned_docs/version-1.1.1/sql_queries.md
index be08fd6bc735..d7eeb9cdb1c4 100644
--- a/website/versioned_docs/version-1.1.0/sql_queries.md
+++ b/website/versioned_docs/version-1.1.1/sql_queries.md
@@ -492,6 +492,69 @@ WITH (
 | `mode`             | `false`  | `dfs`     | Specify as `hms` to keep the 
table metadata with Hive metastore                                              
          |
 | `table.external`   | `false`  | `false`   | Whether to create external 
tables, only valid under `hms` mode                                             
            |
 
+### Query Metadata Columns
+Flink SQL now supports querying the virtual metadata columns from Hudi tables. 
These special columns provide access to 
+internal Hudi metadata such as commit time, record key, and partition path. 
The following virtual metadata columns are supported:
+
+| Metadata Column Name     | Description                                       
                             |
+|--------------------------|--------------------------------------------------------------------------------|
+| `_hoodie_commit_time`    | The commit time when the record was committed     
                           |
+| `_hoodie_commit_seqno`   | The commit requence number of the record          
                           |
+| `_hoodie_record_key`     | The record key of the record                      
                            |
+| `_hoodie_partition_path` | The partition path of the record                  
                        |
+| `_hoodie_file_name`      | The file name where the record is stored          
                             |
+| `_hoodie_operation`      | The changelog operation of the record, enabled by 
'changelog.enabled' = 'true' |
+
+Before selecting these columns in your SQL queries, you have to define them in 
the DDL through the [virtual metadata 
+column](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/sql/create/#columns)
 syntax of Flink SQL.
+
+Example usage:
+
+```sql
+CREATE TABLE hudi_table(
+    _hoodie_commit_time STRING METADATA VIRTUAL,
+    _hoodie_record_key STRING METADATA VIRTUAL,
+    ts BIGINT,
+    uuid VARCHAR(40) PRIMARY KEY NOT ENFORCED,
+    rider VARCHAR(20),
+    driver VARCHAR(20),
+    fare DOUBLE,
+    city VARCHAR(20)
+)
+PARTITIONED BY (`city`)
+WITH (
+  'connector' = 'hudi',
+  'path' = 'file:///tmp/hudi_table',
+  'table.type' = 'MERGE_ON_READ'
+);
+
+-- Insert some records into the table
+INSERT INTO hudi_table
+VALUES
+(1695159649087,'334e26e9-8355-45cc-97c6-c31daf0df330','rider-A','driver-K',19.10,'san_francisco'),
+(1695091554788,'e96c4396-3fad-413a-a942-4cb36106d721','rider-C','driver-M',27.70
 ,'san_francisco'),
+(1695046462179,'9909a8b1-2d15-4d3d-8ec9-efc48c536a00','rider-D','driver-L',33.90
 ,'san_francisco'),
+(1695332066204,'1dced545-862b-4ceb-8b43-d2a568f6616b','rider-E','driver-O',93.50,'san_francisco'),
+(1695516137016,'e3cf430c-889d-4015-bc98-59bdce1e530c','rider-F','driver-P',34.15,'sao_paulo'),
+(1695376420876,'7a84095f-737f-40bc-b62f-6b69664712d2','rider-G','driver-Q',43.40
 ,'sao_paulo'),
+(1695173887231,'3eeb61f7-c2b0-4636-99bd-5d7a5a1d2c04','rider-I','driver-S',41.06
 ,'chennai'),
+(1695115999911,'c8abbe79-8d89-47ea-b4ce-4d224bae5bfa','rider-J','driver-T',17.85,'chennai');
+
+-- Query a Hudi table with virtual metadata columns
+SELECT
+    _hoodie_commit_time,
+    _hoodie_record_key,
+    uuid,
+    rider,
+    fare
+FROM hudi_table;
+```
+
+:::note
+Virtual metadata columns are read-only, which means you can simply ignore them 
in an INSERT statement and only provide 
+values for the regular data columns.
+:::
+
 ## Hive
 
 [Hive](https://hive.apache.org/) has support for snapshot and incremental 
queries (with limitations) on Hudi tables.
@@ -552,14 +615,13 @@ Please check the below table for query types supported 
and installation instruct
 | > = 0.272             | No action needed. Hudi 0.10.1 version is a compile 
time dependency. | File listing optimizations. Improved query performance. |
 | > = 0.275             | No action needed. Hudi 0.11.0 version is a compile 
time dependency. | All of the above. Native Hudi connector that is on par with 
Hive connector. |
 
-
 :::note
 Incremental queries and point in time queries are not supported either through 
the Hive connector or Hudi
 connector. However, it is in our roadmap, and you can track the development
 under [this GitHub issue](https://github.com/apache/hudi/issues/14992).
 :::
 
-To use the Hudi connector, please configure hudi catalog in ` 
/presto-server-0.2xxx/etc/catalog/hudi.properties` as follows:
+To use the Hudi connector, please configure hudi catalog in 
`/presto-server-0.2xxx/etc/catalog/hudi.properties` as follows:
 
 ```properties
 connector.name=hudi
diff --git a/website/versioned_docs/version-1.1.0/storage_layouts.md 
b/website/versioned_docs/version-1.1.1/storage_layouts.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/storage_layouts.md
rename to website/versioned_docs/version-1.1.1/storage_layouts.md
diff --git a/website/versioned_docs/version-1.1.0/structure.md 
b/website/versioned_docs/version-1.1.1/structure.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/structure.md
rename to website/versioned_docs/version-1.1.1/structure.md
diff --git 
a/website/versioned_docs/version-1.1.0/syncing_aws_glue_data_catalog.md 
b/website/versioned_docs/version-1.1.1/syncing_aws_glue_data_catalog.md
similarity index 100%
rename from 
website/versioned_docs/version-1.1.0/syncing_aws_glue_data_catalog.md
rename to website/versioned_docs/version-1.1.1/syncing_aws_glue_data_catalog.md
diff --git a/website/versioned_docs/version-1.1.0/syncing_datahub.md 
b/website/versioned_docs/version-1.1.1/syncing_datahub.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/syncing_datahub.md
rename to website/versioned_docs/version-1.1.1/syncing_datahub.md
diff --git a/website/versioned_docs/version-1.1.0/syncing_metastore.md 
b/website/versioned_docs/version-1.1.1/syncing_metastore.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/syncing_metastore.md
rename to website/versioned_docs/version-1.1.1/syncing_metastore.md
diff --git a/website/versioned_docs/version-1.1.0/syncing_xtable.md 
b/website/versioned_docs/version-1.1.1/syncing_xtable.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/syncing_xtable.md
rename to website/versioned_docs/version-1.1.1/syncing_xtable.md
diff --git a/website/versioned_docs/version-1.1.0/table_types.md 
b/website/versioned_docs/version-1.1.1/table_types.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/table_types.md
rename to website/versioned_docs/version-1.1.1/table_types.md
diff --git a/website/versioned_docs/version-1.1.0/timeline.md 
b/website/versioned_docs/version-1.1.1/timeline.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/timeline.md
rename to website/versioned_docs/version-1.1.1/timeline.md
diff --git a/website/versioned_docs/version-1.1.0/troubleshooting.md 
b/website/versioned_docs/version-1.1.1/troubleshooting.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/troubleshooting.md
rename to website/versioned_docs/version-1.1.1/troubleshooting.md
diff --git a/website/versioned_docs/version-1.1.0/tuning-guide.md 
b/website/versioned_docs/version-1.1.1/tuning-guide.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/tuning-guide.md
rename to website/versioned_docs/version-1.1.1/tuning-guide.md
diff --git a/website/versioned_docs/version-1.1.0/use_cases.md 
b/website/versioned_docs/version-1.1.1/use_cases.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/use_cases.md
rename to website/versioned_docs/version-1.1.1/use_cases.md
diff --git a/website/versioned_docs/version-1.1.0/write_operations.md 
b/website/versioned_docs/version-1.1.1/write_operations.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/write_operations.md
rename to website/versioned_docs/version-1.1.1/write_operations.md
diff --git a/website/versioned_docs/version-1.1.0/writing_data.md 
b/website/versioned_docs/version-1.1.1/writing_data.md
similarity index 100%
rename from website/versioned_docs/version-1.1.0/writing_data.md
rename to website/versioned_docs/version-1.1.1/writing_data.md
diff --git 
a/website/versioned_docs/version-1.1.0/writing_tables_streaming_writes.md 
b/website/versioned_docs/version-1.1.1/writing_tables_streaming_writes.md
similarity index 100%
rename from 
website/versioned_docs/version-1.1.0/writing_tables_streaming_writes.md
rename to 
website/versioned_docs/version-1.1.1/writing_tables_streaming_writes.md
diff --git a/website/versioned_sidebars/version-1.1.0-sidebars.json 
b/website/versioned_sidebars/version-1.1.1-sidebars.json
similarity index 100%
rename from website/versioned_sidebars/version-1.1.0-sidebars.json
rename to website/versioned_sidebars/version-1.1.1-sidebars.json
diff --git a/website/versions.json b/website/versions.json
index 798415f69da9..478802d4e957 100644
--- a/website/versions.json
+++ b/website/versions.json
@@ -1,5 +1,5 @@
 [
-  "1.1.0",
+  "1.1.1",
   "1.0.2",
   "1.0.1",
   "1.0.0",

Reply via email to