This is an automated email from the ASF dual-hosted git repository.

bhavanisudha pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 1fcaec2  [HUDI-923] Update site to reflect graduation (#1656)
1fcaec2 is described below

commit 1fcaec26ea41a9a1a93a924a71ce39b703a2b637
Author: vinoth chandar <[email protected]>
AuthorDate: Sun May 24 01:23:14 2020 -0700

    [HUDI-923] Update site to reflect graduation (#1656)
    
    - Removed references to "Incubating" in all pages, logos, text etc..
    - Update PMC/Committers information correctly. 
    - Removed the incubator disclaimer on site footer
    - Fixed github links to point to apache/hudi
---
 docs/_config.yml                                   |  18 +++++-----
 docs/_docs/0.5.0/1_1_quick_start_guide.cn.md       |   6 ++--
 docs/_docs/0.5.0/1_1_quick_start_guide.md          |   6 ++--
 docs/_docs/0.5.1/1_1_quick_start_guide.cn.md       |   6 ++--
 docs/_docs/0.5.1/1_1_quick_start_guide.md          |  12 +++----
 docs/_docs/0.5.2/1_1_quick_start_guide.cn.md       |   6 ++--
 docs/_docs/0.5.2/1_1_quick_start_guide.md          |  12 +++----
 docs/_docs/1_1_quick_start_guide.cn.md             |   6 ++--
 docs/_docs/1_1_quick_start_guide.md                |  20 +++++------
 docs/_includes/footer.html                         |   5 ---
 docs/_includes/masthead.html                       |   2 +-
 docs/_pages/community.cn.md                        |   2 +-
 docs/_pages/community.md                           |  29 +++++++---------
 docs/_pages/contributing.cn.md                     |   4 +--
 docs/_pages/contributing.md                        |   4 +--
 docs/_pages/index.md                               |   2 +-
 docs/_pages/releases.cn.md                         |  28 +++++++--------
 docs/_pages/releases.md                            |  38 ++++++++++-----------
 .../2020-04-27-apache-hudi-apache-zepplin.md       |   2 +-
 docs/assets/images/hudi.png                        | Bin 3463 -> 6203 bytes
 20 files changed, 100 insertions(+), 108 deletions(-)

diff --git a/docs/_config.yml b/docs/_config.yml
index df8658d..ec03d44 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -29,11 +29,11 @@ previous_docs:
 
 # Site Settings
 locale                   : "en-US"
-title                    : # "Apache Hudi (Incubating)"
+title                    : # "Apache Hudi"
 title_separator          : "-"
-subtitle                 : "Incubating" # *version
-description              : "Apache Hudi ingests & manages storage of large 
analytical datasets over DFS (HDFS or cloud stores)."
-url                      : https://hudi.apache.org # the base hostname & 
protocol for your site e.g. "https://github.com/apache/incubator-hudi.git";
+subtitle                 : "" # *version
+description              : "Apache Hudi brings upserts, deletes and stream 
processing to data lakes built on HDFS or cloud storage."
+url                      : https://hudi.apache.org # the base hostname & 
protocol for your site e.g. "https://github.com/apache/hudi.git";
 repository               : "apache/incubator-hudi"
 teaser                   : "/assets/images/500x300.png" # path of fallback 
teaser image, e.g. "/assets/images/500x300.png"
 logo                     : "/assets/images/hudi.png" # path of logo image to 
display in the masthead, e.g. "/assets/images/88x88.png"
@@ -60,7 +60,7 @@ author:
       url: 
"https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE";
     - label: "Fork on GitHub"
       icon: "fa fa-github"
-      url: "https://github.com/apache/incubator-hudi";
+      url: "https://github.com/apache/hudi";
     - label: "Report Issues"
       icon: "fa fa-navicon"
       url: "https://issues.apache.org/jira/projects/HUDI/summary";
@@ -86,7 +86,7 @@ cn_author:
       url: 
"https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE";
     - label: "Fork on GitHub"
       icon: "fa fa-github"
-      url: "https://github.com/apache/incubator-hudi";
+      url: "https://github.com/apache/hudi";
     - label: "Report Issues"
       icon: "fa fa-navicon"
       url: "https://issues.apache.org/jira/projects/HUDI/summary";
@@ -113,7 +113,7 @@ cn_author:
       url: 
"https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE";
     - label: "Fork on GitHub"
       icon: "fa fa-github"
-      url: "https://github.com/apache/incubator-hudi";
+      url: "https://github.com/apache/hudi";
     - label: "Report Issues"
       icon: "fa fa-navicon"
       url: "https://issues.apache.org/jira/projects/HUDI/summary";
@@ -139,7 +139,7 @@ cn_author:
       url: 
"https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE";
     - label: "Fork on GitHub"
       icon: "fa fa-github"
-      url: "https://github.com/apache/incubator-hudi";
+      url: "https://github.com/apache/hudi";
     - label: "Report Issues"
       icon: "fa fa-navicon"
       url: "https://issues.apache.org/jira/projects/HUDI/summary";
@@ -165,7 +165,7 @@ cn_author:
       url: 
"https://join.slack.com/t/apache-hudi/shared_invite/enQtODYyNDAxNzc5MTg2LTE5OTBlYmVhYjM0N2ZhOTJjOWM4YzBmMWU2MjZjMGE4NDc5ZDFiOGQ2N2VkYTVkNzU3ZDQ4OTI1NmFmYWQ0NzE";
     - label: "Fork on GitHub"
       icon: "fa fa-github"
-      url: "https://github.com/apache/incubator-hudi";
+      url: "https://github.com/apache/hudi";
     - label: "Report Issues"
       icon: "fa fa-navicon"
       url: "https://issues.apache.org/jira/projects/HUDI/summary";
diff --git a/docs/_docs/0.5.0/1_1_quick_start_guide.cn.md 
b/docs/_docs/0.5.0/1_1_quick_start_guide.cn.md
index fba3032..8cef74b 100644
--- a/docs/_docs/0.5.0/1_1_quick_start_guide.cn.md
+++ b/docs/_docs/0.5.0/1_1_quick_start_guide.cn.md
@@ -33,8 +33,8 @@ val basePath = "file:///tmp/hudi_cow_table"
 val dataGen = new DataGenerator
 ```
 
-[数据生成器](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
-可以基于[行程样本模式](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+[数据生成器](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
+可以基于[行程样本模式](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 生成插入和更新的样本。
 
 ## 插入数据 {#inserts}
@@ -151,7 +151,7 @@ spark.sql("select `_hoodie_commit_time`, fare, begin_lon, 
begin_lat, ts from  hu
 
 ## 从这开始下一步?
 
-您也可以通过[自己构建hudi](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source)来快速开始,
+您也可以通过[自己构建hudi](https://github.com/apache/hudi#building-apache-hudi-from-source)来快速开始,
 并在spark-shell命令中使用`--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle-*.*.*-SNAPSHOT.jar`,
 而不是`--packages org.apache.hudi:hudi-spark-bundle:0.5.0-incubating`
 
diff --git a/docs/_docs/0.5.0/1_1_quick_start_guide.md 
b/docs/_docs/0.5.0/1_1_quick_start_guide.md
index c938f8e..b1b5464 100644
--- a/docs/_docs/0.5.0/1_1_quick_start_guide.md
+++ b/docs/_docs/0.5.0/1_1_quick_start_guide.md
@@ -36,8 +36,8 @@ val basePath = "file:///tmp/hudi_cow_table"
 val dataGen = new DataGenerator
 ```
 
-The 
[DataGenerator](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
-can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+The 
[DataGenerator](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
+can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 {: .notice--info}
 
 
@@ -163,7 +163,7 @@ spark.sql("select `_hoodie_commit_time`, fare, begin_lon, 
begin_lat, ts from  hu
 
 ## Where to go from here?
 
-You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source),
 
+You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/hudi#building-apache-hudi-from-source), 
 and using `--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle-*.*.*-SNAPSHOT.jar`
 in the spark-shell command above
 instead of `--packages org.apache.hudi:hudi-spark-bundle:0.5.0-incubating`
 
diff --git a/docs/_docs/0.5.1/1_1_quick_start_guide.cn.md 
b/docs/_docs/0.5.1/1_1_quick_start_guide.cn.md
index 0080ff6..3f7bb96 100644
--- a/docs/_docs/0.5.1/1_1_quick_start_guide.cn.md
+++ b/docs/_docs/0.5.1/1_1_quick_start_guide.cn.md
@@ -33,8 +33,8 @@ val basePath = "file:///tmp/hudi_cow_table"
 val dataGen = new DataGenerator
 ```
 
-[数据生成器](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
-可以基于[行程样本模式](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+[数据生成器](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
+可以基于[行程样本模式](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 生成插入和更新的样本。
 
 ## 插入数据 {#inserts}
@@ -151,7 +151,7 @@ spark.sql("select `_hoodie_commit_time`, fare, begin_lon, 
begin_lat, ts from  hu
 
 ## 从这开始下一步?
 
-您也可以通过[自己构建hudi](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source)来快速开始,
+您也可以通过[自己构建hudi](https://github.com/apache/hudi#building-apache-hudi-from-source)来快速开始,
 并在spark-shell命令中使用`--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle-*.*.*-SNAPSHOT.jar`,
 而不是`--packages org.apache.hudi:hudi-spark-bundle:0.5.0-incubating`
 
diff --git a/docs/_docs/0.5.1/1_1_quick_start_guide.md 
b/docs/_docs/0.5.1/1_1_quick_start_guide.md
index 7d549cd..63f1670 100644
--- a/docs/_docs/0.5.1/1_1_quick_start_guide.md
+++ b/docs/_docs/0.5.1/1_1_quick_start_guide.md
@@ -47,8 +47,8 @@ val basePath = "file:///tmp/hudi_trips_cow"
 val dataGen = new DataGenerator
 ```
 
-The 
[DataGenerator](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
-can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+The 
[DataGenerator](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
+can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 {: .notice--info}
 
 
@@ -71,8 +71,8 @@ df.write.format("org.apache.hudi").
 
 `mode(Overwrite)` overwrites and recreates the table if it already exists.
 You can check the data generated under 
`/tmp/hudi_trips_cow/<region>/<country>/<city>/`. We provided a record key 
-(`uuid` in 
[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
-[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
+(`uuid` in 
[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
+[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
 [Modeling data stored in 
Hudi](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=113709185#FAQ-HowdoImodelthedatastoredinHudi)
 and for info on ways to ingest data into Hudi, refer to [Writing Hudi 
Tables](/docs/0.5.1-writing_data.html).
 Here we are using the default write operation : `upsert`. If you have a 
workload without updates, you can also issue 
@@ -207,9 +207,9 @@ Note: Only `Append` mode is supported for delete operation.
 
 ## Where to go from here?
 
-You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source),
 
+You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/hudi#building-apache-hudi-from-source), 
 and using `--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle_2.11-*.*.*-SNAPSHOT.jar`
 in the spark-shell command above
-instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.1-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/incubator-hudi#build-with-scala-212)
+instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.1-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/hudi#build-with-scala-212)
 for more info.
 
 Also, we used Spark here to show case the capabilities of Hudi. However, Hudi 
can support multiple table types/query types and 
diff --git a/docs/_docs/0.5.2/1_1_quick_start_guide.cn.md 
b/docs/_docs/0.5.2/1_1_quick_start_guide.cn.md
index c56774b..d9d6eec 100644
--- a/docs/_docs/0.5.2/1_1_quick_start_guide.cn.md
+++ b/docs/_docs/0.5.2/1_1_quick_start_guide.cn.md
@@ -33,8 +33,8 @@ val basePath = "file:///tmp/hudi_cow_table"
 val dataGen = new DataGenerator
 ```
 
-[数据生成器](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
-可以基于[行程样本模式](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+[数据生成器](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
+可以基于[行程样本模式](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 生成插入和更新的样本。
 
 ## 插入数据 {#inserts}
@@ -151,7 +151,7 @@ spark.sql("select `_hoodie_commit_time`, fare, begin_lon, 
begin_lat, ts from  hu
 
 ## 从这开始下一步?
 
-您也可以通过[自己构建hudi](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source)来快速开始,
+您也可以通过[自己构建hudi](https://github.com/apache/hudi#building-apache-hudi-from-source)来快速开始,
 并在spark-shell命令中使用`--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle-*.*.*-SNAPSHOT.jar`,
 而不是`--packages org.apache.hudi:hudi-spark-bundle:0.5.2-incubating`
 
diff --git a/docs/_docs/0.5.2/1_1_quick_start_guide.md 
b/docs/_docs/0.5.2/1_1_quick_start_guide.md
index ab4e37c..e8e9261 100644
--- a/docs/_docs/0.5.2/1_1_quick_start_guide.md
+++ b/docs/_docs/0.5.2/1_1_quick_start_guide.md
@@ -47,8 +47,8 @@ val basePath = "file:///tmp/hudi_trips_cow"
 val dataGen = new DataGenerator
 ```
 
-The 
[DataGenerator](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
-can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+The 
[DataGenerator](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
+can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 {: .notice--info}
 
 
@@ -71,8 +71,8 @@ df.write.format("hudi").
 
 `mode(Overwrite)` overwrites and recreates the table if it already exists.
 You can check the data generated under 
`/tmp/hudi_trips_cow/<region>/<country>/<city>/`. We provided a record key 
-(`uuid` in 
[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
-[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
+(`uuid` in 
[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
+[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
 [Modeling data stored in 
Hudi](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=113709185#FAQ-HowdoImodelthedatastoredinHudi)
 and for info on ways to ingest data into Hudi, refer to [Writing Hudi 
Tables](/docs/0.5.2-writing_data.html).
 Here we are using the default write operation : `upsert`. If you have a 
workload without updates, you can also issue 
@@ -207,9 +207,9 @@ Note: Only `Append` mode is supported for delete operation.
 
 ## Where to go from here?
 
-You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source),
 
+You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/hudi#building-apache-hudi-from-source), 
 and using `--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle_2.11-*.*.*-SNAPSHOT.jar`
 in the spark-shell command above
-instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.2-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/incubator-hudi#build-with-scala-212)
+instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.2-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/hudi#build-with-scala-212)
 for more info.
 
 Also, we used Spark here to show case the capabilities of Hudi. However, Hudi 
can support multiple table types/query types and 
diff --git a/docs/_docs/1_1_quick_start_guide.cn.md 
b/docs/_docs/1_1_quick_start_guide.cn.md
index 9137f91..728ce98 100644
--- a/docs/_docs/1_1_quick_start_guide.cn.md
+++ b/docs/_docs/1_1_quick_start_guide.cn.md
@@ -32,8 +32,8 @@ val basePath = "file:///tmp/hudi_cow_table"
 val dataGen = new DataGenerator
 ```
 
-[数据生成器](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
-可以基于[行程样本模式](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+[数据生成器](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
+可以基于[行程样本模式](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 生成插入和更新的样本。
 
 ## 插入数据 {#inserts}
@@ -152,7 +152,7 @@ spark.sql("select `_hoodie_commit_time`, fare, begin_lon, 
begin_lat, ts from  hu
 
 ## 从这开始下一步?
 
-您也可以通过[自己构建hudi](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source)来快速开始,
+您也可以通过[自己构建hudi](https://github.com/apache/hudi#building-apache-hudi-from-source)来快速开始,
 并在spark-shell命令中使用`--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle-*.*.*-SNAPSHOT.jar`,
 而不是`--packages org.apache.hudi:hudi-spark-bundle:0.5.0-incubating`
 
diff --git a/docs/_docs/1_1_quick_start_guide.md 
b/docs/_docs/1_1_quick_start_guide.md
index 62939cb..d2eaa57 100644
--- a/docs/_docs/1_1_quick_start_guide.md
+++ b/docs/_docs/1_1_quick_start_guide.md
@@ -49,8 +49,8 @@ val basePath = "file:///tmp/hudi_trips_cow"
 val dataGen = new DataGenerator
 ```
 
-The 
[DataGenerator](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
-can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+The 
[DataGenerator](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
+can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 {: .notice--info}
 
 
@@ -74,8 +74,8 @@ df.write.format("hudi").
 
 `mode(Overwrite)` overwrites and recreates the table if it already exists.
 You can check the data generated under 
`/tmp/hudi_trips_cow/<region>/<country>/<city>/`. We provided a record key 
-(`uuid` in 
[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/country/city`) and combine logic (`ts` in 
-[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
+(`uuid` in 
[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/country/city`) and combine logic (`ts` in 
+[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
 [Modeling data stored in 
Hudi](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=113709185#FAQ-HowdoImodelthedatastoredinHudi)
 and for info on ways to ingest data into Hudi, refer to [Writing Hudi 
Tables](/docs/writing_data.html).
 Here we are using the default write operation : `upsert`. If you have a 
workload without updates, you can also issue 
@@ -247,8 +247,8 @@ basePath = "file:///tmp/hudi_trips_cow"
 dataGen = sc._jvm.org.apache.hudi.QuickstartUtils.DataGenerator()
 ```
 
-The 
[DataGenerator](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
-can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
+The 
[DataGenerator](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L50)
 
+can generate sample inserts and updates based on the the sample trip schema 
[here](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L57)
 {: .notice--info}
 
 
@@ -280,8 +280,8 @@ df.write.format("hudi"). \
 
 `mode(Overwrite)` overwrites and recreates the table if it already exists.
 You can check the data generated under 
`/tmp/hudi_trips_cow/<region>/<country>/<city>/`. We provided a record key 
-(`uuid` in 
[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
-[schema](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
+(`uuid` in 
[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58)),
 partition field (`region/county/city`) and combine logic (`ts` in 
+[schema](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/java/org/apache/hudi/QuickstartUtils.java#L58))
 to ensure trip records are unique within each partition. For more info, refer 
to 
 [Modeling data stored in 
Hudi](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=113709185#FAQ-HowdoImodelthedatastoredinHudi)
 and for info on ways to ingest data into Hudi, refer to [Writing Hudi 
Tables](/docs/writing_data.html).
 Here we are using the default write operation : `upsert`. If you have a 
workload without updates, you can also issue 
@@ -438,9 +438,9 @@ spark.sql("select uuid, partitionPath from 
hudi_trips_snapshot").count()
 
 ## Where to go from here?
 
-You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/incubator-hudi#building-apache-hudi-from-source),
 
+You can also do the quickstart by [building hudi 
yourself](https://github.com/apache/hudi#building-apache-hudi-from-source), 
 and using `--jars <path to 
hudi_code>/packaging/hudi-spark-bundle/target/hudi-spark-bundle_2.11-*.*.*-SNAPSHOT.jar`
 in the spark-shell command above
-instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.1-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/incubator-hudi#build-with-scala-212)
+instead of `--packages 
org.apache.hudi:hudi-spark-bundle_2.11:0.5.1-incubating`. Hudi also supports 
scala 2.12. Refer [build with scala 
2.12](https://github.com/apache/hudi#build-with-scala-212)
 for more info.
 
 Also, we used Spark here to show case the capabilities of Hudi. However, Hudi 
can support multiple table types/query types and 
diff --git a/docs/_includes/footer.html b/docs/_includes/footer.html
index e70b77c..2a4926d 100644
--- a/docs/_includes/footer.html
+++ b/docs/_includes/footer.html
@@ -23,11 +23,6 @@
     <p>
       Copyright &copy; <span id="copyright-year">2019</span> <a 
href="https://apache.org";>The Apache Software Foundation</a>, Licensed under 
the <a href="https://www.apache.org/licenses/LICENSE-2.0";> Apache License, 
Version 2.0</a>.
       Hudi, Apache and the Apache feather logo are trademarks of The Apache 
Software Foundation. <a href="/docs/privacy">Privacy Policy</a>
-      <br>
-      Apache Hudi is an effort undergoing incubation at The Apache Software 
Foundation (ASF), sponsored by the <a 
href="http://incubator.apache.org/";>Apache Incubator</a>.
-      Incubation is required of all newly accepted projects until a further 
review indicates that the infrastructure, communications, and decision making 
process have
-      stabilized in a manner consistent with other successful ASF projects. 
While incubation status is not necessarily a
-      reflection of the completeness or stability of the code, it does 
indicate that the project has yet to be fully endorsed by the ASF.
     </p>
   </div>
 </div>
\ No newline at end of file
diff --git a/docs/_includes/masthead.html b/docs/_includes/masthead.html
index 45ee655..7c58b24 100644
--- a/docs/_includes/masthead.html
+++ b/docs/_includes/masthead.html
@@ -42,6 +42,6 @@
   </div>
 </div>
 <!--
-<p class="notice--warning" style="margin: 0 !important; text-align: center 
!important;"><strong>Note:</strong> This site is work in progress, if you 
notice any issues, please <a target="_blank" 
href="https://github.com/apache/incubator-hudi/issues";>Report on Issue</a>.
+<p class="notice--warning" style="margin: 0 !important; text-align: center 
!important;"><strong>Note:</strong> This site is work in progress, if you 
notice any issues, please <a target="_blank" 
href="https://github.com/apache/hudi/issues";>Report on Issue</a>.
   Click <a href="/"> here</a> back to old site.</p>
 -->
\ No newline at end of file
diff --git a/docs/_pages/community.cn.md b/docs/_pages/community.cn.md
index 97caa23..6a424e1 100644
--- a/docs/_pages/community.cn.md
+++ b/docs/_pages/community.cn.md
@@ -14,7 +14,7 @@ There are several ways to get in touch with the Hudi 
community.
 |-------|--------|
 | For any general questions, user support, development discussions | Dev 
Mailing list ([Subscribe](mailto:[email protected]), 
[Unsubscribe](mailto:[email protected]), 
[Archives](https://lists.apache.org/[email protected])). Empty 
email works for subscribe/unsubscribe. Please use 
[gists](https://gist.github.com) to share code/stacktraces on the email. |
 | For reporting bugs or issues or discover known issues | Please use [ASF Hudi 
JIRA](https://issues.apache.org/jira/projects/HUDI/summary). See 
[#here](#accounts) for access |
-| For quick pings & 1-1 chats | Join our [slack 
group](https://join.slack.com/t/apache-hudi/signup). In case your mail domain 
is not there in pre-approved list for joining slack group, please check out the 
[github issue](https://github.com/apache/incubator-hudi/issues/143) |
+| For quick pings & 1-1 chats | Join our [slack 
group](https://join.slack.com/t/apache-hudi/signup). In case your mail domain 
is not there in pre-approved list for joining slack group, please check out the 
[github issue](https://github.com/apache/hudi/issues/143) |
 | For proposing large features, changes | Start a RFC. Instructions 
[here](https://cwiki.apache.org/confluence/display/HUDI/RFC+Process).
  See [#here](#accounts) for wiki access |
 | For stream of commits, pull requests etc | Commits Mailing list 
([Subscribe](mailto:[email protected]), 
[Unsubscribe](mailto:[email protected]), 
[Archives](https://lists.apache.org/[email protected])) |
diff --git a/docs/_pages/community.md b/docs/_pages/community.md
index 16ab033..b43d644 100644
--- a/docs/_pages/community.md
+++ b/docs/_pages/community.md
@@ -13,7 +13,7 @@ There are several ways to get in touch with the Hudi 
community.
 |-------|--------|
 | For any general questions, user support, development discussions | Dev 
Mailing list ([Subscribe](mailto:[email protected]), 
[Unsubscribe](mailto:[email protected]), 
[Archives](https://lists.apache.org/[email protected])). Empty 
email works for subscribe/unsubscribe. Please use 
[gists](https://gist.github.com) to share code/stacktraces on the email. |
 | For reporting bugs or issues or discover known issues | Please use [ASF Hudi 
JIRA](https://issues.apache.org/jira/projects/HUDI/summary). See 
[#here](#accounts) for access |
-| For quick pings & 1-1 chats | Join our [slack 
group](https://join.slack.com/t/apache-hudi/signup). In case your mail domain 
is not there in pre-approved list for joining slack group, please check out the 
[github issue](https://github.com/apache/incubator-hudi/issues/143) |
+| For quick pings & 1-1 chats | Join our [slack 
group](https://join.slack.com/t/apache-hudi/signup). In case your mail domain 
is not there in pre-approved list for joining slack group, please check out the 
[github issue](https://github.com/apache/hudi/issues/143) |
 | For proposing large features, changes | Start a RFC. Instructions 
[here](https://cwiki.apache.org/confluence/display/HUDI/RFC+Process).
  See [#here](#accounts) for wiki access |
 | For stream of commits, pull requests etc | Commits Mailing list 
([Subscribe](mailto:[email protected]), 
[Unsubscribe](mailto:[email protected]), 
[Archives](https://lists.apache.org/[email protected])) |
@@ -48,27 +48,24 @@ Committers are chosen by a majority vote of the Apache Hudi 
[PMC](https://www.ap
  - Great citizenship in helping with all peripheral (but very critical) work 
like site maintenance, wiki/jira cleanups and so on.
  - Proven commitment to the project by way of upholding all agreed upon 
processes, conventions and principles of the community.
 
-### The Committers
+### The Team
 
 | Image                                                        | Name          
                                               | Role            | Apache ID    
|
 | ------------------------------------------------------------ | 
------------------------------------------------------------ | --------------- 
| ------------ |
-| <img src="https://avatars.githubusercontent.com/alunarbeach"; 
style="max-width: 100px" alt="alunarbeach" align="middle" /> | [Anbu 
Cheeralan](https://github.com/alunarbeach)             | PPMC, Committer | 
anchee       |
-| <img src="https://avatars.githubusercontent.com/bhasudha"; style="max-width: 
100px" alt="bhasudha" align="middle" /> | [Bhavani 
Sudha](https://github.com/bhasudha)                 | PPMC, Committer | 
bhavanisudha |
-| <img src="https://avatars.githubusercontent.com/bvaradar"; style="max-width: 
100px" alt="bvaradar" align="middle" /> | [Balaji 
Varadarajan](https://github.com/bvaradar)            | PPMC, Committer | 
vbalaji      |
-| <img src="https://avatars.githubusercontent.com/kishoreg"; style="max-width: 
100px" alt="kishoreg" align="middle" /> | [Kishore 
Gopalakrishna](https://github.com/kishoreg)         | PPMC, Committer | 
kishoreg     |
-| <img src="https://avatars.githubusercontent.com/lresende"; style="max-width: 
100px" alt="lresende" align="middle" /> | [Luciano 
Resende](https://github.com/lresende)               | PPMC, Committer | 
lresende     |
-| <img src="https://avatars.githubusercontent.com/n3nash"; style="max-width: 
100px" alt="n3nash" align="middle" /> | [Nishith 
Agarwal](https://github.com/n3nash)                 | PPMC, Committer | 
nagarwal     |
-| <img src="https://avatars.githubusercontent.com/prasannarajaperumal"; 
style="max-width: 100px" alt="prasannarajaperumal" align="middle" /> | 
[Prasanna Rajaperumal](https://github.com/prasannarajaperumal) | PPMC, 
Committer | prasanna     |
-| <img src="https://avatars.githubusercontent.com/leesf"; style="max-width: 
100px" alt="leesf" align="middle" /> | [Shaofeng Li](https://github.com/leesf)  
                    | PPMC, Committer       | leesf        |
+| <img src="https://avatars.githubusercontent.com/alunarbeach"; 
style="max-width: 100px" alt="alunarbeach" align="middle" /> | [Anbu 
Cheeralan](https://github.com/alunarbeach)             | PMC, Committer | 
anchee       |
+| <img src="https://avatars.githubusercontent.com/bhasudha"; style="max-width: 
100px" alt="bhasudha" align="middle" /> | [Bhavani 
Sudha](https://github.com/bhasudha)                 | PMC, Committer | 
bhavanisudha |
+| <img src="https://avatars.githubusercontent.com/bvaradar"; style="max-width: 
100px" alt="bvaradar" align="middle" /> | [Balaji 
Varadarajan](https://github.com/bvaradar)            | PMC, Committer | vbalaji 
     |
+| <img src="https://avatars.githubusercontent.com/lresende"; style="max-width: 
100px" alt="lresende" align="middle" /> | [Luciano 
Resende](https://github.com/lresende)               | PMC, Committer | lresende 
    |
+| <img src="https://avatars.githubusercontent.com/n3nash"; style="max-width: 
100px" alt="n3nash" align="middle" /> | [Nishith 
Agarwal](https://github.com/n3nash)                 | PMC, Committer | nagarwal 
    |
+| <img src="https://avatars.githubusercontent.com/prasannarajaperumal"; 
style="max-width: 100px" alt="prasannarajaperumal" align="middle" /> | 
[Prasanna Rajaperumal](https://github.com/prasannarajaperumal) | PMC, Committer 
| prasanna     |
+| <img src="https://avatars.githubusercontent.com/leesf"; style="max-width: 
100px" alt="leesf" align="middle" /> | [Shaofeng Li](https://github.com/leesf)  
                    | PMC, Committer       | leesf        |
 | <img src="https://avatars.githubusercontent.com/nsivabalan"; 
style="max-width: 100px" alt="nsivabalan" align="middle" /> | [Sivabalan 
Narayanan](https://github.com/nsivabalan)         | Committer | sivabalan      |
-| <img src="https://avatars.githubusercontent.com/smarthi"; style="max-width: 
100px" alt="smarthi" align="middle" /> | [Suneel 
Marthi](https://github.com/smarthi)                  | PPMC, Committer | 
smarthi      |
-| <img src="https://avatars.githubusercontent.com/tweise"; style="max-width: 
100px" alt="tweise" align="middle" /> | [Thomas 
Weise](https://github.com/tweise)                    | PPMC, Committer | thw    
      |
-| <img src="https://avatars.githubusercontent.com/vinothchandar"; 
style="max-width: 100px" alt="vinothchandar" align="middle" /> | [vinoth 
chandar](https://github.com/vinothchandar)           | PPMC, Committer | vinoth 
      |
-| <img src="https://avatars.githubusercontent.com/yanghua"; style="max-width: 
100px" alt="yanghua" /> | [vinoyang](https://github.com/yanghua)                
       | PPMC, Committer       | vinoyang     |
-| <img src="https://avatars.githubusercontent.com/zqureshi"; alt="zqureshi" 
style="max-width: 100px;" align="middle" /> | [Zeeshan 
Qureshi](https://github.com/zqureshi)               | PPMC, Committer | 
zqureshi     |
+| <img src="https://avatars.githubusercontent.com/smarthi"; style="max-width: 
100px" alt="smarthi" align="middle" /> | [Suneel 
Marthi](https://github.com/smarthi)                  | PMC, Committer | smarthi 
     |
+| <img src="https://avatars.githubusercontent.com/tweise"; style="max-width: 
100px" alt="tweise" align="middle" /> | [Thomas 
Weise](https://github.com/tweise)                    | PMC, Committer | thw     
     |
+| <img src="https://avatars.githubusercontent.com/vinothchandar"; 
style="max-width: 100px" alt="vinothchandar" align="middle" /> | [Vinoth 
Chandar](https://github.com/vinothchandar)           | PMC, Committer | vinoth  
     |
+| <img src="https://avatars.githubusercontent.com/yanghua"; style="max-width: 
100px" alt="yanghua" /> | [vinoyang](https://github.com/yanghua)                
       | PMC, Committer       | vinoyang     |
 | <img src="https://avatars.githubusercontent.com/lamber-ken"; alt="lamber-ken" 
style="max-width: 100px;" align="middle" /> | 
[lamber-ken](https://github.com/lamber-ken)               | Committer | 
lamberken     |
 
-
 ## Code Contributions
 
 Useful resources for contributing can be found under the "Quick Links" left 
menu.
diff --git a/docs/_pages/contributing.cn.md b/docs/_pages/contributing.cn.md
index ffcdec0..ca4b011 100644
--- a/docs/_pages/contributing.cn.md
+++ b/docs/_pages/contributing.cn.md
@@ -26,7 +26,7 @@ To contribute code, you need
 To contribute, you would need to do the following
  
  - Fork the Hudi code on Github & then clone your own fork locally. Once 
cloned, we recommend building as per instructions on 
[quickstart](/docs/quick-start-guide.html)
- - [Recommended] We have embraced the code style largely based on [google 
format](https://google.github.io/styleguide/javaguide.html). Please setup your 
IDE with style files from 
[here](https://github.com/apache/incubator-hudi/tree/master/style).
+ - [Recommended] We have embraced the code style largely based on [google 
format](https://google.github.io/styleguide/javaguide.html). Please setup your 
IDE with style files from 
[here](https://github.com/apache/hudi/tree/master/style).
 These instructions have been tested on IntelliJ. 
  - [Recommended] Set up the [Save Action 
Plugin](https://plugins.jetbrains.com/plugin/7642-save-actions) to auto format 
& organize imports on save. The Maven Compilation life-cycle will fail if there 
are checkstyle violations.
  - [Optional] If needed, add spark jars to the classpath of your module in 
Intellij by following the steps from 
[here](https://stackoverflow.com/questions/1051640/correct-way-to-add-external-jars-lib-jar-to-an-intellij-idea-project).
 
@@ -108,7 +108,7 @@ and more importantly also try to improve the process along 
the way as well.
    - For involved changes, it's best to also run the entire integration test 
suite using `mvn clean install`
    - For website changes, please build the site locally & test navigation, 
formatting & links thoroughly
    - If your code change changes some aspect of documentation (e.g new config, 
default value change), 
-     please ensure there is another PR to [update the 
docs](https://github.com/apache/incubator-hudi/tree/asf-site/README.md) as well.
+     please ensure there is another PR to [update the 
docs](https://github.com/apache/hudi/tree/asf-site/README.md) as well.
  - Sending a Pull Request
    - Format commit and the pull request title like `[HUDI-XXX] Fixes bug in 
Spark Datasource`, 
      where you replace `HUDI-XXX` with the appropriate JIRA issue. 
diff --git a/docs/_pages/contributing.md b/docs/_pages/contributing.md
index d0aaa32..eca5ee8 100644
--- a/docs/_pages/contributing.md
+++ b/docs/_pages/contributing.md
@@ -25,7 +25,7 @@ To contribute code, you need
 To contribute, you would need to do the following
  
  - Fork the Hudi code on Github & then clone your own fork locally. Once 
cloned, we recommend building as per instructions on 
[quickstart](/docs/quick-start-guide.html)
- - [Recommended] We have embraced the code style largely based on [google 
format](https://google.github.io/styleguide/javaguide.html). Please setup your 
IDE with style files from 
[here](https://github.com/apache/incubator-hudi/tree/master/style).
+ - [Recommended] We have embraced the code style largely based on [google 
format](https://google.github.io/styleguide/javaguide.html). Please setup your 
IDE with style files from 
[here](https://github.com/apache/hudi/tree/master/style).
 These instructions have been tested on IntelliJ. 
  - [Recommended] Set up the [Save Action 
Plugin](https://plugins.jetbrains.com/plugin/7642-save-actions) to auto format 
& organize imports on save. The Maven Compilation life-cycle will fail if there 
are checkstyle violations.
  - [Optional] If needed, add spark jars to the classpath of your module in 
Intellij by following the steps from 
[here](https://stackoverflow.com/questions/1051640/correct-way-to-add-external-jars-lib-jar-to-an-intellij-idea-project).
 
@@ -107,7 +107,7 @@ and more importantly also try to improve the process along 
the way as well.
    - For involved changes, it's best to also run the entire integration test 
suite using `mvn clean install`
    - For website changes, please build the site locally & test navigation, 
formatting & links thoroughly
    - If your code change changes some aspect of documentation (e.g new config, 
default value change), 
-     please ensure there is another PR to [update the 
docs](https://github.com/apache/incubator-hudi/tree/asf-site/README.md) as well.
+     please ensure there is another PR to [update the 
docs](https://github.com/apache/hudi/tree/asf-site/README.md) as well.
  - Sending a Pull Request
    - Format commit and the pull request title like `[HUDI-XXX] Fixes bug in 
Spark Datasource`, 
      where you replace `HUDI-XXX` with the appropriate JIRA issue. 
diff --git a/docs/_pages/index.md b/docs/_pages/index.md
index 802ac8b..b0e75a5 100644
--- a/docs/_pages/index.md
+++ b/docs/_pages/index.md
@@ -4,7 +4,7 @@ permalink: /
 title: Welcome to Apache Hudi !
 excerpt: >
   Apache Hudi ingests & manages storage of large analytical datasets over DFS 
(hdfs or cloud stores).<br />
-  <small><a 
href="https://github.com/apache/incubator-hudi/releases/tag/release-0.5.2-incubating";
 target="_blank">Latest release 0.5.2-incubating</a></small>
+  <small><a 
href="https://github.com/apache/hudi/releases/tag/release-0.5.2-incubating"; 
target="_blank">Latest release 0.5.2-incubating</a></small>
 power_items:
   - img_path: /assets/images/powers/aws.jpg
   - img_path: /assets/images/powers/emis.jpg
diff --git a/docs/_pages/releases.cn.md b/docs/_pages/releases.cn.md
index 56a5466..b3c7d14 100644
--- a/docs/_pages/releases.cn.md
+++ b/docs/_pages/releases.cn.md
@@ -7,11 +7,11 @@ last_modified_at: 2019-12-30T15:59:57-04:00
 language: cn
 ---
 
-## [Release 
0.5.2-incubating](https://github.com/apache/incubator-hudi/releases/tag/release-0.5.2-incubating)
 ([docs](/docs/0.5.2-quick-start-guide.html))
+## [Release 
0.5.2-incubating](https://github.com/apache/hudi/releases/tag/release-0.5.2-incubating)
 ([docs](/docs/0.5.2-quick-start-guide.html))
 
 ### Download Information
- * Source Release : [Apache Hudi(incubating) 0.5.2-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.sha512))
- * Apache Hudi (incubating) jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
+ * Source Release : [Apache Hudi 0.5.2-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.sha512))
+ * Apache Hudi jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
 
 ### Migration Guide for this release
  * Write Client restructuring has moved classes around 
([HUDI-554](https://issues.apache.org/jira/browse/HUDI-554)). Package `client` 
now has all the various client classes, that do the transaction management. 
`func` renamed to `execution` and some helpers moved to `client/utils`. All 
compaction code under `io` now under `table/compact`. Rollback code under 
`table/rollback` and in general all code for individual operations under 
`table`. This change only affects the apps/projects dependi [...]
@@ -19,7 +19,7 @@ language: cn
 ### Release Highlights
  * Support for overwriting the payload implementation in `hoodie.properties` 
via specifying the `hoodie.compaction.payload.class` config option. Previously, 
once the payload class is set once in `hoodie.properties`, it cannot be 
changed. In some cases, if a code refactor is done and the jar updated, one may 
need to pass the new payload class name.
  * `TimestampBasedKeyGenerator` supports for `CharSequence`  types. Previously 
`TimestampBasedKeyGenerator` only supports `Double`, `Long`, `Float` and 
`String` 4 data types for the partition key. Now, after data type extending, 
`CharSequence` has been supported in `TimestampBasedKeyGenerator`.
- * Hudi now supports incremental pulling from defined partitions via the 
`hoodie.datasource.read.incr.path.glob` [config 
option](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala#L111).
 For some use case that users only need to pull the incremental part of certain 
partitions, it can run faster by only loading relevant parquet files.
+ * Hudi now supports incremental pulling from defined partitions via the 
`hoodie.datasource.read.incr.path.glob` [config 
option](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala#L111).
 For some use case that users only need to pull the incremental part of certain 
partitions, it can run faster by only loading relevant parquet files.
  * With 0.5.2, hudi allows partition path to be updated with `GLOBAL_BLOOM` 
index. Previously, when a record is to be updated with a new partition path, 
and when set to `GLOBAL_BLOOM` as index, hudi ignores the new partition path 
and update the record in the original partition path. Now, hudi allows records 
to be inserted into their new partition paths and delete the records in the old 
partition paths. A configuration (e.g. 
`hoodie.index.bloom.update.partition.path=true`) can be added to [...]
  * A `JdbcbasedSchemaProvider` schema provider has been provided to get 
metadata through JDBC. For the use case that users want to synchronize data 
from MySQL, and at the same time, want to get the schema from the database, 
it's very helpful.
  * Simplify `HoodieBloomIndex` without the need for 2GB limit handling. Prior 
to spark 2.4.0, each spark partition has a limit of 2GB. In Hudi 0.5.1, after 
we upgraded to spark 2.4.4, we don't have the limitation anymore. Hence 
removing the safe parallelism constraint we had in` HoodieBloomIndex`.
@@ -33,11 +33,11 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
 ### Raw Release Notes
   The raw release notes are available 
[here](https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12322822&version=12346606)
 
-## [Release 
0.5.1-incubating](https://github.com/apache/incubator-hudi/releases/tag/release-0.5.1-incubating)
 ([docs](/docs/0.5.1-quick-start-guide.html))
+## [Release 
0.5.1-incubating](https://github.com/apache/hudi/releases/tag/release-0.5.1-incubating)
 ([docs](/docs/0.5.1-quick-start-guide.html))
 
 ### Download Information
- * Source Release : [Apache Hudi(incubating) 0.5.1-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.sha512))
- * Apache Hudi (incubating) jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
+ * Source Release : [Apache Hudi 0.5.1-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.sha512))
+ * Apache Hudi jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
 
 ### Migration Guide for this release
  * In 0.5.1, the community restructured the package of key generators. The key 
generator related classes have been moved from `org.apache.hudi` to 
`org.apache.hudi.keygen`. 
@@ -51,12 +51,12 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
  * HoodieCombinedInputFormat to scale huge hive queries running on Hoodie 
tables
 
 ### Migration Guide for this release
- This is the first Apache release for Hudi (incubating). Prior to this 
release, Hudi Jars were published using "com.uber.hoodie" maven co-ordinates. 
We have a [migration 
guide](https://cwiki.apache.org/confluence/display/HUDI/Migration+Guide+From+com.uber.hoodie+to+org.apache.hudi)
+ This is the first Apache release for Hudi. Prior to this release, Hudi Jars 
were published using "com.uber.hoodie" maven co-ordinates. We have a [migration 
guide](https://cwiki.apache.org/confluence/display/HUDI/Migration+Guide+From+com.uber.hoodie+to+org.apache.hudi)
 
 ### Raw Release Notes
  The raw release notes are available 
[here](https://jira.apache.org/jira/secure/ReleaseNote.jspa?projectId=12322822&version=12346087)
 
-## [Release 
0.4.7](https://github.com/apache/incubator-hudi/releases/tag/hoodie-0.4.7)
+## [Release 0.4.7](https://github.com/apache/hudi/releases/tag/hoodie-0.4.7)
 
 ### Release Highlights
 
@@ -68,8 +68,8 @@ temp_query --sql "select Instant, NumInserts, NumWrites from 
satishkotha_debug w
 
 ### PR LIST
 
-- Skip Meta folder when looking for partitions. 
[#698](https://github.com/apache/incubator-hudi/pull/698)
-- HUDI-134 - Disable inline compaction for Hoodie Demo. 
[#696](https://github.com/apache/incubator-hudi/pull/696)
-- Default implementation for HBase index qps allocator. 
[#685](https://github.com/apache/incubator-hudi/pull/685)
-- Handle duplicate record keys across partitions. 
[#687](https://github.com/apache/incubator-hudi/pull/687)
-- Fix up offsets not available on leader exception. 
[#650](https://github.com/apache/incubator-hudi/pull/650)
+- Skip Meta folder when looking for partitions. 
[#698](https://github.com/apache/hudi/pull/698)
+- HUDI-134 - Disable inline compaction for Hoodie Demo. 
[#696](https://github.com/apache/hudi/pull/696)
+- Default implementation for HBase index qps allocator. 
[#685](https://github.com/apache/hudi/pull/685)
+- Handle duplicate record keys across partitions. 
[#687](https://github.com/apache/hudi/pull/687)
+- Fix up offsets not available on leader exception. 
[#650](https://github.com/apache/hudi/pull/650)
diff --git a/docs/_pages/releases.md b/docs/_pages/releases.md
index edaaf44..a2ddef7 100644
--- a/docs/_pages/releases.md
+++ b/docs/_pages/releases.md
@@ -6,11 +6,11 @@ toc: true
 last_modified_at: 2019-12-30T15:59:57-04:00
 ---
 
-## [Release 
0.5.2-incubating](https://github.com/apache/incubator-hudi/releases/tag/release-0.5.2-incubating)
 ([docs](/docs/0.5.2-quick-start-guide.html))
+## [Release 
0.5.2-incubating](https://github.com/apache/hudi/releases/tag/release-0.5.2-incubating)
 ([docs](/docs/0.5.2-quick-start-guide.html))
 
 ### Download Information
- * Source Release : [Apache Hudi(incubating) 0.5.2-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.sha512))
- * Apache Hudi (incubating) jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
+ * Source Release : [Apache Hudi 0.5.2-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.2-incubating/hudi-0.5.2-incubating.src.tgz.sha512))
+ * Apache Hudi jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
 
 ### Migration Guide for this release
  * Write Client restructuring has moved classes around 
([HUDI-554](https://issues.apache.org/jira/browse/HUDI-554)). Package `client` 
now has all the various client classes, that do the transaction management. 
`func` renamed to `execution` and some helpers moved to `client/utils`. All 
compaction code under `io` now under `table/compact`. Rollback code under 
`table/rollback` and in general all code for individual operations under 
`table`. This change only affects the apps/projects dependi [...]
@@ -18,7 +18,7 @@ last_modified_at: 2019-12-30T15:59:57-04:00
 ### Release Highlights
  * Support for overwriting the payload implementation in `hoodie.properties` 
via specifying the `hoodie.compaction.payload.class` config option. Previously, 
once the payload class is set once in `hoodie.properties`, it cannot be 
changed. In some cases, if a code refactor is done and the jar updated, one may 
need to pass the new payload class name.
  * `TimestampBasedKeyGenerator` supports for `CharSequence`  types. Previously 
`TimestampBasedKeyGenerator` only supports `Double`, `Long`, `Float` and 
`String` 4 data types for the partition key. Now, after data type extending, 
`CharSequence` has been supported in `TimestampBasedKeyGenerator`.
- * Hudi now supports incremental pulling from defined partitions via the 
`hoodie.datasource.read.incr.path.glob` [config 
option](https://github.com/apache/incubator-hudi/blob/master/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala#L111).
 For some use case that users only need to pull the incremental part of certain 
partitions, it can run faster by only loading relevant parquet files.
+ * Hudi now supports incremental pulling from defined partitions via the 
`hoodie.datasource.read.incr.path.glob` [config 
option](https://github.com/apache/hudi/blob/master/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala#L111).
 For some use case that users only need to pull the incremental part of certain 
partitions, it can run faster by only loading relevant parquet files.
  * With 0.5.2, hudi allows partition path to be updated with `GLOBAL_BLOOM` 
index. Previously, when a record is to be updated with a new partition path, 
and when set to `GLOBAL_BLOOM` as index, hudi ignores the new partition path 
and update the record in the original partition path. Now, hudi allows records 
to be inserted into their new partition paths and delete the records in the old 
partition paths. A configuration (e.g. 
`hoodie.index.bloom.update.partition.path=true`) can be added to [...]
  * A `JdbcbasedSchemaProvider` schema provider has been provided to get 
metadata through JDBC. For the use case that users want to synchronize data 
from MySQL, and at the same time, want to get the schema from the database, 
it's very helpful.
  * Simplify `HoodieBloomIndex` without the need for 2GB limit handling. Prior 
to spark 2.4.0, each spark partition has a limit of 2GB. In Hudi 0.5.1, after 
we upgraded to spark 2.4.4, we don't have the limitation anymore. Hence 
removing the safe parallelism constraint we had in` HoodieBloomIndex`.
@@ -32,11 +32,11 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
 ### Raw Release Notes
   The raw release notes are available 
[here](https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12322822&version=12346606)
 
-## [Release 
0.5.1-incubating](https://github.com/apache/incubator-hudi/releases/tag/release-0.5.1-incubating)
 ([docs](/docs/0.5.1-quick-start-guide.html))
+## [Release 
0.5.1-incubating](https://github.com/apache/hudi/releases/tag/release-0.5.1-incubating)
 ([docs](/docs/0.5.1-quick-start-guide.html))
 
 ### Download Information
- * Source Release : [Apache Hudi(incubating) 0.5.1-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.sha512))
- * Apache Hudi (incubating) jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
+ * Source Release : [Apache Hudi 0.5.1-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.1-incubating/hudi-0.5.1-incubating.src.tgz.sha512))
+ * Apache Hudi jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
 
 ### Migration Guide for this release
  * In 0.5.1, the community restructured the package of key generators. The key 
generator related classes have been moved from `org.apache.hudi` to 
`org.apache.hudi.keygen`. 
@@ -48,7 +48,7 @@ temp_query --sql "select Instant, NumInserts, NumWrites from 
satishkotha_debug w
    - Upgrade from Parquet 1.8.1 to Parquet 1.10.1
    - Upgrade from Kafka 0.8.2.1 to Kafka 2.0.0 as a result of updating 
spark-streaming-kafka artifact from 0.8_2.11/2.12 to 0.10_2.11/2.12.
  * **IMPORTANT** This version requires your runtime spark version to be 
upgraded to 2.4+.
- * Hudi now supports both Scala 2.11 and Scala 2.12, please refer to [Build 
with Scala 2.12](https://github.com/apache/incubator-hudi#build-with-scala-212) 
to build with Scala 2.12.
+ * Hudi now supports both Scala 2.11 and Scala 2.12, please refer to [Build 
with Scala 2.12](https://github.com/apache/hudi#build-with-scala-212) to build 
with Scala 2.12.
    Also, the packages hudi-spark, hudi-utilities, hudi-spark-bundle and 
hudi-utilities-bundle are changed correspondingly to 
hudi-spark_{scala_version}, hudi-spark_{scala_version}, 
hudi-utilities_{scala_version}, hudi-spark-bundle_{scala_version} and 
hudi-utilities-bundle_{scala_version}.
    Note that scala_version here is one of (2.11, 2.12).
  * With 0.5.1, we added functionality to stop using renames for Hudi timeline 
metadata operations. This feature is automatically enabled for newly created 
Hudi tables. For existing tables, this feature is turned off by default. Please 
read this [section](https://hudi.apache.org/docs/deployment.html#upgrading), 
before enabling this feature for existing hudi tables.
@@ -59,7 +59,7 @@ temp_query --sql "select Instant, NumInserts, NumWrites from 
satishkotha_debug w
  * When using spark-shell to give a quick peek at Hudi, please provide 
`--packages org.apache.spark:spark-avro_2.11:2.4.4`, more details would refer 
to [latest quickstart docs](https://hudi.apache.org/docs/quick-start-guide.html)
  * Key generator moved to separate package under org.apache.hudi.keygen. If 
you are using overridden key generator classes (configuration 
("hoodie.datasource.write.keygenerator.class")) that comes with hudi package, 
please ensure the fully qualified class name is changed accordingly.
  * Hive Sync tool will register RO tables for MOR with a _ro suffix, so query 
with _ro suffix. You would use `--skip-ro-suffix` in sync config in sync config 
to retain the old naming without the _ro suffix.
- * With 0.5.1, hudi-hadoop-mr-bundle which is used by query engines such as 
presto and hive includes shaded avro package to support hudi real time queries 
through these engines. Hudi supports pluggable logic for merging of records. 
Users provide their own implementation of 
[HoodieRecordPayload](https://github.com/apache/incubator-hudi/blob/master/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordPayload.java).
+ * With 0.5.1, hudi-hadoop-mr-bundle which is used by query engines such as 
presto and hive includes shaded avro package to support hudi real time queries 
through these engines. Hudi supports pluggable logic for merging of records. 
Users provide their own implementation of 
[HoodieRecordPayload](https://github.com/apache/hudi/blob/master/hudi-common/src/main/java/org/apache/hudi/common/model/HoodieRecordPayload.java).
    If you are using this feature, you need to relocate the avro dependencies 
in your custom record payload class to be consistent with internal hudi 
shading. You need to add the following relocation when shading the package 
containing the record payload implementation.
 
    ```xml
@@ -76,11 +76,11 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
 ### Raw Release Notes
  The raw release notes are available 
[here](https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12322822&version=12346183)
 
-## [Release 
0.5.0-incubating](https://github.com/apache/incubator-hudi/releases/tag/release-0.5.0-incubating)
 ([docs](/docs/0.5.0-quick-start-guide.html))
+## [Release 
0.5.0-incubating](https://github.com/apache/hudi/releases/tag/release-0.5.0-incubating)
 ([docs](/docs/0.5.0-quick-start-guide.html))
 
 ### Download Information
- * Source Release : [Apache Hudi(incubating) 0.5.0-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz.sha512))
- * Apache Hudi (incubating) jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
+ * Source Release : [Apache Hudi 0.5.0-incubating Source 
Release](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz)
 
([asc](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz.asc),
 
[sha512](https://downloads.apache.org/incubator/hudi/0.5.0-incubating/hudi-0.5.0-incubating.src.tgz.sha512))
+ * Apache Hudi jars corresponding to this release is available 
[here](https://repository.apache.org/#nexus-search;quick~hudi)
 
 ### Release Highlights
  * Package and format renaming from com.uber.hoodie to org.apache.hudi (See 
migration guide section below)
@@ -91,12 +91,12 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
  * HoodieCombinedInputFormat to scale huge hive queries running on Hoodie 
tables
 
 ### Migration Guide for this release
- This is the first Apache release for Hudi (incubating). Prior to this 
release, Hudi Jars were published using "com.uber.hoodie" maven co-ordinates. 
We have a [migration 
guide](https://cwiki.apache.org/confluence/display/HUDI/Migration+Guide+From+com.uber.hoodie+to+org.apache.hudi)
+ This is the first Apache release for Hudi. Prior to this release, Hudi Jars 
were published using "com.uber.hoodie" maven co-ordinates. We have a [migration 
guide](https://cwiki.apache.org/confluence/display/HUDI/Migration+Guide+From+com.uber.hoodie+to+org.apache.hudi)
 
 ### Raw Release Notes
  The raw release notes are available 
[here](https://jira.apache.org/jira/secure/ReleaseNote.jspa?projectId=12322822&version=12346087)
 
-## [Release 
0.4.7](https://github.com/apache/incubator-hudi/releases/tag/hoodie-0.4.7)
+## [Release 0.4.7](https://github.com/apache/hudi/releases/tag/hoodie-0.4.7)
 
 ### Release Highlights
 
@@ -108,8 +108,8 @@ temp_query --sql "select Instant, NumInserts, NumWrites 
from satishkotha_debug w
 
 ### PR LIST
 
-- Skip Meta folder when looking for partitions. 
[#698](https://github.com/apache/incubator-hudi/pull/698)
-- HUDI-134 - Disable inline compaction for Hoodie Demo. 
[#696](https://github.com/apache/incubator-hudi/pull/696)
-- Default implementation for HBase index qps allocator. 
[#685](https://github.com/apache/incubator-hudi/pull/685)
-- Handle duplicate record keys across partitions. 
[#687](https://github.com/apache/incubator-hudi/pull/687)
-- Fix up offsets not available on leader exception. 
[#650](https://github.com/apache/incubator-hudi/pull/650)
+- Skip Meta folder when looking for partitions. 
[#698](https://github.com/apache/hudi/pull/698)
+- HUDI-134 - Disable inline compaction for Hoodie Demo. 
[#696](https://github.com/apache/hudi/pull/696)
+- Default implementation for HBase index qps allocator. 
[#685](https://github.com/apache/hudi/pull/685)
+- Handle duplicate record keys across partitions. 
[#687](https://github.com/apache/hudi/pull/687)
+- Fix up offsets not available on leader exception. 
[#650](https://github.com/apache/hudi/pull/650)
diff --git a/docs/_posts/2020-04-27-apache-hudi-apache-zepplin.md 
b/docs/_posts/2020-04-27-apache-hudi-apache-zepplin.md
index 32eabc9..7ef7dbb 100644
--- a/docs/_posts/2020-04-27-apache-hudi-apache-zepplin.md
+++ b/docs/_posts/2020-04-27-apache-hudi-apache-zepplin.md
@@ -1,5 +1,5 @@
 ---
-title: "Apache Hudi (Incubating) Support on Apache Zeppelin"
+title: "Apache Hudi Support on Apache Zeppelin"
 excerpt: "Integrating HUDI's real-time and read-optimized query capabilities 
into Apache Zeppelin’s notebook"
 author: leesf
 category: blog
diff --git a/docs/assets/images/hudi.png b/docs/assets/images/hudi.png
index ef39d33..350f9eb 100644
Binary files a/docs/assets/images/hudi.png and b/docs/assets/images/hudi.png 
differ

Reply via email to