This is an automated email from the ASF dual-hosted git repository.

jark pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/fluss.git


The following commit(s) were added to refs/heads/main by this push:
     new 074d7563f [hotfix][docs] Fix some documentation errors
074d7563f is described below

commit 074d7563f3f414e574de55fd5c1361135b337496
Author: Jark Wu <[email protected]>
AuthorDate: Fri Feb 13 18:39:20 2026 +0800

    [hotfix][docs] Fix some documentation errors
---
 website/docs/quickstart/lakehouse.md               |  2 +-
 .../docs/table-design/merge-engines/aggregation.md |  3 +-
 website/docs/table-design/table-types/pk-table.md  | 40 ++++++++++++----------
 3 files changed, 25 insertions(+), 20 deletions(-)

diff --git a/website/docs/quickstart/lakehouse.md 
b/website/docs/quickstart/lakehouse.md
index eb2cde8fe..31dc5661f 100644
--- a/website/docs/quickstart/lakehouse.md
+++ b/website/docs/quickstart/lakehouse.md
@@ -40,7 +40,7 @@ mkdir -p lib opt
 # Flink connectors
 curl -fL -o lib/flink-faker-0.5.3.jar 
https://github.com/knaufk/flink-faker/releases/download/v0.5.3/flink-faker-0.5.3.jar
 curl -fL -o "lib/fluss-flink-1.20-$FLUSS_VERSION$.jar" 
"$FLUSS_MAVEN_REPO_URL$/org/apache/fluss/fluss-flink-1.20/$FLUSS_VERSION$/fluss-flink-1.20-$FLUSS_VERSION$.jar"
-curl -fL -o "lib/paimon-flink-1.20-$PAIMON_VERSION$.jar" 
"$FLUSS_MAVEN_REPO_URL$/org/apache/paimon/paimon-flink-1.20/$PAIMON_VERSION$/paimon-flink-1.20-$PAIMON_VERSION$.jar"
+curl -fL -o "lib/paimon-flink-1.20-$PAIMON_VERSION$.jar" 
"https://repo1.maven.org/maven2/org/apache/paimon/paimon-flink-1.20/$PAIMON_VERSION$/paimon-flink-1.20-$PAIMON_VERSION$.jar";
 
 # Fluss lake plugin
 curl -fL -o "lib/fluss-lake-paimon-$FLUSS_VERSION$.jar" 
"$FLUSS_MAVEN_REPO_URL$/org/apache/fluss/fluss-lake-paimon/$FLUSS_VERSION$/fluss-lake-paimon-$FLUSS_VERSION$.jar"
diff --git a/website/docs/table-design/merge-engines/aggregation.md 
b/website/docs/table-design/merge-engines/aggregation.md
index fe3b303b6..4375404af 100644
--- a/website/docs/table-design/merge-engines/aggregation.md
+++ b/website/docs/table-design/merge-engines/aggregation.md
@@ -115,7 +115,8 @@ CREATE TABLE product_stats (
 -- Insert data - these will be aggregated
 INSERT INTO product_stats VALUES
     (1, 23.0, 15, TIMESTAMP '2024-01-01 10:00:00'),
-    (1, 30.2, 20, TIMESTAMP '2024-01-01 11:00:00');  -- Same primary key - 
triggers aggregation
+    -- Same primary key - triggers aggregation
+    (1, 30.2, 20, TIMESTAMP '2024-01-01 11:00:00');
 ```
 
 ### Querying Results
diff --git a/website/docs/table-design/table-types/pk-table.md 
b/website/docs/table-design/table-types/pk-table.md
index 5791452d2..e71a443a0 100644
--- a/website/docs/table-design/table-types/pk-table.md
+++ b/website/docs/table-design/table-types/pk-table.md
@@ -171,36 +171,40 @@ However, this property cannot be modified after the table 
has been created.
 
 For example, create a table named `uid_mapping` with 2 buckets and insert five 
rows of data as follows:
 
-```sql
+```sql title="Flink SQL"
 CREATE TABLE uid_mapping (
   user_id STRING,
   uid BIGINT,
   PRIMARY KEY (user_id) NOT ENFORCED
 ) WITH (
-  'table.auto-increment.fields' = 'uid_int64',
+  'auto-increment.fields' = 'uid',
   'bucket.num' = '2'
 );
+```
 
-INSERT INTO uid_mapping VALUES ('user1');
-INSERT INTO uid_mapping VALUES ('user2');
-INSERT INTO uid_mapping VALUES ('user3');
-INSERT INTO uid_mapping VALUES ('user4');
-INSERT INTO uid_mapping VALUES ('user5');
+```sql title="Flink SQL"
+INSERT INTO uid_mapping (user_id) VALUES
+  ('user1'), ('user2'), ('user3'), ('user4'), ('user5');
 ```
 
 The auto-incremented IDs in the table `uid_mapping` do not monotonically 
increase, because the two table buckets cache auto-incremented IDs, [1, 100000] 
and [100001, 200000], respectively.
 
-```sql
-SELECT * FROM uid_mapping;
-+---------+---------+
-| user_id |   uid   |
-+---------+---------+
-| user1   |             1  |
-| user2   | 100001  |
-| user3   |     2  |
-| user4   |     3  |
-| user5   | 100002  |
-+---------+---------+
+```sql title="Flink SQL"
+SELECT * FROM uid_mapping LIMIT 10;
+```
+
+The result may look like this:
+
+```
++---------+--------+
+| user_id |    uid |
++---------+--------+
+|   user1 | 100001 |
+|   user2 | 100002 |
+|   user4 | 100003 |
+|   user3 |      1 |
+|   user5 |      2 |
++---------+--------+
 ```
 
 ### Limits

Reply via email to