This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 97bfb68211 [doc] remove spaces after backslash which will cause bash 
command execution fail (#5780)
97bfb68211 is described below

commit 97bfb682110739a9a1b0186c80c969b834cb91f9
Author: Shawn Huang <hx0...@gmail.com>
AuthorDate: Fri Jun 20 10:14:50 2025 +0800

    [doc] remove spaces after backslash which will cause bash command execution 
fail (#5780)
---
 docs/content/flink/consumer-id.md                |  4 ++--
 docs/content/learn-paimon/understand-files.md    |  2 +-
 docs/content/maintenance/dedicated-compaction.md | 12 ++++++------
 docs/content/maintenance/filesystems.md          |  6 +++---
 docs/content/maintenance/manage-branches.md      |  6 +++---
 docs/content/maintenance/manage-snapshots.md     |  6 +++---
 docs/content/maintenance/manage-tags.md          |  6 +++---
 7 files changed, 21 insertions(+), 21 deletions(-)

diff --git a/docs/content/flink/consumer-id.md 
b/docs/content/flink/consumer-id.md
index 4452784e89..fd10d9e813 100644
--- a/docs/content/flink/consumer-id.md
+++ b/docs/content/flink/consumer-id.md
@@ -107,7 +107,7 @@ CALL sys.reset_consumer(
     /path/to/paimon-flink-action-{{< version >}}.jar \
     reset-consumer \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --consumer_id <consumer-id> \
     [--next_snapshot <next-snapshot-id>] \
@@ -146,7 +146,7 @@ CALL sys.clear_consumers(
     /path/to/paimon-flink-action-{{< version >}}.jar \
     clear_consumers \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     [--including_consumers <including-consumers>] \
     [--excluding_consumers <excluding-consumers>] \
diff --git a/docs/content/learn-paimon/understand-files.md 
b/docs/content/learn-paimon/understand-files.md
index fea6d30a04..1b85293472 100644
--- a/docs/content/learn-paimon/understand-files.md
+++ b/docs/content/learn-paimon/understand-files.md
@@ -267,7 +267,7 @@ CALL sys.compact(
     /path/to/paimon-flink-action-{{< version >}}.jar \
     compact \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     [--partition <partition-name>] \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]] \
diff --git a/docs/content/maintenance/dedicated-compaction.md 
b/docs/content/maintenance/dedicated-compaction.md
index 5e610f5e71..f1af09f0ac 100644
--- a/docs/content/maintenance/dedicated-compaction.md
+++ b/docs/content/maintenance/dedicated-compaction.md
@@ -104,7 +104,7 @@ Run the following command to submit a compaction job for 
the table.
     /path/to/paimon-flink-action-{{< version >}}.jar \
     compact \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     [--partition <partition-name>] \
     [--compact_strategy <minor / full>] \
@@ -191,7 +191,7 @@ CALL sys.compact_database(
     /path/to/paimon-flink-action-{{< version >}}.jar \
     compact_database \
     --warehouse <warehouse-path> \
-    --including_databases <database-name|name-regular-expr> \ 
+    --including_databases <database-name|name-regular-expr> \
     [--including_tables <paimon-table-name|name-regular-expr>] \
     [--excluding_tables <paimon-table-name|name-regular-expr>] \
     [--mode <compact-mode>] \
@@ -296,7 +296,7 @@ CALL sys.compact(`table` => 'default.T', order_strategy => 
'zorder', order_by =>
     /path/to/paimon-flink-action-{{< version >}}.jar \
     compact \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --order_strategy <orderType> \
     --order_by <col1,col2,...> \
@@ -352,9 +352,9 @@ CALL sys.compact(`table` => 'default.T', 
partition_idle_time => '1 d')
     /path/to/paimon-flink-action-{{< version >}}.jar \
     compact \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
-    --partition_idle_time <partition-idle-time> \ 
+    --partition_idle_time <partition-idle-time> \
     [--partition <partition-name>] \
     [--compact_strategy <minor / full>] \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]] \
@@ -413,7 +413,7 @@ CALL sys.compact_database(
     compact_database \
     --warehouse <warehouse-path> \
     --including_databases <database-name|name-regular-expr> \
-    --partition_idle_time <partition-idle-time> \ 
+    --partition_idle_time <partition-idle-time> \
     [--including_tables <paimon-table-name|name-regular-expr>] \
     [--excluding_tables <paimon-table-name|name-regular-expr>] \
     [--mode <compact-mode>] \
diff --git a/docs/content/maintenance/filesystems.md 
b/docs/content/maintenance/filesystems.md
index 4a0f5e6f38..960aea6ec6 100644
--- a/docs/content/maintenance/filesystems.md
+++ b/docs/content/maintenance/filesystems.md
@@ -213,7 +213,7 @@ If you have already configured oss access through Spark 
(Via Hadoop FileSystem),
 Place `paimon-oss-{{< version >}}.jar` together with `paimon-spark-{{< version 
>}}.jar` under Spark's jars directory, and start like
 
 ```shell
-spark-sql \ 
+spark-sql \
   --conf spark.sql.catalog.paimon=org.apache.paimon.spark.SparkCatalog \
   --conf spark.sql.catalog.paimon.warehouse=oss://<bucket>/<path> \
   --conf spark.sql.catalog.paimon.fs.oss.endpoint=oss-cn-hangzhou.aliyuncs.com 
\
@@ -315,7 +315,7 @@ If you have already configured s3 access through Spark (Via 
Hadoop FileSystem),
 Place `paimon-s3-{{< version >}}.jar` together with `paimon-spark-{{< version 
>}}.jar` under Spark's jars directory, and start like
 
 ```shell
-spark-sql \ 
+spark-sql \
   --conf spark.sql.catalog.paimon=org.apache.paimon.spark.SparkCatalog \
   --conf spark.sql.catalog.paimon.warehouse=s3://<bucket>/<path> \
   --conf spark.sql.catalog.paimon.s3.endpoint=your-endpoint-hostname \
@@ -525,7 +525,7 @@ If you have already configured obs access through Spark 
(Via Hadoop FileSystem),
 Place `paimon-obs-{{< version >}}.jar` together with `paimon-spark-{{< version 
>}}.jar` under Spark's jars directory, and start like
 
 ```shell
-spark-sql \ 
+spark-sql \
   --conf spark.sql.catalog.paimon=org.apache.paimon.spark.SparkCatalog \
   --conf spark.sql.catalog.paimon.warehouse=obs://<bucket>/<path> \
   --conf spark.sql.catalog.paimon.fs.obs.endpoint=obs-endpoint-hostname \
diff --git a/docs/content/maintenance/manage-branches.md 
b/docs/content/maintenance/manage-branches.md
index b6815ef343..21060b83a0 100644
--- a/docs/content/maintenance/manage-branches.md
+++ b/docs/content/maintenance/manage-branches.md
@@ -60,7 +60,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     create_branch \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --branch_name <branch-name> \
     [--tag_name <tag-name>] \
@@ -111,7 +111,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     delete_branch \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --branch_name <branch-name> \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]]
@@ -197,7 +197,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     fast_forward \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --branch_name <branch-name> \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]]
diff --git a/docs/content/maintenance/manage-snapshots.md 
b/docs/content/maintenance/manage-snapshots.md
index 2d4299520e..d21b029c10 100644
--- a/docs/content/maintenance/manage-snapshots.md
+++ b/docs/content/maintenance/manage-snapshots.md
@@ -253,7 +253,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     rollback_to \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --version <snapshot-id> \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]]
@@ -323,7 +323,7 @@ CALL sys.remove_orphan_files(`table` => 'my_db.*', 
[older_than => '2023-10-31 12
     /path/to/paimon-flink-action-{{< version >}}.jar \
     remove_orphan_files \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     [--older_than <timestamp>] \
     [--dry_run <false/true>] \
@@ -338,7 +338,7 @@ To avoid deleting files that are newly added by other 
writing jobs, this action
     /path/to/paimon-flink-action-{{< version >}}.jar \
     remove_orphan_files \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table T \
     --older_than '2023-10-31 12:00:00'
 ```
diff --git a/docs/content/maintenance/manage-tags.md 
b/docs/content/maintenance/manage-tags.md
index 5a5ad765d5..2127c1bf23 100644
--- a/docs/content/maintenance/manage-tags.md
+++ b/docs/content/maintenance/manage-tags.md
@@ -117,7 +117,7 @@ If `snapshot_id` unset, snapshot_id defaults to the latest.
     /path/to/paimon-flink-action-{{< version >}}.jar \
     create_tag \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --tag_name <tag-name> \
     [--snapshot <snapshot_id>] \
@@ -191,7 +191,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     delete_tag \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --tag_name <tag-name> \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]]
@@ -252,7 +252,7 @@ Run the following command:
     /path/to/paimon-flink-action-{{< version >}}.jar \
     rollback_to \
     --warehouse <warehouse-path> \
-    --database <database-name> \ 
+    --database <database-name> \
     --table <table-name> \
     --version <tag-name> \
     [--catalog_conf <paimon-catalog-conf> [--catalog_conf 
<paimon-catalog-conf> ...]]

Reply via email to