This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 183f9b8f66 [doc] Add more examples of insert overwrite partition
(#5366)
183f9b8f66 is described below
commit 183f9b8f66f9384c1071f4642bdde573e9fb9447
Author: Zouxxyy <[email protected]>
AuthorDate: Fri Mar 28 13:54:52 2025 +0800
[doc] Add more examples of insert overwrite partition (#5366)
---
docs/content/spark/sql-write.md | 15 ++++++++++
.../spark/sql/InsertOverwriteTableTestBase.scala | 33 ++++++++++++++++++++++
2 files changed, 48 insertions(+)
diff --git a/docs/content/spark/sql-write.md b/docs/content/spark/sql-write.md
index c3afcd3754..1f4407ef89 100644
--- a/docs/content/spark/sql-write.md
+++ b/docs/content/spark/sql-write.md
@@ -85,6 +85,8 @@ INSERT INTO my_table VALUES (1, 'p1'), (2, 'p2');
-- Static overwrite (Overwrite the whole table)
INSERT OVERWRITE my_table VALUES (3, 'p1');
+-- or
+INSERT OVERWRITE my_table PARTITION (pt) VALUES (3, 'p1');
SELECT * FROM my_table;
/*
@@ -95,6 +97,19 @@ SELECT * FROM my_table;
+---+---+
*/
+-- Static overwrite with specified partitions (Only overwrite pt='p1')
+INSERT OVERWRITE my_table PARTITION (pt='p1') VALUES (3);
+
+SELECT * FROM my_table;
+/*
++---+---+
+| id| pt|
++---+---+
+| 2| p2|
+| 3| p1|
++---+---+
+*/
+
-- Dynamic overwrite (Only overwrite pt='p1')
SET spark.sql.sources.partitionOverwriteMode=dynamic;
INSERT OVERWRITE my_table VALUES (3, 'p1');
diff --git
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTestBase.scala
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTestBase.scala
index 20e32942c3..b7e0b35dd1 100644
---
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTestBase.scala
+++
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/InsertOverwriteTableTestBase.scala
@@ -575,4 +575,37 @@ abstract class InsertOverwriteTableTestBase extends
PaimonSparkTestBase {
}
}
}
+
+ test("Paimon Insert: dynamic insert overwrite partition") {
+ withTable("my_table") {
+ sql("CREATE TABLE my_table (id INT, pt STRING) PARTITIONED BY (pt)")
+
+ for (mode <- Seq("static", "dynamic")) {
+ withSparkSQLConf("spark.sql.sources.partitionOverwriteMode" -> mode) {
+ sql("INSERT OVERWRITE my_table VALUES (1, 'p1'), (2, 'p2')")
+ // INSERT OVERWRITE table
+ sql("INSERT OVERWRITE my_table VALUES (3, 'p1')")
+ if (mode == "dynamic") {
+ checkAnswer(sql("SELECT * FROM my_table ORDER BY id"), Seq(Row(2,
"p2"), Row(3, "p1")))
+ } else {
+ checkAnswer(sql("SELECT * FROM my_table ORDER BY id"), Row(3,
"p1"))
+ }
+
+ sql("INSERT OVERWRITE my_table VALUES (1, 'p1'), (2, 'p2')")
+ // INSERT OVERWRITE table PARTITION (pt)
+ sql("INSERT OVERWRITE my_table PARTITION (pt) VALUES (3, 'p1')")
+ if (mode == "dynamic") {
+ checkAnswer(sql("SELECT * FROM my_table ORDER BY id"), Seq(Row(2,
"p2"), Row(3, "p1")))
+ } else {
+ checkAnswer(sql("SELECT * FROM my_table ORDER BY id"), Row(3,
"p1"))
+ }
+
+ sql("INSERT OVERWRITE my_table VALUES (1, 'p1'), (2, 'p2')")
+ // INSERT OVERWRITE table PARTITION (pt='p1')
+ sql("INSERT OVERWRITE my_table PARTITION (pt='p1') VALUES (3)")
+ checkAnswer(sql("SELECT * FROM my_table ORDER BY id"), Seq(Row(2,
"p2"), Row(3, "p1")))
+ }
+ }
+ }
+ }
}