This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 5b07c9433 [test][doc] Test and document Hive Time Travel (#1608)
5b07c9433 is described below

commit 5b07c94335f32e7dcf629b24f68bbbb89a4133af
Author: Jingsong Lee <[email protected]>
AuthorDate: Thu Jul 20 15:31:09 2023 +0800

    [test][doc] Test and document Hive Time Travel (#1608)
---
 docs/content/engines/hive.md                       | 13 +++++++++
 docs/content/how-to/querying-tables.md             | 16 +++++++++++
 .../org/apache/paimon/hive/utils/HiveUtils.java    |  4 +--
 .../paimon/hive/PaimonStorageHandlerITCase.java    | 31 ++++++++++++++++++++++
 4 files changed, 62 insertions(+), 2 deletions(-)

diff --git a/docs/content/engines/hive.md b/docs/content/engines/hive.md
index fbef3b3df..5c878be19 100644
--- a/docs/content/engines/hive.md
+++ b/docs/content/engines/hive.md
@@ -173,6 +173,19 @@ OK
 2      Store
 3      Paimon
 */
+
+-- time travel
+
+SET paimon.scan.snapshot-id=1;
+SELECT a, b FROM test_table ORDER BY a;
+/*
+OK
+1      Table
+2      Store
+3      Paimon
+*/
+SET paimon.scan.snapshot-id=null;
+
 ```
 
 ## Hive SQL: create new Paimon Tables
diff --git a/docs/content/how-to/querying-tables.md 
b/docs/content/how-to/querying-tables.md
index bb6a014ba..a211cab38 100644
--- a/docs/content/how-to/querying-tables.md
+++ b/docs/content/how-to/querying-tables.md
@@ -116,6 +116,14 @@ SELECT * FROM t;
 
 {{< /tab >}}
 
+{{< tab "Hive" >}}
+```sql
+SET paimon.scan.timestamp-millis=1679486589444;
+SELECT * FROM t;
+SET paimon.scan.timestamp-millis=null;
+```
+{{< /tab >}}
+
 {{< /tabs >}}
 
 ### Batch Incremental
@@ -166,6 +174,14 @@ spark.read()
 
 {{< /tab >}}
 
+{{< tab "Hive" >}}
+```sql
+SET paimon.incremental-between='12,20';
+SELECT * FROM t;
+SET paimon.incremental-between=null;
+```
+{{< /tab >}}
+
 {{< /tabs >}}
 
 In Batch SQL, the `DELETE` records are not allowed to be returned, so records 
of `-D` will be dropped.
diff --git 
a/paimon-hive/paimon-hive-connector-common/src/main/java/org/apache/paimon/hive/utils/HiveUtils.java
 
b/paimon-hive/paimon-hive-connector-common/src/main/java/org/apache/paimon/hive/utils/HiveUtils.java
index ec0b78deb..3ede93564 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/main/java/org/apache/paimon/hive/utils/HiveUtils.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/main/java/org/apache/paimon/hive/utils/HiveUtils.java
@@ -77,8 +77,8 @@ public class HiveUtils {
         if (hiveConf != null) {
             for (Map.Entry<String, String> entry : hiveConf) {
                 String name = entry.getKey();
-                if (name.startsWith(PAIMON_PREFIX)) {
-                    String value = hiveConf.get(name);
+                String value = entry.getValue();
+                if (name.startsWith(PAIMON_PREFIX) && 
!"NULL".equalsIgnoreCase(value)) {
                     name = name.substring(PAIMON_PREFIX.length());
                     configMap.put(name, value);
                 }
diff --git 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/PaimonStorageHandlerITCase.java
 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/PaimonStorageHandlerITCase.java
index 9b9f60c00..5e3b69586 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/PaimonStorageHandlerITCase.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/PaimonStorageHandlerITCase.java
@@ -32,6 +32,7 @@ import 
org.apache.paimon.hive.objectinspector.PaimonObjectInspectorFactory;
 import org.apache.paimon.hive.runner.PaimonEmbeddedHiveRunner;
 import org.apache.paimon.options.CatalogOptions;
 import org.apache.paimon.options.Options;
+import org.apache.paimon.table.FileStoreTable;
 import org.apache.paimon.table.Table;
 import org.apache.paimon.table.sink.StreamTableCommit;
 import org.apache.paimon.table.sink.StreamTableWrite;
@@ -222,6 +223,36 @@ public class PaimonStorageHandlerITCase {
                         "2\t40\t40\t800",
                         "3\t50\t50\t400");
         Assert.assertEquals(expected, actual);
+
+        long snapshotId = ((FileStoreTable) 
table).snapshotManager().latestSnapshot().id();
+
+        // write new data
+        data =
+                Collections.singletonList(
+                        GenericRow.of(1, 10L, BinaryString.fromString("Hi Time 
Travel"), 10000L));
+        writeData(table, data);
+
+        // validate new data
+        actual = hiveShell.executeQuery("SELECT * FROM " + externalTable + " 
ORDER BY b");
+        expected =
+                Arrays.asList(
+                        "1\t10\tHi Time Travel\t10000",
+                        "1\t20\tHello\t200",
+                        "2\t40\tNULL\t400",
+                        "3\t50\tStore\t200");
+        Assert.assertEquals(expected, actual);
+
+        // test time travel
+        hiveShell.execute("SET paimon.scan.snapshot-id=" + snapshotId);
+        actual = hiveShell.executeQuery("SELECT * FROM " + externalTable + " 
ORDER BY b");
+        expected =
+                Arrays.asList(
+                        "1\t10\tHi Again\t1000",
+                        "1\t20\tHello\t200",
+                        "2\t40\tNULL\t400",
+                        "3\t50\tStore\t200");
+        Assert.assertEquals(expected, actual);
+        hiveShell.execute("SET paimon.scan.snapshot-id=null");
     }
 
     @Test

Reply via email to