This is an automated email from the ASF dual-hosted git repository.

zouxxyy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 816a76f2e1 [spark] Allow format table and spark table options to 
recognize each other
816a76f2e1 is described below

commit 816a76f2e1c642baff3aa7cb7eff89f10f3fd2ab
Author: Zouxxyy <[email protected]>
AuthorDate: Wed Dec 24 11:46:23 2025 +0800

    [spark] Allow format table and spark table options to recognize each other
---
 .../main/java/org/apache/paimon/CoreOptions.java   |  2 +
 .../org/apache/paimon/format/csv/CsvOptions.java   |  4 ++
 .../paimon/spark/catalog/FormatTableCatalog.java   |  9 +++++
 .../paimon/spark/sql/FormatTableTestBase.scala     | 43 ++++++++++++++++++++++
 4 files changed, 58 insertions(+)

diff --git a/paimon-api/src/main/java/org/apache/paimon/CoreOptions.java 
b/paimon-api/src/main/java/org/apache/paimon/CoreOptions.java
index 1060b7deff..6ec0dff011 100644
--- a/paimon-api/src/main/java/org/apache/paimon/CoreOptions.java
+++ b/paimon-api/src/main/java/org/apache/paimon/CoreOptions.java
@@ -2368,6 +2368,8 @@ public class CoreOptions implements Serializable {
             return options.get(FILE_COMPRESSION.key());
         } else if (options.containsKey(FORMAT_TABLE_FILE_COMPRESSION.key())) {
             return options.get(FORMAT_TABLE_FILE_COMPRESSION.key());
+        } else if (options.containsKey("compression")) {
+            return options.get("compression");
         } else {
             String format = formatType();
             switch (format) {
diff --git 
a/paimon-format/src/main/java/org/apache/paimon/format/csv/CsvOptions.java 
b/paimon-format/src/main/java/org/apache/paimon/format/csv/CsvOptions.java
index 03d0a382c0..e0ad2d0d91 100644
--- a/paimon-format/src/main/java/org/apache/paimon/format/csv/CsvOptions.java
+++ b/paimon-format/src/main/java/org/apache/paimon/format/csv/CsvOptions.java
@@ -47,24 +47,28 @@ public class CsvOptions {
             ConfigOptions.key("csv.quote-character")
                     .stringType()
                     .defaultValue("\"")
+                    .withFallbackKeys("quote")
                     .withDescription("The quote character for CSV format");
 
     public static final ConfigOption<String> ESCAPE_CHARACTER =
             ConfigOptions.key("csv.escape-character")
                     .stringType()
                     .defaultValue("\\")
+                    .withFallbackKeys("escape")
                     .withDescription("The escape character for CSV format");
 
     public static final ConfigOption<Boolean> INCLUDE_HEADER =
             ConfigOptions.key("csv.include-header")
                     .booleanType()
                     .defaultValue(false)
+                    .withFallbackKeys("header")
                     .withDescription("Whether to include header in CSV files");
 
     public static final ConfigOption<String> NULL_LITERAL =
             ConfigOptions.key("csv.null-literal")
                     .stringType()
                     .defaultValue("")
+                    .withFallbackKeys("nullvalue")
                     .withDescription("The literal for null values in CSV 
format");
 
     public static final ConfigOption<Mode> MODE =
diff --git 
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/FormatTableCatalog.java
 
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/FormatTableCatalog.java
index e7fcf19fcf..2da1b1896c 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/FormatTableCatalog.java
+++ 
b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/catalog/FormatTableCatalog.java
@@ -86,6 +86,15 @@ public interface FormatTableCatalog {
         CaseInsensitiveStringMap dsOptions = new 
CaseInsensitiveStringMap(options.toMap());
         if (formatTable.format() == FormatTable.Format.CSV) {
             options.set("sep", options.get(CsvOptions.FIELD_DELIMITER));
+            options.set("lineSep", options.get(CsvOptions.LINE_DELIMITER));
+            options.set("quote", options.get(CsvOptions.QUOTE_CHARACTER));
+            options.set("header", 
options.get(CsvOptions.INCLUDE_HEADER).toString());
+            options.set("escape", options.get(CsvOptions.ESCAPE_CHARACTER));
+            options.set("nullvalue", options.get(CsvOptions.NULL_LITERAL));
+            options.set("mode", options.get(CsvOptions.MODE).getValue());
+            if (options.contains(CoreOptions.FORMAT_TABLE_FILE_COMPRESSION)) {
+                options.set("compression", 
options.get(CoreOptions.FORMAT_TABLE_FILE_COMPRESSION));
+            }
             dsOptions = new CaseInsensitiveStringMap(options.toMap());
             return new PartitionedCSVTable(
                     ident.name(),
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
index 935f9176e1..9da753f299 100644
--- 
a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
+++ 
b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/FormatTableTestBase.scala
@@ -195,4 +195,47 @@ abstract class FormatTableTestBase extends 
PaimonHiveTestBase {
       
assert(df.queryExecution.executedPlan.toString().contains("BroadcastExchange"))
     }
   }
+
+  test("Format table: format table and spark table props recognize") {
+    val paimonFormatTblProps =
+      """
+        |'csv.field-delimiter'=';',
+        |'csv.line-delimiter'='?',
+        |'csv.quote-character'='%',
+        |'csv.include-header'='true',
+        |'csv.null-literal'='null',
+        |'csv.mode'='permissive',
+        |'format-table.file.compression'='gzip'
+        |""".stripMargin
+
+    val sparkTblProps =
+      """
+        |'sep'=';',
+        |'lineSep'='?',
+        |'quote'='%',
+        |'header'='true',
+        |'nullvalue'='null',
+        |'mode'='permissive',
+        |'compression'='gzip'
+        |""".stripMargin
+
+    val defaultProps = "'k'='v'"
+
+    for (tblProps <- Seq(paimonFormatTblProps, sparkTblProps, defaultProps)) {
+      withTable("t") {
+        sql(s"CREATE TABLE t (id INT, v STRING) USING CSV TBLPROPERTIES 
($tblProps)")
+        sql("INSERT INTO t SELECT /*+ REPARTITION(1) */ id, id + 1 FROM 
range(2)")
+        sql("INSERT INTO t VALUES (2, null)")
+
+        for (impl <- Seq("engine", "paimon")) {
+          withSparkSQLConf("spark.paimon.format-table.implementation" -> impl) 
{
+            checkAnswer(
+              sql("SELECT * FROM t ORDER BY id"),
+              Seq(Row(0, "1"), Row(1, "2"), Row(2, null))
+            )
+          }
+        }
+      }
+    }
+  }
 }

Reply via email to