This is an automated email from the ASF dual-hosted git repository.

aokolnychyi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/main by this push:
     new d4c2ef8950 Spark 3.5: Support camel case session configs and options 
(#10310)
d4c2ef8950 is described below

commit d4c2ef89500426eea66106b47e39841ec2383c54
Author: Anton Okolnychyi <[email protected]>
AuthorDate: Fri May 24 08:59:02 2024 -0700

    Spark 3.5: Support camel case session configs and options (#10310)
---
 .../org/apache/iceberg/spark/SparkConfParser.java  | 28 ++++++++++++++++++++++
 .../apache/iceberg/spark/TestSparkWriteConf.java   | 26 ++++++++++++++++++++
 2 files changed, 54 insertions(+)

diff --git 
a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkConfParser.java 
b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkConfParser.java
index 09ddc1a6d2..896d77a760 100644
--- 
a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkConfParser.java
+++ 
b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkConfParser.java
@@ -270,6 +270,11 @@ class SparkConfParser {
         if (optionValue != null) {
           return conversion.apply(optionValue);
         }
+
+        String sparkOptionValue = options.get(toCamelCase(optionName));
+        if (sparkOptionValue != null) {
+          return conversion.apply(sparkOptionValue);
+        }
       }
 
       if (sessionConfName != null) {
@@ -277,6 +282,11 @@ class SparkConfParser {
         if (sessionConfValue != null) {
           return conversion.apply(sessionConfValue);
         }
+
+        String sparkSessionConfValue = 
sessionConf.get(toCamelCase(sessionConfName), null);
+        if (sparkSessionConfValue != null) {
+          return conversion.apply(sparkSessionConfValue);
+        }
       }
 
       if (tablePropertyName != null) {
@@ -288,5 +298,23 @@ class SparkConfParser {
 
       return defaultValue;
     }
+
+    private String toCamelCase(String key) {
+      StringBuilder transformedKey = new StringBuilder();
+      boolean capitalizeNext = false;
+
+      for (char character : key.toCharArray()) {
+        if (character == '-') {
+          capitalizeNext = true;
+        } else if (capitalizeNext) {
+          transformedKey.append(Character.toUpperCase(character));
+          capitalizeNext = false;
+        } else {
+          transformedKey.append(character);
+        }
+      }
+
+      return transformedKey.toString();
+    }
   }
 }
diff --git 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestSparkWriteConf.java
 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestSparkWriteConf.java
index bf2f59f1a4..c2df626978 100644
--- 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestSparkWriteConf.java
+++ 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestSparkWriteConf.java
@@ -88,6 +88,32 @@ public class TestSparkWriteConf extends TestBaseWithCatalog {
     assertThat(parsedValue).isEqualTo("value");
   }
 
+  @TestTemplate
+  public void testCamelCaseSparkSessionConf() {
+    Table table = validationCatalog.loadTable(tableIdent);
+    String confName = "spark.sql.iceberg.some-int-conf";
+    String sparkConfName = "spark.sql.iceberg.someIntConf";
+
+    withSQLConf(
+        ImmutableMap.of(sparkConfName, "1"),
+        () -> {
+          SparkConfParser parser = new SparkConfParser(spark, table, 
ImmutableMap.of());
+          Integer value = 
parser.intConf().sessionConf(confName).parseOptional();
+          assertThat(value).isEqualTo(1);
+        });
+  }
+
+  @TestTemplate
+  public void testCamelCaseSparkOption() {
+    Table table = validationCatalog.loadTable(tableIdent);
+    String option = "some-int-option";
+    String sparkOption = "someIntOption";
+    Map<String, String> options = ImmutableMap.of(sparkOption, "1");
+    SparkConfParser parser = new SparkConfParser(spark, table, options);
+    Integer value = parser.intConf().option(option).parseOptional();
+    assertThat(value).isEqualTo(1);
+  }
+
   @TestTemplate
   public void testDurationConf() {
     Table table = validationCatalog.loadTable(tableIdent);

Reply via email to