This is an automated email from the ASF dual-hosted git repository.

jshao pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git


The following commit(s) were added to refs/heads/main by this push:
     new c894157273 [#6719][#6337][#7413]  feat(iceberg): upgrade Iceberg 
version to 1.9 (#7917)
c894157273 is described below

commit c894157273cfe103a408f3bc0e74c108041bac73
Author: FANNG <[email protected]>
AuthorDate: Wed Aug 6 13:59:24 2025 +0800

    [#6719][#6337][#7413]  feat(iceberg): upgrade Iceberg version to 1.9 (#7917)
    
    ### What changes were proposed in this pull request?
    
    * Upgrade Iceberg version from 1.6 to 1.9
    * Add support for Endpoints in the config response for better
    compatibility with Trino.
    
    ### Why are the changes needed?
    
    Fix: https://github.com/apache/gravitino/issues/6719
    https://github.com/apache/gravitino/issues/6337
    https://github.com/apache/gravitino/issues/7413
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    compiled and check Iceberg version, test whether it works.
---
 .../authorization-ranger/build.gradle.kts          |  2 +-
 .../iceberg/converter/TestBaseConvert.java         |  1 +
 .../iceberg/ops/TestIcebergTableUpdate.java        | 20 +++--
 docs/iceberg-rest-service.md                       |  5 +-
 flink-connector/flink/build.gradle.kts             |  2 +-
 .../test/iceberg/FlinkIcebergRestCatalogIT.java    |  4 +
 gradle/libs.versions.toml                          |  6 +-
 .../iceberg/common/ops/IcebergCatalogWrapper.java  |  4 +
 .../iceberg/service/IcebergRestUtils.java          |  4 -
 .../service/rest/IcebergConfigOperations.java      | 94 +++++++++++++++++++---
 .../integration/test/IcebergRESTServiceIT.java     |  5 +-
 .../integration/test/TestIcebergExtendAPI.java     |  5 +-
 .../TestIcebergCatalogWrapperManagerForREST.java   |  2 +-
 .../iceberg/service/TestIcebergRESTUtils.java      |  4 +-
 .../provider/TestDynamicIcebergConfigProvider.java |  3 -
 .../service/rest/CatalogWrapperForTest.java        |  1 +
 .../service/rest/TestIcebergTableOperations.java   |  3 +-
 .../service/rest/TestIcebergViewOperations.java    |  1 +
 spark-connector/spark-common/build.gradle.kts      |  2 +-
 .../IcebergExtendedDataSourceV2Strategy.java       | 57 +++++++++++--
 .../test/iceberg/SparkIcebergCatalogIT.java        | 10 ++-
 .../iceberg/SparkIcebergCatalogRestBackendIT.java  |  4 +
 spark-connector/v3.3/spark/build.gradle.kts        |  2 +-
 .../SparkIcebergCatalogRestBackendIT33.java        |  3 +
 spark-connector/v3.4/spark/build.gradle.kts        |  2 +-
 .../SparkIcebergCatalogRestBackendIT34.java        |  3 +
 spark-connector/v3.5/spark/build.gradle.kts        |  2 +-
 .../SparkIcebergCatalogRestBackendIT35.java        |  3 +
 28 files changed, 197 insertions(+), 57 deletions(-)

diff --git a/authorizations/authorization-ranger/build.gradle.kts 
b/authorizations/authorization-ranger/build.gradle.kts
index 75af033f44..84aa5916a5 100644
--- a/authorizations/authorization-ranger/build.gradle.kts
+++ b/authorizations/authorization-ranger/build.gradle.kts
@@ -28,7 +28,7 @@ val scalaVersion: String = project.properties["scalaVersion"] 
as? String ?: extr
 val sparkVersion: String = libs.versions.spark35.get()
 val kyuubiVersion: String = libs.versions.kyuubi4paimon.get()
 val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
-val icebergVersion: String = libs.versions.iceberg4spark.get()
+val icebergVersion: String = libs.versions.iceberg.get()
 val paimonVersion: String = libs.versions.paimon.get()
 
 dependencies {
diff --git 
a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/converter/TestBaseConvert.java
 
b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/converter/TestBaseConvert.java
index 004e73aac1..377e62856e 100644
--- 
a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/converter/TestBaseConvert.java
+++ 
b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/converter/TestBaseConvert.java
@@ -44,6 +44,7 @@ import org.apache.iceberg.SortField;
 import org.apache.iceberg.types.Types;
 
 /** Provide some basic usage methods and test classes for basic fields. */
+@SuppressWarnings("deprecation")
 public class TestBaseConvert {
 
   protected static final String TEST_COMMENT = "test_comment";
diff --git 
a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/ops/TestIcebergTableUpdate.java
 
b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/ops/TestIcebergTableUpdate.java
index 37124dc5f3..2be6a402e2 100644
--- 
a/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/ops/TestIcebergTableUpdate.java
+++ 
b/catalogs/catalog-lakehouse-iceberg/src/test/java/org/apache/gravitino/catalog/lakehouse/iceberg/ops/TestIcebergTableUpdate.java
@@ -45,6 +45,7 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
+@SuppressWarnings("deprecation")
 public class TestIcebergTableUpdate {
   private IcebergCatalogWrapper icebergCatalogWrapper = null;
   private IcebergCatalogWrapperHelper icebergCatalogWrapperHelper = null;
@@ -263,17 +264,14 @@ public class TestIcebergTableUpdate {
         });
 
     // add required column
-    IllegalArgumentException exception =
-        Assertions.assertThrowsExactly(
-            IllegalArgumentException.class,
-            () -> {
-              TableChange addColumn1 =
-                  TableChange.addColumn(
-                      new String[] {"required_column"}, 
Types.IntegerType.get(), false);
-              updateTable(identifier, addColumn1);
-            });
-    Assertions.assertTrue(
-        exception.getMessage().contains("Incompatible change: cannot add 
required column:"));
+    Assertions.assertThrowsExactly(
+        IllegalArgumentException.class,
+        () -> {
+          TableChange addColumn1 =
+              TableChange.addColumn(
+                  new String[] {"required_column"}, Types.IntegerType.get(), 
false);
+          updateTable(identifier, addColumn1);
+        });
   }
 
   @Test
diff --git a/docs/iceberg-rest-service.md b/docs/iceberg-rest-service.md
index aeb683e63a..f11cda5095 100644
--- a/docs/iceberg-rest-service.md
+++ b/docs/iceberg-rest-service.md
@@ -12,9 +12,11 @@ The Apache Gravitino Iceberg REST Server follows the [Apache 
Iceberg REST API sp
 
 ### Capabilities
 
-- Supports the Apache Iceberg REST API defined in Iceberg 1.5, and supports 
all namespace and table interfaces. The following interfaces are not 
implemented yet:
+- Supports the Apache Iceberg REST API defined in Iceberg 1.9, and supports 
all namespace and table interfaces. The following interfaces are not 
implemented yet:
   - multi table transaction
   - pagination
+  - scan planning
+  - load table credentials
 - Works as a catalog proxy, supporting `Hive` and `JDBC` as catalog backend.
 - Supports credential vending for `S3`、`GCS`、`OSS` and `ADLS`.
 - Supports different storages like `S3`, `HDFS`, `OSS`, `GCS`, `ADLS` and 
provides the capability to support other storages.
@@ -482,7 +484,6 @@ connector.name=iceberg
 iceberg.catalog.type=rest
 iceberg.rest-catalog.uri=http://localhost:9001/iceberg/
 fs.hadoop.enabled=true
-iceberg.rest-catalog.view-endpoints-enabled=false
 ```
 
 Please refer to [Trino Iceberg 
document](https://trino.io/docs/current/connector/iceberg.html) for more 
details.
diff --git a/flink-connector/flink/build.gradle.kts 
b/flink-connector/flink/build.gradle.kts
index ea0a2d0e76..9afd51ed03 100644
--- a/flink-connector/flink/build.gradle.kts
+++ b/flink-connector/flink/build.gradle.kts
@@ -30,7 +30,7 @@ var paimonVersion: String = libs.versions.paimon.get()
 val flinkVersion: String = libs.versions.flink.get()
 val flinkMajorVersion: String = flinkVersion.substringBeforeLast(".")
 
-val icebergVersion: String = libs.versions.iceberg.get()
+val icebergVersion: String = libs.versions.iceberg4connector.get()
 
 // The Flink only support scala 2.12, and all scala api will be removed in a 
future version.
 // You can find more detail at the following issues:
diff --git 
a/flink-connector/flink/src/test/java/org/apache/gravitino/flink/connector/integration/test/iceberg/FlinkIcebergRestCatalogIT.java
 
b/flink-connector/flink/src/test/java/org/apache/gravitino/flink/connector/integration/test/iceberg/FlinkIcebergRestCatalogIT.java
index c65a1191e8..da6a230937 100644
--- 
a/flink-connector/flink/src/test/java/org/apache/gravitino/flink/connector/integration/test/iceberg/FlinkIcebergRestCatalogIT.java
+++ 
b/flink-connector/flink/src/test/java/org/apache/gravitino/flink/connector/integration/test/iceberg/FlinkIcebergRestCatalogIT.java
@@ -28,8 +28,12 @@ import 
org.apache.gravitino.flink.connector.iceberg.IcebergPropertiesConstants;
 import org.apache.gravitino.flink.connector.integration.test.utils.TestUtils;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.condition.DisabledIf;
 
 @Tag("gravitino-docker-test")
+// Flink connector use low Iceberg version, couldn't work with Iceberg REST 
server with high Iceberg
+// version in embedded mode.
+@DisabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public class FlinkIcebergRestCatalogIT extends FlinkIcebergCatalogIT {
 
   @Override
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 3cde9cd7f1..d7a7a4cf3e 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -58,8 +58,8 @@ commons-collections3 = "3.2.2"
 commons-configuration1 = "1.6"
 commons-dbcp2 = "2.11.0"
 caffeine = "2.9.3"
-iceberg = '1.6.1' # used for Gravitino Iceberg catalog and Iceberg REST service
-iceberg4spark = "1.4.1" # used for compile spark connector
+iceberg = '1.9.2' # used for Gravitino Iceberg catalog and Iceberg REST service
+iceberg4connector = "1.6.1" # used for compile connectors like Spark, Flink, 
etc
 paimon = '0.8.0'
 spark33 = "3.3.4"
 spark34 = "3.4.3"
@@ -323,4 +323,4 @@ dependencyLicenseReport = {id = 
"com.github.jk1.dependency-license-report", vers
 bom = {id = "org.cyclonedx.bom", version = "1.5.0"}
 errorprone = {id = "net.ltgt.errorprone", version.ref = "error-prone"}
 jcstress = { id = "io.github.reyerizo.gradle.jcstress", version.ref = 
"jcstress" }
-jmh = { id = "me.champeau.jmh", version.ref = "jmh-plugin" }
\ No newline at end of file
+jmh = { id = "me.champeau.jmh", version.ref = "jmh-plugin" }
diff --git 
a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java
 
b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java
index ff925c1cf1..0baf74885c 100644
--- 
a/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java
+++ 
b/iceberg/iceberg-common/src/main/java/org/apache/gravitino/iceberg/common/ops/IcebergCatalogWrapper.java
@@ -233,6 +233,10 @@ public class IcebergCatalogWrapper implements 
AutoCloseable {
     return CatalogHandlers.listViews(getViewCatalog(), namespace);
   }
 
+  public boolean supportsViewOperations() {
+    return catalog instanceof ViewCatalog;
+  }
+
   @Override
   public void close() throws Exception {
     if (catalog instanceof AutoCloseable) {
diff --git 
a/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/IcebergRestUtils.java
 
b/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/IcebergRestUtils.java
index ba747e060a..cecda88457 100644
--- 
a/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/IcebergRestUtils.java
+++ 
b/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/IcebergRestUtils.java
@@ -99,10 +99,6 @@ public class IcebergRestUtils {
 
   public static String getCatalogName(String rawPrefix) {
     String catalogName = normalizePrefix(rawPrefix);
-    Preconditions.checkArgument(
-        !IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG.equals(catalogName),
-        String.format(
-            "%s is conflicted with reserved catalog name, please replace it", 
catalogName));
     if (StringUtils.isBlank(catalogName)) {
       return IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG;
     }
diff --git 
a/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/rest/IcebergConfigOperations.java
 
b/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/rest/IcebergConfigOperations.java
index 96f763794e..12e31cf333 100644
--- 
a/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/rest/IcebergConfigOperations.java
+++ 
b/iceberg/iceberg-rest-server/src/main/java/org/apache/gravitino/iceberg/service/rest/IcebergConfigOperations.java
@@ -20,6 +20,12 @@ package org.apache.gravitino.iceberg.service.rest;
 
 import com.codahale.metrics.annotation.ResponseMetered;
 import com.codahale.metrics.annotation.Timed;
+import com.google.common.collect.ImmutableList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import javax.inject.Inject;
 import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.Consumes;
@@ -31,10 +37,13 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
 import org.apache.gravitino.iceberg.service.CatalogWrapperForREST;
 import org.apache.gravitino.iceberg.service.IcebergCatalogWrapperManager;
 import org.apache.gravitino.iceberg.service.IcebergRestUtils;
 import org.apache.gravitino.metrics.MetricNames;
+import org.apache.iceberg.rest.Endpoint;
 import org.apache.iceberg.rest.responses.ConfigResponse;
 
 @Path("/v1/{prefix:([^/]*/)?}config")
@@ -48,6 +57,34 @@ public class IcebergConfigOperations {
 
   private final IcebergCatalogWrapperManager catalogWrapperManager;
 
+  private static final List<Endpoint> DEFAULT_ENDPOINTS =
+      ImmutableList.<Endpoint>builder()
+          .add(Endpoint.V1_LIST_NAMESPACES)
+          .add(Endpoint.V1_LOAD_NAMESPACE)
+          .add(Endpoint.V1_CREATE_NAMESPACE)
+          .add(Endpoint.V1_UPDATE_NAMESPACE)
+          .add(Endpoint.V1_DELETE_NAMESPACE)
+          .add(Endpoint.V1_LIST_TABLES)
+          .add(Endpoint.V1_LOAD_TABLE)
+          .add(Endpoint.V1_CREATE_TABLE)
+          .add(Endpoint.V1_UPDATE_TABLE)
+          .add(Endpoint.V1_DELETE_TABLE)
+          .add(Endpoint.V1_RENAME_TABLE)
+          .add(Endpoint.V1_REGISTER_TABLE)
+          .add(Endpoint.V1_REPORT_METRICS)
+          .add(Endpoint.V1_COMMIT_TRANSACTION)
+          .build();
+
+  private static final List<Endpoint> DEFAULT_VIEW_ENDPOINTS =
+      ImmutableList.<Endpoint>builder()
+          .add(Endpoint.V1_LIST_VIEWS)
+          .add(Endpoint.V1_LOAD_VIEW)
+          .add(Endpoint.V1_CREATE_VIEW)
+          .add(Endpoint.V1_UPDATE_VIEW)
+          .add(Endpoint.V1_DELETE_VIEW)
+          .add(Endpoint.V1_RENAME_VIEW)
+          .build();
+
   @Inject
   public IcebergConfigOperations(IcebergCatalogWrapperManager 
catalogWrapperManager) {
     this.catalogWrapperManager = catalogWrapperManager;
@@ -58,18 +95,53 @@ public class IcebergConfigOperations {
   @Timed(name = "config." + MetricNames.HTTP_PROCESS_DURATION, absolute = true)
   @ResponseMetered(name = "config", absolute = true)
   public Response getConfig(@DefaultValue("") @QueryParam("warehouse") String 
warehouse) {
-    if (warehouse == null || warehouse.isEmpty()) {
-      ConfigResponse response = ConfigResponse.builder().build();
-      return IcebergRestUtils.ok(response);
+    String catalogName = getCatalogName(warehouse);
+    boolean supportsView = supportsViewOperations(catalogName);
+    ConfigResponse.Builder builder = ConfigResponse.builder();
+    
builder.withDefaults(getDefaultConfig(catalogName)).withEndpoints(getEndpoints(supportsView));
+    if (StringUtils.isNotBlank(warehouse)) {
+      builder.withDefault("prefix", warehouse);
+    }
+    return IcebergRestUtils.ok(builder.build());
+  }
+
+  private List<Endpoint> getEndpoints(boolean supportsViewOperations) {
+    if (!supportsViewOperations) {
+      return DEFAULT_ENDPOINTS;
     }
+    return Stream.concat(DEFAULT_ENDPOINTS.stream(), 
DEFAULT_VIEW_ENDPOINTS.stream())
+        .collect(Collectors.toList());
+  }
+
+  private Map<String, String> getCatalogConfig(String catalogName) {
+    Map<String, String> configs = new HashMap<>();
+    CatalogWrapperForREST catalogWrapper = getCatalogWrapper(catalogName);
+    configs.putAll(catalogWrapper.getCatalogConfigToClient());
+    return configs;
+  }
+
+  private String getCatalogName(String warehouse) {
+    if (StringUtils.isBlank(warehouse)) {
+      return IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG;
+    } else {
+      return warehouse;
+    }
+  }
+
+  private boolean supportsViewOperations(String catalogName) {
+    CatalogWrapperForREST catalogWrapperForREST = 
getCatalogWrapper(catalogName);
+    return catalogWrapperForREST.supportsViewOperations();
+  }
+
+  private CatalogWrapperForREST getCatalogWrapper(String catalogName) {
+    return catalogWrapperManager.getCatalogWrapper(catalogName);
+  }
+
+  protected Map<String, String> getDefaultConfig(String catalogName) {
+    return getCatalogConfig(catalogName);
+  }
 
-    // Get the catalog wrapper which contains the configuration
-    CatalogWrapperForREST catalogWrapper = 
catalogWrapperManager.getCatalogWrapper(warehouse);
-    ConfigResponse response =
-        ConfigResponse.builder()
-            .withDefaults(catalogWrapper.getCatalogConfigToClient())
-            .withDefault("prefix", warehouse)
-            .build();
-    return IcebergRestUtils.ok(response);
+  protected ConfigResponse.Builder getConfigResponseBuilder(boolean 
supportsViewOperations) {
+    return ConfigResponse.builder();
   }
 }
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTServiceIT.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTServiceIT.java
index 5d7fc3e314..b7e95b7937 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTServiceIT.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/IcebergRESTServiceIT.java
@@ -25,7 +25,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import javax.annotation.Nullable;
-import org.apache.iceberg.exceptions.BadRequestException;
+import org.apache.iceberg.exceptions.NamespaceNotEmptyException;
 import org.apache.iceberg.exceptions.ValidationException;
 import org.apache.spark.sql.AnalysisException;
 import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException;
@@ -207,9 +207,8 @@ public abstract class IcebergRESTServiceIT extends 
IcebergRESTServiceBaseIT {
                 + "(id bigint COMMENT 'unique id',data string) using iceberg",
             namespaceName));
 
-    // seems a bug in Iceberg REST client, should be NamespaceNotEmptyException
     Assertions.assertThrowsExactly(
-        BadRequestException.class, () -> sql("DROP DATABASE " + 
namespaceName));
+        NamespaceNotEmptyException.class, () -> sql("DROP DATABASE " + 
namespaceName));
     sql(String.format("DROP TABLE %s.test", namespaceName));
     sql("DROP DATABASE " + namespaceName);
 
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/TestIcebergExtendAPI.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/TestIcebergExtendAPI.java
index 9d975a6f9c..497257743d 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/TestIcebergExtendAPI.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/integration/test/TestIcebergExtendAPI.java
@@ -29,6 +29,7 @@ import org.apache.gravitino.server.web.JettyServerConfig;
 import org.apache.iceberg.rest.ErrorHandlers;
 import org.apache.iceberg.rest.HTTPClient;
 import org.apache.iceberg.rest.RESTClient;
+import org.apache.iceberg.rest.auth.AuthSession;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeAll;
@@ -44,6 +45,7 @@ import 
org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
 // We couldn't add REST extension package jar in deploy mode, so just test 
embedded mode.
 @EnabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public class TestIcebergExtendAPI {
+
   public static final Logger LOG = 
LoggerFactory.getLogger(TestIcebergExtendAPI.class);
   private IcebergRESTServerManager icebergRESTServerManager;
   private String uri;
@@ -64,7 +66,8 @@ public class TestIcebergExtendAPI {
 
   @Test
   void testExtendAPI() {
-    RESTClient client = HTTPClient.builder(ImmutableMap.of()).uri(uri).build();
+    RESTClient client =
+        
HTTPClient.builder(ImmutableMap.of()).uri(uri).withAuthSession(AuthSession.EMPTY).build();
     HelloResponse helloResponse =
         client.get(
             HelloOperations.HELLO_URI_PATH,
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergCatalogWrapperManagerForREST.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergCatalogWrapperManagerForREST.java
index fad31e816d..60c758cc5f 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergCatalogWrapperManagerForREST.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergCatalogWrapperManagerForREST.java
@@ -55,7 +55,7 @@ public class TestIcebergCatalogWrapperManagerForREST {
   }
 
   @ParameterizedTest
-  @ValueSource(strings = {"hello", "\\\n\t\\\'", "\u0024", "\100", "[_~", 
"default_catalog/"})
+  @ValueSource(strings = {"hello", "\\\n\t\\\'", "\u0024", "\100", "[_~"})
   public void testInvalidGetOps(String rawPrefix) {
     Map<String, String> config = Maps.newHashMap();
     IcebergConfigProvider configProvider = 
IcebergConfigProviderFactory.create(config);
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergRESTUtils.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergRESTUtils.java
index 79a317e52b..050740dbc9 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergRESTUtils.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/TestIcebergRESTUtils.java
@@ -30,6 +30,7 @@ import org.apache.iceberg.types.Types.StringType;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
+@SuppressWarnings("deprecation")
 public class TestIcebergRESTUtils {
 
   @Test
@@ -49,9 +50,6 @@ public class TestIcebergRESTUtils {
     Assertions.assertEquals("catalog", 
IcebergRestUtils.getCatalogName(prefix));
     Assertions.assertEquals(
         IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG, 
IcebergRestUtils.getCatalogName(""));
-    Assertions.assertThrowsExactly(
-        IllegalArgumentException.class,
-        () -> 
IcebergRestUtils.getCatalogName(IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG + 
"/"));
   }
 
   @Test
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/provider/TestDynamicIcebergConfigProvider.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/provider/TestDynamicIcebergConfigProvider.java
index e7e3af2408..8bf9486daf 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/provider/TestDynamicIcebergConfigProvider.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/provider/TestDynamicIcebergConfigProvider.java
@@ -112,9 +112,6 @@ public class TestDynamicIcebergConfigProvider {
         IllegalArgumentException.class, () -> 
provider.getIcebergCatalogConfig(invalidCatalogName));
     Assertions.assertThrowsExactly(
         IllegalArgumentException.class, () -> 
provider.getIcebergCatalogConfig(""));
-    Assertions.assertThrowsExactly(
-        IllegalArgumentException.class,
-        () -> 
provider.getIcebergCatalogConfig(IcebergConstants.ICEBERG_REST_DEFAULT_CATALOG));
   }
 
   @Test
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/CatalogWrapperForTest.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/CatalogWrapperForTest.java
index 423b52d577..1cf41af796 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/CatalogWrapperForTest.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/CatalogWrapperForTest.java
@@ -32,6 +32,7 @@ import org.apache.iceberg.types.Types.StringType;
 import org.testcontainers.shaded.com.google.common.collect.ImmutableMap;
 
 // Used to override registerTable
+@SuppressWarnings("deprecation")
 public class CatalogWrapperForTest extends CatalogWrapperForREST {
   public CatalogWrapperForTest(String catalogName, IcebergConfig 
icebergConfig) {
     super(catalogName, icebergConfig);
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergTableOperations.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergTableOperations.java
index cfa7d71a0d..b076df1bdd 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergTableOperations.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergTableOperations.java
@@ -79,6 +79,7 @@ import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.MethodSource;
 import org.mockito.Mockito;
 
+@SuppressWarnings("deprecation")
 public class TestIcebergTableOperations extends IcebergNamespaceTestBase {
 
   private static final Schema tableSchema =
@@ -374,7 +375,7 @@ public class TestIcebergTableOperations extends 
IcebergNamespaceTestBase {
   }
 
   private Response doUpdateTable(Namespace ns, String name, TableMetadata 
base) {
-    TableMetadata newMetadata = base.updateSchema(newTableSchema, 
base.lastColumnId());
+    TableMetadata newMetadata = base.updateSchema(newTableSchema);
     List<MetadataUpdate> metadataUpdates = newMetadata.changes();
     List<UpdateRequirement> requirements = 
UpdateRequirements.forUpdateTable(base, metadataUpdates);
     UpdateTableRequest updateTableRequest = new 
UpdateTableRequest(requirements, metadataUpdates);
diff --git 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergViewOperations.java
 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergViewOperations.java
index 85100e4e29..a319638fa9 100644
--- 
a/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergViewOperations.java
+++ 
b/iceberg/iceberg-rest-server/src/test/java/org/apache/gravitino/iceberg/service/rest/TestIcebergViewOperations.java
@@ -71,6 +71,7 @@ import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.MethodSource;
 import org.mockito.Mockito;
 
+@SuppressWarnings("deprecation")
 public class TestIcebergViewOperations extends IcebergNamespaceTestBase {
   private static final Schema viewSchema =
       new Schema(Types.NestedField.of(1, false, "foo_string", 
Types.StringType.get()));
diff --git a/spark-connector/spark-common/build.gradle.kts 
b/spark-connector/spark-common/build.gradle.kts
index 5a61934295..1e8861af32 100644
--- a/spark-connector/spark-common/build.gradle.kts
+++ b/spark-connector/spark-common/build.gradle.kts
@@ -30,7 +30,7 @@ repositories {
 val scalaVersion: String = project.properties["scalaVersion"] as? String ?: 
extra["defaultScalaVersion"].toString()
 val sparkVersion: String = libs.versions.spark33.get()
 val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
-val icebergVersion: String = libs.versions.iceberg4spark.get()
+val icebergVersion: String = libs.versions.iceberg4connector.get()
 val paimonVersion: String = libs.versions.paimon.get()
 // kyuubi hive connector for Spark 3.3 doesn't support scala 2.13
 val kyuubiVersion: String = libs.versions.kyuubi4spark34.get()
diff --git 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/extensions/IcebergExtendedDataSourceV2Strategy.java
 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/extensions/IcebergExtendedDataSourceV2Strategy.java
index 3bf2631569..e8decfe3fa 100644
--- 
a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/extensions/IcebergExtendedDataSourceV2Strategy.java
+++ 
b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/extensions/IcebergExtendedDataSourceV2Strategy.java
@@ -20,7 +20,9 @@ package 
org.apache.gravitino.spark.connector.iceberg.extensions;
 
 import static 
org.apache.gravitino.spark.connector.utils.ConnectorUtil.toJavaList;
 
+import java.lang.reflect.Method;
 import java.util.Collections;
+import lombok.SneakyThrows;
 import org.apache.gravitino.spark.connector.iceberg.GravitinoIcebergCatalog;
 import org.apache.iceberg.spark.Spark3Util;
 import org.apache.spark.sql.SparkSession;
@@ -212,12 +214,20 @@ public class IcebergExtendedDataSourceV2Strategy extends 
ExtendedDataSourceV2Str
               spark, setWriteDistributionAndOrdering.table().toIndexedSeq())
           .map(
               catalogAndIdentifier -> {
-                SetWriteDistributionAndOrderingExec 
setWriteDistributionAndOrderingExec =
-                    new SetWriteDistributionAndOrderingExec(
-                        catalogAndIdentifier.catalog,
-                        catalogAndIdentifier.identifier,
-                        setWriteDistributionAndOrdering.distributionMode(),
-                        setWriteDistributionAndOrdering.sortOrder());
+                SetWriteDistributionAndOrderingExec 
setWriteDistributionAndOrderingExec = null;
+                // Iceberg 1.6 return DistributionMode, while Iceberg 1.9 
return
+                // Option<DistributionMode>
+                Object distributionMode = 
getDistributionMode(setWriteDistributionAndOrdering);
+                try {
+                  setWriteDistributionAndOrderingExec =
+                      createDistributionAndOrderingExec(
+                          catalogAndIdentifier.catalog,
+                          catalogAndIdentifier.identifier,
+                          distributionMode,
+                          setWriteDistributionAndOrdering.sortOrder());
+                } catch (Exception e) {
+                  throw new RuntimeException(e);
+                }
                 return toSeq(setWriteDistributionAndOrderingExec);
               })
           .get();
@@ -266,4 +276,39 @@ public class IcebergExtendedDataSourceV2Strategy extends 
ExtendedDataSourceV2Str
       }
     }
   }
+
+  private static SetWriteDistributionAndOrderingExec 
createDistributionAndOrderingExec(
+      TableCatalog catalog, Identifier identifier, Object distributionMode, 
Object sortOrder)
+      throws Exception {
+    // 1. get the object associated with the scala case class
+    Class<?> companionClass =
+        Class.forName(SetWriteDistributionAndOrderingExec.class.getName() + 
"$");
+    Object companionInstance = companionClass.getField("MODULE$").get(null);
+
+    // 2. get apply method
+    Class<?>[] paramTypes = getApplyMethodParamTypes(companionClass);
+    Method applyMethod = companionClass.getMethod("apply", paramTypes);
+
+    // 3. invoke apply to create object
+    return (SetWriteDistributionAndOrderingExec)
+        applyMethod.invoke(companionInstance, catalog, identifier, 
distributionMode, sortOrder);
+  }
+
+  private static Class<?>[] getApplyMethodParamTypes(Class<?> companionClass) {
+    // Scala may generate multiple apply method,use the apply method with 4 
arguments
+    for (Method method : companionClass.getMethods()) {
+      if ("apply".equals(method.getName()) && method.getParameterCount() == 4) 
{
+        return method.getParameterTypes();
+      }
+    }
+    throw new IllegalStateException("Could find apply method with 4 
arguments");
+  }
+
+  @SneakyThrows
+  private static Object getDistributionMode(
+      SetWriteDistributionAndOrdering setWriteDistributionAndOrdering) {
+    Method distributionModeMethod =
+        
setWriteDistributionAndOrdering.getClass().getMethod("distributionMode");
+    return distributionModeMethod.invoke(setWriteDistributionAndOrdering);
+  }
 }
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
index 78607332ab..b595adaf32 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogIT.java
@@ -832,7 +832,9 @@ public abstract class SparkIcebergCatalogIT extends 
SparkCommonIT {
                     getCatalogName(), fullTableName))
             .collectAsList();
     Assertions.assertEquals(1, callResult.size());
-    Assertions.assertEquals(2, callResult.get(0).getInt(0));
+    int rewrite_delete_files = callResult.get(0).getInt(0);
+    // rewrite delete files is 1 in Iceberg 1.9
+    Assertions.assertTrue(rewrite_delete_files == 1 || rewrite_delete_files == 
2);
     Assertions.assertEquals(1, callResult.get(0).getInt(1));
   }
 
@@ -1111,7 +1113,11 @@ public abstract class SparkIcebergCatalogIT extends 
SparkCommonIT {
     icebergTable.refresh();
     tableInfo = getTableInfo(tableName);
     tableProperties = tableInfo.getTableProperties();
-    Assertions.assertEquals("none", 
tableProperties.get(ICEBERG_WRITE_DISTRIBUTION_MODE));
+    // After https://github.com/apache/iceberg/pull/10774/, distribution mode 
is not changed for
+    // local sort order
+    String distributionMode = 
tableProperties.get(ICEBERG_WRITE_DISTRIBUTION_MODE);
+    Assertions.assertTrue(
+        "none".equalsIgnoreCase(distributionMode) || 
"range".equalsIgnoreCase(distributionMode));
     Assertions.assertEquals(
         "id DESC NULLS LAST", 
tableProperties.get(IcebergPropertiesConstants.ICEBERG_SORT_ORDER));
     sortOrder =
diff --git 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT.java
 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT.java
index 26f59302d2..6962cfbbda 100644
--- 
a/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT.java
+++ 
b/spark-connector/spark-common/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT.java
@@ -22,11 +22,15 @@ import com.google.common.collect.Maps;
 import java.util.Map;
 import org.apache.gravitino.spark.connector.iceberg.IcebergPropertiesConstants;
 import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.condition.DisabledIf;
 
 /**
  * This class use Apache Iceberg RESTCatalog for test, and the real backend 
catalog is HiveCatalog.
  */
 @Tag("gravitino-docker-test")
+// Spark connector use low Iceberg version, couldn't work with Iceberg REST 
server with high Iceberg
+// version in embedded mode.
+@DisabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public abstract class SparkIcebergCatalogRestBackendIT extends 
SparkIcebergCatalogIT {
 
   @Override
diff --git a/spark-connector/v3.3/spark/build.gradle.kts 
b/spark-connector/v3.3/spark/build.gradle.kts
index 257020a84c..4b7aed6474 100644
--- a/spark-connector/v3.3/spark/build.gradle.kts
+++ b/spark-connector/v3.3/spark/build.gradle.kts
@@ -30,7 +30,7 @@ repositories {
 val scalaVersion: String = project.properties["scalaVersion"] as? String ?: 
extra["defaultScalaVersion"].toString()
 val sparkVersion: String = libs.versions.spark33.get()
 val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
-val icebergVersion: String = libs.versions.iceberg4spark.get()
+val icebergVersion: String = libs.versions.iceberg4connector.get()
 val paimonVersion: String = libs.versions.paimon.get()
 val kyuubiVersion: String = libs.versions.kyuubi4spark33.get()
 val scalaJava8CompatVersion: String = libs.versions.scala.java.compat.get()
diff --git 
a/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT33.java
 
b/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT33.java
index db67c0b5f3..e04c1d2351 100644
--- 
a/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT33.java
+++ 
b/spark-connector/v3.3/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT33.java
@@ -19,4 +19,7 @@
 
 package org.apache.gravitino.spark.connector.integration.test.iceberg;
 
+import org.junit.jupiter.api.condition.DisabledIf;
+
+@DisabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public class SparkIcebergCatalogRestBackendIT33 extends 
SparkIcebergCatalogRestBackendIT {}
diff --git a/spark-connector/v3.4/spark/build.gradle.kts 
b/spark-connector/v3.4/spark/build.gradle.kts
index 7789017bd9..160a8f8f2f 100644
--- a/spark-connector/v3.4/spark/build.gradle.kts
+++ b/spark-connector/v3.4/spark/build.gradle.kts
@@ -30,7 +30,7 @@ repositories {
 val scalaVersion: String = project.properties["scalaVersion"] as? String ?: 
extra["defaultScalaVersion"].toString()
 val sparkVersion: String = libs.versions.spark34.get()
 val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
-val icebergVersion: String = libs.versions.iceberg4spark.get()
+val icebergVersion: String = libs.versions.iceberg4connector.get()
 val paimonVersion: String = libs.versions.paimon.get()
 val kyuubiVersion: String = libs.versions.kyuubi4spark34.get()
 val scalaJava8CompatVersion: String = libs.versions.scala.java.compat.get()
diff --git 
a/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT34.java
 
b/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT34.java
index dbb1458113..ecff644384 100644
--- 
a/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT34.java
+++ 
b/spark-connector/v3.4/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT34.java
@@ -19,4 +19,7 @@
 
 package org.apache.gravitino.spark.connector.integration.test.iceberg;
 
+import org.junit.jupiter.api.condition.DisabledIf;
+
+@DisabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public class SparkIcebergCatalogRestBackendIT34 extends 
SparkIcebergCatalogRestBackendIT {}
diff --git a/spark-connector/v3.5/spark/build.gradle.kts 
b/spark-connector/v3.5/spark/build.gradle.kts
index f6a5417ac2..c00028fdbd 100644
--- a/spark-connector/v3.5/spark/build.gradle.kts
+++ b/spark-connector/v3.5/spark/build.gradle.kts
@@ -30,7 +30,7 @@ repositories {
 val scalaVersion: String = project.properties["scalaVersion"] as? String ?: 
extra["defaultScalaVersion"].toString()
 val sparkVersion: String = libs.versions.spark35.get()
 val sparkMajorVersion: String = sparkVersion.substringBeforeLast(".")
-val icebergVersion: String = libs.versions.iceberg4spark.get()
+val icebergVersion: String = libs.versions.iceberg4connector.get()
 val paimonVersion: String = libs.versions.paimon.get()
 val kyuubiVersion: String = libs.versions.kyuubi4spark35.get()
 val scalaJava8CompatVersion: String = libs.versions.scala.java.compat.get()
diff --git 
a/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT35.java
 
b/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT35.java
index 723f421bb8..35f7b0cd60 100644
--- 
a/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT35.java
+++ 
b/spark-connector/v3.5/spark/src/test/java/org/apache/gravitino/spark/connector/integration/test/iceberg/SparkIcebergCatalogRestBackendIT35.java
@@ -19,4 +19,7 @@
 
 package org.apache.gravitino.spark.connector.integration.test.iceberg;
 
+import org.junit.jupiter.api.condition.DisabledIf;
+
+@DisabledIf("org.apache.gravitino.integration.test.util.ITUtils#isEmbedded")
 public class SparkIcebergCatalogRestBackendIT35 extends 
SparkIcebergCatalogRestBackendIT {}


Reply via email to