This is an automated email from the ASF dual-hosted git repository. yufei pushed a commit to branch release/1.0.x in repository https://gitbox.apache.org/repos/asf/polaris.git
commit bea6513b26c13fb70eceacb064ed0473d367f164 Author: Yun Zou <yunzou.colost...@gmail.com> AuthorDate: Mon Jun 23 14:42:02 2025 -0700 Update spark client to use the shaded iceberg-core in iceberg-spark-runtime to avoid spark compatibilities issue (#1908) * add change * add comment * update change * add comment * add change * add tests * add comment * clean up style check * update build * Revert "Reuse shadowJar for spark client bundle jar maven publish (#1857)" This reverts commit 1f7f127536a088911bf940addd1d05c07ff99a68. * Reuse shadowJar for spark client bundle jar maven publish (#1857) * fix spark client * fix test failure and address feedback * fix error * update regression test * update classifier name * address comment * add change * update doc * update build and readme * add back jr * udpate dependency * add change * update * update tests * remove merge service file * update readme * update readme * update checkstyl * rebase with main * Revert "Reuse shadowJar for spark client bundle jar maven publish (#1857)" This reverts commit 40f4d36c8e9cfa54074b9754b7024e4828b7b7a6. * update checkstyle * revert change * address comments * trigger tests --- .../apache/polaris/service/it/env/CatalogApi.java | 2 +- .../polaris/service/it/env/ManagementApi.java | 2 +- plugins/spark/v3.5/integration/build.gradle.kts | 14 +- .../spark/quarkus/it/PolarisManagementClient.java | 102 +++++++++++ .../spark/quarkus/it/SparkCatalogIcebergIT.java | 3 + .../spark/quarkus/it/SparkIntegrationBase.java | 152 ++++++++++++++++- plugins/spark/v3.5/spark/build.gradle.kts | 59 ++----- .../spark/v3.5/spark/checkstyle_suppressions.xml | 32 ++++ .../org/apache/polaris/spark/PolarisCatalog.java | 2 +- .../apache/polaris/spark/PolarisRESTCatalog.java | 4 +- .../apache/polaris/spark/PolarisSparkCatalog.java | 6 +- .../spark/rest/CreateGenericTableRESTRequest.java | 5 +- .../spark/rest/CreateGenericTableRequest.java | 187 +++++++++++++++++++++ .../apache/polaris/spark/rest/GenericTable.java | 186 ++++++++++++++++++++ .../spark/rest/ListGenericTablesRESTResponse.java | 5 +- .../spark/rest/ListGenericTablesResponse.java | 134 +++++++++++++++ .../spark/rest/LoadGenericTableRESTResponse.java | 6 +- .../spark/rest/LoadGenericTableResponse.java | 108 ++++++++++++ .../polaris/spark/utils/PolarisCatalogUtils.java | 2 +- .../polaris/spark/PolarisInMemoryCatalog.java | 2 +- .../polaris/spark/rest/DeserializationTest.java | 29 ++-- 21 files changed, 961 insertions(+), 81 deletions(-) diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java b/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java index 7be67f194..0274d0ea8 100644 --- a/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java +++ b/integration-tests/src/main/java/org/apache/polaris/service/it/env/CatalogApi.java @@ -50,7 +50,7 @@ import org.apache.iceberg.rest.responses.OAuthTokenResponse; * @see PolarisClient#catalogApi(ClientCredentials) */ public class CatalogApi extends RestApi { - CatalogApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { + public CatalogApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { super(client, endpoints, authToken, uri); } diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java b/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java index fb3019c3e..f2adf3014 100644 --- a/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java +++ b/integration-tests/src/main/java/org/apache/polaris/service/it/env/ManagementApi.java @@ -53,7 +53,7 @@ import org.apache.polaris.core.admin.model.UpdateCatalogRequest; * @see PolarisClient#managementApi(ClientCredentials) */ public class ManagementApi extends RestApi { - ManagementApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { + public ManagementApi(Client client, PolarisApiEndpoints endpoints, String authToken, URI uri) { super(client, endpoints, authToken, uri); } diff --git a/plugins/spark/v3.5/integration/build.gradle.kts b/plugins/spark/v3.5/integration/build.gradle.kts index 0a1a8087e..a27361b41 100644 --- a/plugins/spark/v3.5/integration/build.gradle.kts +++ b/plugins/spark/v3.5/integration/build.gradle.kts @@ -45,9 +45,13 @@ dependencies { implementation(project(":polaris-runtime-service")) - testImplementation(project(":polaris-api-management-model")) + testImplementation( + "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" + ) testImplementation(project(":polaris-spark-${sparkMajorVersion}_${scalaVersion}")) + testImplementation(project(":polaris-api-management-model")) + testImplementation("org.apache.spark:spark-sql_${scalaVersion}:${spark35Version}") { // exclude log4j dependencies. Explicit dependencies for the log4j libraries are // enforced below to ensure the version compatibility @@ -64,13 +68,7 @@ dependencies { testImplementation("io.delta:delta-spark_${scalaVersion}:3.3.1") testImplementation(platform(libs.jackson.bom)) - testImplementation("com.fasterxml.jackson.core:jackson-annotations") - testImplementation("com.fasterxml.jackson.core:jackson-core") - testImplementation("com.fasterxml.jackson.core:jackson-databind") - - testImplementation( - "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" - ) + testImplementation("com.fasterxml.jackson.jakarta.rs:jackson-jakarta-rs-json-provider") testImplementation(testFixtures(project(":polaris-runtime-service"))) diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java new file mode 100644 index 000000000..cc0f177f7 --- /dev/null +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/PolarisManagementClient.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.quarkus.it; + +import static java.util.concurrent.TimeUnit.MINUTES; +import static org.apache.polaris.service.it.ext.PolarisServerManagerLoader.polarisServerManager; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.jakarta.rs.json.JacksonJsonProvider; +import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.client.ClientBuilder; +import java.util.Map; +import java.util.Random; +import org.apache.iceberg.rest.HTTPClient; +import org.apache.iceberg.rest.RESTClient; +import org.apache.iceberg.rest.auth.AuthSession; +import org.apache.iceberg.rest.auth.OAuth2Util; +import org.apache.iceberg.rest.responses.OAuthTokenResponse; +import org.apache.polaris.service.it.env.ClientCredentials; +import org.apache.polaris.service.it.env.ManagementApi; +import org.apache.polaris.service.it.env.PolarisApiEndpoints; + +/** + * This class provides a REST client for the Polaris Management service endpoints and its auth-token + * endpoint, which is used in Spark client tests to run commands that Spark SQL can’t issue directly + * (e.g., createCatalog). + */ +public final class PolarisManagementClient implements AutoCloseable { + private final PolarisApiEndpoints endpoints; + private final Client client; + // Use an alphanumeric ID for widest compatibility in HTTP and SQL. + // Use MAX_RADIX for shorter output. + private final String clientId = + Long.toString(Math.abs(new Random().nextLong()), Character.MAX_RADIX); + // initialization an Iceberg rest client for fetch token + private final RESTClient restClient; + + private PolarisManagementClient(PolarisApiEndpoints endpoints) { + this.endpoints = endpoints; + + this.client = + ClientBuilder.newBuilder() + .readTimeout(5, MINUTES) + .connectTimeout(1, MINUTES) + .register(new JacksonJsonProvider(new ObjectMapper())) + .build(); + + this.restClient = HTTPClient.builder(Map.of()).uri(endpoints.catalogApiEndpoint()).build(); + } + + public static PolarisManagementClient managementClient(PolarisApiEndpoints endpoints) { + return new PolarisManagementClient(endpoints); + } + + /** This method should be used by test code to make top-level entity names. */ + public String newEntityName(String hint) { + return polarisServerManager().transformEntityName(hint + "_" + clientId); + } + + public ManagementApi managementApi(String authToken) { + return new ManagementApi(client, endpoints, authToken, endpoints.managementApiEndpoint()); + } + + public ManagementApi managementApi(ClientCredentials credentials) { + return managementApi(obtainToken(credentials)); + } + + /** Requests an access token from the Polaris server for the given {@link ClientCredentials}. */ + public String obtainToken(ClientCredentials credentials) { + OAuthTokenResponse response = + OAuth2Util.fetchToken( + restClient.withAuthSession(AuthSession.EMPTY), + Map.of(), + String.format("%s:%s", credentials.clientId(), credentials.clientSecret()), + "PRINCIPAL_ROLE:ALL", + endpoints.catalogApiEndpoint() + "/v1/oauth/tokens", + Map.of("grant_type", "client_credentials")); + return response.token(); + } + + @Override + public void close() throws Exception { + client.close(); + restClient.close(); + } +} diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java index f3c411df2..d9182e6e8 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkCatalogIcebergIT.java @@ -27,6 +27,9 @@ public class SparkCatalogIcebergIT extends SparkCatalogBaseIT { @Override protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) { return builder + .config( + "spark.sql.extensions", + "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions") .config( String.format("spark.sql.catalog.%s", catalogName), "org.apache.iceberg.spark.SparkCatalog") diff --git a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java index be456716c..8d16c36ad 100644 --- a/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java +++ b/plugins/spark/v3.5/integration/src/intTest/java/org/apache/polaris/spark/quarkus/it/SparkIntegrationBase.java @@ -18,23 +18,133 @@ */ package org.apache.polaris.spark.quarkus.it; +import com.adobe.testing.s3mock.testcontainers.S3MockContainer; import com.google.common.collect.ImmutableList; import com.google.errorprone.annotations.FormatMethod; import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.nio.file.Path; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.DirectoryFileFilter; import org.apache.commons.io.filefilter.FalseFileFilter; -import org.apache.polaris.service.it.ext.PolarisSparkIntegrationTestBase; +import org.apache.polaris.core.admin.model.AwsStorageConfigInfo; +import org.apache.polaris.core.admin.model.Catalog; +import org.apache.polaris.core.admin.model.CatalogProperties; +import org.apache.polaris.core.admin.model.PolarisCatalog; +import org.apache.polaris.core.admin.model.StorageConfigInfo; +import org.apache.polaris.service.it.env.ClientCredentials; +import org.apache.polaris.service.it.env.IntegrationTestsHelper; +import org.apache.polaris.service.it.env.ManagementApi; +import org.apache.polaris.service.it.env.PolarisApiEndpoints; +import org.apache.polaris.service.it.ext.PolarisIntegrationTestExtension; +import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; +import org.intellij.lang.annotations.Language; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.io.TempDir; +import org.slf4j.LoggerFactory; -public abstract class SparkIntegrationBase extends PolarisSparkIntegrationTestBase { +@ExtendWith(PolarisIntegrationTestExtension.class) +public abstract class SparkIntegrationBase { + protected static final S3MockContainer s3Container = + new S3MockContainer("3.11.0").withInitialBuckets("my-bucket,my-old-bucket"); + protected static SparkSession spark; + protected PolarisApiEndpoints endpoints; + protected PolarisManagementClient client; + protected ManagementApi managementApi; + protected String catalogName; + protected String sparkToken; + + protected URI warehouseDir; + + @BeforeAll + public static void setup() throws IOException { + s3Container.start(); + } + + @AfterAll + public static void cleanup() { + s3Container.stop(); + } + + @BeforeEach + public void before( + PolarisApiEndpoints apiEndpoints, ClientCredentials credentials, @TempDir Path tempDir) { + endpoints = apiEndpoints; + client = PolarisManagementClient.managementClient(endpoints); + sparkToken = client.obtainToken(credentials); + managementApi = client.managementApi(credentials); + + warehouseDir = IntegrationTestsHelper.getTemporaryDirectory(tempDir).resolve("spark-warehouse"); + + catalogName = client.newEntityName("spark_catalog"); + + AwsStorageConfigInfo awsConfigModel = + AwsStorageConfigInfo.builder() + .setRoleArn("arn:aws:iam::123456789012:role/my-role") + .setExternalId("externalId") + .setUserArn("userArn") + .setStorageType(StorageConfigInfo.StorageTypeEnum.S3) + .setAllowedLocations(List.of("s3://my-old-bucket/path/to/data")) + .build(); + CatalogProperties props = new CatalogProperties("s3://my-bucket/path/to/data"); + props.putAll( + Map.of( + "table-default.s3.endpoint", + s3Container.getHttpEndpoint(), + "table-default.s3.path-style-access", + "true", + "table-default.s3.access-key-id", + "foo", + "table-default.s3.secret-access-key", + "bar", + "s3.endpoint", + s3Container.getHttpEndpoint(), + "s3.path-style-access", + "true", + "s3.access-key-id", + "foo", + "s3.secret-access-key", + "bar", + "polaris.config.drop-with-purge.enabled", + "true")); + Catalog catalog = + PolarisCatalog.builder() + .setType(Catalog.TypeEnum.INTERNAL) + .setName(catalogName) + .setProperties(props) + .setStorageConfigInfo(awsConfigModel) + .build(); + + managementApi.createCatalog(catalog); + + SparkSession.Builder sessionBuilder = + SparkSession.builder() + .master("local[1]") + .config("spark.hadoop.fs.s3.impl", "org.apache.hadoop.fs.s3a.S3AFileSystem") + .config( + "spark.hadoop.fs.s3.aws.credentials.provider", + "org.apache.hadoop.fs.s3.TemporaryAWSCredentialsProvider") + .config("spark.hadoop.fs.s3.access.key", "foo") + .config("spark.hadoop.fs.s3.secret.key", "bar") + .config("spark.ui.showConsoleProgress", false) + .config("spark.ui.enabled", "false"); + spark = withCatalog(sessionBuilder, catalogName).getOrCreate(); + + onSpark("USE " + catalogName); + } - @Override protected SparkSession.Builder withCatalog(SparkSession.Builder builder, String catalogName) { return builder .config( @@ -61,6 +171,38 @@ public abstract class SparkIntegrationBase extends PolarisSparkIntegrationTestBa .config(String.format("spark.sql.catalog.%s.s3.region", catalogName), "us-west-2"); } + @AfterEach + public void after() throws Exception { + cleanupCatalog(catalogName); + try { + SparkSession.clearDefaultSession(); + SparkSession.clearActiveSession(); + spark.close(); + } catch (Exception e) { + LoggerFactory.getLogger(getClass()).error("Unable to close spark session", e); + } + + client.close(); + } + + protected void cleanupCatalog(String catalogName) { + onSpark("USE " + catalogName); + List<Row> namespaces = onSpark("SHOW NAMESPACES").collectAsList(); + for (Row namespace : namespaces) { + List<Row> tables = onSpark("SHOW TABLES IN " + namespace.getString(0)).collectAsList(); + for (Row table : tables) { + onSpark("DROP TABLE " + namespace.getString(0) + "." + table.getString(1)); + } + List<Row> views = onSpark("SHOW VIEWS IN " + namespace.getString(0)).collectAsList(); + for (Row view : views) { + onSpark("DROP VIEW " + namespace.getString(0) + "." + view.getString(1)); + } + onSpark("DROP NAMESPACE " + namespace.getString(0)); + } + + managementApi.deleteCatalog(catalogName); + } + @FormatMethod protected List<Object[]> sql(String query, Object... args) { List<Row> rows = spark.sql(String.format(query, args)).collectAsList(); @@ -110,4 +252,8 @@ public abstract class SparkIntegrationBase extends PolarisSparkIntegrationTestBa protected String generateName(String prefix) { return prefix + "_" + UUID.randomUUID().toString().replaceAll("-", ""); } + + protected static Dataset<Row> onSpark(@Language("SQL") String sql) { + return spark.sql(sql); + } } diff --git a/plugins/spark/v3.5/spark/build.gradle.kts b/plugins/spark/v3.5/spark/build.gradle.kts index a2a54e26b..d13255bf6 100644 --- a/plugins/spark/v3.5/spark/build.gradle.kts +++ b/plugins/spark/v3.5/spark/build.gradle.kts @@ -21,6 +21,14 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar plugins { id("polaris-client") } +checkstyle { + configProperties = + mapOf( + "org.checkstyle.google.suppressionfilter.config" to + project.file("checkstyle_suppressions.xml").absolutePath + ) +} + // get version information val sparkMajorVersion = "3.5" val scalaVersion = getAndUseScalaVersionForProject() @@ -37,32 +45,6 @@ val scalaLibraryVersion = dependencies { // TODO: extract a polaris-rest module as a thin layer for // client to depends on. - implementation(project(":polaris-api-iceberg-service")) { - // exclude the iceberg dependencies, use the ones pulled - // by iceberg-core - exclude("org.apache.iceberg", "*") - // exclude all cloud and quarkus specific dependencies to avoid - // running into problems with signature files. - exclude("com.azure", "*") - exclude("software.amazon.awssdk", "*") - exclude("com.google.cloud", "*") - exclude("io.airlift", "*") - exclude("io.smallrye", "*") - exclude("io.smallrye.common", "*") - exclude("io.swagger", "*") - exclude("org.apache.commons", "*") - } - implementation(project(":polaris-api-catalog-service")) { - exclude("org.apache.iceberg", "*") - exclude("com.azure", "*") - exclude("software.amazon.awssdk", "*") - exclude("com.google.cloud", "*") - exclude("io.airlift", "*") - exclude("io.smallrye", "*") - exclude("io.smallrye.common", "*") - exclude("io.swagger", "*") - exclude("org.apache.commons", "*") - } implementation(project(":polaris-core")) { exclude("org.apache.iceberg", "*") exclude("com.azure", "*") @@ -75,15 +57,9 @@ dependencies { exclude("org.apache.commons", "*") } - implementation("org.apache.iceberg:iceberg-core:${icebergVersion}") - implementation( "org.apache.iceberg:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersion}:${icebergVersion}" - ) { - // exclude the iceberg rest dependencies, use the ones pulled - // with iceberg-core dependency - exclude("org.apache.iceberg", "iceberg-core") - } + ) compileOnly("org.scala-lang:scala-library:${scalaLibraryVersion}") compileOnly("org.scala-lang:scala-reflect:${scalaLibraryVersion}") @@ -95,6 +71,9 @@ dependencies { exclude("org.slf4j", "jul-to-slf4j") } + compileOnly(libs.jakarta.annotation.api) + compileOnly(libs.jakarta.validation.api) + testImplementation(platform(libs.junit.bom)) testImplementation("org.junit.jupiter:junit-jupiter") testImplementation(libs.assertj.core) @@ -129,14 +108,10 @@ tasks.register<ShadowJar>("createPolarisSparkJar") { // Optimization: Minimize the JAR (remove unused classes from dependencies) // The iceberg-spark-runtime plugin is always packaged along with our polaris-spark plugin, // therefore excluded from the optimization. - minimize { - exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) - exclude(dependency("org.apache.iceberg:iceberg-core*.*")) - exclude(dependency("org.apache.avro:avro*.*")) - } - - relocate("com.fasterxml", "org.apache.polaris.shaded.com.fasterxml.jackson") - relocate("org.apache.avro", "org.apache.polaris.shaded.org.apache.avro") + minimize { exclude(dependency("org.apache.iceberg:iceberg-spark-runtime-*.*")) } } -tasks.withType(Jar::class).named("sourcesJar") { dependsOn("createPolarisSparkJar") } +// ensure the ShadowJar job is run for both `assemble` and `build` task +tasks.named("assemble") { dependsOn("createPolarisSparkJar") } + +tasks.named("build") { dependsOn("createPolarisSparkJar") } diff --git a/plugins/spark/v3.5/spark/checkstyle_suppressions.xml b/plugins/spark/v3.5/spark/checkstyle_suppressions.xml new file mode 100644 index 000000000..1f6f0aad9 --- /dev/null +++ b/plugins/spark/v3.5/spark/checkstyle_suppressions.xml @@ -0,0 +1,32 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +--> +<!DOCTYPE suppressions PUBLIC + "-//Puppy Crawl//DTD Suppressions Configuration 1.2//EN" + "https://checkstyle.org/dtds/suppressions_1_2.dtd"> + +<suppressions> + <!-- + Suppress IllegalImport in all files under plugins/spark/v3.5/spark. + The spark client relies on the shaded libraries from iceberg-spark-runtime, + and therefore uses imports like org.apache.iceberg.shaded.* in the code, + which is intended. + --> + <suppress checks="IllegalImport" files="plugins/spark/v3.5/spark/.*"/> +</suppressions> diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java index 31a6ac189..99802d3cb 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisCatalog.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Map; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; public interface PolarisCatalog { List<TableIdentifier> listGenericTables(Namespace ns); diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java index d255c3c57..5dfd1f898 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisRESTCatalog.java @@ -46,9 +46,9 @@ import org.apache.iceberg.util.EnvironmentUtil; import org.apache.iceberg.util.PropertyUtil; import org.apache.polaris.core.rest.PolarisEndpoints; import org.apache.polaris.core.rest.PolarisResourcePaths; -import org.apache.polaris.service.types.CreateGenericTableRequest; -import org.apache.polaris.service.types.GenericTable; import org.apache.polaris.spark.rest.CreateGenericTableRESTRequest; +import org.apache.polaris.spark.rest.CreateGenericTableRequest; +import org.apache.polaris.spark.rest.GenericTable; import org.apache.polaris.spark.rest.ListGenericTablesRESTResponse; import org.apache.polaris.spark.rest.LoadGenericTableRESTResponse; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java index e1658312b..fe0c6e180 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/PolarisSparkCatalog.java @@ -22,7 +22,11 @@ import java.util.Map; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.exceptions.AlreadyExistsException; import org.apache.iceberg.spark.Spark3Util; -import org.apache.polaris.service.types.GenericTable; +// Use the spec class defined at client side under the rest package. +// The spec classes used at client side and server side are different in +// terms of import, where the client side uses the shaded jackson library +// from iceberg-spark-runtime. +import org.apache.polaris.spark.rest.GenericTable; import org.apache.polaris.spark.utils.PolarisCatalogUtils; import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java index 6a9c89a5f..493861116 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRESTRequest.java @@ -18,11 +18,10 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Map; import org.apache.iceberg.rest.RESTRequest; -import org.apache.polaris.service.types.CreateGenericTableRequest; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTRequest definition for CreateGenericTable which extends the iceberg RESTRequest. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java new file mode 100644 index 000000000..101695e55 --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/CreateGenericTableRequest.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.constraints.NotNull; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +// TODO: auto generate the class based on spec +public class CreateGenericTableRequest { + + @NotNull private final String name; + @NotNull private final String format; + private final String baseLocation; + private final String doc; + private final Map<String, String> properties; + + /** */ + @JsonProperty(value = "name", required = true) + public String getName() { + return name; + } + + /** */ + @JsonProperty(value = "format", required = true) + public String getFormat() { + return format; + } + + /** */ + @JsonProperty(value = "base-location") + public String getBaseLocation() { + return baseLocation; + } + + /** */ + @JsonProperty(value = "doc") + public String getDoc() { + return doc; + } + + /** */ + @JsonProperty(value = "properties") + public Map<String, String> getProperties() { + return properties; + } + + @JsonCreator + public CreateGenericTableRequest( + @JsonProperty(value = "name", required = true) String name, + @JsonProperty(value = "format", required = true) String format, + @JsonProperty(value = "base-location") String baseLocation, + @JsonProperty(value = "doc") String doc, + @JsonProperty(value = "properties") Map<String, String> properties) { + this.name = name; + this.format = format; + this.baseLocation = baseLocation; + this.doc = doc; + this.properties = Objects.requireNonNullElse(properties, new HashMap<>()); + } + + public CreateGenericTableRequest(String name, String format) { + this.name = name; + this.format = format; + this.baseLocation = null; + this.doc = null; + this.properties = new HashMap<>(); + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(String name, String format) { + return new Builder(name, format); + } + + public static final class Builder { + private String name; + private String format; + private String baseLocation; + private String doc; + private Map<String, String> properties; + + private Builder() {} + + private Builder(String name, String format) { + this.name = name; + this.format = format; + } + + public Builder setName(String name) { + this.name = name; + return this; + } + + public Builder setFormat(String format) { + this.format = format; + return this; + } + + public Builder setBaseLocation(String baseLocation) { + this.baseLocation = baseLocation; + return this; + } + + public Builder setDoc(String doc) { + this.doc = doc; + return this; + } + + public Builder setProperties(Map<String, String> properties) { + this.properties = properties; + return this; + } + + public CreateGenericTableRequest build() { + CreateGenericTableRequest inst = + new CreateGenericTableRequest(name, format, baseLocation, doc, properties); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + CreateGenericTableRequest createGenericTableRequest = (CreateGenericTableRequest) o; + return Objects.equals(this.name, createGenericTableRequest.name) + && Objects.equals(this.format, createGenericTableRequest.format) + && Objects.equals(this.baseLocation, createGenericTableRequest.baseLocation) + && Objects.equals(this.doc, createGenericTableRequest.doc) + && Objects.equals(this.properties, createGenericTableRequest.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, format, baseLocation, doc, properties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class CreateGenericTableRequest {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" format: ").append(toIndentedString(format)).append("\n"); + sb.append(" baseLocation: ").append(toIndentedString(baseLocation)).append("\n"); + sb.append(" doc: ").append(toIndentedString(doc)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java new file mode 100644 index 000000000..08f751132 --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/GenericTable.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.constraints.NotNull; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +// TODO: auto generate the class based on spec +public class GenericTable { + + @NotNull private final String name; + @NotNull private final String format; + private final String baseLocation; + private final String doc; + private final Map<String, String> properties; + + /** */ + @JsonProperty(value = "name", required = true) + public String getName() { + return name; + } + + /** */ + @JsonProperty(value = "format", required = true) + public String getFormat() { + return format; + } + + /** */ + @JsonProperty(value = "base-location") + public String getBaseLocation() { + return baseLocation; + } + + /** */ + @JsonProperty(value = "doc") + public String getDoc() { + return doc; + } + + /** */ + @JsonProperty(value = "properties") + public Map<String, String> getProperties() { + return properties; + } + + @JsonCreator + public GenericTable( + @JsonProperty(value = "name", required = true) String name, + @JsonProperty(value = "format", required = true) String format, + @JsonProperty(value = "base-location") String baseLocation, + @JsonProperty(value = "doc") String doc, + @JsonProperty(value = "properties") Map<String, String> properties) { + this.name = name; + this.format = format; + this.baseLocation = baseLocation; + this.doc = doc; + this.properties = Objects.requireNonNullElse(properties, new HashMap<>()); + } + + public GenericTable(String name, String format) { + this.name = name; + this.format = format; + this.baseLocation = null; + this.doc = null; + this.properties = new HashMap<>(); + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(String name, String format) { + return new Builder(name, format); + } + + public static final class Builder { + private String name; + private String format; + private String baseLocation; + private String doc; + private Map<String, String> properties; + + private Builder() {} + + private Builder(String name, String format) { + this.name = name; + this.format = format; + } + + public Builder setName(String name) { + this.name = name; + return this; + } + + public Builder setFormat(String format) { + this.format = format; + return this; + } + + public Builder setBaseLocation(String baseLocation) { + this.baseLocation = baseLocation; + return this; + } + + public Builder setDoc(String doc) { + this.doc = doc; + return this; + } + + public Builder setProperties(Map<String, String> properties) { + this.properties = properties; + return this; + } + + public GenericTable build() { + GenericTable inst = new GenericTable(name, format, baseLocation, doc, properties); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + GenericTable genericTable = (GenericTable) o; + return Objects.equals(this.name, genericTable.name) + && Objects.equals(this.format, genericTable.format) + && Objects.equals(this.baseLocation, genericTable.baseLocation) + && Objects.equals(this.doc, genericTable.doc) + && Objects.equals(this.properties, genericTable.properties); + } + + @Override + public int hashCode() { + return Objects.hash(name, format, baseLocation, doc, properties); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class GenericTable {\n"); + + sb.append(" name: ").append(toIndentedString(name)).append("\n"); + sb.append(" format: ").append(toIndentedString(format)).append("\n"); + sb.append(" baseLocation: ").append(toIndentedString(baseLocation)).append("\n"); + sb.append(" doc: ").append(toIndentedString(doc)).append("\n"); + sb.append(" properties: ").append(toIndentedString(properties)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java index ede2c89a9..55205d30f 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesRESTResponse.java @@ -18,12 +18,11 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Set; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.rest.RESTResponse; -import org.apache.polaris.service.types.ListGenericTablesResponse; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTResponse definition for ListGenericTable which extends the iceberg RESTResponse. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java new file mode 100644 index 000000000..5ba2eb193 --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/ListGenericTablesResponse.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.Valid; +import java.util.LinkedHashSet; +import java.util.Objects; +import java.util.Set; +import org.apache.iceberg.catalog.TableIdentifier; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +// TODO: auto generate the class based on spec +public class ListGenericTablesResponse { + + private final String nextPageToken; + @Valid private final Set<@Valid TableIdentifier> identifiers; + + /** + * An opaque token that allows clients to make use of pagination for list APIs (e.g. ListTables). + * Clients may initiate the first paginated request by sending an empty query parameter + * `pageToken` to the server. Servers that support pagination should identify the + * `pageToken` parameter and return a `next-page-token` in the response if + * there are more results available. After the initial request, the value of + * `next-page-token` from each response must be used as the `pageToken` + * parameter value for the next request. The server must return `null` value for the + * `next-page-token` in the last response. Servers that support pagination must return + * all results in a single response with the value of `next-page-token` set to + * `null` if the query parameter `pageToken` is not set in the request. + * Servers that do not support pagination should ignore the `pageToken` parameter and + * return all results in a single response. The `next-page-token` must be omitted from + * the response. Clients must interpret either `null` or missing response value of + * `next-page-token` as the end of the listing results. + */ + @JsonProperty(value = "next-page-token") + public String getNextPageToken() { + return nextPageToken; + } + + /** */ + @JsonProperty(value = "identifiers") + public Set<@Valid TableIdentifier> getIdentifiers() { + return identifiers; + } + + @JsonCreator + public ListGenericTablesResponse( + @JsonProperty(value = "next-page-token") String nextPageToken, + @JsonProperty(value = "identifiers") Set<@Valid TableIdentifier> identifiers) { + this.nextPageToken = nextPageToken; + this.identifiers = Objects.requireNonNullElse(identifiers, new LinkedHashSet<>()); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private String nextPageToken; + private Set<@Valid TableIdentifier> identifiers; + + private Builder() {} + + public Builder setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public Builder setIdentifiers(Set<@Valid TableIdentifier> identifiers) { + this.identifiers = identifiers; + return this; + } + + public ListGenericTablesResponse build() { + ListGenericTablesResponse inst = new ListGenericTablesResponse(nextPageToken, identifiers); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ListGenericTablesResponse listGenericTablesResponse = (ListGenericTablesResponse) o; + return Objects.equals(this.nextPageToken, listGenericTablesResponse.nextPageToken) + && Objects.equals(this.identifiers, listGenericTablesResponse.identifiers); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, identifiers); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class ListGenericTablesResponse {\n"); + + sb.append(" nextPageToken: ").append(toIndentedString(nextPageToken)).append("\n"); + sb.append(" identifiers: ").append(toIndentedString(identifiers)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java index 68c738dae..ae9999dd5 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableRESTResponse.java @@ -18,11 +18,9 @@ */ package org.apache.polaris.spark.rest; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.iceberg.rest.RESTResponse; -import org.apache.polaris.service.types.GenericTable; -import org.apache.polaris.service.types.LoadGenericTableResponse; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; /** * RESTResponse definition for LoadGenericTable which extends the iceberg RESTResponse. This is diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java new file mode 100644 index 000000000..3cdb51e8d --- /dev/null +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/rest/LoadGenericTableResponse.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.polaris.spark.rest; + +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; +import java.util.Objects; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonCreator; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonProperty; + +// TODO: auto generate the class based on spec +public class LoadGenericTableResponse { + + @NotNull @Valid private final GenericTable table; + + /** */ + @JsonProperty(value = "table", required = true) + public GenericTable getTable() { + return table; + } + + @JsonCreator + public LoadGenericTableResponse( + @JsonProperty(value = "table", required = true) GenericTable table) { + this.table = table; + } + + public static Builder builder() { + return new Builder(); + } + + public static Builder builder(GenericTable table) { + return new Builder(table); + } + + public static final class Builder { + private GenericTable table; + + private Builder() {} + + private Builder(GenericTable table) { + this.table = table; + } + + public Builder setTable(GenericTable table) { + this.table = table; + return this; + } + + public LoadGenericTableResponse build() { + LoadGenericTableResponse inst = new LoadGenericTableResponse(table); + return inst; + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + LoadGenericTableResponse loadGenericTableResponse = (LoadGenericTableResponse) o; + return Objects.equals(this.table, loadGenericTableResponse.table); + } + + @Override + public int hashCode() { + return Objects.hash(table); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("class LoadGenericTableResponse {\n"); + + sb.append(" table: ").append(toIndentedString(table)).append("\n"); + sb.append("}"); + return sb.toString(); + } + + /** + * Convert the given object to string with each line indented by 4 spaces (except the first line). + */ + private String toIndentedString(Object o) { + if (o == null) { + return "null"; + } + return o.toString().replace("\n", "\n "); + } +} diff --git a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java index 8dac78b23..e7cd76bca 100644 --- a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java +++ b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java @@ -27,7 +27,7 @@ import org.apache.iceberg.rest.RESTCatalog; import org.apache.iceberg.rest.RESTSessionCatalog; import org.apache.iceberg.rest.auth.OAuth2Util; import org.apache.iceberg.spark.SparkCatalog; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.connector.catalog.Table; import org.apache.spark.sql.connector.catalog.TableCatalog; diff --git a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java index 5c3d59710..c846659df 100644 --- a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java +++ b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/PolarisInMemoryCatalog.java @@ -30,7 +30,7 @@ import org.apache.iceberg.exceptions.AlreadyExistsException; import org.apache.iceberg.exceptions.NoSuchNamespaceException; import org.apache.iceberg.exceptions.NoSuchTableException; import org.apache.iceberg.inmemory.InMemoryCatalog; -import org.apache.polaris.service.types.GenericTable; +import org.apache.polaris.spark.rest.GenericTable; /** InMemory implementation for the Polaris Catalog. This class is mainly used by testing. */ public class PolarisInMemoryCatalog extends InMemoryCatalog implements PolarisCatalog { diff --git a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java index 3ec9ddbdf..d4d4da6ab 100644 --- a/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java +++ b/plugins/spark/v3.5/spark/src/test/java/org/apache/polaris/spark/rest/DeserializationTest.java @@ -20,14 +20,6 @@ package org.apache.polaris.spark.rest; import static org.assertj.core.api.Assertions.assertThat; -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.JsonFactoryBuilder; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.PropertyNamingStrategies; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import java.util.Map; @@ -36,8 +28,14 @@ import java.util.stream.Stream; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.rest.RESTSerializers; -import org.apache.polaris.service.types.CreateGenericTableRequest; -import org.apache.polaris.service.types.GenericTable; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.JsonAutoDetect; +import org.apache.iceberg.shaded.com.fasterxml.jackson.annotation.PropertyAccessor; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonFactory; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonFactoryBuilder; +import org.apache.iceberg.shaded.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.DeserializationFeature; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.iceberg.shaded.com.fasterxml.jackson.databind.PropertyNamingStrategies; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -137,6 +135,17 @@ public class DeserializationTest { } } + @Test + public void testLoadGenericTableRestResponse() throws JsonProcessingException { + LoadGenericTableRESTResponse request = + new LoadGenericTableRESTResponse( + GenericTable.builder().setName("test-table").setFormat("delta").build()); + String json = mapper.writeValueAsString(request); + LoadGenericTableRESTResponse deserializedResponse = + mapper.readValue(json, LoadGenericTableRESTResponse.class); + assertThat(deserializedResponse.getTable().getName()).isEqualTo("test-table"); + } + private static Stream<Arguments> genericTableTestCases() { var doc = "table for testing"; var properties = Maps.newHashMap();