This is an automated email from the ASF dual-hosted git repository.
jshao pushed a commit to branch branch-0.6
in repository https://gitbox.apache.org/repos/asf/gravitino.git
The following commit(s) were added to refs/heads/branch-0.6 by this push:
new d3f26c63a [#4662] improve(IT): Add ranger authorization Hive E2E test
(#4677)
d3f26c63a is described below
commit d3f26c63aa75c19876e5e9cee266996dda5f0eb6
Author: Xun <[email protected]>
AuthorDate: Mon Aug 26 16:57:47 2024 +0800
[#4662] improve(IT): Add ranger authorization Hive E2E test (#4677)
### What changes were proposed in this pull request?
1. Use an isolated class loader to create an authorization Ranger
plugin.
2. Add authorization for Hive E2E tests using Ranger via Gravitino
RESTful access control.
3. Copy the authorization ranger jar to the
`distribution/package/authorizations/ranger/libs`.
4. Add `Apache Ranger` to license.bin
### Why are the changes needed?
#4662
### Does this PR introduce _any_ user-facing change?
N/A
### How was this patch tested?
CI
---
...est.yml => access-control-integration-test.yml} | 52 ++--
.github/workflows/backend-integration-test.yml | 73 +++++-
LICENSE.bin | 5 +-
NOTICE.bin | 3 +
.../authorization-ranger/build.gradle.kts | 25 +-
.../ranger/integration/test/RangerHiveE2EIT.java | 242 +++++++++++++++++++
.../ranger/integration/test/RangerHiveIT.java | 264 +++++++--------------
.../ranger/integration/test/RangerITEnv.java | 216 ++++++++++++++++-
build.gradle.kts | 19 +-
.../jdbc/JdbcCatalogPropertiesMetadata.java | 7 +-
.../kafka/KafkaCatalogPropertiesMetadata.java | 18 +-
.../apache/gravitino/catalog/CatalogManager.java | 42 +++-
.../apache/gravitino/connector/BaseCatalog.java | 70 +++---
.../connector/authorization/TestAuthorization.java | 7 +
gradle/libs.versions.toml | 2 +
.../integration/test/container/MySQLContainer.java | 2 +-
16 files changed, 770 insertions(+), 277 deletions(-)
diff --git a/.github/workflows/backend-integration-test.yml
b/.github/workflows/access-control-integration-test.yml
similarity index 54%
copy from .github/workflows/backend-integration-test.yml
copy to .github/workflows/access-control-integration-test.yml
index 2e2026aa3..9f7d5f2bd 100644
--- a/.github/workflows/backend-integration-test.yml
+++ b/.github/workflows/access-control-integration-test.yml
@@ -1,4 +1,4 @@
-name: Backend Integration Test
+name: Access Control Integration Test
# Controls when the workflow will run
on:
@@ -22,32 +22,16 @@ jobs:
with:
filters: |
source_changes:
- - .github/**
- api/**
- - bin/**
+ - authorizations/**
- catalogs/**
- clients/client-java/**
- clients/client-java-runtime/**
- - clients/filesystem-hadoop3/**
- - clients/filesystem-hadoop3-runtime/**
- common/**
- - conf/**
- core/**
- - dev/**
- - gradle/**
- - integration-test/**
- integration-test-common/**
- - iceberg/**
- - meta/**
- server/**
- server-common/**
- - trino-connector/**
- - web/**
- - docs/open-api/**
- - build.gradle.kts
- - gradle.properties
- - gradlew
- - setting.gradle.kts
outputs:
source_changes: ${{ steps.filter.outputs.source_changes }}
@@ -59,10 +43,16 @@ jobs:
timeout-minutes: 60
strategy:
matrix:
+ # Integration test for AMD64 architecture
architecture: [linux/amd64]
- java-version: [ 8, 11, 17 ]
+ java-version: [ 17 ]
test-mode: [ embedded, deploy ]
- backend: [ mysql, h2]
+ include:
+ - test-mode: 'embedded'
+ backend: 'h2'
+ - test-mode: 'deploy'
+ backend: 'mysql'
+
env:
PLATFORM: ${{ matrix.architecture }}
steps:
@@ -90,27 +80,17 @@ jobs:
run: |
dev/ci/util_free_space.sh
- - name: Backend Integration Test(JDK${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }})
+ - name: Authorization Integration Test (JDK${{ matrix.java-version
}}-${{ matrix.test-mode }}-${{ matrix.backend }})
id: integrationTest
- run: >
- ./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }}
-PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }}
-PskipWebITs -PskipDockerTests=false
- -x :web:test -x :clients:client-python:test -x :flink-connector:test
-x :spark-connector:test -x :spark-connector:spark-common:test
- -x :spark-connector:spark-3.3:test -x
:spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test
- -x :spark-connector:spark-runtime-3.3:test -x
:spark-connector:spark-runtime-3.4:test -x
:spark-connector:spark-runtime-3.5:test
+ run: |
+ ./gradlew -PskipTests -PtestMode=${{ matrix.test-mode }}
-PjdbcBackend=${{ matrix.backend }} -PjdkVersion=${{ matrix.java-version }}
-PskipDockerTests=false :authorizations:authorization-ranger:test --tests
"org.apache.gravitino.authorization.ranger.integration.test.**"
- name: Upload integrate tests reports
uses: actions/upload-artifact@v3
if: ${{ (failure() && steps.integrationTest.outcome == 'failure') ||
contains(github.event.pull_request.labels.*.name, 'upload log') }}
with:
- name: integrate-test-reports-${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }}
+ name: authorizations-integrate-test-reports-${{ matrix.java-version
}}
path: |
build/reports
- iceberg/iceberg-rest-server/build/*.log
- integration-test/build/*.log
- integration-test/build/*.tar
- integration-test/build/trino-ci-container-log
- distribution/package/logs/*.out
- distribution/package/logs/*.log
- catalogs/**/*.log
- catalogs/**/*.tar
- distribution/**/*.log
+ distribution/package/logs/gravitino-server.out
+ distribution/package/logs/gravitino-server.log
diff --git a/.github/workflows/backend-integration-test.yml
b/.github/workflows/backend-integration-test.yml
index 2e2026aa3..87f6729ca 100644
--- a/.github/workflows/backend-integration-test.yml
+++ b/.github/workflows/backend-integration-test.yml
@@ -90,13 +90,84 @@ jobs:
run: |
dev/ci/util_free_space.sh
- - name: Backend Integration Test(JDK${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }})
+ - name: Backend Integration Test (JDK${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }})
id: integrationTest
run: >
./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }}
-PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }}
-PskipWebITs -PskipDockerTests=false
-x :web:test -x :clients:client-python:test -x :flink-connector:test
-x :spark-connector:test -x :spark-connector:spark-common:test
-x :spark-connector:spark-3.3:test -x
:spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test
-x :spark-connector:spark-runtime-3.3:test -x
:spark-connector:spark-runtime-3.4:test -x
:spark-connector:spark-runtime-3.5:test
+ -x :authorizations:authorization-ranger:test
+
+ - name: Upload integrate tests reports
+ uses: actions/upload-artifact@v3
+ if: ${{ (failure() && steps.integrationTest.outcome == 'failure') ||
contains(github.event.pull_request.labels.*.name, 'upload log') }}
+ with:
+ name: integrate-test-reports-${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }}
+ path: |
+ build/reports
+ iceberg/iceberg-rest-server/build/*.log
+ integration-test/build/*.log
+ integration-test/build/*.tar
+ integration-test/build/trino-ci-container-log
+ distribution/package/logs/*.out
+ distribution/package/logs/*.log
+ catalogs/**/*.log
+ catalogs/**/*.tar
+ distribution/**/*.log
+
+ test-on-pr:
+ needs: changes
+ if: (github.event_name == 'pull_request' &&
needs.changes.outputs.source_changes == 'true')
+ runs-on: ubuntu-latest
+ timeout-minutes: 90
+ strategy:
+ matrix:
+ # Integration test for AMD64 architecture
+ architecture: [ linux/amd64 ]
+ java-version: [ 17 ]
+ test-mode: [ embedded, deploy ]
+ include:
+ - test-mode: 'embedded'
+ backend: 'h2'
+ - test-mode: 'deploy'
+ backend: 'mysql'
+
+ env:
+ PLATFORM: ${{ matrix.architecture }}
+ steps:
+ - uses: actions/checkout@v3
+
+ - uses: actions/setup-java@v4
+ with:
+ java-version: ${{ matrix.java-version }}
+ distribution: 'temurin'
+ cache: 'gradle'
+
+ - name: Set up QEMU
+ uses: docker/setup-qemu-action@v2
+
+ - name: Check required command
+ run: |
+ dev/ci/check_commands.sh
+
+ - name: Package Gravitino
+ if: ${{ matrix.test-mode == 'deploy' }}
+ run: |
+ ./gradlew compileDistribution -x test -PjdkVersion=${{
matrix.java-version }}
+
+ - name: Free up disk space
+ run: |
+ dev/ci/util_free_space.sh
+
+ - name: Backend Integration Test (JDK${{ matrix.java-version }}-${{
matrix.test-mode }}-${{ matrix.backend }})
+ id: integrationTest
+ run: >
+ ./gradlew test -PskipTests -PtestMode=${{ matrix.test-mode }}
-PjdkVersion=${{ matrix.java-version }} -PjdbcBackend=${{ matrix.backend }}
-PskipWebITs -PskipDockerTests=false
+ -x :web:test -x :clients:client-python:test -x :flink-connector:test
-x :spark-connector:test -x :spark-connector:spark-common:test
+ -x :spark-connector:spark-3.3:test -x
:spark-connector:spark-3.4:test -x :spark-connector:spark-3.5:test
+ -x :spark-connector:spark-runtime-3.3:test -x
:spark-connector:spark-runtime-3.4:test -x
:spark-connector:spark-runtime-3.5:test
+ -x :authorizations:authorization-ranger:test
- name: Upload integrate tests reports
uses: actions/upload-artifact@v3
diff --git a/LICENSE.bin b/LICENSE.bin
index b23d1bfef..ca9218f77 100644
--- a/LICENSE.bin
+++ b/LICENSE.bin
@@ -310,6 +310,8 @@
Apache Yetus - Audience Annotations
Apache Kerby
Apache Kyuubi
+ Apache Ranger
+ Apache Ranger intg
Jackson JSON processor
DataNucleus
Modernizer Maven Plugin
@@ -364,7 +366,7 @@
This product bundles various third-party components also under the
BSD license
-
+
JSR305
LevelDB JNI
RocksDB JNI
@@ -446,6 +448,7 @@
Jakarta RESTful WS API
Jakarta XML Binding API
JavaServer Pages(TM) API
+ Javax WS RS API
HK2 API Module
HK2 Service Locator
HK2 Utils
diff --git a/NOTICE.bin b/NOTICE.bin
index 5c63b8189..44cb17a5a 100644
--- a/NOTICE.bin
+++ b/NOTICE.bin
@@ -22,6 +22,9 @@ Copyright 2008-2023 The Apache Software Foundation
Apache Zeppelin
Copyright 2016-2023 The Apache Software Foundation
+Apache Ranger
+Copyright 2014-2024 The Apache Software Foundation
+
Apache Hadoop
Copyright 2006 and onwards The Apache Software Foundation.
diff --git a/authorizations/authorization-ranger/build.gradle.kts
b/authorizations/authorization-ranger/build.gradle.kts
index be7303517..9121a414a 100644
--- a/authorizations/authorization-ranger/build.gradle.kts
+++ b/authorizations/authorization-ranger/build.gradle.kts
@@ -51,11 +51,17 @@ dependencies {
exclude("org.apache.ranger", "ranger-plugins-audit")
exclude("org.apache.ranger", "ranger-plugins-cred")
exclude("org.apache.ranger", "ranger-plugins-classloader")
+ exclude("javax.ws.rs")
}
+ implementation(libs.javax.ws.rs.api)
implementation(libs.javax.jaxb.api) {
exclude("*")
}
+ testImplementation(project(":common"))
+ testImplementation(project(":clients:client-java"))
+ testImplementation(project(":server"))
+ testImplementation(project(":catalogs:catalog-common"))
testImplementation(project(":integration-test-common", "testArtifacts"))
testImplementation(libs.junit.jupiter.api)
testImplementation(libs.mockito.core)
@@ -69,10 +75,12 @@ dependencies {
exclude("org.elasticsearch")
exclude("org.elasticsearch.client")
exclude("org.elasticsearch.plugin")
+ exclude("javax.ws.rs")
}
testImplementation(libs.hive2.jdbc) {
exclude("org.slf4j")
}
+ testImplementation(libs.mysql.driver)
}
tasks {
@@ -80,13 +88,20 @@ tasks {
from(configurations.runtimeClasspath)
into("build/libs")
}
-}
-tasks.build {
- dependsOn("runtimeJars", "javadoc")
+ val copyAuthorizationLibs by registering(Copy::class) {
+ dependsOn("jar", "runtimeJars")
+ from("build/libs")
+ into("$rootDir/distribution/package/authorizations/ranger/libs")
+ }
+
+ register("copyLibAndConfig", Copy::class) {
+ dependsOn(copyAuthorizationLibs)
+ }
}
tasks.test {
+ dependsOn(":catalogs:catalog-hive:jar", ":catalogs:catalog-hive:runtimeJars")
val skipUTs = project.hasProperty("skipTests")
if (skipUTs) {
// Only run integration tests
@@ -101,7 +116,3 @@ tasks.test {
dependsOn(tasks.jar)
}
}
-
-tasks.getByName("generateMetadataFileForMavenJavaPublication") {
- dependsOn("runtimeJars")
-}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
new file mode 100644
index 000000000..89ecbc849
--- /dev/null
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
@@ -0,0 +1,242 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger.integration.test;
+
+import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
+import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_ADMIN_URL;
+import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
+import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
+import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
+import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_USERNAME;
+import static
org.apache.gravitino.integration.test.container.RangerContainer.RANGER_SERVER_PORT;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import org.apache.gravitino.Catalog;
+import org.apache.gravitino.Configs;
+import org.apache.gravitino.MetadataObject;
+import org.apache.gravitino.NameIdentifier;
+import org.apache.gravitino.Schema;
+import org.apache.gravitino.auth.AuthConstants;
+import org.apache.gravitino.authorization.Privileges;
+import org.apache.gravitino.authorization.Role;
+import org.apache.gravitino.authorization.SecurableObject;
+import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin;
+import org.apache.gravitino.catalog.hive.HiveConstants;
+import org.apache.gravitino.client.GravitinoMetalake;
+import org.apache.gravitino.connector.AuthorizationPropertiesMeta;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.apache.gravitino.integration.test.container.RangerContainer;
+import org.apache.gravitino.integration.test.util.AbstractIT;
+import org.apache.gravitino.integration.test.util.GravitinoITUtils;
+import org.apache.gravitino.rel.Column;
+import org.apache.gravitino.rel.Table;
+import org.apache.gravitino.rel.expressions.NamedReference;
+import org.apache.gravitino.rel.expressions.distributions.Distribution;
+import org.apache.gravitino.rel.expressions.distributions.Distributions;
+import org.apache.gravitino.rel.expressions.distributions.Strategy;
+import org.apache.gravitino.rel.expressions.sorts.NullOrdering;
+import org.apache.gravitino.rel.expressions.sorts.SortDirection;
+import org.apache.gravitino.rel.expressions.sorts.SortOrder;
+import org.apache.gravitino.rel.expressions.sorts.SortOrders;
+import org.apache.gravitino.rel.expressions.transforms.Transforms;
+import org.apache.gravitino.rel.types.Types;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Tag("gravitino-docker-test")
+public class RangerHiveE2EIT extends AbstractIT {
+ private static final Logger LOG =
LoggerFactory.getLogger(RangerHiveE2EIT.class);
+
+ private static RangerAuthorizationPlugin rangerAuthPlugin;
+ public static final String metalakeName =
+
GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase();
+ public static final String catalogName =
+ GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase();
+ public static final String schemaName =
+ GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase();
+ public static final String tableName =
+ GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase();
+
+ public static final String HIVE_COL_NAME1 = "hive_col_name1";
+ public static final String HIVE_COL_NAME2 = "hive_col_name2";
+ public static final String HIVE_COL_NAME3 = "hive_col_name3";
+
+ private static GravitinoMetalake metalake;
+ private static Catalog catalog;
+ private static final String provider = "hive";
+ private static String HIVE_METASTORE_URIS;
+
+ @BeforeAll
+ public static void startIntegrationTest() throws Exception {
+ Map<String, String> configs = Maps.newHashMap();
+ configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
+ configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
+ registerCustomConfigs(configs);
+ AbstractIT.startIntegrationTest();
+
+ RangerITEnv.setup();
+ containerSuite.startHiveContainer();
+ HIVE_METASTORE_URIS =
+ String.format(
+ "thrift://%s:%d",
+ containerSuite.getHiveContainer().getContainerIpAddress(),
+ HiveContainer.HIVE_METASTORE_PORT);
+
+ createMetalake();
+ createCatalogAndRangerAuthPlugin();
+ createSchema();
+ createHiveTable();
+ }
+
+ @AfterAll
+ public static void stop() throws IOException {
+ AbstractIT.client = null;
+ }
+
+ @Test
+ void testCreateRole() {
+ String roleName = RangerITEnv.currentFunName();
+ Map<String, String> properties = Maps.newHashMap();
+ properties.put("k1", "v1");
+
+ SecurableObject table1 =
+ SecurableObjects.parse(
+ String.format("%s.%s.%s", catalogName, schemaName, tableName),
+ MetadataObject.Type.TABLE,
+ Lists.newArrayList(Privileges.SelectTable.allow()));
+ Role role = metalake.createRole(roleName, properties,
Lists.newArrayList(table1));
+ RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role);
+ }
+
+ private static void createMetalake() {
+ GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
+ Assertions.assertEquals(0, gravitinoMetalakes.length);
+
+ GravitinoMetalake createdMetalake =
+ client.createMetalake(metalakeName, "comment", Collections.emptyMap());
+ GravitinoMetalake loadMetalake = client.loadMetalake(metalakeName);
+ Assertions.assertEquals(createdMetalake, loadMetalake);
+
+ metalake = loadMetalake;
+ }
+
+ private static void createCatalogAndRangerAuthPlugin() {
+ rangerAuthPlugin =
+ new RangerAuthorizationPlugin(
+ "hive",
+ ImmutableMap.of(
+ AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ String.format(
+ "http://%s:%d",
+
containerSuite.getRangerContainer().getContainerIpAddress(),
+ RangerContainer.RANGER_SERVER_PORT),
+ AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+ RangerContainer.authType,
+ AuthorizationPropertiesMeta.RANGER_USERNAME,
+ RangerContainer.rangerUserName,
+ AuthorizationPropertiesMeta.RANGER_PASSWORD,
+ RangerContainer.rangerPassword,
+ AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME));
+
+ Map<String, String> properties = Maps.newHashMap();
+ properties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS);
+ properties.put(AUTHORIZATION_PROVIDER, "ranger");
+ properties.put(RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME);
+ properties.put(
+ RANGER_ADMIN_URL,
+ String.format(
+ "http://localhost:%s",
+
containerSuite.getRangerContainer().getMappedPort(RANGER_SERVER_PORT)));
+ properties.put(RANGER_AUTH_TYPE, RangerContainer.authType);
+ properties.put(RANGER_USERNAME, RangerContainer.rangerUserName);
+ properties.put(RANGER_PASSWORD, RangerContainer.rangerPassword);
+
+ metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider,
"comment", properties);
+ catalog = metalake.loadCatalog(catalogName);
+ LOG.info("Catalog created: {}", catalog);
+ }
+
+ private static void createSchema() {
+ Map<String, String> properties = Maps.newHashMap();
+ properties.put("key1", "val1");
+ properties.put("key2", "val2");
+ properties.put(
+ "location",
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse/%s.db",
+ containerSuite.getHiveContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT,
+ schemaName.toLowerCase()));
+ String comment = "comment";
+
+ catalog.asSchemas().createSchema(schemaName, comment, properties);
+ Schema loadSchema = catalog.asSchemas().loadSchema(schemaName);
+ Assertions.assertEquals(schemaName.toLowerCase(), loadSchema.name());
+ }
+
+ public static void createHiveTable() {
+ // Create table from Gravitino API
+ Column[] columns = createColumns();
+ NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
+
+ Distribution distribution =
+ Distributions.of(Strategy.EVEN, 10,
NamedReference.field(HIVE_COL_NAME1));
+
+ final SortOrder[] sortOrders =
+ new SortOrder[] {
+ SortOrders.of(
+ NamedReference.field(HIVE_COL_NAME2),
+ SortDirection.DESCENDING,
+ NullOrdering.NULLS_FIRST)
+ };
+
+ Map<String, String> properties = ImmutableMap.of("key1", "val1", "key2",
"val2");
+ Table createdTable =
+ catalog
+ .asTableCatalog()
+ .createTable(
+ nameIdentifier,
+ columns,
+ "table_comment",
+ properties,
+ Transforms.EMPTY_TRANSFORM,
+ distribution,
+ sortOrders);
+ LOG.info("Table created: {}", createdTable);
+ }
+
+ private static Column[] createColumns() {
+ Column col1 = Column.of(HIVE_COL_NAME1, Types.ByteType.get(),
"col_1_comment");
+ Column col2 = Column.of(HIVE_COL_NAME2, Types.DateType.get(),
"col_2_comment");
+ Column col3 = Column.of(HIVE_COL_NAME3, Types.StringType.get(),
"col_3_comment");
+ return new Column[] {col1, col2, col3};
+ }
+}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
index c21a281b4..79f66ef28 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
@@ -19,6 +19,8 @@
package org.apache.gravitino.authorization.ranger.integration.test;
import static org.apache.gravitino.authorization.SecurableObjects.DOT_SPLITTER;
+import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
+import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.verifyRoleInRanger;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
@@ -34,7 +36,6 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import org.apache.gravitino.MetadataObject;
import org.apache.gravitino.MetadataObjects;
import org.apache.gravitino.authorization.Owner;
@@ -57,7 +58,6 @@ import org.apache.gravitino.meta.RoleEntity;
import org.apache.gravitino.meta.UserEntity;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
-import org.apache.ranger.plugin.model.RangerRole;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
@@ -66,7 +66,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Tag("gravitino-docker-test")
-public class RangerHiveIT extends RangerITEnv {
+public class RangerHiveIT {
private static final Logger LOG =
LoggerFactory.getLogger(RangerHiveIT.class);
private static final ContainerSuite containerSuite =
ContainerSuite.getInstance();
@@ -119,13 +119,6 @@ public class RangerHiveIT extends RangerITEnv {
RangerITEnv.RANGER_HIVE_REPO_NAME));
rangerPolicyHelper = new RangerHelper(rangerAuthPlugin, "hive");
- createRangerHdfsRepository(
- containerSuite.getHiveRangerContainer().getContainerIpAddress(), true);
- createRangerHiveRepository(
- containerSuite.getHiveRangerContainer().getContainerIpAddress(), true);
- allowAnyoneAccessHDFS();
- allowAnyoneAccessInformationSchema();
-
// Create hive connection
String url =
String.format(
@@ -182,7 +175,7 @@ public class RangerHiveIT extends RangerITEnv {
public void testOnRoleCreated() {
RoleEntity role = mock3TableRole(currentFunName());
Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role));
- verifyRoleInRanger(role);
+ verifyRoleInRanger(rangerAuthPlugin, role);
}
@Test
@@ -297,9 +290,9 @@ public class RangerHiveIT extends RangerITEnv {
policyItem.setAccesses(
Arrays.asList(
new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT)));
- updateOrCreateRangerPolicy(
+ RangerITEnv.updateOrCreateRangerPolicy(
RangerDefines.SERVICE_TYPE_HIVE,
- RANGER_HIVE_REPO_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
roleName,
policyResourceMap,
Collections.singletonList(policyItem));
@@ -328,7 +321,7 @@ public class RangerHiveIT extends RangerITEnv {
.withAuditInfo(auditInfo)
.withSecurableObjects(Lists.newArrayList(securableObject1))
.build();
- verifyRoleInRanger(verifyRole1);
+ verifyRoleInRanger(rangerAuthPlugin, verifyRole1);
// 2. Multi-call Add a same entity and privilege to the role, because
support idempotent
// operation, so return true
@@ -336,7 +329,7 @@ public class RangerHiveIT extends RangerITEnv {
rangerAuthPlugin.onRoleUpdated(
mockCatalogRole,
RoleChange.addSecurableObject(mockCatalogRole.name(),
securableObject1)));
- verifyRoleInRanger(verifyRole1);
+ verifyRoleInRanger(rangerAuthPlugin, verifyRole1);
// 3. Add the same metadata but have different privileges to the role
SecurableObject securableObject3 =
@@ -376,7 +369,7 @@ public class RangerHiveIT extends RangerITEnv {
.withAuditInfo(auditInfo)
.withSecurableObjects(securableObjects)
.build();
- verifyRoleInRanger(verifyRole);
+ verifyRoleInRanger(rangerAuthPlugin, verifyRole);
}
}
}
@@ -418,7 +411,7 @@ public class RangerHiveIT extends RangerITEnv {
.withAuditInfo(auditInfo)
.withSecurableObjects(Lists.newArrayList(newSecurableObject))
.build();
- verifyRoleInRanger(verifyRole);
+ verifyRoleInRanger(rangerAuthPlugin, verifyRole);
}
@Test
@@ -465,7 +458,7 @@ public class RangerHiveIT extends RangerITEnv {
.withAuditInfo(auditInfo)
.withSecurableObjects(Lists.newArrayList(newSecurableObject))
.build();
- verifyRoleInRanger(verifyRole);
+ verifyRoleInRanger(rangerAuthPlugin, verifyRole);
verifyOwnerInRanger(oldMetadataObject, Lists.newArrayList(userName));
// Delete the role
@@ -496,12 +489,12 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role),
userEntity1));
- verifyRoleInRanger(role, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1));
// multi-call to granted role to the user1
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role),
userEntity1));
- verifyRoleInRanger(role, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1));
// granted a role to the user2
String userName2 = "user2";
@@ -517,7 +510,7 @@ public class RangerHiveIT extends RangerITEnv {
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role),
userEntity2));
// Same to verify user1 and user2
- verifyRoleInRanger(role, Lists.newArrayList(userName1, userName2));
+ verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1,
userName2));
}
@Test
@@ -538,16 +531,16 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role),
userEntity1));
- verifyRoleInRanger(role, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, Lists.newArrayList(userName1));
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role),
userEntity1));
- verifyRoleInRanger(role, null, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null,
Lists.newArrayList(userName1));
// multi-call to revoked role from user1
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role),
userEntity1));
- verifyRoleInRanger(role, null, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null,
Lists.newArrayList(userName1));
}
@Test
@@ -568,12 +561,12 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role),
groupEntity1));
- verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null, null,
Lists.newArrayList(groupName1));
// multi-call to grant a role to the group1 test idempotent operation
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role),
groupEntity1));
- verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null, null,
Lists.newArrayList(groupName1));
// granted a role to the group2
String groupName2 = "group2";
@@ -589,7 +582,8 @@ public class RangerHiveIT extends RangerITEnv {
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role),
groupEntity2));
// Same to verify group1 and group2
- verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1,
groupName2));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role, null, null, Lists.newArrayList(groupName1,
groupName2));
}
@Test
@@ -610,16 +604,16 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role),
groupEntity1));
- verifyRoleInRanger(role, null, null, Lists.newArrayList(groupName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null, null,
Lists.newArrayList(groupName1));
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role),
groupEntity1));
- verifyRoleInRanger(role, null, null, null, Lists.newArrayList(groupName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null, null, null,
Lists.newArrayList(groupName1));
// multi-call to revoke from the group1
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role),
groupEntity1));
- verifyRoleInRanger(role, null, null, null, Lists.newArrayList(groupName1));
+ verifyRoleInRanger(rangerAuthPlugin, role, null, null, null,
Lists.newArrayList(groupName1));
}
private static class MockOwner implements Owner {
@@ -734,7 +728,7 @@ public class RangerHiveIT extends RangerITEnv {
.withSecurableObjects(Lists.newArrayList(securableObject1))
.build();
Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role1));
- verifyRoleInRanger(role1);
+ verifyRoleInRanger(rangerAuthPlugin, role1);
// Create a `SelectTable` privilege role
SecurableObject securableObject2 =
@@ -750,7 +744,7 @@ public class RangerHiveIT extends RangerITEnv {
.withSecurableObjects(Lists.newArrayList(securableObject2))
.build();
Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role2));
- verifyRoleInRanger(role2);
+ verifyRoleInRanger(rangerAuthPlugin, role2);
// Create a `ModifyTable` privilege role
SecurableObject securableObject3 =
@@ -766,7 +760,7 @@ public class RangerHiveIT extends RangerITEnv {
.withSecurableObjects(Lists.newArrayList(securableObject3))
.build();
Assertions.assertTrue(rangerAuthPlugin.onRoleCreated(role3));
- verifyRoleInRanger(role3);
+ verifyRoleInRanger(rangerAuthPlugin, role3);
/** Test grant to user */
// granted role1 to the user1
@@ -784,7 +778,7 @@ public class RangerHiveIT extends RangerITEnv {
// multiple call to grant role1 to the user1 to test idempotent operation
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1),
userEntity1));
- verifyRoleInRanger(role1, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role1, Lists.newArrayList(userName1));
// granted role1 to the user2
String userName2 = "user2";
@@ -798,7 +792,7 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1),
userEntity2));
- verifyRoleInRanger(role1, Lists.newArrayList(userName1, userName2));
+ verifyRoleInRanger(rangerAuthPlugin, role1, Lists.newArrayList(userName1,
userName2));
// granted role1 to the user3
String userName3 = "user3";
@@ -812,22 +806,25 @@ public class RangerHiveIT extends RangerITEnv {
.build();
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role1),
userEntity3));
- verifyRoleInRanger(role1, Lists.newArrayList(userName1, userName2,
userName3));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role1, Lists.newArrayList(userName1, userName2,
userName3));
// Same granted role2 and role3 to the user1 and user2 and user3
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2,
role3), userEntity1));
- verifyRoleInRanger(role2, Lists.newArrayList(userName1));
- verifyRoleInRanger(role3, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role2, Lists.newArrayList(userName1));
+ verifyRoleInRanger(rangerAuthPlugin, role3, Lists.newArrayList(userName1));
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2,
role3), userEntity2));
- verifyRoleInRanger(role2, Lists.newArrayList(userName1, userName2));
- verifyRoleInRanger(role3, Lists.newArrayList(userName1, userName2));
+ verifyRoleInRanger(rangerAuthPlugin, role2, Lists.newArrayList(userName1,
userName2));
+ verifyRoleInRanger(rangerAuthPlugin, role3, Lists.newArrayList(userName1,
userName2));
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role2,
role3), userEntity3));
- verifyRoleInRanger(role2, Lists.newArrayList(userName1, userName2,
userName3));
- verifyRoleInRanger(role3, Lists.newArrayList(userName1, userName2,
userName3));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role2, Lists.newArrayList(userName1, userName2,
userName3));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role3, Lists.newArrayList(userName1, userName2,
userName3));
/** Test grant to group */
// granted role1 to the group1
@@ -843,6 +840,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1),
groupEntity1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -861,6 +859,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1),
groupEntity2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -879,6 +878,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role1),
groupEntity3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -888,11 +888,13 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2,
role3), groupEntity1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
Lists.newArrayList(userName1, userName2, userName3),
null,
Lists.newArrayList(groupName1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -901,11 +903,13 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2,
role3), groupEntity2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
Lists.newArrayList(userName1, userName2, userName3),
null,
Lists.newArrayList(groupName1, groupName2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -913,11 +917,13 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onGrantedRolesToGroup(Lists.newArrayList(role2,
role3), groupEntity3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
Lists.newArrayList(userName1, userName2, userName3),
null,
Lists.newArrayList(groupName1, groupName2, groupName3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
Lists.newArrayList(userName1, userName2, userName3),
null,
@@ -928,6 +934,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1),
userEntity1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
Lists.newArrayList(userName2, userName3),
Lists.newArrayList(userName1),
@@ -937,6 +944,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1),
userEntity2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
Lists.newArrayList(userName3),
Lists.newArrayList(userName1, userName2),
@@ -946,6 +954,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role1),
userEntity3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -955,29 +964,44 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2,
role3), userEntity1));
verifyRoleInRanger(
- role2, Lists.newArrayList(userName2, userName3),
Lists.newArrayList(userName1));
+ rangerAuthPlugin,
+ role2,
+ Lists.newArrayList(userName2, userName3),
+ Lists.newArrayList(userName1));
verifyRoleInRanger(
- role3, Lists.newArrayList(userName2, userName3),
Lists.newArrayList(userName1));
+ rangerAuthPlugin,
+ role3,
+ Lists.newArrayList(userName2, userName3),
+ Lists.newArrayList(userName1));
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2,
role3), userEntity2));
verifyRoleInRanger(
- role2, Lists.newArrayList(userName3), Lists.newArrayList(userName1,
userName2));
+ rangerAuthPlugin,
+ role2,
+ Lists.newArrayList(userName3),
+ Lists.newArrayList(userName1, userName2));
verifyRoleInRanger(
- role3, Lists.newArrayList(userName3), Lists.newArrayList(userName1,
userName2));
+ rangerAuthPlugin,
+ role3,
+ Lists.newArrayList(userName3),
+ Lists.newArrayList(userName1, userName2));
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2,
role3), userEntity3));
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromUser(Lists.newArrayList(role2,
role3), userEntity3));
- verifyRoleInRanger(role2, null, Lists.newArrayList(userName1, userName2,
userName3));
- verifyRoleInRanger(role3, null, Lists.newArrayList(userName1, userName2,
userName3));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role2, null, Lists.newArrayList(userName1,
userName2, userName3));
+ verifyRoleInRanger(
+ rangerAuthPlugin, role3, null, Lists.newArrayList(userName1,
userName2, userName3));
/** Test revoke from group */
// revoke role1 from the group1
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1),
groupEntity1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -988,6 +1012,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1),
groupEntity2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -998,6 +1023,7 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role1),
groupEntity3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role1,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -1008,12 +1034,14 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2,
role3), groupEntity1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
null,
Lists.newArrayList(userName1, userName2, userName3),
Lists.newArrayList(groupName2, groupName3),
Lists.newArrayList(groupName1));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -1023,12 +1051,14 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2,
role3), groupEntity2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
null,
Lists.newArrayList(userName1, userName2, userName3),
Lists.newArrayList(groupName3),
Lists.newArrayList(groupName1, groupName2));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -1038,12 +1068,14 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertTrue(
rangerAuthPlugin.onRevokedRolesFromGroup(Lists.newArrayList(role2,
role3), groupEntity3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role2,
null,
Lists.newArrayList(userName1, userName2, userName3),
null,
Lists.newArrayList(groupName1, groupName2, groupName3));
verifyRoleInRanger(
+ rangerAuthPlugin,
role3,
null,
Lists.newArrayList(userName1, userName2, userName3),
@@ -1068,102 +1100,6 @@ public class RangerHiveIT extends RangerITEnv {
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
}
- private void verifyRoleInRanger(
- Role role,
- List<String> includeUsers,
- List<String> excludeUsers,
- List<String> includeGroups,
- List<String> excludeGroups) {
- // Verify role in RangerRole
- RangerRole rangerRole = null;
- try {
- rangerRole =
- rangerClient.getRole(
- role.name(), rangerAuthPlugin.rangerAdminName,
RANGER_HIVE_REPO_NAME);
- LOG.info("rangerRole: " + rangerRole.toString());
- } catch (RangerServiceException e) {
- throw new RuntimeException(e);
- }
- rangerRole
- .getUsers()
- .forEach(
- user -> {
- if (includeUsers != null && !includeUsers.isEmpty()) {
- Assertions.assertTrue(
- includeUsers.contains(user.getName()),
- "includeUsersInRole: " + includeUsers + ", user: " +
user.getName());
- }
- if (excludeUsers != null && !excludeUsers.isEmpty()) {
- Assertions.assertFalse(
- excludeUsers.contains(user.getName()),
- "excludeUsersInRole: " + excludeUsers.toString() + ",
user: " + user.getName());
- }
- });
- rangerRole
- .getGroups()
- .forEach(
- group -> {
- if (includeGroups != null && !includeGroups.isEmpty()) {
- Assertions.assertTrue(
- includeGroups.contains(group.getName()),
- "includeGroupsInRole: "
- + includeGroups.toString()
- + ", group: "
- + group.getName());
- }
- if (excludeGroups != null && !excludeGroups.isEmpty()) {
- Assertions.assertFalse(
- excludeGroups.contains(group.getName()),
- "excludeGroupsInRole: "
- + excludeGroups.toString()
- + ", group: "
- + group.getName());
- }
- });
-
- // Verify role in RangerPolicy
- role.securableObjects()
- .forEach(
- securableObject -> {
- RangerPolicy policy;
- try {
- policy =
- rangerClient.getPolicy(
- RangerITEnv.RANGER_HIVE_REPO_NAME,
securableObject.fullName());
- LOG.info("policy: " + policy.toString());
- } catch (RangerServiceException e) {
- LOG.error("Failed to get policy: " +
securableObject.fullName());
- throw new RuntimeException(e);
- }
-
- securableObject
- .privileges()
- .forEach(
- gravitinoPrivilege -> {
- Set<String> mappedPrivileges =
-
rangerAuthPlugin.translatePrivilege(gravitinoPrivilege.name());
-
- boolean match =
- policy.getPolicyItems().stream()
- .filter(
- policyItem -> {
- // Filter Ranger policy item by
Gravitino privilege
- return policyItem.getAccesses().stream()
- .anyMatch(
- access -> {
- return
mappedPrivileges.contains(access.getType());
- });
- })
- .allMatch(
- policyItem -> {
- // Verify role name in Ranger policy item
- return
policyItem.getRoles().contains(role.name());
- });
- Assertions.assertTrue(match);
- });
- });
- }
-
/** Verify the Gravitino role in Ranger service */
private void verifyOwnerInRanger(
MetadataObject metadataObject,
@@ -1175,7 +1111,7 @@ public class RangerHiveIT extends RangerITEnv {
String policyName = metadataObject.fullName();
RangerPolicy policy;
try {
- policy = rangerClient.getPolicy(RangerITEnv.RANGER_HIVE_REPO_NAME,
policyName);
+ policy =
RangerITEnv.rangerClient.getPolicy(RangerITEnv.RANGER_HIVE_REPO_NAME,
policyName);
LOG.info("policy: " + policy.toString());
} catch (RangerServiceException e) {
LOG.error("Failed to get policy: " + policyName);
@@ -1259,29 +1195,11 @@ public class RangerHiveIT extends RangerITEnv {
verifyOwnerInRanger(metadataObject, includeUsers, null, null, null);
}
- private void verifyRoleInRanger(Role role) {
- verifyRoleInRanger(role, null, null, null, null);
- }
-
- private void verifyRoleInRanger(Role role, List<String>
includeRolesInPolicyItem) {
- verifyRoleInRanger(role, includeRolesInPolicyItem, null, null, null);
- }
-
- private void verifyRoleInRanger(
- Role role, List<String> includeRolesInPolicyItem, List<String>
excludeRolesInPolicyItem) {
- verifyRoleInRanger(role, includeRolesInPolicyItem,
excludeRolesInPolicyItem, null, null);
- }
-
- private void verifyRoleInRanger(
- Role role, List<String> includeUsers, List<String> excludeUsers,
List<String> includeGroups) {
- verifyRoleInRanger(role, includeUsers, excludeUsers, includeGroups, null);
- }
-
/** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */
static void allowAnyoneAccessHDFS() {
String policyName = currentFunName();
try {
- if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS,
policyName)) {
+ if (null !=
RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS,
policyName)) {
return;
}
} catch (RangerServiceException e) {
@@ -1297,9 +1215,9 @@ public class RangerHiveIT extends RangerITEnv {
new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_READ),
new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE),
new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE)));
- updateOrCreateRangerPolicy(
+ RangerITEnv.updateOrCreateRangerPolicy(
RangerDefines.SERVICE_TYPE_HDFS,
- RANGER_HDFS_REPO_NAME,
+ RangerITEnv.RANGER_HDFS_REPO_NAME,
policyName,
policyResourceMap,
Collections.singletonList(policyItem));
@@ -1312,7 +1230,7 @@ public class RangerHiveIT extends RangerITEnv {
static void allowAnyoneAccessInformationSchema() {
String policyName = currentFunName();
try {
- if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE,
policyName)) {
+ if (null !=
RangerITEnv.rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE,
policyName)) {
return;
}
} catch (RangerServiceException e) {
@@ -1332,9 +1250,9 @@ public class RangerHiveIT extends RangerITEnv {
policyItem.setAccesses(
Arrays.asList(
new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT)));
- updateOrCreateRangerPolicy(
+ RangerITEnv.updateOrCreateRangerPolicy(
RangerDefines.SERVICE_TYPE_HIVE,
- RANGER_HIVE_REPO_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
policyName,
policyResourceMap,
Collections.singletonList(policyItem));
@@ -1353,9 +1271,9 @@ public class RangerHiveIT extends RangerITEnv {
policyItem.setUsers(Arrays.asList(adminUser));
policyItem.setAccesses(
Arrays.asList(new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL)));
- updateOrCreateRangerPolicy(
+ RangerITEnv.updateOrCreateRangerPolicy(
RangerDefines.SERVICE_TYPE_HIVE,
- RANGER_HIVE_REPO_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
"testAllowShowDatabase",
policyResourceMap,
Collections.singletonList(policyItem));
@@ -1383,9 +1301,9 @@ public class RangerHiveIT extends RangerITEnv {
policyItem.setUsers(Arrays.asList(adminUser, anonymousUser));
policyItem.setAccesses(
Arrays.asList(new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_ALL)));
- updateOrCreateRangerPolicy(
+ RangerITEnv.updateOrCreateRangerPolicy(
RangerDefines.SERVICE_TYPE_HIVE,
- RANGER_HIVE_REPO_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
"testAllowShowDatabase",
policyResourceMap,
Collections.singletonList(policyItem));
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
index 784f91b2b..2808a2b79 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
@@ -20,10 +20,15 @@ package
org.apache.gravitino.authorization.ranger.integration.test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.stream.Collectors;
+import org.apache.gravitino.authorization.Role;
+import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin;
import org.apache.gravitino.authorization.ranger.RangerHelper;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
import org.apache.gravitino.integration.test.container.ContainerSuite;
@@ -32,11 +37,10 @@ import
org.apache.gravitino.integration.test.container.TrinoContainer;
import org.apache.ranger.RangerClient;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
+import org.apache.ranger.plugin.model.RangerRole;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.ranger.plugin.util.SearchFilter;
-import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -50,16 +54,25 @@ public class RangerITEnv {
protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev";
private static final String RANGER_HDFS_TYPE = "hdfs";
protected static RangerClient rangerClient;
-
+ private static volatile boolean initRangerService = false;
private static final ContainerSuite containerSuite =
ContainerSuite.getInstance();
- @BeforeAll
public static void setup() {
containerSuite.startRangerContainer();
rangerClient = containerSuite.getRangerContainer().rangerClient;
+
+ if (!initRangerService) {
+ synchronized (RangerITEnv.class) {
+ // No IP address set, no impact on testing
+ createRangerHdfsRepository("", true);
+ createRangerHiveRepository("", true);
+ allowAnyoneAccessHDFS();
+ allowAnyoneAccessInformationSchema();
+ initRangerService = true;
+ }
+ }
}
- @AfterAll
public static void cleanup() {
try {
if (rangerClient != null) {
@@ -75,6 +88,71 @@ public class RangerITEnv {
}
}
+ /** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */
+ static void allowAnyoneAccessHDFS() {
+ String policyName = currentFunName();
+ try {
+ if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS,
policyName)) {
+ return;
+ }
+ } catch (RangerServiceException e) {
+ // If the policy doesn't exist, we will create it
+ LOG.warn("Error while fetching policy: {}", e.getMessage());
+ }
+
+ Map<String, RangerPolicy.RangerPolicyResource> policyResourceMap =
+ ImmutableMap.of(RangerDefines.RESOURCE_PATH, new
RangerPolicy.RangerPolicyResource("/*"));
+ RangerPolicy.RangerPolicyItem policyItem = new
RangerPolicy.RangerPolicyItem();
+ policyItem.setUsers(Arrays.asList(RangerDefines.CURRENT_USER));
+ policyItem.setAccesses(
+ Arrays.asList(
+ new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_READ),
+ new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_WRITE),
+ new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HDFS_EXECUTE)));
+ updateOrCreateRangerPolicy(
+ RangerDefines.SERVICE_TYPE_HDFS,
+ RANGER_HDFS_REPO_NAME,
+ policyName,
+ policyResourceMap,
+ Collections.singletonList(policyItem));
+ }
+
+ /**
+ * Hive must have this policy Allow anyone can access information schema to
show `database`,
+ * `tables` and `columns`
+ */
+ static void allowAnyoneAccessInformationSchema() {
+ String policyName = currentFunName();
+ try {
+ if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE,
policyName)) {
+ return;
+ }
+ } catch (RangerServiceException e) {
+ // If the policy doesn't exist, we will create it
+ LOG.warn("Error while fetching policy: {}", e.getMessage());
+ }
+
+ Map<String, RangerPolicy.RangerPolicyResource> policyResourceMap =
+ ImmutableMap.of(
+ RangerDefines.RESOURCE_DATABASE,
+ new RangerPolicy.RangerPolicyResource("information_schema"),
+ RangerDefines.RESOURCE_TABLE,
+ new RangerPolicy.RangerPolicyResource("*"),
+ RangerDefines.RESOURCE_COLUMN,
+ new RangerPolicy.RangerPolicyResource("*"));
+ RangerPolicy.RangerPolicyItem policyItem = new
RangerPolicy.RangerPolicyItem();
+ policyItem.setGroups(Arrays.asList(RangerDefines.PUBLIC_GROUP));
+ policyItem.setAccesses(
+ Arrays.asList(
+ new
RangerPolicy.RangerPolicyItemAccess(RangerDefines.ACCESS_TYPE_HIVE_SELECT)));
+ updateOrCreateRangerPolicy(
+ RangerDefines.SERVICE_TYPE_HIVE,
+ RANGER_HIVE_REPO_NAME,
+ policyName,
+ policyResourceMap,
+ Collections.singletonList(policyItem));
+ }
+
public void createRangerTrinoRepository(String trinoIp) {
String usernameKey = "username";
String usernameVal = "admin";
@@ -221,6 +299,134 @@ public class RangerITEnv {
}
}
+ protected static void verifyRoleInRanger(
+ RangerAuthorizationPlugin rangerAuthPlugin,
+ Role role,
+ List<String> includeUsers,
+ List<String> excludeUsers,
+ List<String> includeGroups,
+ List<String> excludeGroups) {
+ // Verify role in RangerRole
+ RangerRole rangerRole = null;
+ try {
+ rangerRole =
+ RangerITEnv.rangerClient.getRole(
+ role.name(), rangerAuthPlugin.rangerAdminName,
RangerITEnv.RANGER_HIVE_REPO_NAME);
+ LOG.info("rangerRole: " + rangerRole.toString());
+ } catch (RangerServiceException e) {
+ throw new RuntimeException(e);
+ }
+ rangerRole
+ .getUsers()
+ .forEach(
+ user -> {
+ if (includeUsers != null && !includeUsers.isEmpty()) {
+ Assertions.assertTrue(
+ includeUsers.contains(user.getName()),
+ "includeUsersInRole: " + includeUsers + ", user: " +
user.getName());
+ }
+ if (excludeUsers != null && !excludeUsers.isEmpty()) {
+ Assertions.assertFalse(
+ excludeUsers.contains(user.getName()),
+ "excludeUsersInRole: " + excludeUsers.toString() + ",
user: " + user.getName());
+ }
+ });
+ rangerRole
+ .getGroups()
+ .forEach(
+ group -> {
+ if (includeGroups != null && !includeGroups.isEmpty()) {
+ Assertions.assertTrue(
+ includeGroups.contains(group.getName()),
+ "includeGroupsInRole: "
+ + includeGroups.toString()
+ + ", group: "
+ + group.getName());
+ }
+ if (excludeGroups != null && !excludeGroups.isEmpty()) {
+ Assertions.assertFalse(
+ excludeGroups.contains(group.getName()),
+ "excludeGroupsInRole: "
+ + excludeGroups.toString()
+ + ", group: "
+ + group.getName());
+ }
+ });
+
+ // Verify role in RangerPolicy
+ role.securableObjects()
+ .forEach(
+ securableObject -> {
+ RangerPolicy policy;
+ try {
+ policy =
+ RangerITEnv.rangerClient.getPolicy(
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
securableObject.fullName());
+ LOG.info("policy: " + policy.toString());
+ } catch (RangerServiceException e) {
+ LOG.error("Failed to get policy: " +
securableObject.fullName());
+ throw new RuntimeException(e);
+ }
+
+ securableObject
+ .privileges()
+ .forEach(
+ gravitinoPrivilege -> {
+ Set<String> mappedPrivileges =
+
rangerAuthPlugin.translatePrivilege(gravitinoPrivilege.name());
+
+ boolean match =
+ policy.getPolicyItems().stream()
+ .filter(
+ policyItem -> {
+ // Filter Ranger policy item by
Gravitino privilege
+ return policyItem.getAccesses().stream()
+ .anyMatch(
+ access -> {
+ return
mappedPrivileges.contains(access.getType());
+ });
+ })
+ .allMatch(
+ policyItem -> {
+ // Verify role name in Ranger policy item
+ return
policyItem.getRoles().contains(role.name());
+ });
+ Assertions.assertTrue(match);
+ });
+ });
+ }
+
+ protected static void verifyRoleInRanger(RangerAuthorizationPlugin
rangerAuthPlugin, Role role) {
+ RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role, null, null, null,
null);
+ }
+
+ protected static void verifyRoleInRanger(
+ RangerAuthorizationPlugin rangerAuthPlugin,
+ Role role,
+ List<String> includeRolesInPolicyItem) {
+ RangerITEnv.verifyRoleInRanger(
+ rangerAuthPlugin, role, includeRolesInPolicyItem, null, null, null);
+ }
+
+ protected static void verifyRoleInRanger(
+ RangerAuthorizationPlugin rangerAuthPlugin,
+ Role role,
+ List<String> includeRolesInPolicyItem,
+ List<String> excludeRolesInPolicyItem) {
+ RangerITEnv.verifyRoleInRanger(
+ rangerAuthPlugin, role, includeRolesInPolicyItem,
excludeRolesInPolicyItem, null, null);
+ }
+
+ protected static void verifyRoleInRanger(
+ RangerAuthorizationPlugin rangerAuthPlugin,
+ Role role,
+ List<String> includeUsers,
+ List<String> excludeUsers,
+ List<String> includeGroups) {
+ RangerITEnv.verifyRoleInRanger(
+ rangerAuthPlugin, role, includeUsers, excludeUsers, includeGroups,
null);
+ }
+
protected static void updateOrCreateRangerPolicy(
String type,
String serviceName,
diff --git a/build.gradle.kts b/build.gradle.kts
index 1abcb7a4c..c19ee9945 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -543,7 +543,7 @@ tasks {
val outputDir = projectDir.dir("distribution")
val compileDistribution by registering {
- dependsOn("copySubprojectDependencies", "copyCatalogLibAndConfigs",
"copySubprojectLib", "iceberg:iceberg-rest-server:copyLibAndConfigs")
+ dependsOn("copySubprojectDependencies", "copyCatalogLibAndConfigs",
"copyAuthorizationLibAndConfigs", "copySubprojectLib",
"iceberg:iceberg-rest-server:copyLibAndConfigs")
group = "gravitino distribution"
outputs.dir(projectDir.dir("distribution/package"))
@@ -713,6 +713,7 @@ tasks {
register("copySubprojectDependencies", Copy::class) {
subprojects.forEach() {
if (!it.name.startsWith("catalog") &&
+ !it.name.startsWith("authorization") &&
!it.name.startsWith("client") && !it.name.startsWith("filesystem") &&
!it.name.startsWith("spark") && !it.name.startsWith("iceberg") && it.name !=
"trino-connector" &&
it.name != "integration-test" && it.name != "bundled-catalog" &&
it.name != "flink-connector"
) {
@@ -726,20 +727,16 @@ tasks {
subprojects.forEach() {
if (!it.name.startsWith("catalog") &&
!it.name.startsWith("client") &&
+ !it.name.startsWith("authorization") &&
!it.name.startsWith("filesystem") &&
!it.name.startsWith("spark") &&
!it.name.startsWith("iceberg") &&
!it.name.startsWith("integration-test") &&
- it.name != "authorizations" &&
it.name != "trino-connector" &&
it.name != "bundled-catalog" &&
it.name != "flink-connector"
) {
- if (it.name.startsWith("authorization-")) {
- dependsOn(":authorizations:${it.name}:build")
- } else {
- dependsOn("${it.name}:build")
- }
+ dependsOn("${it.name}:build")
from("${it.name}/build/libs")
into("distribution/package/libs")
include("*.jar")
@@ -757,7 +754,13 @@ tasks {
":catalogs:catalog-jdbc-mysql:copyLibAndConfig",
":catalogs:catalog-jdbc-postgresql:copyLibAndConfig",
":catalogs:catalog-hadoop:copyLibAndConfig",
- "catalogs:catalog-kafka:copyLibAndConfig"
+ ":catalogs:catalog-kafka:copyLibAndConfig"
+ )
+ }
+
+ register("copyAuthorizationLibAndConfigs", Copy::class) {
+ dependsOn(
+ ":authorizations:authorization-ranger:copyLibAndConfig"
)
}
diff --git
a/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java
b/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java
index 915417f8b..ca0275beb 100644
---
a/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java
+++
b/catalogs/catalog-jdbc-common/src/main/java/org/apache/gravitino/catalog/jdbc/JdbcCatalogPropertiesMetadata.java
@@ -23,6 +23,7 @@ import static
org.apache.gravitino.connector.PropertyEntry.stringOptionalPropert
import static org.apache.gravitino.connector.PropertyEntry.stringPropertyEntry;
import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import java.util.List;
import java.util.Map;
@@ -94,7 +95,11 @@ public class JdbcCatalogPropertiesMetadata extends
BaseCatalogPropertiesMetadata
JdbcConfig.POOL_MAX_SIZE.getDefaultValue(),
true /* hidden */,
false /* reserved */));
- PROPERTIES_METADATA = Maps.uniqueIndex(propertyEntries,
PropertyEntry::getName);
+ PROPERTIES_METADATA =
+ ImmutableMap.<String, PropertyEntry<?>>builder()
+ .putAll(BASIC_CATALOG_PROPERTY_ENTRIES)
+ .putAll(Maps.uniqueIndex(propertyEntries, PropertyEntry::getName))
+ .build();
}
@Override
diff --git
a/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java
b/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java
index caff19942..00193a37a 100644
---
a/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java
+++
b/catalogs/catalog-kafka/src/main/java/org/apache/gravitino/catalog/kafka/KafkaCatalogPropertiesMetadata.java
@@ -18,7 +18,7 @@
*/
package org.apache.gravitino.catalog.kafka;
-import java.util.Collections;
+import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.gravitino.connector.BaseCatalogPropertiesMetadata;
import org.apache.gravitino.connector.PropertyEntry;
@@ -30,13 +30,17 @@ public class KafkaCatalogPropertiesMetadata extends
BaseCatalogPropertiesMetadat
public static final String BOOTSTRAP_SERVERS = "bootstrap.servers";
private static final Map<String, PropertyEntry<?>>
KAFKA_CATALOG_PROPERTY_ENTRIES =
- Collections.singletonMap(
- BOOTSTRAP_SERVERS,
- PropertyEntry.stringRequiredPropertyEntry(
+ ImmutableMap.<String, PropertyEntry<?>>builder()
+ .put(
BOOTSTRAP_SERVERS,
- "The Kafka broker(s) to connect to, allowing for multiple
brokers by comma-separating them",
- false /* immutable */,
- false /* hidden */));
+ PropertyEntry.stringRequiredPropertyEntry(
+ BOOTSTRAP_SERVERS,
+ "The Kafka broker(s) to connect to, allowing for multiple
brokers by "
+ + "comma-separating them",
+ false /* immutable */,
+ false /* hidden */))
+ .putAll(BASIC_CATALOG_PROPERTY_ENTRIES)
+ .build();
@Override
protected Map<String, PropertyEntry<?>> specificPropertyEntries() {
diff --git
a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java
b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java
index 66e0ea385..f351a3271 100644
--- a/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java
+++ b/core/src/main/java/org/apache/gravitino/catalog/CatalogManager.java
@@ -39,6 +39,7 @@ import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.time.Instant;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
@@ -52,6 +53,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.CatalogChange;
@@ -701,14 +703,17 @@ public class CatalogManager implements CatalogDispatcher,
Closeable {
// Load Catalog class instance
BaseCatalog<?> catalog = createCatalogInstance(classLoader,
entity.getProvider());
catalog.withCatalogConf(entity.getProperties()).withCatalogEntity(entity);
+ catalog.initAuthorizationPluginInstance(classLoader);
return catalog;
}
private IsolatedClassLoader createClassLoader(String provider, Map<String,
String> conf) {
if (config.get(Configs.CATALOG_LOAD_ISOLATED)) {
- String pkgPath = buildPkgPath(conf, provider);
- String confPath = buildConfPath(conf, provider);
- return IsolatedClassLoader.buildClassLoader(Lists.newArrayList(pkgPath,
confPath));
+ String catalogPkgPath = buildPkgPath(conf, provider);
+ String catalogConfPath = buildConfPath(conf, provider);
+ ArrayList<String> libAndResourcesPaths =
Lists.newArrayList(catalogPkgPath, catalogConfPath);
+ buildAuthorizationPkgPath(conf).ifPresent(libAndResourcesPaths::add);
+ return IsolatedClassLoader.buildClassLoader(libAndResourcesPaths);
} else {
// This will use the current class loader, it is mainly used for test.
return new IsolatedClassLoader(
@@ -824,6 +829,37 @@ public class CatalogManager implements CatalogDispatcher,
Closeable {
return pkgPath;
}
+ private Optional<String> buildAuthorizationPkgPath(Map<String, String> conf)
{
+ String gravitinoHome = System.getenv("GRAVITINO_HOME");
+ Preconditions.checkArgument(gravitinoHome != null, "GRAVITINO_HOME not
set");
+ boolean testEnv = System.getenv("GRAVITINO_TEST") != null;
+
+ String authorizationProvider = conf.get(Catalog.AUTHORIZATION_PROVIDER);
+ if (StringUtils.isBlank(authorizationProvider)) {
+ return Optional.empty();
+ }
+
+ String pkgPath;
+ if (testEnv) {
+ // In test, the authorization package is under the build directory.
+ pkgPath =
+ String.join(
+ File.separator,
+ gravitinoHome,
+ "authorizations",
+ "authorization-" + authorizationProvider,
+ "build",
+ "libs");
+ } else {
+ // In real environment, the authorization package is under the
authorization directory.
+ pkgPath =
+ String.join(
+ File.separator, gravitinoHome, "authorizations",
authorizationProvider, "libs");
+ }
+
+ return Optional.of(pkgPath);
+ }
+
private Class<? extends CatalogProvider> lookupCatalogProvider(String
provider, ClassLoader cl) {
ServiceLoader<CatalogProvider> loader =
ServiceLoader.load(CatalogProvider.class, cl);
diff --git a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
index 45a7b9098..bb3c2f9bd 100644
--- a/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
+++ b/core/src/main/java/org/apache/gravitino/connector/BaseCatalog.java
@@ -38,6 +38,7 @@ import
org.apache.gravitino.connector.authorization.AuthorizationProvider;
import org.apache.gravitino.connector.authorization.BaseAuthorization;
import org.apache.gravitino.connector.capability.Capability;
import org.apache.gravitino.meta.CatalogEntity;
+import org.apache.gravitino.utils.IsolatedClassLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -181,51 +182,52 @@ public abstract class BaseCatalog<T extends BaseCatalog>
public AuthorizationPlugin getAuthorizationPlugin() {
if (authorization == null) {
- synchronized (this) {
- if (authorization == null) {
- BaseAuthorization<?> baseAuthorization =
createAuthorizationPluginInstance();
- if (baseAuthorization == null) {
- return null;
- }
- authorization = baseAuthorization;
- }
- }
+ return null;
}
return authorization.plugin(provider(), this.conf);
}
- private BaseAuthorization<?> createAuthorizationPluginInstance() {
- String authorizationProvider =
- catalogPropertiesMetadata().containsProperty(AUTHORIZATION_PROVIDER)
- ? (String) catalogPropertiesMetadata().getOrDefault(conf,
AUTHORIZATION_PROVIDER)
- : null;
+ public void initAuthorizationPluginInstance(IsolatedClassLoader classLoader)
{
+ if (authorization != null) {
+ return;
+ }
+ String authorizationProvider =
+ (String) catalogPropertiesMetadata().getOrDefault(conf,
AUTHORIZATION_PROVIDER);
if (authorizationProvider == null) {
LOG.info("Authorization provider is not set!");
- return null;
+ return;
}
- ServiceLoader<AuthorizationProvider> loader =
- ServiceLoader.load(
- AuthorizationProvider.class,
Thread.currentThread().getContextClassLoader());
-
- List<Class<? extends AuthorizationProvider>> providers =
- Streams.stream(loader.iterator())
- .filter(p -> p.shortName().equalsIgnoreCase(authorizationProvider))
- .map(AuthorizationProvider::getClass)
- .collect(Collectors.toList());
- if (providers.isEmpty()) {
- throw new IllegalArgumentException(
- "No authorization provider found for: " + authorizationProvider);
- } else if (providers.size() > 1) {
- throw new IllegalArgumentException(
- "Multiple authorization providers found for: " +
authorizationProvider);
- }
try {
- return (BaseAuthorization<?>)
-
Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance();
+ authorization =
+ classLoader.withClassLoader(
+ cl -> {
+ try {
+ ServiceLoader<AuthorizationProvider> loader =
+ ServiceLoader.load(AuthorizationProvider.class, cl);
+
+ List<Class<? extends AuthorizationProvider>> providers =
+ Streams.stream(loader.iterator())
+ .filter(p ->
p.shortName().equalsIgnoreCase(authorizationProvider))
+ .map(AuthorizationProvider::getClass)
+ .collect(Collectors.toList());
+ if (providers.isEmpty()) {
+ throw new IllegalArgumentException(
+ "No authorization provider found for: " +
authorizationProvider);
+ } else if (providers.size() > 1) {
+ throw new IllegalArgumentException(
+ "Multiple authorization providers found for: " +
authorizationProvider);
+ }
+ return (BaseAuthorization<?>)
+
Iterables.getOnlyElement(providers).getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
+ LOG.error("Failed to create authorization instance", e);
+ throw new RuntimeException(e);
+ }
+ });
} catch (Exception e) {
- LOG.error("Failed to create authorization instance", e);
+ LOG.error("Failed to load authorization with class loader", e);
throw new RuntimeException(e);
}
}
diff --git
a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java
b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java
index 8ccf3a2de..554ef0cec 100644
---
a/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java
+++
b/core/src/test/java/org/apache/gravitino/connector/authorization/TestAuthorization.java
@@ -20,6 +20,7 @@ package org.apache.gravitino.connector.authorization;
import com.google.common.collect.ImmutableMap;
import java.time.Instant;
+import java.util.Collections;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Namespace;
import org.apache.gravitino.TestCatalog;
@@ -27,6 +28,7 @@ import
org.apache.gravitino.connector.authorization.mysql.TestMySQLAuthorization
import
org.apache.gravitino.connector.authorization.ranger.TestRangerAuthorizationPlugin;
import org.apache.gravitino.meta.AuditInfo;
import org.apache.gravitino.meta.CatalogEntity;
+import org.apache.gravitino.utils.IsolatedClassLoader;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -54,6 +56,10 @@ public class TestAuthorization {
new TestCatalog()
.withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER,
"ranger"))
.withCatalogEntity(hiveCatalogEntity);
+ IsolatedClassLoader isolatedClassLoader =
+ new IsolatedClassLoader(
+ Collections.emptyList(), Collections.emptyList(),
Collections.emptyList());
+ hiveCatalog.initAuthorizationPluginInstance(isolatedClassLoader);
CatalogEntity mySQLEntity =
CatalogEntity.builder()
@@ -69,6 +75,7 @@ public class TestAuthorization {
new TestCatalog()
.withCatalogConf(ImmutableMap.of(Catalog.AUTHORIZATION_PROVIDER,
"mysql"))
.withCatalogEntity(mySQLEntity);
+ mySQLCatalog.initAuthorizationPluginInstance(isolatedClassLoader);
}
@Test
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index d9d0ab3cc..8cc2b0f48 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -79,6 +79,7 @@ flink = "1.18.0"
cglib = "2.2"
ranger = "2.4.0"
javax-jaxb-api = "2.3.1"
+javax-ws-rs-api = "2.1.1"
protobuf-plugin = "0.9.2"
spotless-plugin = '6.11.0'
gradle-extensions-plugin = '1.74'
@@ -196,6 +197,7 @@ cglib = { group = "cglib", name = "cglib", version.ref =
"cglib"}
ranger-intg = { group = "org.apache.ranger", name = "ranger-intg", version.ref
= "ranger" }
javax-jaxb-api = { group = "javax.xml.bind", name = "jaxb-api", version.ref =
"javax-jaxb-api" }
+javax-ws-rs-api = { group = "javax.ws.rs", name = "javax.ws.rs-api",
version.ref = "javax-ws-rs-api" }
selenium = { group = "org.seleniumhq.selenium", name = "selenium-java",
version.ref = "selenium" }
rauschig = { group = "org.rauschig", name = "jarchivelib", version.ref =
"rauschig" }
mybatis = { group = "org.mybatis", name = "mybatis", version.ref = "mybatis"}
diff --git
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java
index 036235e4a..cc5cd53ab 100644
---
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java
+++
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/MySQLContainer.java
@@ -129,7 +129,7 @@ public class MySQLContainer extends BaseContainer {
statement.execute(query);
LOG.info(String.format("MySQL container database %s has been created",
testDatabaseName));
} catch (Exception e) {
- LOG.error(e.getMessage(), e);
+ throw new RuntimeException("Failed to create database", e);
}
}