This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git
The following commit(s) were added to refs/heads/main by this push:
new 46a6a20cf [#4759] feat(auth-ranger): Use Spark verify Ranger
authorization Hive (#4948)
46a6a20cf is described below
commit 46a6a20cf56ac7a1eb60849dac1b928ce6d06f86
Author: Xun <[email protected]>
AuthorDate: Fri Sep 27 08:58:50 2024 +0800
[#4759] feat(auth-ranger): Use Spark verify Ranger authorization Hive
(#4948)
### What changes were proposed in this pull request?
1. Shadow integration-test-common module.
2. Add Ranger authorization Hive IT, use Spark to verify Ranger
authorization Hive.
### Why are the changes needed?
Fix: #4759
### Does this PR introduce _any_ user-facing change?
N/A
### How was this patch tested?
CI Passed.
---
.../authorization-ranger/build.gradle.kts | 42 +--
.../authorization/ranger/RangerHelper.java | 5 +-
.../ranger/integration/test/RangerHiveE2EIT.java | 294 ++++++++++++++-------
.../ranger/integration/test/RangerHiveIT.java | 103 +-------
.../ranger/integration/test/RangerITEnv.java | 33 ++-
.../src/test/resources/log4j2.properties | 73 +++++
.../resources/ranger-spark-security.xml.template | 45 ++++
integration-test-common/build.gradle.kts | 1 -
.../test/container/RangerContainer.java | 6 +-
.../integration/test/util/AbstractIT.java | 7 +-
10 files changed, 388 insertions(+), 221 deletions(-)
diff --git a/authorizations/authorization-ranger/build.gradle.kts
b/authorizations/authorization-ranger/build.gradle.kts
index 47ec7eba5..13f4cc753 100644
--- a/authorizations/authorization-ranger/build.gradle.kts
+++ b/authorizations/authorization-ranger/build.gradle.kts
@@ -24,6 +24,10 @@ plugins {
id("idea")
}
+val scalaVersion: String = project.properties["scalaVersion"] as? String ?:
extra["defaultScalaVersion"].toString()
+val sparkVersion: String = libs.versions.spark35.get()
+val kyuubiVersion: String = libs.versions.kyuubi4spark35.get()
+
dependencies {
implementation(project(":api")) {
exclude(group = "*")
@@ -69,24 +73,30 @@ dependencies {
testImplementation(libs.mockito.core)
testImplementation(libs.testcontainers)
testRuntimeOnly(libs.junit.jupiter.engine)
- testImplementation(libs.ranger.intg) {
- exclude("org.apache.hive", "hive-storage-api")
- exclude("org.apache.lucene")
- exclude("org.apache.solr")
- exclude("org.apache.kafka")
- exclude("org.eclipse.jetty")
- exclude("org.elasticsearch")
- exclude("org.elasticsearch.client")
- exclude("org.elasticsearch.plugin")
- exclude("javax.ws.rs")
- exclude("org.apache.ranger", "ranger-plugin-classloader")
- }
- testImplementation(libs.hive2.jdbc) {
- exclude("org.slf4j")
- exclude("org.eclipse.jetty.aggregate")
- }
testImplementation(libs.mysql.driver)
testImplementation(libs.postgresql.driver)
+ testImplementation(libs.postgresql.driver)
+ testImplementation("org.apache.spark:spark-hive_$scalaVersion:$sparkVersion")
+ testImplementation("org.apache.spark:spark-sql_$scalaVersion:$sparkVersion")
{
+ exclude("org.apache.avro")
+ exclude("org.apache.hadoop")
+ exclude("org.apache.zookeeper")
+ exclude("io.dropwizard.metrics")
+ exclude("org.rocksdb")
+ }
+
testImplementation("org.apache.kyuubi:kyuubi-spark-authz_$scalaVersion:$kyuubiVersion")
{
+ exclude("com.sun.jersey")
+ }
+ testImplementation(libs.hadoop3.client)
+ testImplementation(libs.hadoop3.common) {
+ exclude("com.sun.jersey")
+ exclude("javax.servlet", "servlet-api")
+ }
+ testImplementation(libs.hadoop3.hdfs) {
+ exclude("com.sun.jersey")
+ exclude("javax.servlet", "servlet-api")
+ exclude("io.netty")
+ }
}
tasks {
diff --git
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java
index e34fe5685..13f5a5cba 100644
---
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java
+++
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerHelper.java
@@ -36,6 +36,7 @@ import org.apache.gravitino.authorization.Privilege;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
+import org.apache.ranger.RangerClient;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerRole;
@@ -60,12 +61,12 @@ public class RangerHelper {
/** The policy search keys */
private final List<String> policyResourceDefines;
- private final RangerClientExtension rangerClient;
+ private final RangerClient rangerClient;
private final String rangerAdminName;
private final String rangerServiceName;
public RangerHelper(
- RangerClientExtension rangerClient,
+ RangerClient rangerClient,
String rangerAdminName,
String rangerServiceName,
Map<Privilege.Name, Set<RangerPrivilege>> privilegesMapping,
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
index 1c57a0001..2769d2fbc 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java
@@ -19,7 +19,8 @@
package org.apache.gravitino.authorization.ranger.integration.test;
import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
-import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_ADMIN_URL;
+import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
+import static
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_AUTH_TYPE;
import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_PASSWORD;
import static
org.apache.gravitino.connector.AuthorizationPropertiesMeta.RANGER_SERVICE_NAME;
@@ -29,17 +30,21 @@ import static
org.apache.gravitino.integration.test.container.RangerContainer.RA
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.util.Arrays;
import java.util.Collections;
+import java.util.List;
import java.util.Map;
+import org.apache.commons.io.FileUtils;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Configs;
import org.apache.gravitino.MetadataObject;
-import org.apache.gravitino.NameIdentifier;
import org.apache.gravitino.Schema;
-import org.apache.gravitino.auth.AuthConstants;
+import org.apache.gravitino.auth.AuthenticatorType;
import org.apache.gravitino.authorization.Privileges;
-import org.apache.gravitino.authorization.Role;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
import org.apache.gravitino.authorization.ranger.RangerAuthorizationHivePlugin;
@@ -51,18 +56,12 @@ import
org.apache.gravitino.integration.test.container.HiveContainer;
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.AbstractIT;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
-import org.apache.gravitino.rel.Column;
-import org.apache.gravitino.rel.Table;
-import org.apache.gravitino.rel.expressions.NamedReference;
-import org.apache.gravitino.rel.expressions.distributions.Distribution;
-import org.apache.gravitino.rel.expressions.distributions.Distributions;
-import org.apache.gravitino.rel.expressions.distributions.Strategy;
-import org.apache.gravitino.rel.expressions.sorts.NullOrdering;
-import org.apache.gravitino.rel.expressions.sorts.SortDirection;
-import org.apache.gravitino.rel.expressions.sorts.SortOrder;
-import org.apache.gravitino.rel.expressions.sorts.SortOrders;
-import org.apache.gravitino.rel.expressions.transforms.Transforms;
-import org.apache.gravitino.rel.types.Types;
+import org.apache.gravitino.meta.AuditInfo;
+import org.apache.gravitino.meta.RoleEntity;
+import org.apache.gravitino.meta.UserEntity;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
@@ -77,63 +76,195 @@ public class RangerHiveE2EIT extends AbstractIT {
private static RangerAuthorizationPlugin rangerAuthPlugin;
public static final String metalakeName =
-
GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_metalake").toLowerCase();
public static final String catalogName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_catalog").toLowerCase();
public static final String schemaName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase();
- public static final String tableName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase();
-
- public static final String HIVE_COL_NAME1 = "hive_col_name1";
- public static final String HIVE_COL_NAME2 = "hive_col_name2";
- public static final String HIVE_COL_NAME3 = "hive_col_name3";
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_schema").toLowerCase();
private static GravitinoMetalake metalake;
private static Catalog catalog;
private static final String provider = "hive";
private static String HIVE_METASTORE_URIS;
+ private static SparkSession sparkSession = null;
+ private final AuditInfo auditInfo =
+
AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build();
+ private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+ private static final String TEST_USER_NAME = "e2e_it_user";
+
+ private static final String SQL_SHOW_DATABASES =
+ String.format("SHOW DATABASES like '%s'", schemaName);
+
+ private static String RANGER_ADMIN_URL = null;
+
@BeforeAll
public static void startIntegrationTest() throws Exception {
+ // Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
- configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
+ configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
+ configs.put(Configs.AUTHENTICATORS.getKey(),
AuthenticatorType.SIMPLE.name().toLowerCase());
+ configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
RangerITEnv.setup();
- containerSuite.startHiveContainer();
+ RangerITEnv.startHiveRangerContainer();
+
+ RANGER_ADMIN_URL =
+ String.format(
+ "http://%s:%d",
+ containerSuite.getRangerContainer().getContainerIpAddress(),
RANGER_SERVER_PORT);
+
HIVE_METASTORE_URIS =
String.format(
"thrift://%s:%d",
- containerSuite.getHiveContainer().getContainerIpAddress(),
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
HiveContainer.HIVE_METASTORE_PORT);
+ generateRangerSparkSecurityXML();
+
+ sparkSession =
+ SparkSession.builder()
+ .master("local[1]")
+ .appName("Ranger Hive E2E integration test")
+ .config("hive.metastore.uris", HIVE_METASTORE_URIS)
+ .config(
+ "spark.sql.warehouse.dir",
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse",
+
containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT))
+ .config("spark.sql.storeAssignmentPolicy", "LEGACY")
+ .config("mapreduce.input.fileinputformat.input.dir.recursive",
"true")
+ .config(
+ "spark.sql.extensions",
+
"org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension")
+ .enableHiveSupport()
+ .getOrCreate();
+
createMetalake();
createCatalogAndRangerAuthPlugin();
- createSchema();
- createHiveTable();
+ }
+
+ private static void generateRangerSparkSecurityXML() throws IOException {
+ String templatePath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "src",
+ "test",
+ "resources",
+ "ranger-spark-security.xml.template");
+ String xmlPath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "build",
+ "resources",
+ "test",
+ "ranger-spark-security.xml");
+
+ String templateContext =
+ FileUtils.readFileToString(new File(templatePath),
StandardCharsets.UTF_8);
+ templateContext =
+ templateContext
+ .replace("__REPLACE__RANGER_ADMIN_URL", RANGER_ADMIN_URL)
+ .replace("__REPLACE__RANGER_HIVE_REPO_NAME",
RangerITEnv.RANGER_HIVE_REPO_NAME);
+ FileUtils.writeStringToFile(new File(xmlPath), templateContext,
StandardCharsets.UTF_8);
}
@AfterAll
public static void stop() throws IOException {
+ if (client != null) {
+ Arrays.stream(catalog.asSchemas().listSchemas())
+ .filter(schema -> !schema.equals("default"))
+ .forEach(
+ (schema -> {
+ catalog.asSchemas().dropSchema(schema, true);
+ }));
+ Arrays.stream(metalake.listCatalogs())
+ .forEach(
+ (catalogName -> {
+ metalake.dropCatalog(catalogName);
+ }));
+ client.dropMetalake(metalakeName);
+ }
+ if (sparkSession != null) {
+ sparkSession.close();
+ }
+ try {
+ closer.close();
+ } catch (Exception e) {
+ LOG.error("Failed to close CloseableGroup", e);
+ }
+
AbstractIT.client = null;
}
@Test
- void testCreateRole() {
- String roleName = RangerITEnv.currentFunName();
- Map<String, String> properties = Maps.newHashMap();
- properties.put("k1", "v1");
+ void testAllowUseSchemaPrivilege() throws InterruptedException {
+ // First, create a schema use Gravitino client
+ createSchema();
- SecurableObject table1 =
+ // Use Spark to show this databases(schema)
+ Dataset dataset1 = sparkSession.sql(SQL_SHOW_DATABASES);
+ dataset1.show();
+ List<Row> rows1 = dataset1.collectAsList();
+ // The schema should not be shown, because the user does not have the
permission
+ Assertions.assertEquals(
+ 0, rows1.stream().filter(row ->
row.getString(0).equals(schemaName)).count());
+
+ // Create a role with CREATE_SCHEMA privilege
+ SecurableObject securableObject1 =
SecurableObjects.parse(
- String.format("%s.%s.%s", catalogName, schemaName, tableName),
- MetadataObject.Type.TABLE,
- Lists.newArrayList(Privileges.SelectTable.allow()));
- Role role = metalake.createRole(roleName, properties,
Lists.newArrayList(table1));
- RangerITEnv.verifyRoleInRanger(rangerAuthPlugin, role);
+ String.format("%s.%s", catalogName, schemaName),
+ MetadataObject.Type.SCHEMA,
+ Lists.newArrayList(Privileges.CreateSchema.allow()));
+ RoleEntity role =
+ RoleEntity.builder()
+ .withId(1L)
+ .withName(currentFunName())
+ .withAuditInfo(auditInfo)
+ .withSecurableObjects(Lists.newArrayList(securableObject1))
+ .build();
+ rangerAuthPlugin.onRoleCreated(role);
+
+ // Granted this role to the spark execution user `HADOOP_USER_NAME`
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ UserEntity userEntity1 =
+ UserEntity.builder()
+ .withId(1L)
+ .withName(userName1)
+ .withRoleNames(Collections.emptyList())
+ .withRoleIds(Collections.emptyList())
+ .withAuditInfo(auditInfo)
+ .build();
+ Assertions.assertTrue(
+ rangerAuthPlugin.onGrantedRolesToUser(Lists.newArrayList(role),
userEntity1));
+
+ // After Ranger Authorization, Must wait a period of time for the Ranger
Spark plugin to update
+ // the policy Sleep time must be greater than the policy update interval
+ // (ranger.plugin.spark.policy.pollIntervalMs) in the
+ // `resources/ranger-spark-security.xml.template`
+ Thread.sleep(1000L);
+
+ // Use Spark to show this databases(schema) again
+ Dataset dataset2 = sparkSession.sql(SQL_SHOW_DATABASES);
+ dataset2.show(100, 100);
+ List<Row> rows2 = dataset2.collectAsList();
+ rows2.stream()
+ .filter(row -> row.getString(0).equals(schemaName))
+ .findFirst()
+ .orElseThrow(() -> new IllegalStateException("Database not found: " +
schemaName));
+ // The schema should be shown, because the user has the permission
+ Assertions.assertEquals(
+ 1, rows2.stream().filter(row ->
row.getString(0).equals(schemaName)).count());
}
private static void createMetalake() {
@@ -153,31 +284,34 @@ public class RangerHiveE2EIT extends AbstractIT {
RangerAuthorizationHivePlugin.getInstance(
ImmutableMap.of(
AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
- String.format(
- "http://%s:%d",
-
containerSuite.getRangerContainer().getContainerIpAddress(),
- RangerContainer.RANGER_SERVER_PORT),
- AuthorizationPropertiesMeta.RANGER_AUTH_TYPE,
+ RANGER_ADMIN_URL,
+ RANGER_AUTH_TYPE,
RangerContainer.authType,
- AuthorizationPropertiesMeta.RANGER_USERNAME,
+ RANGER_USERNAME,
RangerContainer.rangerUserName,
- AuthorizationPropertiesMeta.RANGER_PASSWORD,
+ RANGER_PASSWORD,
RangerContainer.rangerPassword,
- AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
+ RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HIVE_REPO_NAME));
- Map<String, String> properties = Maps.newHashMap();
- properties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS);
- properties.put(AUTHORIZATION_PROVIDER, "ranger");
- properties.put(RANGER_SERVICE_NAME, RangerITEnv.RANGER_HIVE_REPO_NAME);
- properties.put(
- RANGER_ADMIN_URL,
- String.format(
- "http://localhost:%s",
-
containerSuite.getRangerContainer().getMappedPort(RANGER_SERVER_PORT)));
- properties.put(RANGER_AUTH_TYPE, RangerContainer.authType);
- properties.put(RANGER_USERNAME, RangerContainer.rangerUserName);
- properties.put(RANGER_PASSWORD, RangerContainer.rangerPassword);
+ Map<String, String> properties =
+ ImmutableMap.of(
+ HiveConstants.METASTORE_URIS,
+ HIVE_METASTORE_URIS,
+ IMPERSONATION_ENABLE,
+ "true",
+ AUTHORIZATION_PROVIDER,
+ "ranger",
+ RANGER_SERVICE_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
+ AuthorizationPropertiesMeta.RANGER_ADMIN_URL,
+ RANGER_ADMIN_URL,
+ RANGER_AUTH_TYPE,
+ RangerContainer.authType,
+ RANGER_USERNAME,
+ RangerContainer.rangerUserName,
+ RANGER_PASSWORD,
+ RangerContainer.rangerPassword);
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider,
"comment", properties);
catalog = metalake.loadCatalog(catalogName);
@@ -192,7 +326,7 @@ public class RangerHiveE2EIT extends AbstractIT {
"location",
String.format(
"hdfs://%s:%d/user/hive/warehouse/%s.db",
- containerSuite.getHiveContainer().getContainerIpAddress(),
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
HiveContainer.HDFS_DEFAULTFS_PORT,
schemaName.toLowerCase()));
String comment = "comment";
@@ -201,42 +335,4 @@ public class RangerHiveE2EIT extends AbstractIT {
Schema loadSchema = catalog.asSchemas().loadSchema(schemaName);
Assertions.assertEquals(schemaName.toLowerCase(), loadSchema.name());
}
-
- public static void createHiveTable() {
- // Create table from Gravitino API
- Column[] columns = createColumns();
- NameIdentifier nameIdentifier = NameIdentifier.of(schemaName, tableName);
-
- Distribution distribution =
- Distributions.of(Strategy.EVEN, 10,
NamedReference.field(HIVE_COL_NAME1));
-
- final SortOrder[] sortOrders =
- new SortOrder[] {
- SortOrders.of(
- NamedReference.field(HIVE_COL_NAME2),
- SortDirection.DESCENDING,
- NullOrdering.NULLS_FIRST)
- };
-
- Map<String, String> properties = ImmutableMap.of("key1", "val1", "key2",
"val2");
- Table createdTable =
- catalog
- .asTableCatalog()
- .createTable(
- nameIdentifier,
- columns,
- "table_comment",
- properties,
- Transforms.EMPTY_TRANSFORM,
- distribution,
- sortOrders);
- LOG.info("Table created: {}", createdTable);
- }
-
- private static Column[] createColumns() {
- Column col1 = Column.of(HIVE_COL_NAME1, Types.ByteType.get(),
"col_1_comment");
- Column col2 = Column.of(HIVE_COL_NAME2, Types.DateType.get(),
"col_2_comment");
- Column col3 = Column.of(HIVE_COL_NAME3, Types.StringType.get(),
"col_3_comment");
- return new Column[] {col1, col2, col3};
- }
}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
index 7f5579c47..97005e58c 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
@@ -19,18 +19,12 @@
package org.apache.gravitino.authorization.ranger.integration.test;
import static org.apache.gravitino.authorization.SecurableObjects.DOT_SPLITTER;
-import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.RESOURCE_DATABASE;
import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.currentFunName;
import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.rangerClient;
import static
org.apache.gravitino.authorization.ranger.integration.test.RangerITEnv.verifyRoleInRanger;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
@@ -75,12 +69,9 @@ public class RangerHiveIT {
private static final Logger LOG =
LoggerFactory.getLogger(RangerHiveIT.class);
private static final ContainerSuite containerSuite =
ContainerSuite.getInstance();
- private static Connection adminConnection;
- private static Connection anonymousConnection;
private static final String adminUser = "gravitino";
- private static final String anonymousUser = "anonymous";
private static RangerAuthorizationPlugin rangerAuthPlugin;
- private static RangerHelper rangerPolicyHelper;
+ private static RangerHelper rangerHelper;
private final AuditInfo auditInfo =
AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build();
@@ -121,7 +112,7 @@ public class RangerHiveIT {
RangerContainer.rangerPassword,
AuthorizationPropertiesMeta.RANGER_SERVICE_NAME,
RangerITEnv.RANGER_HIVE_REPO_NAME));
- rangerPolicyHelper =
+ rangerHelper =
new RangerHelper(
rangerClient,
RangerContainer.rangerUserName,
@@ -129,20 +120,6 @@ public class RangerHiveIT {
rangerAuthPlugin.privilegesMappingRule(),
rangerAuthPlugin.ownerMappingRule(),
rangerAuthPlugin.policyResourceDefinesRule());
-
- // Create hive connection
- String url =
- String.format(
- "jdbc:hive2://%s:%d/default",
- containerSuite.getHiveRangerContainer().getContainerIpAddress(),
- HiveContainer.HIVE_SERVICE_PORT);
- try {
- Class.forName("org.apache.hive.jdbc.HiveDriver");
- adminConnection = DriverManager.getConnection(url, adminUser, "");
- anonymousConnection = DriverManager.getConnection(url, anonymousUser,
"");
- } catch (ClassNotFoundException | SQLException e) {
- throw new RuntimeException(e);
- }
}
/**
@@ -201,7 +178,7 @@ public class RangerHiveIT {
role.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNull(rangerHelper.findManagedPolicy(securableObject)));
}
@Test
@@ -223,7 +200,7 @@ public class RangerHiveIT {
role.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject)));
}
@Test
@@ -273,14 +250,14 @@ public class RangerHiveIT {
String.format("catalog.%s3.tab1", dbName),
MetadataObject.Type.TABLE,
Lists.newArrayList(Privileges.CreateTable.allow()));
-
Assertions.assertNull(rangerPolicyHelper.findManagedPolicy(securableObject1));
+ Assertions.assertNull(rangerHelper.findManagedPolicy(securableObject1));
// Add a policy for `db3.tab1`
createHivePolicy(
Lists.newArrayList(String.format("%s3", dbName), "tab1"),
GravitinoITUtils.genRandomName(currentFunName()));
// findManagedPolicy function use precise search, so return not null
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject1));
+ Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject1));
}
static void createHivePolicy(List<String> metaObjects, String roleName) {
@@ -479,7 +456,7 @@ public class RangerHiveIT {
role.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject)));
verifyOwnerInRanger(oldMetadataObject, Lists.newArrayList(userName));
}
@@ -1101,15 +1078,15 @@ public class RangerHiveIT {
role1.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject)));
role2.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject)));
role3.securableObjects().stream()
.forEach(
securableObject ->
-
Assertions.assertNotNull(rangerPolicyHelper.findManagedPolicy(securableObject)));
+
Assertions.assertNotNull(rangerHelper.findManagedPolicy(securableObject)));
}
/** Verify the Gravitino role in Ranger service */
@@ -1208,64 +1185,4 @@ public class RangerHiveIT {
private void verifyOwnerInRanger(MetadataObject metadataObject, List<String>
includeUsers) {
verifyOwnerInRanger(metadataObject, includeUsers, null, null, null);
}
-
- @Test
- public void testCreateDatabase() throws Exception {
- String dbName = currentFunName().toLowerCase(); // Hive database name is
case-insensitive
-
- // Only allow admin user to operation database `db1`
- // Other users can't see the database `db1`
- Map<String, RangerPolicy.RangerPolicyResource> policyResourceMap =
- ImmutableMap.of(RESOURCE_DATABASE, new
RangerPolicy.RangerPolicyResource(dbName));
- RangerPolicy.RangerPolicyItem policyItem = new
RangerPolicy.RangerPolicyItem();
- policyItem.setUsers(Arrays.asList(adminUser));
- policyItem.setAccesses(
- Arrays.asList(
- new RangerPolicy.RangerPolicyItemAccess(
- RangerPrivilege.RangerHivePrivilege.ALL.toString())));
- RangerITEnv.updateOrCreateRangerPolicy(
- RangerDefines.SERVICE_TYPE_HIVE,
- RangerITEnv.RANGER_HIVE_REPO_NAME,
- "testAllowShowDatabase",
- policyResourceMap,
- Collections.singletonList(policyItem));
-
- Statement adminStmt = adminConnection.createStatement();
- adminStmt.execute(String.format("CREATE DATABASE %s", dbName));
- String sql = "show databases";
- ResultSet adminRS = adminStmt.executeQuery(sql);
- List<String> adminDbs = new ArrayList<>();
- while (adminRS.next()) {
- adminDbs.add(adminRS.getString(1));
- }
- Assertions.assertTrue(adminDbs.contains(dbName), "adminDbs : " + adminDbs);
-
- // Anonymous user can't see the database `db1`
- Statement anonymousStmt = anonymousConnection.createStatement();
- ResultSet anonymousRS = anonymousStmt.executeQuery(sql);
- List<String> anonymousDbs = new ArrayList<>();
- while (anonymousRS.next()) {
- anonymousDbs.add(anonymousRS.getString(1));
- }
- Assertions.assertFalse(anonymousDbs.contains(dbName), "anonymous : " +
anonymousDbs);
-
- // Allow anonymous user to see the database `db1`
- policyItem.setUsers(Arrays.asList(adminUser, anonymousUser));
- policyItem.setAccesses(
- Arrays.asList(
- new RangerPolicy.RangerPolicyItemAccess(
- RangerPrivilege.RangerHivePrivilege.ALL.toString())));
- RangerITEnv.updateOrCreateRangerPolicy(
- RangerDefines.SERVICE_TYPE_HIVE,
- RangerITEnv.RANGER_HIVE_REPO_NAME,
- "testAllowShowDatabase",
- policyResourceMap,
- Collections.singletonList(policyItem));
- anonymousRS = anonymousStmt.executeQuery(sql);
- anonymousDbs.clear();
- while (anonymousRS.next()) {
- anonymousDbs.add(anonymousRS.getString(1));
- }
- Assertions.assertTrue(anonymousDbs.contains(dbName));
- }
}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
index 9a9d713f7..563ec7fd1 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerITEnv.java
@@ -29,13 +29,14 @@ import java.util.Set;
import java.util.stream.Collectors;
import org.apache.gravitino.authorization.Role;
import org.apache.gravitino.authorization.ranger.RangerAuthorizationPlugin;
-import org.apache.gravitino.authorization.ranger.RangerClientExtension;
import org.apache.gravitino.authorization.ranger.RangerHelper;
import org.apache.gravitino.authorization.ranger.RangerPrivilege;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
import org.apache.gravitino.integration.test.container.ContainerSuite;
import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.container.TrinoContainer;
+import org.apache.ranger.RangerClient;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerRole;
@@ -54,7 +55,8 @@ public class RangerITEnv {
private static final String RANGER_HIVE_TYPE = "hive";
protected static final String RANGER_HDFS_REPO_NAME = "hdfsDev";
private static final String RANGER_HDFS_TYPE = "hdfs";
- protected static RangerClientExtension rangerClient;
+ protected static RangerClient rangerClient;
+ protected static final String HADOOP_USER_NAME = "gravitino";
private static volatile boolean initRangerService = false;
private static final ContainerSuite containerSuite =
ContainerSuite.getInstance();
@@ -106,11 +108,30 @@ public class RangerITEnv {
}
}
+ static void startHiveRangerContainer() {
+ containerSuite.startHiveRangerContainer(
+ new HashMap<>(
+ ImmutableMap.of(
+ HiveContainer.HIVE_RUNTIME_VERSION,
+ HiveContainer.HIVE3,
+ RangerContainer.DOCKER_ENV_RANGER_SERVER_URL,
+ String.format(
+ "http://%s:%d",
+
containerSuite.getRangerContainer().getContainerIpAddress(),
+ RangerContainer.RANGER_SERVER_PORT),
+ RangerContainer.DOCKER_ENV_RANGER_HIVE_REPOSITORY_NAME,
+ RangerITEnv.RANGER_HIVE_REPO_NAME,
+ RangerContainer.DOCKER_ENV_RANGER_HDFS_REPOSITORY_NAME,
+ RangerITEnv.RANGER_HDFS_REPO_NAME,
+ HiveContainer.HADOOP_USER_NAME,
+ HADOOP_USER_NAME)));
+ }
+
/** Currently we only test Ranger Hive, So wo Allow anyone to visit HDFS */
static void allowAnyoneAccessHDFS() {
String policyName = currentFunName();
try {
- if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HDFS,
policyName)) {
+ if (null != rangerClient.getPolicy(RANGER_HDFS_REPO_NAME, policyName)) {
return;
}
} catch (RangerServiceException e) {
@@ -131,7 +152,7 @@ public class RangerITEnv {
new RangerPolicy.RangerPolicyItemAccess(
RangerPrivilege.RangerHdfsPrivilege.EXECUTE.toString())));
updateOrCreateRangerPolicy(
- RangerDefines.SERVICE_TYPE_HDFS,
+ RANGER_HDFS_TYPE,
RANGER_HDFS_REPO_NAME,
policyName,
policyResourceMap,
@@ -145,7 +166,7 @@ public class RangerITEnv {
static void allowAnyoneAccessInformationSchema() {
String policyName = currentFunName();
try {
- if (null != rangerClient.getPolicy(RangerDefines.SERVICE_TYPE_HIVE,
policyName)) {
+ if (null != rangerClient.getPolicy(RANGER_HIVE_REPO_NAME, policyName)) {
return;
}
} catch (RangerServiceException e) {
@@ -168,7 +189,7 @@ public class RangerITEnv {
new RangerPolicy.RangerPolicyItemAccess(
RangerPrivilege.RangerHivePrivilege.SELECT.toString())));
updateOrCreateRangerPolicy(
- RangerDefines.SERVICE_TYPE_HIVE,
+ RANGER_HIVE_TYPE,
RANGER_HIVE_REPO_NAME,
policyName,
policyResourceMap,
diff --git
a/authorizations/authorization-ranger/src/test/resources/log4j2.properties
b/authorizations/authorization-ranger/src/test/resources/log4j2.properties
new file mode 100644
index 000000000..8bda5f6e8
--- /dev/null
+++ b/authorizations/authorization-ranger/src/test/resources/log4j2.properties
@@ -0,0 +1,73 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Set to debug or trace if log4j initialization is failing
+status = info
+
+# Name of the configuration
+name = ConsoleLogConfig
+
+# Console appender configuration
+appender.console.type = Console
+appender.console.name = consoleLogger
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p [%t] %c{1}:%L -
%m%n
+
+# Log files location
+property.logPath =
${sys:gravitino.log.path:-build/authorization-ranger-integration-test.log}
+
+# File appender configuration
+appender.file.type = File
+appender.file.name = fileLogger
+appender.file.fileName = ${logPath}
+appender.file.layout.type = PatternLayout
+appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %-5p %c - %m%n
+
+# Root logger level
+rootLogger.level = info
+
+# Root logger referring to console and file appenders
+rootLogger.appenderRef.stdout.ref = consoleLogger
+rootLogger.appenderRef.file.ref = fileLogger
+
+# File appender configuration for testcontainers
+appender.testcontainersFile.type = File
+appender.testcontainersFile.name = testcontainersLogger
+appender.testcontainersFile.fileName = build/testcontainers.log
+appender.testcontainersFile.layout.type = PatternLayout
+appender.testcontainersFile.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} [%t]
%-5p %c - %m%n
+
+# Logger for testcontainers
+logger.testcontainers.name = org.testcontainers
+logger.testcontainers.level = debug
+logger.testcontainers.additivity = false
+logger.testcontainers.appenderRef.file.ref = testcontainersLogger
+
+logger.tc.name = tc
+logger.tc.level = debug
+logger.tc.additivity = false
+logger.tc.appenderRef.file.ref = testcontainersLogger
+
+logger.docker.name = com.github.dockerjava
+logger.docker.level = warn
+logger.docker.additivity = false
+logger.docker.appenderRef.file.ref = testcontainersLogger
+
+logger.http.name =
com.github.dockerjava.zerodep.shaded.org.apache.hc.client5.http.wire
+logger.http.level = off
diff --git
a/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template
b/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template
new file mode 100644
index 000000000..eb7f2b5e8
--- /dev/null
+++
b/authorizations/authorization-ranger/src/test/resources/ranger-spark-security.xml.template
@@ -0,0 +1,45 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+ -->
+<configuration>
+ <property>
+ <name>ranger.plugin.spark.policy.rest.url</name>
+ <value>__REPLACE__RANGER_ADMIN_URL</value>
+ </property>
+
+ <property>
+ <name>ranger.plugin.spark.service.name</name>
+ <value>__REPLACE__RANGER_HIVE_REPO_NAME</value>
+ </property>
+
+ <property>
+ <name>ranger.plugin.spark.policy.cache.dir</name>
+ <value>/tmp/policycache</value>
+ </property>
+
+ <property>
+ <name>ranger.plugin.spark.policy.pollIntervalMs</name>
+ <value>500</value>
+ </property>
+
+ <property>
+ <name>ranger.plugin.spark.policy.source.impl</name>
+ <value>org.apache.ranger.admin.client.RangerAdminRESTClient</value>
+ </property>
+
+</configuration>
\ No newline at end of file
diff --git a/integration-test-common/build.gradle.kts
b/integration-test-common/build.gradle.kts
index 449c38efc..a25ad4cff 100644
--- a/integration-test-common/build.gradle.kts
+++ b/integration-test-common/build.gradle.kts
@@ -32,7 +32,6 @@ dependencies {
testImplementation(project(":core"))
testImplementation(project(":server"))
testImplementation(project(":server-common"))
- testImplementation(project(":authorizations:authorization-ranger"))
testImplementation(libs.bundles.jetty)
testImplementation(libs.bundles.jersey)
testImplementation(libs.bundles.jwt)
diff --git
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/RangerContainer.java
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/RangerContainer.java
index 1aa91e086..54b2afc0c 100644
---
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/RangerContainer.java
+++
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/container/RangerContainer.java
@@ -25,7 +25,7 @@ import com.google.common.collect.ImmutableSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
-import org.apache.gravitino.authorization.ranger.RangerClientExtension;
+import org.apache.ranger.RangerClient;
import org.apache.ranger.RangerServiceException;
import org.rnorth.ducttape.Preconditions;
import org.slf4j.Logger;
@@ -38,7 +38,7 @@ public class RangerContainer extends BaseContainer {
public static final String DEFAULT_IMAGE =
System.getenv("GRAVITINO_CI_RANGER_DOCKER_IMAGE");
public static final String HOST_NAME = "gravitino-ci-ranger";
public static final int RANGER_SERVER_PORT = 6080;
- public RangerClientExtension rangerClient;
+ public RangerClient rangerClient;
private String rangerUrl;
/**
@@ -83,7 +83,7 @@ public class RangerContainer extends BaseContainer {
super.start();
rangerUrl = String.format("http://localhost:%s",
this.getMappedPort(RANGER_SERVER_PORT));
- rangerClient = new RangerClientExtension(rangerUrl, authType,
rangerUserName, rangerPassword);
+ rangerClient = new RangerClient(rangerUrl, authType, rangerUserName,
rangerPassword, null);
Preconditions.check("Ranger container startup failed!",
checkContainerStatus(10));
}
diff --git
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java
index 6644e1f64..71d92d6c9 100644
---
a/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java
+++
b/integration-test-common/src/test/java/org/apache/gravitino/integration/test/util/AbstractIT.java
@@ -340,7 +340,12 @@ public class AbstractIT {
if (authenticators.contains(AuthenticatorType.OAUTH.name().toLowerCase()))
{
client =
GravitinoAdminClient.builder(serverUri).withOAuth(mockDataProvider).build();
} else if
(authenticators.contains(AuthenticatorType.SIMPLE.name().toLowerCase())) {
- client =
GravitinoAdminClient.builder(serverUri).withSimpleAuth().build();
+ String userName = customConfigs.get("SimpleAuthUserName");
+ if (userName != null) {
+ client =
GravitinoAdminClient.builder(serverUri).withSimpleAuth(userName).build();
+ } else {
+ client =
GravitinoAdminClient.builder(serverUri).withSimpleAuth().build();
+ }
} else if
(authenticators.contains(AuthenticatorType.KERBEROS.name().toLowerCase())) {
serverUri = "http://localhost:" + jettyServerConfig.getHttpPort();
client = null;