xunliu commented on code in PR #4948:
URL: https://github.com/apache/gravitino/pull/4948#discussion_r1773302725
##########
core/src/main/java/org/apache/gravitino/storage/relational/database/H2Database.java:
##########
@@ -54,6 +54,12 @@ public String startH2Database(Config config) {
String connectionUrl = constructH2URI(originalJDBCUrl, storagePath);
+ try {
+ Class.forName("org.h2.Driver");
Review Comment:
I used `mergeServiceFiles()` to fix this problem.
##########
authorizations/authorization-ranger/build.gradle.kts:
##########
@@ -57,36 +61,26 @@ dependencies {
exclude("net.java.dev.jna")
exclude("javax.ws.rs")
exclude("org.eclipse.jetty")
+ exclude("com.amazonaws", "aws-java-sdk-bundle")
Review Comment:
`integration-test-common` will only compile the test jar, not the release
jar.
##########
authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java:
##########
@@ -77,63 +76,195 @@ public class RangerHiveE2EIT extends AbstractIT {
private static RangerAuthorizationPlugin rangerAuthPlugin;
public static final String metalakeName =
-
GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_metalake").toLowerCase();
public static final String catalogName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_catalog").toLowerCase();
public static final String schemaName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase();
- public static final String tableName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase();
-
- public static final String HIVE_COL_NAME1 = "hive_col_name1";
- public static final String HIVE_COL_NAME2 = "hive_col_name2";
- public static final String HIVE_COL_NAME3 = "hive_col_name3";
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_schema").toLowerCase();
private static GravitinoMetalake metalake;
private static Catalog catalog;
private static final String provider = "hive";
private static String HIVE_METASTORE_URIS;
+ private static SparkSession sparkSession = null;
+ private final AuditInfo auditInfo =
+
AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build();
+ private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+ private static final String TEST_USER_NAME = "e2e_it_user";
+
+ private static final String SQL_SHOW_DATABASES =
+ String.format("SHOW DATABASES like '%s'", schemaName);
+
+ private static String RANGER_ADMIN_URL = null;
+
@BeforeAll
public static void startIntegrationTest() throws Exception {
+ // Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
- configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
+ configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
+ configs.put(Configs.AUTHENTICATORS.getKey(),
AuthenticatorType.SIMPLE.name().toLowerCase());
+ configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
RangerITEnv.setup();
- containerSuite.startHiveContainer();
+ RangerITEnv.startHiveRangerContainer();
+
+ RANGER_ADMIN_URL =
+ String.format(
+ "http://%s:%d",
+ containerSuite.getRangerContainer().getContainerIpAddress(),
RANGER_SERVER_PORT);
+
HIVE_METASTORE_URIS =
String.format(
"thrift://%s:%d",
- containerSuite.getHiveContainer().getContainerIpAddress(),
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
HiveContainer.HIVE_METASTORE_PORT);
+ generateRangerSparkSecurityXML();
+
+ sparkSession =
+ SparkSession.builder()
+ .master("local[1]")
+ .appName("Hive Catalog integration test")
+ .config("hive.metastore.uris", HIVE_METASTORE_URIS)
+ .config(
+ "spark.sql.warehouse.dir",
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse",
+
containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT))
+ .config("spark.sql.storeAssignmentPolicy", "LEGACY")
+ .config("mapreduce.input.fileinputformat.input.dir.recursive",
"true")
+ .config(
+ "spark.sql.extensions",
+
"org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension")
+ .enableHiveSupport()
+ .getOrCreate();
+
createMetalake();
createCatalogAndRangerAuthPlugin();
- createSchema();
- createHiveTable();
+ }
+
+ private static void generateRangerSparkSecurityXML() throws IOException {
+ String templatePath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "src",
+ "test",
+ "resources",
+ "ranger-spark-security.xml.template");
+ String xmlPath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "build",
+ "resources",
+ "test",
+ "ranger-spark-security.xml");
+
+ String templateContext =
+ FileUtils.readFileToString(new File(templatePath),
StandardCharsets.UTF_8);
+ templateContext =
+ templateContext
+ .replace("__REPLACE__RANGER_ADMIN_URL", RANGER_ADMIN_URL)
+ .replace("__REPLACE__RANGER_HIVE_REPO_NAME",
RangerITEnv.RANGER_HIVE_REPO_NAME);
+ FileUtils.writeStringToFile(new File(xmlPath), templateContext,
StandardCharsets.UTF_8);
}
@AfterAll
public static void stop() throws IOException {
+ if (client != null) {
+ Arrays.stream(catalog.asSchemas().listSchemas())
+ .filter(schema -> !schema.equals("default"))
+ .forEach(
+ (schema -> {
+ catalog.asSchemas().dropSchema(schema, true);
+ }));
+ Arrays.stream(metalake.listCatalogs())
+ .forEach(
+ (catalogName -> {
+ metalake.dropCatalog(catalogName);
+ }));
+ client.dropMetalake(metalakeName);
+ }
+ if (sparkSession != null) {
+ sparkSession.close();
+ }
+ try {
+ closer.close();
+ } catch (Exception e) {
+ LOG.error("Failed to close CloseableGroup", e);
+ }
+
AbstractIT.client = null;
}
@Test
- void testCreateRole() {
- String roleName = RangerITEnv.currentFunName();
- Map<String, String> properties = Maps.newHashMap();
- properties.put("k1", "v1");
+ void testAllowUseSchemaPrivilege() throws InterruptedException {
Review Comment:
I plan build a use Spark to verify Ranger authorization Hive framework,
There are some community friends who are already interested in adding test
cases to this.
##########
authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java:
##########
@@ -77,63 +76,195 @@ public class RangerHiveE2EIT extends AbstractIT {
private static RangerAuthorizationPlugin rangerAuthPlugin;
public static final String metalakeName =
-
GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_metalake").toLowerCase();
public static final String catalogName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_catalog").toLowerCase();
public static final String schemaName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase();
- public static final String tableName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase();
-
- public static final String HIVE_COL_NAME1 = "hive_col_name1";
- public static final String HIVE_COL_NAME2 = "hive_col_name2";
- public static final String HIVE_COL_NAME3 = "hive_col_name3";
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_schema").toLowerCase();
private static GravitinoMetalake metalake;
private static Catalog catalog;
private static final String provider = "hive";
private static String HIVE_METASTORE_URIS;
+ private static SparkSession sparkSession = null;
+ private final AuditInfo auditInfo =
+
AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build();
+ private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+ private static final String TEST_USER_NAME = "e2e_it_user";
+
+ private static final String SQL_SHOW_DATABASES =
+ String.format("SHOW DATABASES like '%s'", schemaName);
+
+ private static String RANGER_ADMIN_URL = null;
+
@BeforeAll
public static void startIntegrationTest() throws Exception {
+ // Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
- configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
+ configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
+ configs.put(Configs.AUTHENTICATORS.getKey(),
AuthenticatorType.SIMPLE.name().toLowerCase());
+ configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
RangerITEnv.setup();
- containerSuite.startHiveContainer();
+ RangerITEnv.startHiveRangerContainer();
+
+ RANGER_ADMIN_URL =
+ String.format(
+ "http://%s:%d",
+ containerSuite.getRangerContainer().getContainerIpAddress(),
RANGER_SERVER_PORT);
+
HIVE_METASTORE_URIS =
String.format(
"thrift://%s:%d",
- containerSuite.getHiveContainer().getContainerIpAddress(),
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
HiveContainer.HIVE_METASTORE_PORT);
+ generateRangerSparkSecurityXML();
+
+ sparkSession =
+ SparkSession.builder()
+ .master("local[1]")
+ .appName("Hive Catalog integration test")
Review Comment:
DONE
##########
.github/workflows/python-integration-test.yml:
##########
@@ -48,7 +48,7 @@ jobs:
needs: changes
if: needs.changes.outputs.source_changes == 'true'
runs-on: ubuntu-latest
- timeout-minutes: 30
+ timeout-minutes: 45
Review Comment:
I rollback this change.
##########
gradle/libs.versions.toml:
##########
@@ -217,7 +218,7 @@ datanucleus-core = { group = "org.datanucleus", name =
"datanucleus-core", versi
datanucleus-api-jdo = { group = "org.datanucleus", name =
"datanucleus-api-jdo", version.ref = "datanucleus-api-jdo" }
datanucleus-rdbms = { group = "org.datanucleus", name = "datanucleus-rdbms",
version.ref = "datanucleus-rdbms" }
datanucleus-jdo = { group = "org.datanucleus", name = "javax.jdo", version.ref
= "datanucleus-jdo" }
-
+apiguardian-api = { group = "org.apiguardian", name = "apiguardian-api",
version.ref = "apiguardian-api" }
Review Comment:
DONE
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]