jerqi commented on code in PR #5190:
URL: https://github.com/apache/gravitino/pull/5190#discussion_r1818496023
##########
authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java:
##########
@@ -204,52 +229,327 @@ public void stop() {
}
@Test
- void testAllowUseSchemaPrivilege() throws InterruptedException {
- // First, create a schema use Gravitino client
- createSchema();
+ void testCreateSchema() throws InterruptedException {
+ // First, fail to create the schema
+ Assertions.assertThrows(
+ AccessControlException.class, () ->
sparkSession.sql(SQL_CREATE_SCHEMA));
- // Use Spark to show this databases(schema)
- Dataset dataset1 = sparkSession.sql(SQL_SHOW_DATABASES);
- dataset1.show();
- List<Row> rows1 = dataset1.collectAsList();
- // The schema should not be shown, because the user does not have the
permission
- Assertions.assertEquals(
- 0, rows1.stream().filter(row ->
row.getString(0).equals(schemaName)).count());
+ // Second, grant the `CREATE_SCHEMA` role
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ String roleName = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName, Lists.newArrayList(Privileges.CreateSchema.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+ waitForUpdatingPolicies();
+
+ // Third, succeed to create the schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Clean up
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(roleName);
+ }
+
+ @Test
+ void testCreateTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String createSchemaRole = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(Privileges.UseSchema.allow(),
Privileges.CreateSchema.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(
+ createSchemaRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(createSchemaRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, fail to create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_CREATE_TABLE));
+
+ // Fourth, create a role for creating a table and grant to the user
+ String createTableRole = "createTableRole";
+ securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName, Lists.newArrayList(Privileges.CreateTable.allow()));
+ metalake.createRole(
+ createTableRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(createTableRole), userName1);
+ waitForUpdatingPolicies();
+
+ // Fifth, succeed to create a table
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(createTableRole);
+ metalake.deleteRole(createSchemaRole);
+ }
+
+ @Test
+ void testReadWriteTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readWriteRole = "readWriteRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow(),
+ Privileges.ModifyTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readWriteRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readWriteRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Succeed to insert data into table
+ sparkSession.sql(SQL_INSERT_TABLE);
+
+ // case 2: Succeed to select data from the table
+ sparkSession.sql(SQL_SELECT_TABLE).collectAsList();
+
+ // case 3: Fail to update data in the table, Because Hive doesn't support.
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+
+ // case 4: Fail to delete data from the table, Because Hive doesn't
support.
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Succeed to alter the table
+ sparkSession.sql(SQL_ALTER_TABLE);
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readWriteRole);
+ }
+
+ @Test
+ void testReadOnlyTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readOnlyRole = "readOnlyRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readOnlyRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readOnlyRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Fail to insert data into table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_INSERT_TABLE));
+
+ // case 2: Succeed to select data from the table
+ sparkSession.sql(SQL_SELECT_TABLE).collectAsList();
+
+ // case 3: Fail to alter data in the table
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+
+ // case 4: Fail to delete data from the table
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Fail to alter the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_ALTER_TABLE));
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readOnlyRole);
+ }
+
+ @Test
+ void testWriteOnlyTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readOnlyRole = "writeOnlyRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.ModifyTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readOnlyRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readOnlyRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Succeed to insert data into the table
+ sparkSession.sql(SQL_INSERT_TABLE);
+
+ // case 2: Fail to select data from the table
+ Assertions.assertThrows(
+ AccessControlException.class, () ->
sparkSession.sql(SQL_SELECT_TABLE).collectAsList());
+
+ // case 3: Succeed to update data in the table
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+ // case 4: Succeed to delete data from the table
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Succeed to alter the table
+ sparkSession.sql(SQL_ALTER_TABLE);
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readOnlyRole);
+ }
+
+ @Test
+ void testCreateAllPrivilegesRole() throws InterruptedException {
+ String roleName = "allPrivilegesRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.CreateCatalog.allow(),
+ Privileges.UseCatalog.allow(),
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateFileset.allow(),
+ Privileges.ReadFileset.allow(),
+ Privileges.WriteFileset.allow(),
+ Privileges.CreateTopic.allow(),
+ Privileges.ConsumeTopic.allow(),
+ Privileges.ProduceTopic.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow(),
+ Privileges.ModifyTable.allow(),
+ Privileges.ManageUsers.allow(),
+ Privileges.ManageGroups.allow(),
+ Privileges.CreateRole.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+
+ // Granted this role to the spark execution user `HADOOP_USER_NAME`
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+
+ waitForUpdatingPolicies();
+
+ // Test to create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Test to creat a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(roleName);
+ }
+
+ @Test
+ void testDeleteAndRecreateRole() {
// Create a role with CREATE_SCHEMA privilege
- SecurableObject securableObject1 =
+ String roleName = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.parse(
+ String.format("%s", catalogName),
+ MetadataObject.Type.CATALOG,
+ Lists.newArrayList(Privileges.UseCatalog.allow(),
Privileges.CreateSchema.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+
+ // Granted this role to the spark execution user `HADOOP_USER_NAME`
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+
+ // Delete the role
+ metalake.deleteRole(roleName);
+
Review Comment:
I added.
##########
authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java:
##########
@@ -204,52 +229,327 @@ public void stop() {
}
@Test
- void testAllowUseSchemaPrivilege() throws InterruptedException {
- // First, create a schema use Gravitino client
- createSchema();
+ void testCreateSchema() throws InterruptedException {
+ // First, fail to create the schema
+ Assertions.assertThrows(
+ AccessControlException.class, () ->
sparkSession.sql(SQL_CREATE_SCHEMA));
- // Use Spark to show this databases(schema)
- Dataset dataset1 = sparkSession.sql(SQL_SHOW_DATABASES);
- dataset1.show();
- List<Row> rows1 = dataset1.collectAsList();
- // The schema should not be shown, because the user does not have the
permission
- Assertions.assertEquals(
- 0, rows1.stream().filter(row ->
row.getString(0).equals(schemaName)).count());
+ // Second, grant the `CREATE_SCHEMA` role
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ String roleName = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName, Lists.newArrayList(Privileges.CreateSchema.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+ waitForUpdatingPolicies();
+
+ // Third, succeed to create the schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Clean up
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(roleName);
+ }
+
+ @Test
+ void testCreateTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String createSchemaRole = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(Privileges.UseSchema.allow(),
Privileges.CreateSchema.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(
+ createSchemaRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(createSchemaRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, fail to create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_CREATE_TABLE));
+
+ // Fourth, create a role for creating a table and grant to the user
+ String createTableRole = "createTableRole";
+ securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName, Lists.newArrayList(Privileges.CreateTable.allow()));
+ metalake.createRole(
+ createTableRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(createTableRole), userName1);
+ waitForUpdatingPolicies();
+
+ // Fifth, succeed to create a table
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(createTableRole);
+ metalake.deleteRole(createSchemaRole);
+ }
+
+ @Test
+ void testReadWriteTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readWriteRole = "readWriteRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow(),
+ Privileges.ModifyTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readWriteRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readWriteRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Succeed to insert data into table
+ sparkSession.sql(SQL_INSERT_TABLE);
+
+ // case 2: Succeed to select data from the table
+ sparkSession.sql(SQL_SELECT_TABLE).collectAsList();
+
+ // case 3: Fail to update data in the table, Because Hive doesn't support.
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+
+ // case 4: Fail to delete data from the table, Because Hive doesn't
support.
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Succeed to alter the table
+ sparkSession.sql(SQL_ALTER_TABLE);
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readWriteRole);
+ }
+
+ @Test
+ void testReadOnlyTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readOnlyRole = "readOnlyRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readOnlyRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readOnlyRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Fail to insert data into table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_INSERT_TABLE));
+
+ // case 2: Succeed to select data from the table
+ sparkSession.sql(SQL_SELECT_TABLE).collectAsList();
+
+ // case 3: Fail to alter data in the table
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+
+ // case 4: Fail to delete data from the table
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Fail to alter the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_ALTER_TABLE));
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readOnlyRole);
+ }
+
+ @Test
+ void testWriteOnlyTable() throws InterruptedException {
+ // First, create a role for creating a database and grant role to the user
+ String readOnlyRole = "writeOnlyRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.ModifyTable.allow()));
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.createRole(readOnlyRole, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+ metalake.grantRolesToUser(Lists.newArrayList(readOnlyRole), userName1);
+ waitForUpdatingPolicies();
+ // Second, create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Third, create a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // case 1: Succeed to insert data into the table
+ sparkSession.sql(SQL_INSERT_TABLE);
+
+ // case 2: Fail to select data from the table
+ Assertions.assertThrows(
+ AccessControlException.class, () ->
sparkSession.sql(SQL_SELECT_TABLE).collectAsList());
+
+ // case 3: Succeed to update data in the table
+ Assertions.assertThrows(
+ SparkUnsupportedOperationException.class, () ->
sparkSession.sql(SQL_UPDATE_TABLE));
+ // case 4: Succeed to delete data from the table
+ Assertions.assertThrows(AnalysisException.class, () ->
sparkSession.sql(SQL_DELETE_TABLE));
+
+ // case 5: Succeed to alter the table
+ sparkSession.sql(SQL_ALTER_TABLE);
+
+ // case 6: Fail to drop the table
+ Assertions.assertThrows(AccessControlException.class, () ->
sparkSession.sql(SQL_DROP_TABLE));
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(readOnlyRole);
+ }
+
+ @Test
+ void testCreateAllPrivilegesRole() throws InterruptedException {
+ String roleName = "allPrivilegesRole";
+ SecurableObject securableObject =
+ SecurableObjects.ofMetalake(
+ metalakeName,
+ Lists.newArrayList(
+ Privileges.CreateCatalog.allow(),
+ Privileges.UseCatalog.allow(),
+ Privileges.UseSchema.allow(),
+ Privileges.CreateSchema.allow(),
+ Privileges.CreateFileset.allow(),
+ Privileges.ReadFileset.allow(),
+ Privileges.WriteFileset.allow(),
+ Privileges.CreateTopic.allow(),
+ Privileges.ConsumeTopic.allow(),
+ Privileges.ProduceTopic.allow(),
+ Privileges.CreateTable.allow(),
+ Privileges.SelectTable.allow(),
+ Privileges.ModifyTable.allow(),
+ Privileges.ManageUsers.allow(),
+ Privileges.ManageGroups.allow(),
+ Privileges.CreateRole.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+
+ // Granted this role to the spark execution user `HADOOP_USER_NAME`
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+
+ waitForUpdatingPolicies();
+
+ // Test to create a schema
+ sparkSession.sql(SQL_CREATE_SCHEMA);
+
+ // Test to creat a table
+ sparkSession.sql(SQL_USE_SCHEMA);
+ sparkSession.sql(SQL_CREATE_TABLE);
+
+ // Clean up
+ catalog.asTableCatalog().dropTable(NameIdentifier.of(schemaName,
tableName));
+ catalog.asSchemas().dropSchema(schemaName, true);
+ metalake.deleteRole(roleName);
+ }
+
+ @Test
+ void testDeleteAndRecreateRole() {
// Create a role with CREATE_SCHEMA privilege
- SecurableObject securableObject1 =
+ String roleName = "createSchemaRole";
+ SecurableObject securableObject =
+ SecurableObjects.parse(
+ String.format("%s", catalogName),
+ MetadataObject.Type.CATALOG,
+ Lists.newArrayList(Privileges.UseCatalog.allow(),
Privileges.CreateSchema.allow()));
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+
+ // Granted this role to the spark execution user `HADOOP_USER_NAME`
+ String userName1 = System.getenv(HADOOP_USER_NAME);
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+
+ // Delete the role
+ metalake.deleteRole(roleName);
+
+ // Create the role again
+ metalake.createRole(roleName, Collections.emptyMap(),
Lists.newArrayList(securableObject));
+
+ // Grant the role again
+ metalake.grantRolesToUser(Lists.newArrayList(roleName), userName1);
+
Review Comment:
I added.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]