This is an automated email from the ASF dual-hosted git repository.
roryqi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git
The following commit(s) were added to refs/heads/main by this push:
new 4aaff793ac [#6693] improvement(authz): Support to rename Hive table in
the Ranger authz plugin (#6770)
4aaff793ac is described below
commit 4aaff793aca3177a60155c759733daf2fd89253d
Author: roryqi <[email protected]>
AuthorDate: Tue Apr 1 14:02:54 2025 +0800
[#6693] improvement(authz): Support to rename Hive table in the Ranger
authz plugin (#6770)
### What changes were proposed in this pull request?
Support to rename Hive table in the Ranger authz plugin
### Why are the changes needed?
Fix: #6697
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Add an IT.
---
.../authorization/MetadataObjectChange.java | 30 ++-
.../ranger/RangerAuthorizationHDFSPlugin.java | 159 ++++++------
.../ranger/RangerAuthorizationPlugin.java | 93 ++++---
.../test/RangerAuthorizationHDFSPluginIT.java | 31 +--
.../integration/test/RangerHiveHdfsE2EIT.java | 268 +++++++++++++++++++++
.../ranger/integration/test/RangerHiveIT.java | 8 +-
.../authorization/AuthorizationUtils.java | 12 +-
.../gravitino/catalog/OperationDispatcher.java | 10 +
.../catalog/SchemaOperationDispatcher.java | 13 +-
.../catalog/TableOperationDispatcher.java | 13 +-
.../apache/gravitino/hook/TableHookDispatcher.java | 9 +-
11 files changed, 495 insertions(+), 151 deletions(-)
diff --git
a/api/src/main/java/org/apache/gravitino/authorization/MetadataObjectChange.java
b/api/src/main/java/org/apache/gravitino/authorization/MetadataObjectChange.java
index db14cd4b0d..f63daff764 100644
---
a/api/src/main/java/org/apache/gravitino/authorization/MetadataObjectChange.java
+++
b/api/src/main/java/org/apache/gravitino/authorization/MetadataObjectChange.java
@@ -19,6 +19,7 @@
package org.apache.gravitino.authorization;
import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
import java.util.List;
import java.util.Objects;
import org.apache.gravitino.MetadataObject;
@@ -34,11 +35,12 @@ public interface MetadataObjectChange {
*
* @param metadataObject The metadata object.
* @param newMetadataObject The new metadata object.
+ * @param locations The locations of the metadata object.
* @return return a MetadataObjectChange for the rename metadata object.
*/
static MetadataObjectChange rename(
- MetadataObject metadataObject, MetadataObject newMetadataObject) {
- return new RenameMetadataObject(metadataObject, newMetadataObject);
+ MetadataObject metadataObject, MetadataObject newMetadataObject,
List<String> locations) {
+ return new RenameMetadataObject(metadataObject, newMetadataObject,
locations);
}
/**
@@ -56,8 +58,14 @@ public interface MetadataObjectChange {
final class RenameMetadataObject implements MetadataObjectChange {
private final MetadataObject metadataObject;
private final MetadataObject newMetadataObject;
+ private final List<String> locations;
private RenameMetadataObject(MetadataObject metadataObject, MetadataObject
newMetadataObject) {
+ this(metadataObject, newMetadataObject, null);
+ }
+
+ private RenameMetadataObject(
+ MetadataObject metadataObject, MetadataObject newMetadataObject,
List<String> locations) {
Preconditions.checkArgument(
!metadataObject.fullName().equals(newMetadataObject.fullName()),
"The metadata object must be different!");
@@ -67,6 +75,12 @@ public interface MetadataObjectChange {
this.metadataObject = metadataObject;
this.newMetadataObject = newMetadataObject;
+ if (locations != null) {
+ this.locations = Lists.newArrayList(locations);
+ this.locations.sort(String::compareTo);
+ } else {
+ this.locations = null;
+ }
}
/**
@@ -87,6 +101,15 @@ public interface MetadataObjectChange {
return newMetadataObject;
}
+ /**
+ * Return the locations of the metadata object
+ *
+ * @return return the locations of the metadata object
+ */
+ public List<String> locations() {
+ return locations;
+ }
+
/**
* Compares this RenameMetadataObject instance with another object for
equality. The comparison
* is based on the old metadata entity and new metadata entity.
@@ -100,7 +123,8 @@ public interface MetadataObjectChange {
if (o == null || getClass() != o.getClass()) return false;
RenameMetadataObject that = (RenameMetadataObject) o;
return metadataObject.equals(that.metadataObject)
- && newMetadataObject.equals(that.newMetadataObject);
+ && newMetadataObject.equals(that.newMetadataObject)
+ && locations.equals(that.locations);
}
/**
diff --git
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
index 2406fe5581..588e5df18a 100644
---
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
+++
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHDFSPlugin.java
@@ -22,8 +22,8 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -36,7 +36,6 @@ import org.apache.gravitino.GravitinoEnv;
import org.apache.gravitino.MetadataObject;
import org.apache.gravitino.NameIdentifier;
import org.apache.gravitino.Namespace;
-import org.apache.gravitino.Schema;
import org.apache.gravitino.authorization.AuthorizationMetadataObject;
import org.apache.gravitino.authorization.AuthorizationPrivilege;
import org.apache.gravitino.authorization.AuthorizationSecurableObject;
@@ -50,7 +49,9 @@ import
org.apache.gravitino.authorization.common.PathBasedSecurableObject;
import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
+import org.apache.gravitino.rel.Table;
import org.apache.gravitino.utils.MetadataObjectUtil;
+import org.apache.gravitino.utils.NameIdentifierUtil;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.util.SearchFilter;
@@ -257,6 +258,9 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
removeTableMetadataObject(authzMetadataObject);
} else if
(authzMetadataObject.type().equals(PathBasedMetadataObject.FILESET_PATH)) {
removePolicyByMetadataObject(authzMetadataObject);
+ } else if (authzMetadataObject.metadataObjectType() ==
MetadataObject.Type.METALAKE
+ || authzMetadataObject.metadataObjectType() ==
MetadataObject.Type.CATALOG) {
+ // Do nothing
} else {
throw new IllegalArgumentException(
"Unsupported authorization metadata object type: " +
authzMetadataObject.type());
@@ -271,81 +275,7 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
Preconditions.checkArgument(
authzMetadataObject instanceof PathBasedMetadataObject,
"The metadata object must be a PathBasedMetadataObject");
- Preconditions.checkArgument(
- authzMetadataObject.type().equals(PathBasedMetadataObject.SCHEMA_PATH),
- "The metadata object type must be a schema");
- Preconditions.checkArgument(
- authzMetadataObject.names().size() == 1, "The metadata object's size
must be 1.");
- if (RangerHelper.RESOURCE_ALL.equals(authzMetadataObject.name())) {
- // Remove all schema in this catalog
- String catalogName = authzMetadataObject.names().get(0);
- NameIdentifier[] schemas =
- GravitinoEnv.getInstance()
- .schemaDispatcher()
- .listSchemas(Namespace.of(metalake, catalogName));
- Arrays.asList(schemas)
- .forEach(
- schema -> {
- List<String> schemaLocations =
- AuthorizationUtils.getMetadataObjectLocation(
- NameIdentifier.of(metalake, catalogName,
schema.name()),
- Entity.EntityType.SCHEMA);
- schemaLocations.forEach(
- locationPath -> {
- List<String> names = ImmutableList.of(metalake,
catalogName, schema.name());
- AuthorizationMetadataObject schemaMetadataObject =
- new PathBasedMetadataObject(
-
AuthorizationMetadataObject.getParentFullName(names),
- AuthorizationMetadataObject.getLastName(names),
- locationPath,
- PathBasedMetadataObject.SCHEMA_PATH);
- removeSchemaMetadataObject(schemaMetadataObject);
- });
- });
- } else {
- // Remove all table in this schema
- NameIdentifier[] tables =
- GravitinoEnv.getInstance()
- .tableDispatcher()
- .listTables(Namespace.of(authzMetadataObject.name()));
- Arrays.asList(tables)
- .forEach(
- table -> {
- NameIdentifier identifier =
- NameIdentifier.of(authzMetadataObject.name(),
table.name());
- List<String> tabLocations =
- AuthorizationUtils.getMetadataObjectLocation(
- identifier, Entity.EntityType.TABLE);
- tabLocations.forEach(
- locationPath -> {
- AuthorizationMetadataObject tableMetadataObject =
- new PathBasedMetadataObject(
- authzMetadataObject.name(),
- table.name(),
- locationPath,
- PathBasedMetadataObject.TABLE_PATH);
- removeTableMetadataObject(tableMetadataObject);
- });
- });
- // Remove schema
- Schema schema =
- GravitinoEnv.getInstance()
- .schemaDispatcher()
- .loadSchema(NameIdentifier.of(authzMetadataObject.name()));
- List<String> schemaLocations =
- AuthorizationUtils.getMetadataObjectLocation(
- NameIdentifier.parse(authzMetadataObject.fullName()),
Entity.EntityType.SCHEMA);
- schemaLocations.forEach(
- locationPath -> {
- AuthorizationMetadataObject schemaMetadataObject =
- new PathBasedMetadataObject(
- authzMetadataObject.name(),
- schema.name(),
- locationPath,
- PathBasedMetadataObject.SCHEMA_PATH);
- removePolicyByMetadataObject(schemaMetadataObject);
- });
- }
+ // TODO: The remove schema logic will be implemented when schema is
supported
}
/**
@@ -507,6 +437,24 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
case SELECT_TABLE:
case CREATE_TABLE:
case MODIFY_TABLE:
+ AuthorizationUtils.getMetadataObjectLocation(
+ identifier,
MetadataObjectUtil.toEntityType(securableObject))
+ .forEach(
+ locationPath -> {
+ PathBasedMetadataObject pathBaseMetadataObject =
+ new PathBasedMetadataObject(
+ securableObject.parent(),
+ securableObject.name(),
+ locationPath,
+
PathBasedMetadataObject.PathType.get(securableObject.type()));
+
pathBaseMetadataObject.validateAuthorizationMetadataObject();
+ rangerSecurableObjects.add(
+ generateAuthorizationSecurableObject(
+ pathBaseMetadataObject.names(),
+ locationPath,
+
PathBasedMetadataObject.PathType.get(securableObject.type()),
+ rangerPrivileges));
+ });
break;
case CREATE_FILESET:
// Ignore the Gravitino privilege `CREATE_FILESET` in the
@@ -599,6 +547,7 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
case SCHEMA:
break;
case FILESET:
+ case TABLE:
translateMetadataObject(gravitinoMetadataObject)
.forEach(
metadataObject -> {
@@ -611,7 +560,7 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
generateAuthorizationSecurableObject(
pathBasedMetadataObject.names(),
getAuthorizationPath(pathBasedMetadataObject),
- PathBasedMetadataObject.FILESET_PATH,
+ pathBasedMetadataObject.type(),
ownerMappingRule()));
});
break;
@@ -649,10 +598,10 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
public Boolean onMetadataUpdated(MetadataObjectChange... changes) throws
RuntimeException {
for (MetadataObjectChange change : changes) {
if (change instanceof MetadataObjectChange.RenameMetadataObject) {
- MetadataObject metadataObject =
- ((MetadataObjectChange.RenameMetadataObject)
change).metadataObject();
- MetadataObject newMetadataObject =
- ((MetadataObjectChange.RenameMetadataObject)
change).newMetadataObject();
+ MetadataObjectChange.RenameMetadataObject renameChange =
+ (MetadataObjectChange.RenameMetadataObject) change;
+ MetadataObject metadataObject = renameChange.metadataObject();
+ MetadataObject newMetadataObject = renameChange.newMetadataObject();
Preconditions.checkArgument(
metadataObject.type() == newMetadataObject.type(),
"The old and new metadata object types must be equal!");
@@ -665,8 +614,47 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
// Did not need to update the Ranger policy
continue;
}
- List<AuthorizationMetadataObject> oldAuthzMetadataObjects =
- translateMetadataObject(metadataObject);
+
+ // Like topics and filesets, their locations don't change, we don't
need to modify the
+ // policies
+ if (renameChange.locations() == null ||
renameChange.locations().isEmpty()) {
+ continue;
+ }
+
+ // Only Hive managed tables will change the location
+ if (metadataObject.type() == MetadataObject.Type.TABLE) {
+ NameIdentifier ident = MetadataObjectUtil.toEntityIdent(metalake,
newMetadataObject);
+ NameIdentifier catalogIdent =
NameIdentifierUtil.getCatalogIdentifier(ident);
+ if (GravitinoEnv.getInstance()
+ .catalogDispatcher()
+ .loadCatalog(catalogIdent)
+ .provider()
+ .equals("hive")) {
+ Table table =
GravitinoEnv.getInstance().tableDispatcher().loadTable(ident);
+ if (table.properties().get("table-type").equals("EXTERNAL_TABLE"))
{
+ continue;
+ }
+ } else {
+ // Iceberg and other lake houses don't need to change the
privileges of locations
+ continue;
+ }
+ }
+
+ List<AuthorizationMetadataObject> oldAuthzMetadataObjects =
Lists.newArrayList();
+ renameChange
+ .locations()
+ .forEach(
+ locationPath -> {
+ PathBasedMetadataObject pathBaseMetadataObject =
+ new PathBasedMetadataObject(
+ metadataObject.parent(),
+ metadataObject.name(),
+ locationPath,
+
PathBasedMetadataObject.PathType.get(metadataObject.type()));
+ pathBaseMetadataObject.validateAuthorizationMetadataObject();
+ oldAuthzMetadataObjects.add(pathBaseMetadataObject);
+ });
+
List<AuthorizationMetadataObject> newAuthzMetadataObjects =
translateMetadataObject(newMetadataObject);
Preconditions.checkArgument(
@@ -697,7 +685,8 @@ public class RangerAuthorizationHDFSPlugin extends
RangerAuthorizationPlugin {
changeMetadataObject.metadataObject().parent(),
changeMetadataObject.metadataObject().name(),
locationPath,
- PathBasedMetadataObject.FILESET_PATH);
+ PathBasedMetadataObject.PathType.get(
+ changeMetadataObject.metadataObject().type()));
pathBaseMetadataObject.validateAuthorizationMetadataObject();
authzMetadataObjects.add(pathBaseMetadataObject);
});
diff --git
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
index f4d19df233..5007444e10 100644
---
a/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
+++
b/authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationPlugin.java
@@ -28,9 +28,11 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
+import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.gravitino.MetadataObject;
import org.apache.gravitino.authorization.AuthorizationMetadataObject;
import org.apache.gravitino.authorization.AuthorizationPrivilege;
@@ -722,35 +724,40 @@ public abstract class RangerAuthorizationPlugin
@Override
public Boolean onUserAdded(User user) throws AuthorizationPluginException {
- VXUserList list = rangerClient.searchUser(ImmutableMap.of("name",
user.name()));
- if (list.getListSize() > 0) {
- LOG.warn("The user({}) already exists in the Ranger!", user.name());
- return Boolean.FALSE;
- }
-
- VXUser rangerUser =
VXUser.builder().withName(user.name()).withDescription(user.name()).build();
- return rangerClient.createUser(rangerUser);
+ return getUserId(user.name())
+ .map(
+ id -> {
+ LOG.warn("The user({}) already exists in the Ranger!",
user.name());
+ return Boolean.FALSE;
+ })
+ .orElseGet(
+ () -> {
+ VXUser rangerUser =
+
VXUser.builder().withName(user.name()).withDescription(user.name()).build();
+ return rangerClient.createUser(rangerUser);
+ });
}
@Override
public Boolean onUserRemoved(User user) throws AuthorizationPluginException {
- VXUserList list = rangerClient.searchUser(ImmutableMap.of("name",
user.name()));
- if (list.getListSize() == 0) {
- LOG.warn("The user({}) doesn't exist in the Ranger!", user);
- return Boolean.FALSE;
- }
- rangerClient.deleteUser(list.getList().get(0).getId());
- return Boolean.TRUE;
+ return getUserId(user.name())
+ .map(id -> rangerClient.deleteUser(id))
+ .orElseGet(
+ () -> {
+ LOG.warn("The user({}) doesn't exist in the Ranger!",
user.name());
+ return Boolean.FALSE;
+ });
}
@Override
public Boolean onUserAcquired(User user) throws AuthorizationPluginException
{
- VXUserList list = rangerClient.searchUser(ImmutableMap.of("name",
user.name()));
- if (list.getListSize() == 0) {
- LOG.warn("The user({}) doesn't exist in the Ranger!", user);
- return Boolean.FALSE;
- }
- return Boolean.TRUE;
+ return getUserId(user.name())
+ .map(id -> Boolean.TRUE)
+ .orElseGet(
+ () -> {
+ LOG.warn("The user({}) doesn't exist in the Ranger!", user);
+ return Boolean.FALSE;
+ });
}
@Override
@@ -761,18 +768,19 @@ public abstract class RangerAuthorizationPlugin
@Override
public Boolean onGroupRemoved(Group group) throws
AuthorizationPluginException {
- VXGroupList list = rangerClient.searchGroup(ImmutableMap.of("name",
group.name()));
- if (list.getListSize() == 0) {
- LOG.warn("The group({}) doesn't exist in the Ranger!", group);
- return Boolean.FALSE;
- }
- return rangerClient.deleteGroup(list.getList().get(0).getId());
+ Optional<Long> groupId = getGroupId(group.name());
+ return groupId
+ .map(id -> rangerClient.deleteGroup(id))
+ .orElseGet(
+ () -> {
+ LOG.warn("The group({}) doesn't exist in the Ranger!",
group.name());
+ return Boolean.FALSE;
+ });
}
@Override
public Boolean onGroupAcquired(Group group) {
- VXGroupList vxGroupList = rangerClient.searchGroup(ImmutableMap.of("name",
group.name()));
- if (vxGroupList.getListSize() == 0) {
+ if (!getGroupId(group.name()).isPresent()) {
LOG.warn("The group({}) doesn't exist in the Ranger!", group);
return Boolean.FALSE;
}
@@ -1044,4 +1052,31 @@ public abstract class RangerAuthorizationPlugin
return match.get();
});
}
+
+ private Optional<Long> getUserId(String name) {
+ VXUserList list = rangerClient.searchUser(ImmutableMap.of("name", name));
+ if (list.getListSize() > 0) {
+ for (VXUser vxUser : list.getList()) {
+ if (vxUser.getName().equals(name)) {
+ return Optional.of(vxUser.getId());
+ }
+ }
+ }
+ return Optional.empty();
+ }
+
+ private Optional<Long> getGroupId(String name) {
+ VXGroupList vxGroupList = rangerClient.searchGroup(ImmutableMap.of("name",
name));
+ try {
+ for (VXGroup group : vxGroupList.getList()) {
+ String value = (String) FieldUtils.readField(group, "name", true);
+ if (name.equals(value)) {
+ return Optional.of(group.getId());
+ }
+ }
+ } catch (Exception e) {
+ throw new AuthorizationPluginException("Fail to get the field name of
class VXGroup");
+ }
+ return Optional.empty();
+ }
}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java
index 9b399eeabb..056f5292b9 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerAuthorizationHDFSPluginIT.java
@@ -75,7 +75,7 @@ public class RangerAuthorizationHDFSPluginIT {
withMockedAuthorizationUtils(
() -> {
MetadataObject metalake =
- MetadataObjects.parse(String.format("metalake1"),
MetadataObject.Type.METALAKE);
+ MetadataObjects.parse("metalake1", MetadataObject.Type.METALAKE);
rangerAuthPlugin
.translateMetadataObject(metalake)
.forEach(
@@ -89,8 +89,7 @@ public class RangerAuthorizationHDFSPluginIT {
Assertions.assertEquals("/test",
pathBasedMetadataObject.path());
});
- MetadataObject catalog =
- MetadataObjects.parse(String.format("catalog1"),
MetadataObject.Type.CATALOG);
+ MetadataObject catalog = MetadataObjects.parse("catalog1",
MetadataObject.Type.CATALOG);
rangerAuthPlugin
.translateMetadataObject(catalog)
.forEach(
@@ -104,7 +103,7 @@ public class RangerAuthorizationHDFSPluginIT {
});
MetadataObject schema =
- MetadataObjects.parse(String.format("catalog1.schema1"),
MetadataObject.Type.SCHEMA);
+ MetadataObjects.parse("catalog1.schema1",
MetadataObject.Type.SCHEMA);
rangerAuthPlugin
.translateMetadataObject(schema)
.forEach(
@@ -118,8 +117,7 @@ public class RangerAuthorizationHDFSPluginIT {
});
MetadataObject table =
- MetadataObjects.parse(
- String.format("catalog1.schema1.tab1"),
MetadataObject.Type.TABLE);
+ MetadataObjects.parse("catalog1.schema1.tab1",
MetadataObject.Type.TABLE);
rangerAuthPlugin
.translateMetadataObject(table)
.forEach(
@@ -133,8 +131,7 @@ public class RangerAuthorizationHDFSPluginIT {
});
MetadataObject fileset =
- MetadataObjects.parse(
- String.format("catalog1.schema1.fileset1"),
MetadataObject.Type.FILESET);
+ MetadataObjects.parse("catalog1.schema1.fileset1",
MetadataObject.Type.FILESET);
rangerAuthPlugin
.translateMetadataObject(fileset)
.forEach(
@@ -155,7 +152,7 @@ public class RangerAuthorizationHDFSPluginIT {
() -> {
SecurableObject filesetInMetalake =
SecurableObjects.parse(
- String.format("metalake1"),
+ "metalake1",
MetadataObject.Type.METALAKE,
Lists.newArrayList(
Privileges.CreateFileset.allow(),
@@ -167,7 +164,7 @@ public class RangerAuthorizationHDFSPluginIT {
SecurableObject filesetInCatalog =
SecurableObjects.parse(
- String.format("catalog1"),
+ "catalog1",
MetadataObject.Type.CATALOG,
Lists.newArrayList(
Privileges.CreateFileset.allow(),
@@ -179,7 +176,7 @@ public class RangerAuthorizationHDFSPluginIT {
SecurableObject filesetInSchema =
SecurableObjects.parse(
- String.format("catalog1.schema1"),
+ "catalog1.schema1",
MetadataObject.Type.SCHEMA,
Lists.newArrayList(
Privileges.CreateFileset.allow(),
@@ -191,7 +188,7 @@ public class RangerAuthorizationHDFSPluginIT {
SecurableObject filesetInFileset =
SecurableObjects.parse(
- String.format("catalog1.schema1.fileset1"),
+ "catalog1.schema1.fileset1",
MetadataObject.Type.FILESET,
Lists.newArrayList(
Privileges.CreateFileset.allow(),
@@ -218,25 +215,23 @@ public class RangerAuthorizationHDFSPluginIT {
withMockedAuthorizationUtils(
() -> {
MetadataObject metalake =
- MetadataObjects.parse(String.format("metalake1"),
MetadataObject.Type.METALAKE);
+ MetadataObjects.parse("metalake1", MetadataObject.Type.METALAKE);
List<AuthorizationSecurableObject> metalakeOwner =
rangerAuthPlugin.translateOwner(metalake);
Assertions.assertEquals(0, metalakeOwner.size());
- MetadataObject catalog =
- MetadataObjects.parse(String.format("catalog1"),
MetadataObject.Type.CATALOG);
+ MetadataObject catalog = MetadataObjects.parse("catalog1",
MetadataObject.Type.CATALOG);
List<AuthorizationSecurableObject> catalogOwner =
rangerAuthPlugin.translateOwner(catalog);
Assertions.assertEquals(0, catalogOwner.size());
MetadataObject schema =
- MetadataObjects.parse(String.format("catalog1.schema1"),
MetadataObject.Type.SCHEMA);
+ MetadataObjects.parse("catalog1.schema1",
MetadataObject.Type.SCHEMA);
List<AuthorizationSecurableObject> schemaOwner =
rangerAuthPlugin.translateOwner(schema);
Assertions.assertEquals(0, schemaOwner.size());
MetadataObject fileset =
- MetadataObjects.parse(
- String.format("catalog1.schema1.fileset1"),
MetadataObject.Type.FILESET);
+ MetadataObjects.parse("catalog1.schema1.fileset1",
MetadataObject.Type.FILESET);
List<AuthorizationSecurableObject> filesetOwner =
rangerAuthPlugin.translateOwner(fileset);
filesetOwner.forEach(
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveHdfsE2EIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveHdfsE2EIT.java
new file mode 100644
index 0000000000..94a1722736
--- /dev/null
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveHdfsE2EIT.java
@@ -0,0 +1,268 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.gravitino.authorization.ranger.integration.test;
+
+import static org.apache.gravitino.Catalog.AUTHORIZATION_PROVIDER;
+import static
org.apache.gravitino.catalog.hive.HiveConstants.IMPERSONATION_ENABLE;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import org.apache.gravitino.Catalog;
+import org.apache.gravitino.Configs;
+import org.apache.gravitino.NameIdentifier;
+import org.apache.gravitino.auth.AuthConstants;
+import org.apache.gravitino.auth.AuthenticatorType;
+import org.apache.gravitino.authorization.Privileges;
+import org.apache.gravitino.authorization.SecurableObject;
+import org.apache.gravitino.authorization.SecurableObjects;
+import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
+import org.apache.gravitino.catalog.hive.HiveConstants;
+import org.apache.gravitino.client.GravitinoMetalake;
+import org.apache.gravitino.exceptions.UserAlreadyExistsException;
+import org.apache.gravitino.integration.test.container.HiveContainer;
+import org.apache.gravitino.integration.test.container.RangerContainer;
+import org.apache.gravitino.integration.test.util.BaseIT;
+import org.apache.gravitino.integration.test.util.GravitinoITUtils;
+import org.apache.gravitino.rel.Column;
+import org.apache.gravitino.rel.TableCatalog;
+import org.apache.gravitino.rel.TableChange;
+import org.apache.gravitino.rel.types.Types;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class RangerHiveHdfsE2EIT extends BaseIT {
+
+ private static final Logger LOG =
LoggerFactory.getLogger(RangerHiveHdfsE2EIT.class);
+ private static final String provider = "hive";
+ private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+ public static final String HIVE_COL_NAME1 = "hive_col_name1";
+ public static final String HIVE_COL_NAME2 = "hive_col_name2";
+ public static final String HIVE_COL_NAME3 = "hive_col_name3";
+ private static String metalakeName;
+ private static GravitinoMetalake metalake;
+ private static String catalogName;
+ private static String HIVE_METASTORE_URIS;
+ private static Catalog catalog;
+
+ private static String DEFAULT_FS;
+
+ @BeforeAll
+ public void startIntegrationTest() throws Exception {
+ metalakeName = GravitinoITUtils.genRandomName("metalake").toLowerCase();
+ catalogName = GravitinoITUtils.genRandomName("catalog").toLowerCase();
+
+ // Enable Gravitino Authorization mode
+ Map<String, String> configs = Maps.newHashMap();
+ configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
+ configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
+ configs.put(Configs.AUTHENTICATORS.getKey(),
AuthenticatorType.SIMPLE.name().toLowerCase());
+ configs.put("SimpleAuthUserName", AuthConstants.ANONYMOUS_USER);
+ registerCustomConfigs(configs);
+
+ super.startIntegrationTest();
+ RangerITEnv.init(RangerBaseE2EIT.metalakeName, false);
+ RangerITEnv.startHiveRangerContainer();
+
+ DEFAULT_FS =
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse",
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT);
+
+ HIVE_METASTORE_URIS =
+ String.format(
+ "thrift://%s:%d",
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HIVE_METASTORE_PORT);
+
+ createMetalake();
+ createCatalog();
+
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", DEFAULT_FS);
+
+ RangerITEnv.cleanup();
+ try {
+ metalake.addUser(System.getenv(HADOOP_USER_NAME));
+ } catch (UserAlreadyExistsException e) {
+ LOG.error("Failed to add user: {}", System.getenv(HADOOP_USER_NAME), e);
+ }
+ }
+
+ @AfterAll
+ void cleanIT() {
+ if (client != null) {
+ Arrays.stream(catalog.asSchemas().listSchemas())
+ .filter(schema -> !schema.equals("default"))
+ .forEach(
+ (schema -> {
+ catalog.asSchemas().dropSchema(schema, false);
+ }));
+
+ // The `dropCatalog` call will invoke the catalog metadata object to
remove privileges
+ Arrays.stream(metalake.listCatalogs())
+ .forEach((catalogName -> metalake.dropCatalog(catalogName, true)));
+ client.disableMetalake(metalakeName);
+ client.dropMetalake(metalakeName);
+ }
+
+ try {
+ closer.close();
+ } catch (Exception e) {
+ LOG.error("Failed to close CloseableGroup", e);
+ }
+ client = null;
+ RangerITEnv.cleanup();
+ }
+
+ protected void createCatalog() {
+ Map<String, String> properties =
+ ImmutableMap.of(
+ HiveConstants.METASTORE_URIS,
+ HIVE_METASTORE_URIS,
+ IMPERSONATION_ENABLE,
+ "true",
+ AUTHORIZATION_PROVIDER,
+ "ranger",
+ RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
+ "HDFS",
+ RangerAuthorizationProperties.RANGER_SERVICE_NAME,
+ RangerITEnv.RANGER_HDFS_REPO_NAME,
+ RangerAuthorizationProperties.RANGER_ADMIN_URL,
+ RangerITEnv.RANGER_ADMIN_URL,
+ RangerAuthorizationProperties.RANGER_AUTH_TYPE,
+ RangerContainer.authType,
+ RangerAuthorizationProperties.RANGER_USERNAME,
+ RangerContainer.rangerUserName,
+ RangerAuthorizationProperties.RANGER_PASSWORD,
+ RangerContainer.rangerPassword);
+
+ metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider,
"comment", properties);
+ catalog = metalake.loadCatalog(catalogName);
+ LOG.info("Catalog created: {}", catalog);
+ }
+
+ private void createMetalake() {
+ GravitinoMetalake[] gravitinoMetalakes = client.listMetalakes();
+ Assertions.assertEquals(0, gravitinoMetalakes.length);
+
+ client.createMetalake(metalakeName, "comment", Collections.emptyMap());
+ GravitinoMetalake loadMetalake = client.loadMetalake(metalakeName);
+ Assertions.assertEquals(metalakeName, loadMetalake.name());
+
+ metalake = loadMetalake;
+ }
+
+ @Test
+ public void testRenameTable() throws Exception {
+ // 1. Create a table
+ TableCatalog tableCatalog = catalog.asTableCatalog();
+ tableCatalog.createTable(
+ NameIdentifier.of("default", "test"), createColumns(), "comment1",
ImmutableMap.of());
+
+ // 2. check the privileges, should throw an exception
+ String userName = "test";
+ metalake.addUser(userName);
+ UserGroupInformation.createProxyUser(userName,
UserGroupInformation.getCurrentUser())
+ .doAs(
+ (PrivilegedExceptionAction<Void>)
+ () -> {
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", DEFAULT_FS);
+ String path = String.format("%s/test", DEFAULT_FS);
+ FileSystem userFileSystem = FileSystem.get(conf);
+ Exception e =
+ Assertions.assertThrows(
+ Exception.class,
+ () ->
+ userFileSystem.mkdirs(
+ new Path(String.format("%s/%s", path,
"test1"))));
+ Assertions.assertTrue(e.getMessage().contains("Permission
denied"));
+ userFileSystem.close();
+ return null;
+ });
+
+ // 3. Grant the privileges to the table
+ SecurableObject catalogObject =
+ SecurableObjects.ofCatalog(catalogName, Collections.emptyList());
+ SecurableObject schemaObject =
+ SecurableObjects.ofSchema(catalogObject, "default",
Collections.emptyList());
+ SecurableObject tableObject =
+ SecurableObjects.ofTable(
+ schemaObject, "test",
Lists.newArrayList(Privileges.ModifyTable.allow()));
+ metalake.createRole(
+ "hdfs_rename_role", Collections.emptyMap(),
Lists.newArrayList(tableObject));
+
+ metalake.grantRolesToUser(Lists.newArrayList("hdfs_rename_role"),
userName);
+ RangerBaseE2EIT.waitForUpdatingPolicies();
+
+ UserGroupInformation.createProxyUser(userName,
UserGroupInformation.getCurrentUser())
+ .doAs(
+ (PrivilegedExceptionAction<Void>)
+ () -> {
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", DEFAULT_FS);
+ String path = String.format("%s/test", DEFAULT_FS);
+ FileSystem userFileSystem = FileSystem.get(conf);
+ Assertions.assertDoesNotThrow(() ->
userFileSystem.listStatus(new Path(path)));
+ Assertions.assertDoesNotThrow(
+ () -> userFileSystem.mkdirs(new
Path(String.format("%s/%s", path, "test1"))));
+ userFileSystem.close();
+ return null;
+ });
+ // 4. Rename the table
+ tableCatalog.alterTable(NameIdentifier.of("default", "test"),
TableChange.rename("test1"));
+
+ // 5. Check the privileges
+ UserGroupInformation.createProxyUser(userName,
UserGroupInformation.getCurrentUser())
+ .doAs(
+ (PrivilegedExceptionAction<Void>)
+ () -> {
+ Configuration conf = new Configuration();
+ conf.set("fs.defaultFS", DEFAULT_FS);
+ String path = String.format("%s/test1", DEFAULT_FS);
+ FileSystem userFileSystem = FileSystem.get(conf);
+ Assertions.assertDoesNotThrow(() ->
userFileSystem.listStatus(new Path(path)));
+ Assertions.assertDoesNotThrow(
+ () -> userFileSystem.mkdirs(new
Path(String.format("%s/%s", path, "test1"))));
+ userFileSystem.close();
+ return null;
+ });
+ }
+
+ private Column[] createColumns() {
+ Column col1 = Column.of(HIVE_COL_NAME1, Types.ByteType.get(),
"col_1_comment");
+ Column col2 = Column.of(HIVE_COL_NAME2, Types.DateType.get(),
"col_2_comment");
+ Column col3 = Column.of(HIVE_COL_NAME3, Types.StringType.get(),
"col_3_comment");
+ return new Column[] {col1, col2, col3};
+ }
+}
diff --git
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
index 37370edf4e..871d4c538b 100644
---
a/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
+++
b/authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveIT.java
@@ -749,7 +749,7 @@ public class RangerHiveIT {
MetadataObject newMetadataObject = MetadataObjects.parse(newMetalake,
oldMetadataObject.type());
Assertions.assertTrue(
rangerAuthHivePlugin.onMetadataUpdated(
- MetadataObjectChange.rename(oldMetadataObject,
newMetadataObject)));
+ MetadataObjectChange.rename(oldMetadataObject, newMetadataObject,
null)));
SecurableObject newSecurableObject1 =
SecurableObjects.parse(
newMetadataObject.fullName(),
@@ -792,7 +792,7 @@ public class RangerHiveIT {
String.format("catalog-new-%s", currentFunName),
oldMetadataObject.type());
Assertions.assertTrue(
rangerAuthHivePlugin.onMetadataUpdated(
- MetadataObjectChange.rename(oldMetadataObject,
newMetadataObject)));
+ MetadataObjectChange.rename(oldMetadataObject, newMetadataObject,
null)));
SecurableObject newSecurableObject1 =
SecurableObjects.parse(
newMetadataObject.fullName(),
@@ -834,7 +834,7 @@ public class RangerHiveIT {
String.format("catalog.new-%s", currentFunName),
oldMetadataObject.type());
Assertions.assertTrue(
rangerAuthHivePlugin.onMetadataUpdated(
- MetadataObjectChange.rename(oldMetadataObject,
newMetadataObject)));
+ MetadataObjectChange.rename(oldMetadataObject, newMetadataObject,
null)));
assertFindManagedPolicyItems(role, false);
SecurableObject newSecurableObject1 =
SecurableObjects.parse(
@@ -877,7 +877,7 @@ public class RangerHiveIT {
String.format("catalog.schema1.new-%s", currentFunName),
oldMetadataObject.type());
Assertions.assertTrue(
rangerAuthHivePlugin.onMetadataUpdated(
- MetadataObjectChange.rename(oldMetadataObject,
newMetadataObject)));
+ MetadataObjectChange.rename(oldMetadataObject, newMetadataObject,
null)));
assertFindManagedPolicyItems(role, false);
SecurableObject newSecurableObject1 =
SecurableObjects.parse(
diff --git
a/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java
b/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java
index 4df0270c6c..0400666bb7 100644
---
a/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java
+++
b/core/src/main/java/org/apache/gravitino/authorization/AuthorizationUtils.java
@@ -287,14 +287,20 @@ public class AuthorizationUtils {
public static void authorizationPluginRenamePrivileges(
NameIdentifier ident, Entity.EntityType type, String newName) {
+ authorizationPluginRenamePrivileges(ident, type, newName, null);
+ }
+
+ public static void authorizationPluginRenamePrivileges(
+ NameIdentifier ident, Entity.EntityType type, String newName,
List<String> locations) {
// If we enable authorization, we should rename the privileges about the
entity in the
// authorization plugin.
if (GravitinoEnv.getInstance().accessControlDispatcher() != null) {
MetadataObject oldMetadataObject =
NameIdentifierUtil.toMetadataObject(ident, type);
MetadataObject newMetadataObject =
NameIdentifierUtil.toMetadataObject(NameIdentifier.of(ident.namespace(),
newName), type);
- MetadataObjectChange renameObject =
- MetadataObjectChange.rename(oldMetadataObject, newMetadataObject);
+
+ MetadataObjectChange renameChange =
+ MetadataObjectChange.rename(oldMetadataObject, newMetadataObject,
locations);
String metalake = type == Entity.EntityType.METALAKE ? newName :
ident.namespace().level(0);
@@ -304,7 +310,7 @@ public class AuthorizationUtils {
metalake,
newMetadataObject,
authorizationPlugin -> {
- authorizationPlugin.onMetadataUpdated(renameObject);
+ authorizationPlugin.onMetadataUpdated(renameChange);
});
}
}
diff --git
a/core/src/main/java/org/apache/gravitino/catalog/OperationDispatcher.java
b/core/src/main/java/org/apache/gravitino/catalog/OperationDispatcher.java
index 3e2ed6c1b1..87daa584f8 100644
--- a/core/src/main/java/org/apache/gravitino/catalog/OperationDispatcher.java
+++ b/core/src/main/java/org/apache/gravitino/catalog/OperationDispatcher.java
@@ -242,6 +242,16 @@ public abstract class OperationDispatcher {
}
}
+ protected <E extends Entity & HasIdentifier> E getEntity(
+ NameIdentifier ident, Entity.EntityType type, Class<E> entityClass) {
+ try {
+ return store.get(ident, type, entityClass);
+ } catch (Exception e) {
+ LOG.error(FormattedErrorMessages.STORE_OP_FAILURE, "get", ident, e);
+ throw new RuntimeException("Fail to check if entity is existed", e);
+ }
+ }
+
private <T> Map<String, String> getPropertiesForSet(T... t) {
Map<String, String> properties = Maps.newHashMap();
for (T item : t) {
diff --git
a/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java
b/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java
index b9f382e46a..b898192c00 100644
---
a/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java
+++
b/core/src/main/java/org/apache/gravitino/catalog/SchemaOperationDispatcher.java
@@ -240,8 +240,8 @@ public class SchemaOperationDispatcher extends
OperationDispatcher implements Sc
}
StringIdentifier stringId =
getStringIdFromProperties(alteredSchema.properties());
- // Case 1: The schema is not created by Gravitino.
- if (stringId == null) {
+ // Case 1: The schema is not created by Gravitino and this schema is
never imported.
+ if (stringId == null && !isEntityExist(ident, SCHEMA)) {
return EntityCombinedSchema.of(alteredSchema)
.withHiddenProperties(
getHiddenPropertyNames(
@@ -250,6 +250,13 @@ public class SchemaOperationDispatcher extends
OperationDispatcher implements Sc
alteredSchema.properties()));
}
+ long schemaId;
+ if (stringId != null) {
+ schemaId = stringId.id();
+ } else {
+ schemaId = getEntity(ident, SCHEMA, SchemaEntity.class).id();
+ }
+
SchemaEntity updatedSchemaEntity =
operateOnEntity(
ident,
@@ -273,7 +280,7 @@ public class SchemaOperationDispatcher extends
OperationDispatcher implements Sc
.build())
.build()),
"UPDATE",
- stringId.id());
+ schemaId);
return EntityCombinedSchema.of(alteredSchema, updatedSchemaEntity)
.withHiddenProperties(
diff --git
a/core/src/main/java/org/apache/gravitino/catalog/TableOperationDispatcher.java
b/core/src/main/java/org/apache/gravitino/catalog/TableOperationDispatcher.java
index 49e892196c..312184e2b4 100644
---
a/core/src/main/java/org/apache/gravitino/catalog/TableOperationDispatcher.java
+++
b/core/src/main/java/org/apache/gravitino/catalog/TableOperationDispatcher.java
@@ -221,8 +221,8 @@ public class TableOperationDispatcher extends
OperationDispatcher implements Tab
IllegalArgumentException.class);
StringIdentifier stringId =
getStringIdFromProperties(alteredTable.properties());
- // Case 1: The table is not created by Gravitino.
- if (stringId == null) {
+ // Case 1: The table is not created by Gravitino and this table is
never imported.
+ if (stringId == null && !isEntityExist(ident, TABLE)) {
return EntityCombinedTable.of(alteredTable)
.withHiddenProperties(
getHiddenPropertyNames(
@@ -231,6 +231,13 @@ public class TableOperationDispatcher extends
OperationDispatcher implements Tab
alteredTable.properties()));
}
+ long tableId;
+ if (stringId != null) {
+ tableId = stringId.id();
+ } else {
+ tableId = getEntity(ident, TABLE, TableEntity.class).id();
+ }
+
TableEntity updatedTableEntity =
operateOnEntity(
ident,
@@ -266,7 +273,7 @@ public class TableOperationDispatcher extends
OperationDispatcher implements Tab
.build();
}),
"UPDATE",
- stringId.id());
+ tableId);
return EntityCombinedTable.of(alteredTable, updatedTableEntity)
.withHiddenProperties(
diff --git
a/core/src/main/java/org/apache/gravitino/hook/TableHookDispatcher.java
b/core/src/main/java/org/apache/gravitino/hook/TableHookDispatcher.java
index 903f3d1534..ea3299d3bb 100644
--- a/core/src/main/java/org/apache/gravitino/hook/TableHookDispatcher.java
+++ b/core/src/main/java/org/apache/gravitino/hook/TableHookDispatcher.java
@@ -97,18 +97,21 @@ public class TableHookDispatcher implements TableDispatcher
{
@Override
public Table alterTable(NameIdentifier ident, TableChange... changes)
throws NoSuchTableException, IllegalArgumentException {
-
- Table alteredTable = dispatcher.alterTable(ident, changes);
TableChange.RenameTable lastRenameChange = null;
+ List<String> locations = null;
for (TableChange change : changes) {
if (change instanceof TableChange.RenameTable) {
lastRenameChange = (TableChange.RenameTable) change;
}
}
+ if (lastRenameChange != null) {
+ locations = AuthorizationUtils.getMetadataObjectLocation(ident,
Entity.EntityType.TABLE);
+ }
+ Table alteredTable = dispatcher.alterTable(ident, changes);
if (lastRenameChange != null) {
AuthorizationUtils.authorizationPluginRenamePrivileges(
- ident, Entity.EntityType.TABLE, lastRenameChange.getNewName());
+ ident, Entity.EntityType.TABLE, lastRenameChange.getNewName(),
locations);
}
return alteredTable;