jerqi commented on code in PR #3852:
URL: https://github.com/apache/gravitino/pull/3852#discussion_r1679089279


##########
catalogs/catalog-hadoop/src/main/java/org/apache/gravitino/catalog/hadoop/SecureHadoopCatalogOperations.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.gravitino.catalog.hadoop;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import org.apache.gravitino.Entity;
+import org.apache.gravitino.EntityStore;
+import org.apache.gravitino.NameIdentifier;
+import org.apache.gravitino.Namespace;
+import org.apache.gravitino.Schema;
+import org.apache.gravitino.SchemaChange;
+import org.apache.gravitino.catalog.hadoop.HadoopCatalogOperations.UserInfo;
+import 
org.apache.gravitino.catalog.hadoop.authentication.kerberos.KerberosConfig;
+import org.apache.gravitino.connector.CatalogInfo;
+import org.apache.gravitino.connector.CatalogOperations;
+import org.apache.gravitino.connector.HasPropertyMetadata;
+import org.apache.gravitino.connector.SupportsSchemas;
+import org.apache.gravitino.exceptions.FilesetAlreadyExistsException;
+import org.apache.gravitino.exceptions.NoSuchCatalogException;
+import org.apache.gravitino.exceptions.NoSuchEntityException;
+import org.apache.gravitino.exceptions.NoSuchFilesetException;
+import org.apache.gravitino.exceptions.NoSuchSchemaException;
+import org.apache.gravitino.exceptions.NonEmptySchemaException;
+import org.apache.gravitino.exceptions.SchemaAlreadyExistsException;
+import org.apache.gravitino.file.Fileset;
+import org.apache.gravitino.file.FilesetCatalog;
+import org.apache.gravitino.file.FilesetChange;
+import org.apache.gravitino.meta.FilesetEntity;
+import org.apache.gravitino.meta.SchemaEntity;
+import org.apache.gravitino.utils.PrincipalUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * SecureHadoopCatalogOperations is a secure version of 
HadoopCatalogOperations that can manage
+ * Schema and fileset level of user authentication.
+ */
+public class SecureHadoopCatalogOperations
+    implements CatalogOperations, SupportsSchemas, FilesetCatalog {
+
+  public static final Logger LOG = 
LoggerFactory.getLogger(SecureHadoopCatalogOperations.class);
+
+  private final HadoopCatalogOperations hadoopCatalogOperations;
+
+  public SecureHadoopCatalogOperations() {
+    this.hadoopCatalogOperations = new HadoopCatalogOperations();
+  }
+
+  public SecureHadoopCatalogOperations(EntityStore store) {
+    this.hadoopCatalogOperations = new HadoopCatalogOperations(store);
+  }
+
+  public HadoopCatalogOperations getHadoopCatalogOperations() {
+    return hadoopCatalogOperations;
+  }
+
+  public String getKerberosRealm() {
+    return hadoopCatalogOperations.getKerberosRealm();
+  }
+
+  public void setProxyPlugin(HadoopProxyPlugin plugin) {
+    hadoopCatalogOperations.setProxyPlugin(plugin);
+  }
+
+  // We have overridden the createFileset, dropFileset, createSchema, 
dropSchema method to reset
+  // the current user based on the name identifier.
+
+  @Override
+  public Fileset createFileset(
+      NameIdentifier ident,
+      String comment,
+      Fileset.Type type,
+      String storageLocation,
+      Map<String, String> properties)
+      throws NoSuchSchemaException, FilesetAlreadyExistsException {
+    UserGroupInformation currentUser = getUGIByIdent(properties, ident);
+    return doAs(
+        currentUser,
+        () ->
+            hadoopCatalogOperations.createFileset(
+                ident, comment, type, storageLocation, properties),
+        ident);
+  }
+
+  @Override
+  public boolean dropFileset(NameIdentifier ident) {
+    FilesetEntity filesetEntity;
+    try {
+      filesetEntity =
+          hadoopCatalogOperations
+              .getStore()
+              .get(ident, Entity.EntityType.FILESET, FilesetEntity.class);
+    } catch (NoSuchEntityException e) {
+      LOG.warn("Fileset {} does not exist", ident);
+      return false;
+    } catch (IOException ioe) {
+      throw new RuntimeException("Failed to delete fileset " + ident, ioe);
+    }
+
+    // Reset the current user based on the name identifier.
+    UserGroupInformation currentUser = 
getUGIByIdent(filesetEntity.properties(), ident);
+
+    boolean r = doAs(currentUser, () -> 
hadoopCatalogOperations.dropFileset(ident), ident);
+    cleanUserInfo(ident);
+    return r;
+  }
+
+  @Override
+  public Schema createSchema(NameIdentifier ident, String comment, Map<String, 
String> properties)
+      throws NoSuchCatalogException, SchemaAlreadyExistsException {
+    // Reset the current user based on the name identifier and properties.
+    UserGroupInformation currentUser = getUGIByIdent(properties, ident);
+
+    return doAs(
+        currentUser, () -> hadoopCatalogOperations.createSchema(ident, 
comment, properties), ident);
+  }
+
+  @Override
+  public boolean dropSchema(NameIdentifier ident, boolean cascade) throws 
NonEmptySchemaException {
+    try {
+      SchemaEntity schemaEntity =
+          hadoopCatalogOperations
+              .getStore()
+              .get(ident, Entity.EntityType.SCHEMA, SchemaEntity.class);
+      Map<String, String> properties =
+          
Optional.ofNullable(schemaEntity.properties()).orElse(Collections.emptyMap());
+
+      // Reset the current user based on the name identifier.
+      UserGroupInformation user = getUGIByIdent(properties, ident);
+
+      boolean r = doAs(user, () -> hadoopCatalogOperations.dropSchema(ident, 
cascade), ident);
+      cleanUserInfo(ident);
+      return r;
+    } catch (IOException ioe) {
+      throw new RuntimeException("Failed to delete schema " + ident, ioe);
+    }
+  }
+
+  @Override
+  public void initialize(
+      Map<String, String> config, CatalogInfo info, HasPropertyMetadata 
propertiesMetadata)
+      throws RuntimeException {
+    hadoopCatalogOperations.initialize(config, info, propertiesMetadata);
+  }
+
+  @Override
+  public Fileset alterFileset(NameIdentifier ident, FilesetChange... changes)
+      throws NoSuchFilesetException, IllegalArgumentException {
+    try {
+      return hadoopCatalogOperations.alterFileset(ident, changes);
+    } finally {
+      String finalName = ident.name();
+      for (FilesetChange change : changes) {

Review Comment:
   There two rename operations.
   ```
   Rename A succesffully,
   Rename B failed.
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to