This is an automated email from the ASF dual-hosted git repository.

gsaihemanth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 8848d1bece2 HIVE-29211: add LDAP callback handlers to enforce group 
filter for ke… (#6085)
8848d1bece2 is described below

commit 8848d1bece27093005d4b44de79a37d46e4a3e12
Author: Hazel Jiang <107082535+jjiang...@users.noreply.github.com>
AuthorDate: Fri Sep 26 12:14:12 2025 -0700

    HIVE-29211: add LDAP callback handlers to enforce group filter for ke… 
(#6085)
---
 .../java/org/apache/hadoop/hive/conf/HiveConf.java |   7 +
 .../apache/hive/service/auth/HiveAuthFactory.java  |  22 ++-
 .../auth/LdapAuthenticationProviderImpl.java       |   2 +-
 .../auth/ldap/KerberosLdapFilterEnforcer.java      | 105 ++++++++++
 .../auth/ldap/LdapGroupCallbackHandler.java        |  81 ++++++++
 .../hive/service/cli/thrift/ThriftHttpServlet.java |  25 ++-
 .../auth/TestLdapKerberosWithGroupFilter.java      | 186 ++++++++++++++++++
 .../auth/ldap/TestLdapGroupCallbackHandler.java    | 191 ++++++++++++++++++
 .../thrift/TestThriftHttpKerberosLdapFilter.java   | 214 +++++++++++++++++++++
 .../metastore/security/HadoopThriftAuthBridge.java |  17 +-
 10 files changed, 838 insertions(+), 12 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index e0e1bdbcb5c..813ff00d8a5 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4353,6 +4353,13 @@ public static enum ConfVars {
     
HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD("hive.server2.authentication.ldap.bindpw",
 null,
         "The password for the bind user, to be used to search for the full 
name of the user being authenticated.\n" +
         "If the username is specified, this parameter must also be 
specified."),
+    HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS(
+        "hive.server2.authentication.ldap.enableGroupCheckAfterKerberos", 
false,
+        "If set to true, LDAP user and group filters are applied to 
Kerberos-authenticated users.\n" +
+            "Uses the same filter resolution as LDAP authentication 
(userSearchFilter, groupSearchFilter,\n" +
+            "customLDAPQuery, userFilter, groupFilter). Filters are not 
applied to authorized proxy users.\n" +
+            "Requires valid LDAP bind credentials to be configured.\n" +
+            "Default value is false."),
     
HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS("hive.server2.custom.authentication.class",
 null,
         "Custom authentication class. Used when property\n" +
         "'hive.server2.authentication' is set to 'CUSTOM'. Provided class\n" +
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java 
b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 8ff99cfcb1f..9e9d47837fd 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -23,6 +23,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import javax.security.auth.callback.CallbackHandler;
 import javax.security.auth.login.LoginException;
 import javax.security.sasl.AuthenticationException;
 import javax.security.sasl.Sasl;
@@ -40,6 +41,7 @@
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hive.service.auth.ldap.LdapGroupCallbackHandler;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.rpc.thrift.TCLIService;
 import org.apache.thrift.TProcessorFactory;
@@ -69,11 +71,21 @@ public HiveAuthFactory(HiveConf conf, boolean isHttpMode) 
throws TTransportExcep
     hadoopAuth = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
     authType = AuthType.authTypeFromConf(conf, isHttpMode);
     if (isSASLWithKerberizedHadoop()) {
-      saslServer =
-          HadoopThriftAuthBridge.getBridge().createServer(
-              conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
-              conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL),
-              conf.getVar(ConfVars.HIVE_SERVER2_CLIENT_KERBEROS_PRINCIPAL));
+      boolean enableLdapGroupCheck = conf.getBoolVar(
+          ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS);
+      if (enableLdapGroupCheck) {
+        CallbackHandler callbackHandler = new LdapGroupCallbackHandler(conf);
+        saslServer = HadoopThriftAuthBridge.getBridge().createServer(
+            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL),
+            conf.getVar(ConfVars.HIVE_SERVER2_CLIENT_KERBEROS_PRINCIPAL),
+            callbackHandler);
+      } else {
+        saslServer = HadoopThriftAuthBridge.getBridge().createServer(
+            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL),
+            conf.getVar(ConfVars.HIVE_SERVER2_CLIENT_KERBEROS_PRINCIPAL));
+      }
 
       // Start delegation token manager
       delegationTokenManager = new MetastoreDelegationTokenManager();
diff --git 
a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
 
b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
index cd8dedc01d0..d0a7337a9bf 100644
--- 
a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
+++ 
b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
@@ -130,7 +130,7 @@ private DirSearch createDirSearch(String user, String 
password) throws Authentic
         String.format("No candidate principals for %s was found.", user));
   }
 
-  private static Filter resolveFilter(HiveConf conf) {
+  public static Filter resolveFilter(HiveConf conf) {
     for (FilterFactory filterProvider : FILTER_FACTORIES) {
       Filter filter = filterProvider.getInstance(conf);
       if (filter != null) {
diff --git 
a/service/src/java/org/apache/hive/service/auth/ldap/KerberosLdapFilterEnforcer.java
 
b/service/src/java/org/apache/hive/service/auth/ldap/KerberosLdapFilterEnforcer.java
new file mode 100644
index 00000000000..b48da4c513a
--- /dev/null
+++ 
b/service/src/java/org/apache/hive/service/auth/ldap/KerberosLdapFilterEnforcer.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth.ldap;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hive.service.auth.LdapAuthenticationProviderImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.sasl.AuthenticationException;
+import java.io.IOException;
+
+/**
+ * Helper that encapsulates LDAP filter resolution and enforcement for 
Kerberos-authenticated users.
+ */
+public final class KerberosLdapFilterEnforcer {
+  private static final Logger LOG = 
LoggerFactory.getLogger(KerberosLdapFilterEnforcer.class);
+
+  private final HiveConf conf;
+  private final DirSearchFactory dirSearchFactory;
+  private final boolean enableLdapGroupCheck;
+  private final Filter filter;
+
+  public KerberosLdapFilterEnforcer(HiveConf conf, DirSearchFactory 
dirSearchFactory) {
+    this.conf = conf;
+    this.dirSearchFactory = dirSearchFactory;
+    this.enableLdapGroupCheck = 
conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS);
+    this.filter = enableLdapGroupCheck ? 
LdapAuthenticationProviderImpl.resolveFilter(conf) : null;
+
+    if (enableLdapGroupCheck && filter == null) {
+      LOG.warn("LDAP group check enabled but no filters configured");
+    }
+  }
+
+  /**
+   * Applies configured LDAP filters to authenticate a user principal.
+   *
+   * @param principal Kerberos principal to validate
+   * @return {@code true} if the principal passes all configured filters; 
{@code false} otherwise
+   */
+  public boolean applyLdapFilter(String principal) {
+    if (!enableLdapGroupCheck) {
+      return true;
+    }
+    if (filter == null) {
+      LOG.warn("LDAP group check enabled but no filters configured");
+      return false;
+    }
+
+    String user = extractUserName(principal);
+    try (DirSearch dirSearch = createDirSearch()) {
+      filter.apply(dirSearch, user);
+      LOG.debug("Principal {} passed LDAP filter validation", principal);
+      return true;
+    } catch (Exception e) {
+      LOG.warn("Principal {} failed LDAP filter validation", principal, e);
+      return false;
+    }
+  }
+
+  public boolean isFilterConfigured() {
+    return enableLdapGroupCheck && filter != null;
+  }
+
+  private DirSearch createDirSearch() throws AuthenticationException {
+    String bindDN = 
conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER);
+    char[] passwordChars;
+    try {
+      passwordChars = 
conf.getPassword(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD.varname);
+    } catch (IOException e) {
+      throw new AuthenticationException("Failed to retrieve LDAP bind 
password");
+    }
+    String bindPassword = passwordChars == null ? null : new 
String(passwordChars);
+
+    if (StringUtils.isBlank(bindDN) || StringUtils.isBlank(bindPassword)) {
+      throw new AuthenticationException("LDAP bind credentials not 
configured");
+    }
+
+    return dirSearchFactory.getInstance(conf, bindDN, bindPassword);
+  }
+
+  @VisibleForTesting
+  public static String extractUserName(String principal) {
+    String[] parts = SaslRpcServer.splitKerberosName(principal);
+    return parts.length > 0 ? parts[0] : principal;
+  }
+}
diff --git 
a/service/src/java/org/apache/hive/service/auth/ldap/LdapGroupCallbackHandler.java
 
b/service/src/java/org/apache/hive/service/auth/ldap/LdapGroupCallbackHandler.java
new file mode 100644
index 00000000000..77ee904264c
--- /dev/null
+++ 
b/service/src/java/org/apache/hive/service/auth/ldap/LdapGroupCallbackHandler.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth.ldap;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+import java.io.IOException;
+
+/**
+ * Callback handler that enforces LDAP filters on Kerberos-authenticated users.
+ * This handler applies the same LDAP filter resolution used for LDAP 
authentication
+ * to Kerberos users, ensuring consistent authorization policies.
+ */
+public class LdapGroupCallbackHandler implements CallbackHandler {
+  private static final Logger LOG = 
LoggerFactory.getLogger(LdapGroupCallbackHandler.class);
+
+  private final CallbackHandler delegateHandler;
+  private final KerberosLdapFilterEnforcer filterEnforcer;
+
+  public LdapGroupCallbackHandler(HiveConf conf) {
+    this(conf, new LdapSearchFactory(), new 
SaslRpcServer.SaslGssCallbackHandler());
+  }
+
+  @VisibleForTesting
+  private LdapGroupCallbackHandler(HiveConf conf, DirSearchFactory 
dirSearchFactory, CallbackHandler delegateHandler) {
+    this.delegateHandler = delegateHandler;
+    this.filterEnforcer = new KerberosLdapFilterEnforcer(conf, 
dirSearchFactory);
+  }
+
+  @VisibleForTesting
+  public static LdapGroupCallbackHandler createForTesting(HiveConf conf, 
DirSearchFactory dirSearchFactory,
+      CallbackHandler delegateHandler) {
+    return new LdapGroupCallbackHandler(conf, dirSearchFactory, 
delegateHandler);
+  }
+
+  @Override
+  public void handle(Callback[] callbacks) throws IOException, 
UnsupportedCallbackException {
+    delegateHandler.handle(callbacks);
+
+    for (Callback callback : callbacks) {
+      if (!(callback instanceof AuthorizeCallback)) {
+        continue;
+      }
+
+      AuthorizeCallback ac = (AuthorizeCallback) callback;
+
+      if (!ac.isAuthorized()) {
+        LOG.debug("Delegate callback rejected {}; skipping LDAP filter", 
ac.getAuthenticationID());
+        continue;
+      }
+
+      String authenticationID = ac.getAuthenticationID();
+
+      boolean authorized = filterEnforcer.applyLdapFilter(authenticationID);
+      ac.setAuthorized(authorized);
+    }
+  }
+}
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index ea774e2eb05..67ebb605d90 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -60,7 +60,10 @@
 import org.apache.hive.service.auth.PasswdAuthenticationProvider;
 import org.apache.hive.service.auth.PlainSaslHelper;
 import org.apache.hive.service.auth.jwt.JWTValidator;
+import org.apache.hive.service.auth.ldap.DirSearchFactory;
 import org.apache.hive.service.auth.ldap.HttpEmptyAuthenticationException;
+import org.apache.hive.service.auth.ldap.KerberosLdapFilterEnforcer;
+import org.apache.hive.service.auth.ldap.LdapSearchFactory;
 import org.apache.hive.service.auth.HttpAuthService;
 import org.apache.hive.service.auth.saml.HiveSaml2Client;
 import org.apache.hive.service.auth.saml.HiveSamlRelayStateStore;
@@ -290,7 +293,7 @@ protected void doPost(HttpServletRequest request, 
HttpServletResponse response)
           LOG.error("Login attempt is failed for user : " +
               httpAuthService.getUsername(request) + ". Error Message :" + 
e.getMessage());
         } catch (Exception ex) {
-          // Failed logging an exception message, ignoring exception, but 
response status is set to 401/unauthorized  
+          // Failed logging an exception message, ignoring exception, but 
response status is set to 401/unauthorized
         }
       }
       response.getWriter().println("Authentication Error: " + e.getMessage());
@@ -473,6 +476,9 @@ String doKerberosAuth(HttpServletRequest request)
   class HttpKerberosServerAction implements PrivilegedExceptionAction<String> {
     HttpServletRequest request;
     UserGroupInformation serviceUGI;
+    private final DirSearchFactory dirSearchFactory = new LdapSearchFactory();
+    private final KerberosLdapFilterEnforcer filterEnforcer =
+        new KerberosLdapFilterEnforcer(hiveConf, dirSearchFactory);
 
     HttpKerberosServerAction(HttpServletRequest request,
         UserGroupInformation serviceUGI) {
@@ -516,7 +522,19 @@ public String run() throws HttpAuthenticationException {
               "unable to establish context with the service ticket " +
               "provided by the client.");
         } else {
-          return 
getPrincipalWithoutRealmAndHost(gssContext.getSrcName().toString());
+          String principal = gssContext.getSrcName().toString();
+          String shortName = getPrincipalWithoutRealmAndHost(principal);
+          LOG.debug("Kerberos authentication successful");
+          if (hiveConf.getBoolVar(
+              
HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS)) {
+            boolean authorized = filterEnforcer.applyLdapFilter(principal);
+            if (!authorized) {
+              LOG.warn("User {} failed LDAP filter", principal);
+              throw new HttpAuthenticationException("LDAP filter check failed 
for user " + principal);
+            }
+            LOG.debug("User {} passed LDAP filter validation", principal);
+          }
+          return shortName;
         }
       } catch (GSSException e) {
         if (gssContext != null) {
@@ -539,7 +557,6 @@ public String run() throws HttpAuthenticationException {
         }
       }
     }
-
     private String getPrincipalWithoutRealm(String fullPrincipal)
         throws HttpAuthenticationException {
       KerberosNameShim fullKerberosName;
@@ -649,5 +666,3 @@ private static String getDoAsQueryParam(String queryString) 
{
   }
 
 }
-
-
diff --git 
a/service/src/test/org/apache/hive/service/auth/TestLdapKerberosWithGroupFilter.java
 
b/service/src/test/org/apache/hive/service/auth/TestLdapKerberosWithGroupFilter.java
new file mode 100644
index 00000000000..755236e9dfc
--- /dev/null
+++ 
b/service/src/test/org/apache/hive/service/auth/TestLdapKerberosWithGroupFilter.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.auth.ldap.DirSearch;
+import org.apache.hive.service.auth.ldap.DirSearchFactory;
+import org.apache.hive.service.auth.ldap.LdapGroupCallbackHandler;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.sasl.AuthorizeCallback;
+
+import java.util.Collections;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.*;
+
+/**
+ * Tests for Kerberos authentication with LDAP group filtering.
+ * This test uses mocks to avoid the need for real LDAP or Kerberos servers.
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestLdapKerberosWithGroupFilter {
+
+  private static final String GROUP1_NAME = "group1";
+  private static final String GROUP2_NAME = "group2";
+  private static final String USER1_ID = "user1";
+  private static final String USER2_ID = "user2";
+  private static final String USER1_PRINCIPAL = USER1_ID + "@TEST.REALM";
+  private static final String USER2_PRINCIPAL = USER2_ID + "@TEST.REALM";
+
+  @Mock
+  private DirSearch dirSearch;
+
+  @Mock
+  private DirSearchFactory dirSearchFactory;
+
+  @Mock
+  private CallbackHandler delegateHandler;
+
+  private HiveConf conf;
+
+  @Before
+  public void setup() throws Exception {
+    conf = new HiveConf();
+    conf.set("hive.root.logger", "DEBUG,console");
+
+    // Setup LDAP connection parameters
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL, 
"ldap://localhost:10389";);
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER, 
"cn=admin,dc=example,dc=com");
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD, 
"admin");
+
+    // Configure Kerberos auth
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION, "KERBEROS");
+
+    // Reset mocks before each test
+    reset(dirSearchFactory, dirSearch, delegateHandler);
+
+    // Set up the default mock behavior
+    when(dirSearchFactory.getInstance(any(HiveConf.class), anyString(), 
anyString()))
+        .thenReturn(dirSearch);
+
+    doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      if (args.length > 0 && args[0] instanceof Callback[]) {
+        for (Callback callback : (Callback[]) args[0]) {
+          if (callback instanceof AuthorizeCallback) {
+            AuthorizeCallback authorizeCallback = (AuthorizeCallback) callback;
+            String authId = authorizeCallback.getAuthenticationID();
+            String authzId = authorizeCallback.getAuthorizationID();
+            authorizeCallback.setAuthorized(authId != null && 
authId.equals(authzId));
+          }
+        }
+      }
+      return null;
+    }).when(delegateHandler).handle(any(Callback[].class));
+  }
+
+  @After
+  public void tearDown() {
+    conf = null;
+  }
+
+  @Test
+  public void testKerberosAuthWithLdapGroupCheckPositive() throws Exception {
+    // Configure LDAP to allow only users in group1
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, 
GROUP1_NAME);
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    String userDn = "uid=user1,dc=example,dc=com";
+    String groupDn = "cn=group1,dc=example,dc=com";
+
+    // Mock the DirSearch to succeed for both UserSearchFilter and 
GroupMembershipKeyFilter
+    when(dirSearch.findUserDn(USER1_ID)).thenReturn(userDn);
+    
when(dirSearch.findGroupsForUser(eq(userDn))).thenReturn(Collections.singletonList(groupDn));
+
+    // Create the callback handler with our test configuration
+    LdapGroupCallbackHandler callbackHandler = 
LdapGroupCallbackHandler.createForTesting(
+        conf, dirSearchFactory, delegateHandler);
+
+    // Create an AuthorizeCallback as would be done by Kerberos authentication
+    AuthorizeCallback ac = new AuthorizeCallback(USER1_PRINCIPAL, 
USER1_PRINCIPAL);
+    Callback[] callbacks = {ac};
+    callbackHandler.handle(callbacks);
+
+    assertTrue(ac.isAuthorized());
+
+    // Verify LDAP operations occurred
+    verify(dirSearchFactory).getInstance(eq(conf), 
eq("cn=admin,dc=example,dc=com"), eq("admin")); // More specific
+    verify(dirSearch, times(2)).findUserDn(USER1_ID);
+    verify(dirSearch).findGroupsForUser(eq(userDn)); // Changed from 
anyString()
+
+  }
+
+  @Test
+  public void testKerberosAuthWithLdapGroupCheckNegative() throws Exception {
+    // Configure LDAP to allow only users in group1
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, 
GROUP1_NAME);
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+
+    String userDn = "uid=user2,dc=example,dc=com";
+    String wrongGroupDn = "cn=group3,dc=example,dc=com";
+
+    when(dirSearch.findUserDn(USER2_ID)).thenReturn(userDn);
+    
when(dirSearch.findGroupsForUser(eq(userDn))).thenReturn(Collections.singletonList(wrongGroupDn));
+
+    LdapGroupCallbackHandler callbackHandler = 
LdapGroupCallbackHandler.createForTesting(
+        conf, dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac = new AuthorizeCallback(USER2_PRINCIPAL, 
USER2_PRINCIPAL);
+    Callback[] callbacks = {ac};
+    callbackHandler.handle(callbacks);
+
+    assertFalse(ac.isAuthorized());
+
+    verify(dirSearch, times(2)).findUserDn(USER2_ID);
+    verify(dirSearch).findGroupsForUser(eq(userDn));
+  }
+
+  @Test
+  public void testKerberosAuthWithDisabledLdapGroupCheck() throws Exception {
+    // Disable LDAP group check
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 false);
+    // Even if a group filter is set, it should be ignored
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, 
GROUP1_NAME);
+
+    LdapGroupCallbackHandler callbackHandler = 
LdapGroupCallbackHandler.createForTesting(
+        conf, dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac1 = new AuthorizeCallback(USER1_PRINCIPAL, 
USER1_PRINCIPAL);
+    AuthorizeCallback ac2 = new AuthorizeCallback(USER2_PRINCIPAL, 
USER2_PRINCIPAL);
+
+    callbackHandler.handle(new Callback[]{ac1, ac2});
+
+    // Both users should be authorized since group check is disabled
+    assertTrue(ac1.isAuthorized());
+    assertTrue(ac2.isAuthorized());
+
+    // Ensure no LDAP interactions occurred
+    verifyNoInteractions(dirSearch);
+  }
+
+}
diff --git 
a/service/src/test/org/apache/hive/service/auth/ldap/TestLdapGroupCallbackHandler.java
 
b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapGroupCallbackHandler.java
new file mode 100644
index 00000000000..e8e29aafd65
--- /dev/null
+++ 
b/service/src/test/org/apache/hive/service/auth/ldap/TestLdapGroupCallbackHandler.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth.ldap;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.sasl.AuthorizeCallback;
+
+import static org.junit.Assert.*;
+import static org.mockito.ArgumentMatchers.*;
+import static org.mockito.Mockito.*;
+
+@RunWith(MockitoJUnitRunner.class)
+public class TestLdapGroupCallbackHandler {
+
+  private static final String TEST_USER = "user";
+  private static final String TEST_PRINCIPAL = TEST_USER + "@TEST.REALM";
+
+  @Mock
+  private DirSearch dirSearch;
+
+  @Mock
+  private DirSearchFactory dirSearchFactory;
+
+  @Mock
+  private javax.security.auth.callback.CallbackHandler delegateHandler;
+
+  private HiveConf conf;
+  private LdapGroupCallbackHandler callbackHandler;
+
+  @Before
+  public void setup() throws Exception {
+    conf = new HiveConf();
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER, 
"bindUser");
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD, 
"bindPassword");
+    doAnswer(invocation -> {
+      Object[] args = invocation.getArguments();
+      if (args.length > 0 && args[0] instanceof Callback[]) {
+        Callback[] callbackArray = (Callback[]) args[0];
+        for (Callback callback : callbackArray) {
+          if (callback instanceof AuthorizeCallback) {
+            AuthorizeCallback authorizeCallback = (AuthorizeCallback) callback;
+            String authId = authorizeCallback.getAuthenticationID();
+            String authzId = authorizeCallback.getAuthorizationID();
+            authorizeCallback.setAuthorized(authId != null && 
authId.equals(authzId));
+          }
+        }
+      }
+      return null;
+    }).when(delegateHandler).handle(any(Callback[].class));
+  }
+
+  @Test
+  public void testAuthorizeWithNoLdapFilter() throws Exception {
+    // Disable LDAP filter check
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 false);
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac = new AuthorizeCallback(TEST_PRINCIPAL, 
TEST_PRINCIPAL);
+    callbackHandler.handle(new Callback[]{ac});
+
+    // Expect immediate authorization when no LDAP filtering is enabled
+    assertTrue(ac.isAuthorized());
+    verifyNoInteractions(dirSearch);
+  }
+
+  @Test
+  public void testAuthorizeWithNoFilterConfigured() throws Exception {
+    // Enable group check but don't configure any filters
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    // No filters configured - resolveFilter will return null
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac = new AuthorizeCallback(TEST_PRINCIPAL, 
TEST_PRINCIPAL);
+    callbackHandler.handle(new Callback[]{ac});
+
+    // Should reject since group check is enabled but misconfigured
+    assertFalse(ac.isAuthorized());
+    verifyNoInteractions(dirSearch);
+  }
+
+  @Test
+  public void testDelegationWithDifferentAuthIds() throws Exception {
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+
+    String authorizationId = "anotheruser@TEST.REALM";
+    AuthorizeCallback ac = new AuthorizeCallback(TEST_PRINCIPAL, 
authorizationId);
+    Callback[] callbacks = {ac};
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+    callbackHandler.handle(callbacks);
+
+    // Since authentication and authorization IDs differ, the handler should 
delegate
+    verify(delegateHandler).handle(argThat(callbackArray ->
+        callbackArray.length == 1 && callbackArray[0] == ac));
+    verifyNoInteractions(dirSearch);
+  }
+
+  @Test
+  public void testAuthorizeWithMissingBindCredentials() throws Exception {
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, 
"group1");
+    conf.unset(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER.varname);
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac = new AuthorizeCallback(TEST_PRINCIPAL, 
TEST_PRINCIPAL);
+    callbackHandler.handle(new Callback[]{ac});
+
+    // Missing bind credentials should cause authorization failure
+    assertFalse(ac.isAuthorized());
+    verifyNoInteractions(dirSearch);
+  }
+
+  @Test(expected = UnsupportedCallbackException.class)
+  public void testHandleUnsupportedCallback() throws Exception {
+    Callback unsupportedCallback = mock(Callback.class);
+    Callback[] callbacks = {unsupportedCallback};
+
+    doThrow(new UnsupportedCallbackException(unsupportedCallback))
+        .when(delegateHandler).handle(any(Callback[].class));
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    callbackHandler.handle(callbacks);
+  }
+
+  @Test
+  public void testHandleMixedCallbacks() throws Exception {
+    // Enable group check and configure a filter
+    
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER, 
TEST_USER);
+
+    AuthorizeCallback ac = new AuthorizeCallback(TEST_PRINCIPAL, 
TEST_PRINCIPAL);
+    Callback unsupportedCallback = mock(Callback.class);
+    Callback[] callbacks = {ac, unsupportedCallback};
+
+    doThrow(new UnsupportedCallbackException(unsupportedCallback))
+        .when(delegateHandler).handle(any(Callback[].class));
+
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    try {
+      callbackHandler.handle(callbacks);
+      fail("Expected UnsupportedCallbackException");
+    } catch (UnsupportedCallbackException e) {
+      assertEquals(unsupportedCallback, e.getCallback());
+      assertFalse(ac.isAuthorized());
+      verify(delegateHandler).handle(any(Callback[].class));
+      verifyNoInteractions(dirSearch);
+    }
+  }
+
+  @Test
+  public void testHandleDelegatesWhenAuthIdsMissing() throws Exception {
+    callbackHandler = LdapGroupCallbackHandler.createForTesting(conf, 
dirSearchFactory, delegateHandler);
+
+    AuthorizeCallback ac = new AuthorizeCallback(null, TEST_PRINCIPAL);
+
+    callbackHandler.handle(new Callback[]{ac});
+
+    verify(delegateHandler).handle(any(Callback[].class));
+    verifyNoInteractions(dirSearch);
+    assertFalse(ac.isAuthorized());
+  }
+}
diff --git 
a/service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpKerberosLdapFilter.java
 
b/service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpKerberosLdapFilter.java
new file mode 100644
index 00000000000..fee4b7806d3
--- /dev/null
+++ 
b/service/src/test/org/apache/hive/service/cli/thrift/TestThriftHttpKerberosLdapFilter.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.cli.thrift;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.service.auth.HttpAuthenticationException;
+import org.apache.hive.service.auth.ldap.DirSearch;
+import org.apache.hive.service.auth.ldap.DirSearchFactory;
+import org.apache.hive.service.auth.ldap.KerberosLdapFilterEnforcer;
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.GSSException;
+import org.ietf.jgss.GSSName;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.junit.MockitoJUnitRunner;
+
+import java.util.Collections;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.*;
+
+/**
+ * Tests for the HTTP Kerberos authentication with additional LDAP group 
filtering.
+ * Uses Mockito for mocking LDAP and Kerberos components.
+ */
+@RunWith(MockitoJUnitRunner.class)
+public class TestThriftHttpKerberosLdapFilter {
+
+  private static final String TEST_USER = "user";
+  private static final String TEST_REALM = "TEST.REALM";
+  private static final String TEST_PRINCIPAL = TEST_USER + "@" + TEST_REALM;
+
+  @Mock
+  private GSSContext gssContext;
+
+  @Mock
+  private GSSName gssName;
+
+  @Mock
+  private DirSearch dirSearch;
+
+  @Mock
+  private DirSearchFactory dirSearchFactory;
+
+  private HiveConf hiveConf;
+  private TestableKerberosAuthHandler authHandler;
+
+  @Before
+  public void setup() throws Exception {
+    hiveConf = new HiveConf();
+
+    // Set up bind user and password
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER, "bindUser");
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_PASSWORD, 
"bindPassword");
+
+    // Mock GSSContext to return a test principal
+    when(gssName.toString()).thenReturn(TEST_PRINCIPAL);
+    when(gssContext.getSrcName()).thenReturn(gssName);
+
+    // Mock DirSearchFactory
+    when(dirSearchFactory.getInstance(any(HiveConf.class), anyString(), 
anyString())).thenReturn(dirSearch);
+
+    authHandler = null;
+  }
+
+  private void createAuthHandler() {
+    authHandler = new TestableKerberosAuthHandler(hiveConf, dirSearchFactory);
+  }
+
+  @Test
+  public void testKerberosAuthWithNoLdapFilters() throws Exception {
+    // Disable filters
+    
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 false);
+
+    createAuthHandler();
+    // Run authentication
+    String username = authHandler.run();
+
+    assertEquals(TEST_USER, username);
+    verifyNoInteractions(dirSearch);
+  }
+
+  @Test
+  public void testKerberosAuthWithGroupFilterEnabled() throws Exception {
+    
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, 
"group1,group2");
+
+    String userDn = "uid=user,dc=example,dc=com";
+    String groupDn = "cn=group1,dc=example,dc=com";
+
+    when(dirSearch.findUserDn(TEST_USER)).thenReturn(userDn);
+    
when(dirSearch.findGroupsForUser(eq(userDn))).thenReturn(Collections.singletonList(groupDn));
+
+    createAuthHandler();
+    String username = authHandler.run();
+    assertEquals(TEST_USER, username);
+
+    verify(dirSearchFactory).getInstance(eq(hiveConf), eq("bindUser"), 
eq("bindPassword"));
+    verify(dirSearch, times(2)).findUserDn(TEST_USER);
+    verify(dirSearch).findGroupsForUser(eq(userDn));
+  }
+
+  @Test(expected = HttpAuthenticationException.class)
+  public void testKerberosAuthWithGroupFilterFailure() throws Exception {
+    
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "group1");
+
+    String userDn = "uid=user,dc=example,dc=com";
+    String wrongGroupDn = "cn=group3,dc=example,dc=com";
+
+    when(dirSearch.findUserDn(TEST_USER)).thenReturn(userDn);
+    
when(dirSearch.findGroupsForUser(eq(userDn))).thenReturn(Collections.singletonList(wrongGroupDn));
+
+    createAuthHandler();
+    authHandler.run();
+  }
+
+  @Test(expected = HttpAuthenticationException.class)
+  public void testKerberosAuthWithNoFilterConfigured() throws Exception {
+    // Enable group check but don't configure any filters
+    
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    // No filters configured - resolveFilter will return null
+
+    createAuthHandler();
+    authHandler.run();
+  }
+
+  @Test(expected = HttpAuthenticationException.class)
+  public void testKerberosAuthWithMissingBindCredentials() throws Exception {
+    // Enable group check but remove bind credentials
+    
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS,
 true);
+    hiveConf.setVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "group1");
+    hiveConf.unset(ConfVars.HIVE_SERVER2_PLAIN_LDAP_BIND_USER.varname);
+
+    createAuthHandler();
+    // Run authentication - should throw exception
+    authHandler.run();
+  }
+
+  /**
+   * A custom implementation for testing Kerberos authentication with LDAP 
filters
+   */
+  private class TestableKerberosAuthHandler {
+    private final HiveConf hiveConf;
+    private final DirSearchFactory mockDirSearchFactory;
+    private final KerberosLdapFilterEnforcer filterEnforcer;
+
+    public TestableKerberosAuthHandler(HiveConf hiveConf,
+                                       DirSearchFactory dirSearchFactory) {
+      this.hiveConf = hiveConf;
+      this.mockDirSearchFactory = dirSearchFactory;
+      this.filterEnforcer = new KerberosLdapFilterEnforcer(hiveConf, 
dirSearchFactory);
+    }
+
+    private void enforceLdapFilters(String principal) throws 
HttpAuthenticationException {
+      // Implementation that delegates to KerberosLdapFilterEnforcer
+      boolean enableGroupCheck = hiveConf.getBoolVar(
+          
HiveConf.ConfVars.HIVE_SERVER2_LDAP_ENABLE_GROUP_CHECK_AFTER_KERBEROS);
+
+      if (!enableGroupCheck) {
+        return;
+      }
+
+      boolean authorized = filterEnforcer.applyLdapFilter(principal);
+      if (!authorized) {
+        throw new HttpAuthenticationException("LDAP filter check failed for 
user " + principal);
+      }
+    }
+
+    public String run() throws HttpAuthenticationException {
+      try {
+        // Simulate successful GSS context establishment
+        if (gssContext == null) {
+          throw new HttpAuthenticationException("GSS Context is null");
+        }
+
+        // Get principal name from GSS context
+        GSSName srcName = gssContext.getSrcName();
+        if (srcName == null) {
+          throw new HttpAuthenticationException("Kerberos authentication 
failed: Could not obtain user principal from GSS Context");
+        }
+
+        String principal = srcName.toString();
+        String shortName = 
KerberosLdapFilterEnforcer.extractUserName(principal);
+        enforceLdapFilters(principal);
+
+        return shortName;
+      } catch (GSSException e) {
+        throw new HttpAuthenticationException("Kerberos authentication 
failed", e);
+      }
+    }
+  }
+}
diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java
index a5e71d75cf5..4d9c52430c0 100644
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java
+++ 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/security/HadoopThriftAuthBridge.java
@@ -115,6 +115,13 @@ public Server createServer(String keytabFile, String 
principalConf, String clien
     return new Server(keytabFile, principalConf, clientConf);
   }
 
+  public Server createServer(String keytabFile, String principalConf, String 
clientConf,
+      CallbackHandler callbackHandler) throws TTransportException {
+    Server server = new Server(keytabFile, principalConf, clientConf);
+    server.setCallbackHandler(callbackHandler);
+    return server;
+  }
+
 
   public String getServerPrincipal(String principalConfig, String host)
       throws IOException {
@@ -324,6 +331,7 @@ public enum ServerMode {
     protected final UserGroupInformation realUgi;
     protected final UserGroupInformation clientValidationUGI;
     protected DelegationTokenSecretManager secretManager;
+    private CallbackHandler callbackHandler;
 
     public Server() throws TTransportException {
       try {
@@ -378,6 +386,10 @@ public void setSecretManager(DelegationTokenSecretManager 
secretManager) {
       this.secretManager = secretManager;
     }
 
+    public void setCallbackHandler(CallbackHandler callbackHandler) {
+      this.callbackHandler = callbackHandler;
+    }
+
     /**
      * Create a TTransportFactory that, upon connection of a client socket,
      * negotiates a Kerberized SASL transport. The resulting TTransportFactory
@@ -411,11 +423,14 @@ public TSaslServerTransport.Factory 
createSaslServerTransportFactory(
       }
 
       TSaslServerTransport.Factory transFactory = new 
TSaslServerTransport.Factory();
+      CallbackHandler kerberosCallbackHandler = this.callbackHandler != null
+        ? this.callbackHandler
+        : new SaslRpcServer.SaslGssCallbackHandler();
       transFactory.addServerDefinition(
           AuthMethod.KERBEROS.getMechanismName(),
           names[0], names[1],  // two parts of kerberos principal
           saslProps,
-          new SaslRpcServer.SaslGssCallbackHandler());
+          kerberosCallbackHandler);
       transFactory.addServerDefinition(AuthMethod.DIGEST.getMechanismName(),
           null, SaslRpcServer.SASL_DEFAULT_REALM,
           saslProps, new SaslDigestCallbackHandler(secretManager));


Reply via email to