This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 3ed7c556e19 branch-3.0: [feat](catalog)Replace HadoopUGI with
HadoopKerberosAuthenticator to Support Kerberos Ticket Auto-Renewal #44916
(#45137)
3ed7c556e19 is described below
commit 3ed7c556e1969de11c1a37a8513e19748c6c13d7
Author: github-actions[bot]
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Fri Dec 6 22:21:24 2024 -0800
branch-3.0: [feat](catalog)Replace HadoopUGI with
HadoopKerberosAuthenticator to Support Kerberos Ticket Auto-Renewal #44916
(#45137)
Cherry-picked from #44916
Co-authored-by: Calvin Kirs <[email protected]>
---
.../java/org/apache/doris/hudi/HudiJniScanner.java | 16 ++--
.../src/main/java/org/apache/doris/hudi/Utils.java | 12 ++-
.../common/security/authentication/HadoopUGI.java | 103 ---------------------
.../datasource/hive/HiveMetaStoreClientHelper.java | 20 ++--
.../datasource/paimon/PaimonExternalCatalog.java | 88 +++++++++++-------
5 files changed, 84 insertions(+), 155 deletions(-)
diff --git
a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
index a284c7adcdd..bc082e56732 100644
---
a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
+++
b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/HudiJniScanner.java
@@ -17,11 +17,10 @@
package org.apache.doris.hudi;
-
import org.apache.doris.common.jni.JniScanner;
import org.apache.doris.common.jni.vec.ColumnType;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopUGI;
+import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.avro.generic.GenericDatumReader;
@@ -160,14 +159,15 @@ public class HudiJniScanner extends JniScanner {
cleanResolverLock.readLock().lock();
try {
lastUpdateTime.set(System.currentTimeMillis());
+ AuthenticationConfig authenticationConfig =
AuthenticationConfig.getKerberosConfig(split.hadoopConf());
+ HadoopAuthenticator hadoopAuthenticator = HadoopAuthenticator
+ .getHadoopAuthenticator(authenticationConfig);
if (split.incrementalRead()) {
- recordIterator =
HadoopUGI.ugiDoAs(AuthenticationConfig.getKerberosConfig(
- split.hadoopConf()),
- () -> new
MORIncrementalSplitReader(split).buildScanIterator(new Filter[0]));
+ recordIterator = hadoopAuthenticator.doAs(() -> new
MORIncrementalSplitReader(split)
+ .buildScanIterator(new Filter[0]));
} else {
- recordIterator =
HadoopUGI.ugiDoAs(AuthenticationConfig.getKerberosConfig(
- split.hadoopConf()),
- () -> new
MORSnapshotSplitReader(split).buildScanIterator(new Filter[0]));
+ recordIterator = hadoopAuthenticator.doAs(() -> new
MORSnapshotSplitReader(split)
+ .buildScanIterator(new Filter[0]));
}
if (AVRO_RESOLVER_CACHE != null && AVRO_RESOLVER_CACHE.get()
!= null) {
cachedResolvers.computeIfAbsent(Thread.currentThread().getId(),
diff --git
a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
index 3e07c891790..c0fbec633e8 100644
---
a/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
+++
b/fe/be-java-extensions/hudi-scanner/src/main/java/org/apache/doris/hudi/Utils.java
@@ -18,7 +18,7 @@
package org.apache.doris.hudi;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopUGI;
+import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
@@ -77,7 +77,13 @@ public class Utils {
public static HoodieTableMetaClient getMetaClient(Configuration conf,
String basePath) {
HadoopStorageConfiguration hadoopStorageConfiguration = new
HadoopStorageConfiguration(conf);
- return HadoopUGI.ugiDoAs(AuthenticationConfig.getKerberosConfig(conf),
() -> HoodieTableMetaClient.builder()
-
.setConf(hadoopStorageConfiguration).setBasePath(basePath).build());
+ AuthenticationConfig authenticationConfig =
AuthenticationConfig.getKerberosConfig(conf);
+ HadoopAuthenticator hadoopAuthenticator =
HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig);
+ try {
+ return hadoopAuthenticator.doAs(() ->
HoodieTableMetaClient.builder()
+
.setConf(hadoopStorageConfiguration).setBasePath(basePath).build());
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to get HoodieTableMetaClient",
e);
+ }
}
}
diff --git
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
deleted file mode 100644
index 2f73440ecfa..00000000000
---
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/HadoopUGI.java
+++ /dev/null
@@ -1,103 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-package org.apache.doris.common.security.authentication;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
-
-@Deprecated
-public class HadoopUGI {
- private static final Logger LOG = LogManager.getLogger(HadoopUGI.class);
-
- /**
- * login and return hadoop ugi
- * @param config auth config
- * @return ugi
- */
- private static UserGroupInformation loginWithUGI(AuthenticationConfig
config) {
- if (config == null || !config.isValid()) {
- return null;
- }
- if (config instanceof KerberosAuthenticationConfig) {
- try {
- // TODO: remove after iceberg and hudi kerberos test case pass
- try {
- // login hadoop with keytab and try checking TGT
- UserGroupInformation ugi =
UserGroupInformation.getCurrentUser();
- LOG.debug("Current login user: {}", ugi.getUserName());
- String principal = ((KerberosAuthenticationConfig)
config).getKerberosPrincipal();
- if (ugi.hasKerberosCredentials() &&
StringUtils.equals(ugi.getUserName(), principal)) {
- // if the current user is logged by kerberos and is
the same user
- // just use checkTGTAndReloginFromKeytab because this
method will only relogin
- // when the TGT is expired or is close to expiry
- ugi.checkTGTAndReloginFromKeytab();
- return ugi;
- }
- } catch (IOException e) {
- LOG.warn("A SecurityException occurs with kerberos, do
login immediately.", e);
- }
- return new
HadoopKerberosAuthenticator((KerberosAuthenticationConfig) config).getUGI();
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- } else {
- String hadoopUserName = ((SimpleAuthenticationConfig)
config).getUsername();
- if (hadoopUserName == null) {
- hadoopUserName = "hadoop";
- ((SimpleAuthenticationConfig)
config).setUsername(hadoopUserName);
- LOG.debug(AuthenticationConfig.HADOOP_USER_NAME + " is unset,
use default user: hadoop");
- }
-
- UserGroupInformation ugi;
- try {
- ugi = UserGroupInformation.getLoginUser();
- if (ugi.getUserName().equals(hadoopUserName)) {
- return ugi;
- }
- } catch (IOException e) {
- LOG.warn("A SecurityException occurs with simple, do login
immediately.", e);
- }
-
- ugi = UserGroupInformation.createRemoteUser(hadoopUserName);
- UserGroupInformation.setLoginUser(ugi);
- LOG.debug("Login by proxy user, hadoop.username: {}",
hadoopUserName);
- return ugi;
- }
- }
-
- public static <T> T ugiDoAs(AuthenticationConfig authConf,
PrivilegedExceptionAction<T> action) {
- UserGroupInformation ugi = HadoopUGI.loginWithUGI(authConf);
- try {
- if (ugi != null) {
- if (authConf instanceof KerberosAuthenticationConfig) {
- ugi.checkTGTAndReloginFromKeytab();
- }
- return ugi.doAs(action);
- } else {
- return action.run();
- }
- } catch (Exception e) {
- throw new RuntimeException(e.getMessage(), e);
- }
- }
-}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
index 97f86612a49..884cfbee45b 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
@@ -40,7 +40,7 @@ import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopUGI;
+import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.fs.remote.dfs.DFSFileSystem;
import org.apache.doris.thrift.TExprOpcode;
@@ -68,6 +68,7 @@ import
org.apache.hudi.storage.hadoop.HadoopStorageConfiguration;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.time.LocalDateTime;
import java.time.ZoneId;
@@ -823,19 +824,22 @@ public class HiveMetaStoreClientHelper {
public static <T> T ugiDoAs(Configuration conf,
PrivilegedExceptionAction<T> action) {
// if hive config is not ready, then use hadoop kerberos to login
- AuthenticationConfig krbConfig =
AuthenticationConfig.getKerberosConfig(conf,
- AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
- AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
- return HadoopUGI.ugiDoAs(krbConfig, action);
+ AuthenticationConfig authenticationConfig =
AuthenticationConfig.getKerberosConfig(conf);
+ HadoopAuthenticator hadoopAuthenticator =
HadoopAuthenticator.getHadoopAuthenticator(authenticationConfig);
+ try {
+ return hadoopAuthenticator.doAs(action);
+ } catch (IOException e) {
+ LOG.warn("HiveMetaStoreClientHelper ugiDoAs failed.", e);
+ throw new RuntimeException(e);
+ }
}
public static HoodieTableMetaClient getHudiClient(HMSExternalTable table) {
String hudiBasePath = table.getRemoteTable().getSd().getLocation();
Configuration conf = getConfiguration(table);
HadoopStorageConfiguration hadoopStorageConfiguration = new
HadoopStorageConfiguration(conf);
- return HadoopUGI.ugiDoAs(AuthenticationConfig.getKerberosConfig(conf),
- () ->
HoodieTableMetaClient.builder().setConf(hadoopStorageConfiguration).setBasePath(hudiBasePath)
- .build());
+ return ugiDoAs(conf, () ->
HoodieTableMetaClient.builder().setConf(hadoopStorageConfiguration)
+ .setBasePath(hudiBasePath).build());
}
public static Configuration getConfiguration(HMSExternalTable table) {
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
index 5a9e6feb5ad..eb25336ab0b 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalCatalog.java
@@ -19,7 +19,7 @@ package org.apache.doris.datasource.paimon;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopUGI;
+import org.apache.doris.common.security.authentication.HadoopAuthenticator;
import org.apache.doris.datasource.CatalogProperty;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.datasource.InitCatalogLog;
@@ -40,6 +40,7 @@ import org.apache.paimon.catalog.CatalogFactory;
import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.options.Options;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -53,6 +54,7 @@ public abstract class PaimonExternalCatalog extends
ExternalCatalog {
protected String catalogType;
protected Catalog catalog;
protected AuthenticationConfig authConf;
+ protected HadoopAuthenticator hadoopAuthenticator;
private static final List<String> REQUIRED_PROPERTIES = ImmutableList.of(
PaimonProperties.WAREHOUSE
@@ -71,9 +73,8 @@ public abstract class PaimonExternalCatalog extends
ExternalCatalog {
for (Map.Entry<String, String> propEntry :
this.catalogProperty.getHadoopProperties().entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
}
- authConf = AuthenticationConfig.getKerberosConfig(conf,
- AuthenticationConfig.HADOOP_KERBEROS_PRINCIPAL,
- AuthenticationConfig.HADOOP_KERBEROS_KEYTAB);
+ authConf = AuthenticationConfig.getKerberosConfig(conf);
+ hadoopAuthenticator =
HadoopAuthenticator.getHadoopAuthenticator(authConf);
}
public String getCatalogType() {
@@ -82,40 +83,57 @@ public abstract class PaimonExternalCatalog extends
ExternalCatalog {
}
protected List<String> listDatabaseNames() {
- return HadoopUGI.ugiDoAs(authConf, () -> new
ArrayList<>(catalog.listDatabases()));
+ try {
+ return hadoopAuthenticator.doAs(() -> new
ArrayList<>(catalog.listDatabases()));
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to list databases names,
catalog name: " + getName(), e);
+ }
}
@Override
public boolean tableExist(SessionContext ctx, String dbName, String
tblName) {
makeSureInitialized();
- return HadoopUGI.ugiDoAs(authConf, () ->
catalog.tableExists(Identifier.create(dbName, tblName)));
+ try {
+ return hadoopAuthenticator.doAs(() ->
catalog.tableExists(Identifier.create(dbName, tblName)));
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to check table existence,
catalog name: " + getName(), e);
+ }
}
@Override
public List<String> listTableNames(SessionContext ctx, String dbName) {
makeSureInitialized();
- return HadoopUGI.ugiDoAs(authConf, () -> {
- List<String> tableNames = null;
- try {
- tableNames = catalog.listTables(dbName);
- } catch (Catalog.DatabaseNotExistException e) {
- LOG.warn("DatabaseNotExistException", e);
- }
- return tableNames;
- });
+ try {
+ return hadoopAuthenticator.doAs(() -> {
+ List<String> tableNames = null;
+ try {
+ tableNames = catalog.listTables(dbName);
+ } catch (Catalog.DatabaseNotExistException e) {
+ LOG.warn("DatabaseNotExistException", e);
+ }
+ return tableNames;
+ });
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to list table names, catalog
name: " + getName(), e);
+ }
}
public org.apache.paimon.table.Table getPaimonTable(String dbName, String
tblName) {
makeSureInitialized();
- return HadoopUGI.ugiDoAs(authConf, () -> {
- org.apache.paimon.table.Table table = null;
- try {
- table = catalog.getTable(Identifier.create(dbName, tblName));
- } catch (Catalog.TableNotExistException e) {
- LOG.warn("TableNotExistException", e);
- }
- return table;
- });
+ try {
+ return hadoopAuthenticator.doAs(() -> {
+ org.apache.paimon.table.Table table = null;
+ try {
+ table = catalog.getTable(Identifier.create(dbName,
tblName));
+ } catch (Catalog.TableNotExistException e) {
+ LOG.warn("TableNotExistException", e);
+ }
+ return table;
+ });
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to get Paimon table, catalog
name: " + getName() + ", db: "
+ + dbName + ", table: " + tblName, e);
+ }
}
protected String getPaimonCatalogType(String catalogType) {
@@ -127,15 +145,19 @@ public abstract class PaimonExternalCatalog extends
ExternalCatalog {
}
protected Catalog createCatalog() {
- return HadoopUGI.ugiDoAs(authConf, () -> {
- Options options = new Options();
- Map<String, String> paimonOptionsMap = getPaimonOptionsMap();
- for (Map.Entry<String, String> kv : paimonOptionsMap.entrySet()) {
- options.set(kv.getKey(), kv.getValue());
- }
- CatalogContext context = CatalogContext.create(options,
getConfiguration());
- return createCatalogImpl(context);
- });
+ try {
+ return hadoopAuthenticator.doAs(() -> {
+ Options options = new Options();
+ Map<String, String> paimonOptionsMap = getPaimonOptionsMap();
+ for (Map.Entry<String, String> kv :
paimonOptionsMap.entrySet()) {
+ options.set(kv.getKey(), kv.getValue());
+ }
+ CatalogContext context = CatalogContext.create(options,
getConfiguration());
+ return createCatalogImpl(context);
+ });
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to create catalog, catalog
name: " + getName(), e);
+ }
}
protected Catalog createCatalogImpl(CatalogContext context) {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]