This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-1.2-lts
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-1.2-lts by this push:
     new 4db6aa67b3 [improvement](libhdfs) Use keytab and principal to login 
kerberos (#19841)
4db6aa67b3 is described below

commit 4db6aa67b38c75ecac9e10bedb662bf60ff9aa30
Author: Long Zhao <[email protected]>
AuthorDate: Mon May 22 10:54:21 2023 +0800

    [improvement](libhdfs) Use keytab and principal to login kerberos (#19841)
    
    User keytab and princpal to login kerberos.
    And user does not need to execute kinit manually anymore.
---
 be/CMakeLists.txt          |  2 +-
 be/src/io/hdfs_builder.cpp | 34 ++++++++++++++++++++++++++++------
 2 files changed, 29 insertions(+), 7 deletions(-)

diff --git a/be/CMakeLists.txt b/be/CMakeLists.txt
index aa40e61040..12621403fb 100644
--- a/be/CMakeLists.txt
+++ b/be/CMakeLists.txt
@@ -739,7 +739,7 @@ set(COMMON_THIRDPARTY
     simdjson
 )
 
-if (ARCH_AMD64 AND OS_LINUX)
+if ((ARCH_AMD64 OR ARCH_AARCH64) AND OS_LINUX)
     add_library(hadoop_hdfs STATIC IMPORTED)
     set_target_properties(hadoop_hdfs PROPERTIES IMPORTED_LOCATION 
${THIRDPARTY_DIR}/lib/hadoop_hdfs/native/libhdfs.a)
 
diff --git a/be/src/io/hdfs_builder.cpp b/be/src/io/hdfs_builder.cpp
index 51a7e539b6..ebed8b8c5b 100644
--- a/be/src/io/hdfs_builder.cpp
+++ b/be/src/io/hdfs_builder.cpp
@@ -47,9 +47,17 @@ Status HDFSCommonBuilder::run_kinit() {
         return Status::InvalidArgument("Invalid hdfs_kerberos_principal or 
hdfs_kerberos_keytab");
     }
     std::string ticket_path = TICKET_CACHE_PATH + generate_uuid_string();
+    const char* krb_home = getenv("KRB_HOME");
+    std::string krb_home_str(krb_home ? krb_home : "");
     fmt::memory_buffer kinit_command;
-    fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}", ticket_path, 
hdfs_kerberos_keytab,
-                   hdfs_kerberos_principal);
+    if (krb_home_str.empty()) {
+        fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}", 
ticket_path,
+                       hdfs_kerberos_keytab, hdfs_kerberos_principal);
+    } else {
+        // Assign kerberos home in env, get kinit in kerberos home
+        fmt::format_to(kinit_command, krb_home_str + "/bin/kinit -c {} -R -t 
{} -k {}", ticket_path,
+                       hdfs_kerberos_keytab, hdfs_kerberos_principal);
+    }
     VLOG_NOTICE << "kinit command: " << fmt::to_string(kinit_command);
     std::string msg;
     AgentUtils util;
@@ -59,8 +67,9 @@ Status HDFSCommonBuilder::run_kinit() {
     }
 #ifdef USE_LIBHDFS3
     hdfsBuilderSetPrincipal(hdfs_builder, hdfs_kerberos_principal.c_str());
-    hdfsBuilderSetKerbTicketCachePath(hdfs_builder, ticket_path.c_str());
 #endif
+    hdfsBuilderConfSetStr(hdfs_builder, 
"hadoop.security.kerberos.ticket.cache.path",
+                          ticket_path.c_str());
     return Status::OK();
 }
 
@@ -103,24 +112,37 @@ Status createHDFSBuilder(const THdfsParams& hdfsParams, 
HDFSCommonBuilder* build
     if (hdfsParams.__isset.hdfs_kerberos_principal) {
         builder->need_kinit = true;
         builder->hdfs_kerberos_principal = hdfsParams.hdfs_kerberos_principal;
+        hdfsBuilderSetUserName(builder->get(), 
hdfsParams.hdfs_kerberos_principal.c_str());
+    } else if (hdfsParams.__isset.user) {
+        hdfsBuilderSetUserName(builder->get(), hdfsParams.user.c_str());
+#ifdef USE_HADOOP_HDFS
+        hdfsBuilderSetKerb5Conf(builder->get(), nullptr);
+        hdfsBuilderSetKeyTabFile(builder->get(), nullptr);
+#endif
     }
     if (hdfsParams.__isset.hdfs_kerberos_keytab) {
         builder->need_kinit = true;
         builder->hdfs_kerberos_keytab = hdfsParams.hdfs_kerberos_keytab;
+#ifdef USE_HADOOP_HDFS
+        hdfsBuilderSetKeyTabFile(builder->get(), 
hdfsParams.hdfs_kerberos_keytab.c_str());
+#endif
     }
     // set other conf
     if (hdfsParams.__isset.hdfs_conf) {
         for (const THdfsConf& conf : hdfsParams.hdfs_conf) {
             hdfsBuilderConfSetStr(builder->get(), conf.key.c_str(), 
conf.value.c_str());
+#ifdef USE_HADOOP_HDFS
+            // Set krb5.conf, we should define java.security.krb5.conf in 
catalog properties
+            if (strcmp(conf.key.c_str(), "java.security.krb5.conf") == 0) {
+                hdfsBuilderSetKerb5Conf(builder->get(), conf.value.c_str());
+            }
+#endif
         }
     }
 
     hdfsBuilderConfSetStr(builder->get(), 
"ipc.client.fallback-to-simple-auth-allowed", "true");
     if (builder->is_need_kinit()) {
         RETURN_IF_ERROR(builder->run_kinit());
-    } else if (hdfsParams.__isset.user) {
-        // set hdfs user
-        hdfsBuilderSetUserName(builder->get(), hdfsParams.user.c_str());
     }
 
     return Status::OK();


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to