This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch hadoop-3.4.2
in repository https://gitbox.apache.org/repos/asf/doris-thirdparty.git

commit 5086dc901f366ad249abbbe68ce01f7ae6a3464b
Author: Long Zhao <[email protected]>
AuthorDate: Thu May 18 11:18:55 2023 +0800

    [feature] c api for use principal and keytab to login kerberos (#69)
---
 .../src/main/native/libhdfs/exception.c            |  10 ++
 .../src/main/native/libhdfs/hdfs.c                 | 143 +++++++++++++++++++--
 .../src/main/native/libhdfs/include/hdfs/hdfs.h    |   9 ++
 .../src/main/native/libhdfs/jclasses.c             |   4 +
 .../src/main/native/libhdfs/jclasses.h             |   4 +
 .../src/main/native/libhdfs/jni_helper.c           |  17 ++-
 6 files changed, 170 insertions(+), 17 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/exception.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/exception.c
index fec9a103b4e..ed61b77fcad 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/exception.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/exception.c
@@ -90,6 +90,16 @@ static const struct ExceptionInfo gExceptionInfo[] = {
         0,
         ESTALE,
     },
+    {
+        "org.apache.hadoop.security.KerberosAuthException",
+        NOPRINT_EXC_ACCESS_CONTROL,
+        ETIME
+    },
+    {
+        "java.net.ConnectException",
+        0,
+        EFAULT
+    },
 };
 
 void getExceptionInfo(const char *excName, int noPrintFlags,
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
index ed150925cdb..ea00393cfc7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
@@ -25,6 +25,7 @@
 #include <fcntl.h>
 #include <inttypes.h>
 #include <stdio.h>
+#include <stdlib.h>
 #include <string.h>
 
 #define JAVA_VOID       "V"
@@ -495,6 +496,11 @@ done:
     return ret;
 }
 
+struct hdfsBuilderConfFileOpt {
+    struct hdfsBuilderConfFileOpt *next;
+    const char *currentPath;
+};
+
 struct hdfsBuilderConfOpt {
     struct hdfsBuilderConfOpt *next;
     const char *key;
@@ -506,10 +512,26 @@ struct hdfsBuilder {
     const char *nn;
     tPort port;
     const char *kerbTicketCachePath;
+    const char *kerb5ConfPath;
+    const char *keyTabFile;
     const char *userName;
     struct hdfsBuilderConfOpt *opts;
+    struct hdfsBuilderConfFileOpt *fileOpts;
 };
 
+void hdfsBuilderSetKerb5Conf(struct hdfsBuilder *bld, const char 
*kerb5ConfPath) {
+    bld->kerb5ConfPath = kerb5ConfPath;
+    if (bld->kerb5ConfPath) {
+        systemPropertySetStr("java.security.krb5.conf", bld->kerb5ConfPath);
+        hdfsBuilderConfSetStr(bld, "hadoop.security.authorization", "true");
+        hdfsBuilderConfSetStr(bld, "hadoop.security.authentication", 
"kerberos");
+    }
+}
+
+void hdfsBuilderSetKeyTabFile(struct hdfsBuilder *bld, const char *keyTabFile) 
{
+    bld->keyTabFile = keyTabFile;
+}
+
 struct hdfsBuilder *hdfsNewBuilder(void)
 {
     struct hdfsBuilder *bld = calloc(1, sizeof(struct hdfsBuilder));
@@ -520,6 +542,42 @@ struct hdfsBuilder *hdfsNewBuilder(void)
     return bld;
 }
 
+int systemPropertySetStr(const char *key, const char *val) {
+    JNIEnv *env = 0;
+    jobject jRet = NULL;
+    jvalue  jVal;
+    jthrowable jthr;
+    int ret = EINTERNAL;
+    jstring jkey = NULL, jvalue0 = NULL;
+    env = getJNIEnv();
+    if (env == NULL) {
+        ret = EINTERNAL;
+        return ret;
+    }
+    jthr = newJavaStr(env, key, &jkey);
+    if (jthr) {
+        ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"System::setProperty::new String");
+        goto done;
+    }
+    jthr = newJavaStr(env, val, &jvalue0);
+    if (jthr) {
+        ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"System::setProperty::new String");
+        goto done;
+    }
+    jthr = invokeMethod(env, &jVal, STATIC, NULL, JC_SYSTEM, "setProperty", 
"(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;", jkey, jvalue0);
+    jRet = jVal.l;
+    ret = 0;
+    if (jthr) {
+        ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"System::setProperty");
+        goto done;
+    }
+    done:
+        destroyLocalReference(env, jRet);
+        destroyLocalReference(env, jkey);
+        destroyLocalReference(env, jvalue0);
+        return ret;
+}
+
 int hdfsBuilderConfSetStr(struct hdfsBuilder *bld, const char *key,
                           const char *val)
 {
@@ -546,6 +604,13 @@ void hdfsFreeBuilder(struct hdfsBuilder *bld)
         free(cur);
         cur = next;
     }
+    struct hdfsBuilderConfFileOpt *cur0, *next0;
+    cur0 = bld->fileOpts;
+    for (cur0 = bld->fileOpts; cur0;) {
+        next0 = cur0->next;
+        free(cur0);
+        cur0 = next0;
+    }
     free(bld);
 }
 
@@ -688,17 +753,32 @@ static const char *hdfsBuilderToStr(const struct 
hdfsBuilder *bld,
     return buf;
 }
 
+jthrowable hadoopConfFileAdd(JNIEnv *env, jobject jConfiguration, const char 
*path) {
+    /* Create an object of org.apache.hadoop.fs.Path */
+    jobject jPath = NULL;
+    jthrowable jthr;
+    jthr = constructNewObjectOfPath(env, path, &jPath);
+    if (jthr) {
+        goto done;
+    }
+    jthr = invokeMethod(env, NULL, INSTANCE, jConfiguration, JC_CONFIGURATION, 
"addResource", JMETHOD1(JPARAM(HADOOP_PATH), JAVA_VOID), jPath);
+    done:
+    destroyLocalReference(env, jPath);
+    return jthr;
+}
+
 hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
 {
     JNIEnv *env = 0;
     jobject jConfiguration = NULL, jFS = NULL, jURI = NULL, jCachePath = NULL;
-    jstring jURIString = NULL, jUserString = NULL;
+    jstring jURIString = NULL, jUserString = NULL, jKeyTabString = NULL;
     jvalue  jVal;
     jthrowable jthr = NULL;
     char *cURI = 0, buf[512];
     int ret;
     jobject jRet = NULL;
     struct hdfsBuilderConfOpt *opt;
+    struct hdfsBuilderConfFileOpt *fileOpt;
 
     //Get the JNIEnv* corresponding to current thread
     env = getJNIEnv();
@@ -715,6 +795,18 @@ hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
             "hdfsBuilderConnect(%s)", hdfsBuilderToStr(bld, buf, sizeof(buf)));
         goto done;
     }
+
+    for (fileOpt = bld->fileOpts; fileOpt; fileOpt = fileOpt->next) {
+        // conf.addResource(new Path(fileOpt->path))
+        jthr = hadoopConfFileAdd(env, jConfiguration, fileOpt->currentPath);
+        if (jthr) {
+            ret = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
+                                        "hdfsBuilderConnect(%s): error adding 
conffile '%s'",
+                                        hdfsBuilderToStr(bld, buf, 
sizeof(buf)), fileOpt->currentPath);
+            goto done;
+        }
+    }
+
     // set configuration values
     for (opt = bld->opts; opt; opt = opt->next) {
         jthr = hadoopConfSetStr(env, jConfiguration, opt->key, opt->val);
@@ -794,7 +886,30 @@ hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
             jURI = jVal.l;
         }
 
-        if (bld->kerbTicketCachePath) {
+        jthr = newJavaStr(env, bld->userName, &jUserString);
+        if (jthr) {
+            ret = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
+                                        "hdfsBuilderConnect(%s)",
+                                        hdfsBuilderToStr(bld, buf, 
sizeof(buf)));
+            goto done;
+        }
+        if (bld->kerb5ConfPath && bld->keyTabFile) {
+            jthr = invokeMethod(env, NULL, STATIC, NULL, 
JC_SECURITY_CONFIGURATION, "setConfiguration", 
JMETHOD1(JPARAM(HADOOP_CONF),JAVA_VOID), jConfiguration);
+            if (jthr) {
+                ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"hdfsBuilderConnect(%s)", hdfsBuilderToStr(bld, buf, 
sizeof(buf)));
+                goto done;
+            }
+            jthr = newJavaStr(env, bld->keyTabFile, &jKeyTabString);
+            if (jthr) {
+                ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"hdfsBuilderConnect(%s)", hdfsBuilderToStr(bld, buf, 
sizeof(buf)));
+                goto done;
+            }
+            jthr = invokeMethod(env, NULL, STATIC, NULL, 
JC_SECURITY_CONFIGURATION, "loginUserFromKeytab", JMETHOD2(JPARAM(JAVA_STRING), 
JPARAM(JAVA_STRING), JAVA_VOID), jUserString, jKeyTabString);
+            if (jthr) {
+                ret = printExceptionAndFree(env, jthr, 
PRINT_EXC_ALL,"hdfsBuilderConnect(%s)", hdfsBuilderToStr(bld, buf, 
sizeof(buf)));
+                goto done;
+            }
+        } else if (bld->kerbTicketCachePath) {
             jthr = hadoopConfSetStr(env, jConfiguration,
                 KERBEROS_TICKET_CACHE_PATH, bld->kerbTicketCachePath);
             if (jthr) {
@@ -803,13 +918,8 @@ hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
                     hdfsBuilderToStr(bld, buf, sizeof(buf)));
                 goto done;
             }
-        }
-        jthr = newJavaStr(env, bld->userName, &jUserString);
-        if (jthr) {
-            ret = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
-                "hdfsBuilderConnect(%s)",
-                hdfsBuilderToStr(bld, buf, sizeof(buf)));
-            goto done;
+            invokeMethod(env, &jVal, STATIC, NULL, JC_SECURITY_CONFIGURATION, 
"setConfiguration",
+            JMETHOD1(JPARAM(HADOOP_CONF),JAVA_VOID), jConfiguration);
         }
         if (bld->forceNewInstance) {
             jthr = invokeMethod(env, &jVal, STATIC, NULL,
@@ -825,11 +935,15 @@ hdfsFS hdfsBuilderConnect(struct hdfsBuilder *bld)
             }
             jFS = jVal.l;
         } else {
-            jthr = invokeMethod(env, &jVal, STATIC, NULL,
-                    JC_FILE_SYSTEM, "get",
-                    JMETHOD3(JPARAM(JAVA_NET_URI), JPARAM(HADOOP_CONF),
-                            JPARAM(JAVA_STRING), JPARAM(HADOOP_FS)), jURI,
-                            jConfiguration, jUserString);
+            if (bld->keyTabFile && bld->kerb5ConfPath) {
+                jthr = invokeMethod(env, &jVal, STATIC, NULL, JC_FILE_SYSTEM, 
"get", JMETHOD1(JPARAM(HADOOP_CONF),
+                JPARAM(HADOOP_FS)), jConfiguration);
+            } else {
+                jthr = invokeMethod(env, &jVal, STATIC, NULL, JC_FILE_SYSTEM, 
"get",
+                                    JMETHOD3(JPARAM(JAVA_NET_URI), 
JPARAM(HADOOP_CONF),
+                                             JPARAM(JAVA_STRING), 
JPARAM(HADOOP_FS)),
+                                    jURI, jConfiguration, jUserString);
+            }
             if (jthr) {
                 ret = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
                     "hdfsBuilderConnect(%s)",
@@ -856,6 +970,7 @@ done:
     destroyLocalReference(env, jCachePath);
     destroyLocalReference(env, jURIString);
     destroyLocalReference(env, jUserString);
+    destroyLocalReference(env, jKeyTabString);
     free(cURI);
     hdfsFreeBuilder(bld);
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include/hdfs/hdfs.h
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include/hdfs/hdfs.h
index eba50ff6eb2..ac2de2d7467 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include/hdfs/hdfs.h
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/include/hdfs/hdfs.h
@@ -294,6 +294,9 @@ extern  "C" {
     LIBHDFS_EXTERNAL
     struct hdfsBuilder *hdfsNewBuilder(void);
 
+    LIBHDFS_EXTERNAL
+    int systemPropertySetStr(const char *key, const char *val);
+
     /**
      * Force the builder to always create a new instance of the FileSystem,
      * rather than possibly finding one in the cache.
@@ -357,6 +360,12 @@ extern  "C" {
     void hdfsBuilderSetKerbTicketCachePath(struct hdfsBuilder *bld,
                                    const char *kerbTicketCachePath);
 
+    LIBHDFS_EXTERNAL
+    void hdfsBuilderSetKerb5Conf(struct hdfsBuilder *bld, const char 
*kerb5ConfPath);
+
+    LIBHDFS_EXTERNAL
+    void hdfsBuilderSetKeyTabFile(struct hdfsBuilder *bld, const char 
*keyTabFile);
+
     /**
      * Free an HDFS builder.
      *
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.c
index 9f589ac257a..44acb08d27f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.c
@@ -112,6 +112,10 @@ jthrowable initCachedClasses(JNIEnv* env) {
                 "org/apache/commons/lang3/exception/ExceptionUtils";
         cachedJavaClasses[JC_CFUTURE].className =
                 "java/util/concurrent/CompletableFuture";
+        cachedJavaClasses[JC_SYSTEM].className =
+                "java/lang/System";
+        cachedJavaClasses[JC_SECURITY_CONFIGURATION].className =
+                "org/apache/hadoop/security/UserGroupInformation";
 
         // Create and set the jclass objects based on the class names set above
         jthrowable jthr;
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.h
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.h
index 0b174e1fecc..4f28ef2c027 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.h
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jclasses.h
@@ -61,6 +61,8 @@ typedef enum {
     JC_ENUM_SET,
     JC_EXCEPTION_UTILS,
     JC_CFUTURE,
+    JC_SYSTEM,
+    JC_SECURITY_CONFIGURATION,
     // A special marker enum that counts the number of cached jclasses
     NUM_CACHED_CLASSES
 } CachedJavaClass;
@@ -101,6 +103,7 @@ const char *getClassName(CachedJavaClass cachedJavaClass);
 #define HADOOP_FS_BLDR  "org/apache/hadoop/fs/FSBuilder"
 #define HADOOP_RO       "org/apache/hadoop/fs/ReadOption"
 #define HADOOP_DS       "org/apache/hadoop/net/unix/DomainSocket"
+#define HADOOP_USER_INFORMATION 
"org/apache/hadoop/security/UserGroupInformation"
 
 /* Some frequently used Java class names */
 #define JAVA_NET_ISA    "java/net/InetSocketAddress"
@@ -111,6 +114,7 @@ const char *getClassName(CachedJavaClass cachedJavaClass);
 #define JAVA_CFUTURE    "java/util/concurrent/CompletableFuture"
 #define JAVA_TIMEUNIT   "java/util/concurrent/TimeUnit"
 #define JAVA_OBJECT     "java/lang/Object"
+#define JAVA_SYSTEM     "java/lang/System"
 
 /* Some frequently used third-party class names */
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
index 47dce0086a9..49f93fc3c36 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
@@ -656,7 +656,7 @@ static char* getClassPath()
 static JNIEnv* getGlobalJNIEnv(void)
 {
     JavaVM* vmBuf[VM_BUF_LENGTH]; 
-    JNIEnv *env;
+    JNIEnv *env = NULL;
     jint rv = 0; 
     jint noVMs = 0;
     jthrowable jthr;
@@ -816,8 +816,10 @@ JNIEnv* getJNIEnv(void)
     if (!state) {
       mutexUnlock(&jvmMutex);
       fprintf(stderr, "getJNIEnv: Unable to create ThreadLocalState\n");
-      return NULL;
+      goto fail;
     }
+    state->env = NULL;
+    THREAD_LOCAL_STORAGE_SET_QUICK(state);
 
     state->env = getGlobalJNIEnv();
     if (!state->env) {
@@ -825,6 +827,12 @@ JNIEnv* getJNIEnv(void)
         goto fail;
     }
 
+    // when the env is created, the state can be set.
+    if (threadLocalStorageSet(state)) {
+        mutexUnlock(&jvmMutex);
+        goto fail;
+    }
+
     jthrowable jthr = NULL;
     jthr = initCachedClasses(state->env);
     if (jthr) {
@@ -847,7 +855,10 @@ JNIEnv* getJNIEnv(void)
 
 fail:
     fprintf(stderr, "getJNIEnv: getGlobalJNIEnv failed\n");
-    hdfsThreadDestructor(state);
+    THREAD_LOCAL_STORAGE_SET_QUICK(NULL);
+    if (state) {
+        hdfsThreadDestructor(state);
+    }
     return NULL;
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to