This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch hadoop-3.4.0
in repository https://gitbox.apache.org/repos/asf/doris-thirdparty.git

commit 1b4fde83eda6a6d8eb8635f778fd24309b1371e9
Author: Mingyu Chen <[email protected]>
AuthorDate: Thu May 18 11:30:17 2023 +0800

    [script] add build script (#71)
    
    1. Set `THIRDPARTY_INSTALLED` in `env.h`
        Point to the Doris thirdparty installed dir.
    
    2. Run `sh build.sh`
---
 build.sh                                           | 60 ++++++++++++++++++++++
 env.sh                                             | 20 ++++++++
 .../hadoop-hdfs-native-client/pom.xml              |  2 +
 .../src/main/native/libhdfs/jni_helper.c           |  2 +-
 .../src/main/native/libhdfspp/CMakeLists.txt       |  5 ++
 .../src/CMakeLists.txt                             |  1 +
 6 files changed, 89 insertions(+), 1 deletion(-)

diff --git a/build.sh b/build.sh
new file mode 100755
index 00000000000..342f2af5bd0
--- /dev/null
+++ b/build.sh
@@ -0,0 +1,60 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+##############################################################
+# This script is used to build hadoop
+##############################################################
+
+set -eo pipefail
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)"
+
+export SRC_HOME="${ROOT}"
+
+. "${SRC_HOME}/env.sh"
+
+if [[ -z "${THIRDPARTY_INSTALLED}" ]]; then
+    echo "Must set 'THIRDPARTY_INSTALLED' in env.sh"
+    exit -1
+fi
+
+DIST_DIR=${SRC_HOME}/hadoop-dist/target/hadoop-3.3.4/
+LIBHDFS_DIST_DIR=${SRC_HOME}/hadoop-dist/target/hadoop-libhdfs-3.3.4/
+rm -rf ${DIST_DIR}
+rm -rf ${LIBHDFS_DIST_DIR}
+
+export PATH=${THIRDPARTY_INSTALLED}/bin:$PATH
+mvn clean package -Pnative,dist -DskipTests 
-Dthirdparty.installed=${THIRDPARTY_INSTALLED}/ 
-Dopenssl.lib=${THIRDPARTY_INSTALLED}/lib/ -e
+
+if [[ ! -d "${DIST_DIR}" ]]; then
+    echo "${DIST_DIR} is missing. Build failed."
+    exit -1
+fi
+
+echo "Finished. Begin to pacakge for libhdfs..."
+mkdir -p ${LIBHDFS_DIST_DIR}/common
+mkdir -p ${LIBHDFS_DIST_DIR}/hdfs
+mkdir -p ${LIBHDFS_DIST_DIR}/include
+cp -r ${DIST_DIR}/share/hadoop/common/* ${LIBHDFS_DIST_DIR}/common/
+cp -r ${DIST_DIR}/share/hadoop/hdfs/* ${LIBHDFS_DIST_DIR}/hdfs/
+cp -r ${DIST_DIR}/include/hdfs.h ${LIBHDFS_DIST_DIR}/include/
+
+echo "Done!"
+echo "The full dist package is under: ${DIST_DIR}"
+echo "The LIBHDFS dist package is under: ${LIBHDFS_DIST_DIR}"
+
diff --git a/env.sh b/env.sh
new file mode 100644
index 00000000000..dd0cd38edc5
--- /dev/null
+++ b/env.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+THIRDPARTY_INSTALLED=
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
index cb3838934bf..e9be65d4938 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
@@ -237,6 +237,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd";>
                     
<CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
                     <CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
                     
<CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
+                    
<CUSTOM_THIRDPARTY_INSTALLED>${thirdparty.installed}</CUSTOM_THIRDPARTY_INSTALLED>
                   </vars>
                   <output>${project.build.directory}</output>
                 </configuration>
@@ -306,6 +307,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd";>
                     
<CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
                     <CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
                     
<CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
+                    
<CUSTOM_THIRDPARTY_INSTALLED>${thirdparty.installed}</CUSTOM_THIRDPARTY_INSTALLED>
                   </vars>
                   <output>${project.build.directory}/clang</output>
                 </configuration>
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
index f1f7715e0e1..e754a57664f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
@@ -238,7 +238,7 @@ jthrowable invokeMethod(JNIEnv *env, jvalue *retval, 
MethType methType,
 static jthrowable constructNewObjectOfJclass(JNIEnv *env,
         jobject *out, jclass cls, const char *className,
                 const char *ctorSignature, va_list args) {
-    jmethodID mid;
+    jmethodID mid = 0;
     jobject jobj;
     jthrowable jthr;
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt
index a42ee2e2114..9fd2ec0e526 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/CMakeLists.txt
@@ -34,6 +34,11 @@ enable_testing()
 set(CMAKE_CXX_STANDARD 17)
 include (CTest)
 
+set(CMAKE_INCLUDE_PATH ${CMAKE_INCLUDE_PATH} 
"${CUSTOM_THIRDPARTY_INSTALLED}/include")
+set(CMAKE_LIBRARY_PATH ${CMAKE_LIBRARY_PATH} 
"${CUSTOM_THIRDPARTY_INSTALLED}/lib")
+set(HDFSPP_LIBRARY_ONLY 1)
+SET(Boost_USE_STATIC_RUNTIME ON)
+
 SET(BUILD_SHARED_HDFSPP TRUE CACHE STRING "BUILD_SHARED_HDFSPP defaulting to 
'TRUE'")
 SET(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake" ${CMAKE_MODULE_PATH})
 
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 4c32838afb0..b9a011ef821 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -61,6 +61,7 @@ check_function_exists(strtoul HAVE_STRTOUL)
 
 # Require snappy.
 set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
+set(CUSTOM_SNAPPY_PREFIX "${CUSTOM_THIRDPARTY_INSTALLED}")
 hadoop_set_find_shared_library_version("1")
 find_library(SNAPPY_LIBRARY
     NAMES snappy


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to