[
https://issues.apache.org/jira/browse/TRAFODION-1521?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14955207#comment-14955207
]
ASF GitHub Bot commented on TRAFODION-1521:
-------------------------------------------
Github user robertamarton commented on a diff in the pull request:
https://github.com/apache/incubator-trafodion/pull/118#discussion_r41888772
--- Diff: core/sqf/sql/scripts/get_libhdfs_files ---
@@ -0,0 +1,186 @@
+#!/bin/sh
+
+# This script downloads and/or makes the required libhdfs files
+# to be able to build Trafodion, which acts as a libhdfs client.
+#
+# Basically, what we need are three files:
+#
+# hdfs.h (copied to $TGT_INC_DIR)
+# libhdfs.so (copied to $TGT_LIB_DIR)
+# libhadoop.so (copied to $TGT_LIB_DIR)
+
+# Working dir in the Trafodion source tree to extract and build libhdfs
files
+# (can be specified as an environment variable)
+if [[ -z ${LIBHDFS_TEMP_DIR} ]]; then
+ LIBHDFS_TEMP_DIR=${MY_SQROOT}/sql/libhdfs_files
+fi
+LOGFILE=${LIBHDFS_TEMP_DIR}/build.log
+
+# Hadoop source tar file to build libhdfs from
+HADOOP_SRC_MIRROR_URL=https://archive.apache.org/dist/hadoop/common/hadoop-2.6.0
+HADOOP_ID=hadoop-2.6.0
+HADOOP_SRC_ID=${HADOOP_ID}-src
+HADOOP_SRC_TAR=${HADOOP_SRC_ID}.tar.gz
+
+# files to build required version of Google Protocol Buffers
+PROTOBUF_MIRROR_URL=https://github.com/google/protobuf/releases/download/v2.5.0
+PROTOBUF_ID=protobuf-2.5.0
+PROTOBUF_TAR=${PROTOBUF_ID}.tar.gz
+
+# result of protobuf build
+PROTOBUF_TGT_ID=protobuf-tgt
+
+# Directories to copy the built libhdfs library and corresponding include
file
+TGT_INC_DIR=$MY_SQROOT/export/include
+TGT_LIB_DIR=$MY_SQROOT/export/lib${SQ_MBTYPE}
+
+FORCE_BUILD=false
+VERBOSE=false
+
+USAGE="Usage $0 [ -f | --force ]\
+ [ -v | --verbose ]\
+ [ -d <temp dir> | --tempDir <temp dir> ]"
+
+while [[ $# > 0 ]]
+do
+ arg="$1"
+
+ case $arg in
+ -f|--force)
+ FORCE_BUILD=true
+ ;;
+
+ -v|--verbose)
+ VERBOSE=true
+ ;;
+
+ -d|--tempDir)
+ shift
+ if [[ $# < 1 ]]; then
+ echo "Expecting argument after -d or --tempDir"
+ exit 1
+ fi
+ LIBHDFS_TEMP_DIR="$1"
+ ;;
+
+ *)
+ echo "Unknown command line option: $arg"
+ echo "Usage $0 [ -f | --force ]"
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+
+if [[ $FORCE_BUILD == true || \
+ ! -e ${TGT_INC_DIR}/hdfs.h || \
+ ! -e ${TGT_LIB_DIR}/libhdfs.so ]]; then
+
+ if [ ! -d $LIBHDFS_TEMP_DIR ]; then
+ mkdir $LIBHDFS_TEMP_DIR
+ fi
+
+ cd $LIBHDFS_TEMP_DIR
+
+ if [[ ! -f ${PROTOBUF_TAR} ]]; then
+ echo "Downloading Google Protocol Buffers..." | tee -a ${LOGFILE}
+ wget ${PROTOBUF_MIRROR_URL}/${PROTOBUF_TAR} >${LOGFILE}
+ fi
+
+ if [[ $FORCE_BUILD == true ]]; then
+ rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_ID}
+ rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+ fi
+
+ if [[ ! -d ${PROTOBUF_ID} ]]; then
+ echo "Unpacking Google Protocol Buffer tar file..." | tee -a ${LOGFILE}
+ rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+ tar -xzf ${PROTOBUF_TAR} >>${LOGFILE}
+ fi
+
+ if [[ ! -d $PROTOBUF_TGT_ID ]]; then
+ cd ${PROTOBUF_ID}
+ echo "Building Google Protocol Buffers, this could take a while..." |
tee -a ${LOGFILE}
+ if [[ $VERBOSE == true ]]; then
+ ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1 |
tee -a ${LOGFILE}
+ else
+ ./configure --prefix=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID} 2>&1
>>${LOGFILE}
+ fi
+ if [[ $? != 0 ]]; then
+ echo "Error during configure step, exiting" | tee -a ${LOGFILE}
+ exit 1
+ fi
+ make 2>&1 >>${LOGFILE}
+ if [[ $? != 0 ]]; then
+ echo "Error during make step, exiting" | tee -a ${LOGFILE}
+ exit 1
+ fi
+ # skip the tests
+ # make check 2>&1 >>${LOGFILE}
+ if [[ $? != 0 ]]; then
+ echo "Error during check step, exiting" | tee -a ${LOGFILE}
+ exit 1
+ fi
+ make install 2>&1 >>${LOGFILE}
+ if [[ $? != 0 ]]; then
+ echo "Error during install step, exiting" | tee -a ${LOGFILE}
+ # remove partial results, if any
+ rm -rf ${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}
+ exit 1
+ fi
+ fi
+
+ cd $LIBHDFS_TEMP_DIR
+ export
HADOOP_PROTOC_PATH=${LIBHDFS_TEMP_DIR}/${PROTOBUF_TGT_ID}/bin/protoc
+
+ if [[ ! -f ${HADOOP_SRC_TAR} ]]; then
+ echo "Downloading Hadoop tar file ${HADOOP_SRC_TAR}..." | tee -a
${LOGFILE}
+ wget ${HADOOP_SRC_MIRROR_URL}/${HADOOP_SRC_TAR} 2>&1 >>${LOGFILE}
+ fi
+
+ if [[ $FORCE_BUILD == true ]]; then
+ rm -rf ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}
+ fi
+
+ if [[ ! -d ${HADOOP_SRC_ID} ]]; then
+ echo "Unpacking Hadoop tar file..." | tee -a ${LOGFILE}
+ tar -xzf ${HADOOP_SRC_TAR}
+ fi
+
+ if [[ ! -d ${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target ]];
then
+ cd ${HADOOP_SRC_ID}
+ echo "Building native library, this will take several minutes..." |
tee -a ${LOGFILE}
+ if [[ $VERBOSE == true ]]; then
+ mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests
-Dtar 2>&1 | tee -a ${LOGFILE}
+ else
+ mvn package -Pdist,native -Dmaven.javadoc.skip=true -DskipTests
-Dtar 2>&1 >>${LOGFILE}
+ fi
+ if [[ $? != 0 ]]; then
+ echo "Error during Maven build step for libhdfs, exiting" | tee -a
${LOGFILE}
+ exit 1
+ fi
+ fi
+
+ echo "Copying include file and built libraries to Trafodion export
dir..." | tee -a ${LOGFILE}
+ if [[ $VERBOSE == true ]]; then
+ set -x
+ fi
+ cp -f
${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/include/hdfs.h
${TGT_INC_DIR}
+ cp -Pf
${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhdfs*.so*
${TGT_LIB_DIR}
+ cp -Pf
${LIBHDFS_TEMP_DIR}/${HADOOP_SRC_ID}/hadoop-dist/target/${HADOOP_ID}/lib/native/libhadoop*.so*
${TGT_LIB_DIR}
+
+ ls -l ${TGT_INC_DIR}/hdfs.h >> ${LOGFILE}
+ ls -l ${TGT_LIB_DIR}/libhdfs.so >> ${LOGFILE}
+ ls -l ${TGT_LIB_DIR}/libhadoop.so >> ${LOGFILE}
+
+ # Final check whether all the needed files are there
+ if [[ ! -r ${TGT_INC_DIR}/hdfs.h || \
+ ! -r ${TGT_LIB_DIR}/libhdfs.so ]]; then
+ echo "Error, not all files were created" | tee -a ${LOGFILE}
+ ls -l ${TGT_INC_DIR}/hdfs.h
+ ls -l ${TGT_LIB_DIR}/libhdfs.so
+ exit 1
+ fi
+fi
--- End diff --
This is great - thanks for adding this make step.
> Trafodion build should not require HBase to be installed
> --------------------------------------------------------
>
> Key: TRAFODION-1521
> URL: https://issues.apache.org/jira/browse/TRAFODION-1521
> Project: Apache Trafodion
> Issue Type: Bug
> Components: sql-general
> Affects Versions: 1.1 (pre-incubation)
> Environment: Any
> Reporter: Hans Zeller
> Assignee: Hans Zeller
> Labels: build
> Fix For: 1.2-incubating
>
> Original Estimate: 96h
> Remaining Estimate: 96h
>
> When we build Trafodion in a source tree, it should not be necessary to
> install HBase first.
> Until now, several build steps relied on an HBase installation:
> 1. Files in core/sql/executor include the libhdfs header file, hdfs.h. This
> file isn't available in the Trafodion source tree.
> 2. When we link certain executables like mxosrvr and sqlci, we specify
> -lhdfs, requiring the libhdfs.so DLL as well as the DLLs it depends on,
> libhadoop.so and libjvm.so.
> 3. Until recently, executor Java (core/sql/executor) files were compiled with
> javac invocations from a Makefile, requiring the HBase client jar files in
> the CLASSPATH. This got changed with TRAFODION-1502.
> 4. I missed some more Java files in the fix for TRAFODION-1502:
> core/sqf/src/seatrans/tm/hbasetmlib2. Those should also be moved to Maven.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)