On Mon, Jun 11, 2007 at 04:20:48PM -0700, Neeraj Mahajan wrote:
>If you look at the gcc options, then you can see that both 32-bit as well as
>64-bit  environments  are specified. Probably only one of them should be
>there. To fix this you might have to edit the Makefile in c++/src/libhdfs
>and replace/remove the -m32 option.
>
>~ Neeraj
>
>On 6/11/07, Phantom <[EMAIL PROTECTED]> wrote:
>>
>>Hi
>>
>>I am assuming that if I need a C/C++ interface to HDFS I must build
>>libhdfs.

Yep.

>>This may be a problem very specific to my environment but would appreciate
>>if someone could tell me what is going on ? So in order to build libhdfs I
>>am running the command ant compile-libhdfs. But I get the following output
>>:
>>
>>compile-libhdfs:
>>     [exec] gcc -g -Wall -O2 -fPIC -m64 -m32

Neeraj is right, there is a bug in the Makefile of libhdfs i.e. doesn't handle 
64-bit platforms correctly...

Could you try the attached patch and check if it works for you? Once you give 
the go-ahead I'll go ahead and get it committed...

thanks,
Arun

>>-I/home/alakshman/jdk1.6.0_01/include
>>-I/home/alakshman/jdk1.6.0_01/include/linux -c hdfs.c -o
>>/home/alakshman/FB-Projects/hadoop-0.13.0/build/libhdfs/hdfs.o
>>     [exec] In file included from /usr/include/features.h:337,
>>     [exec]                  from /usr/include/sys/types.h:27,
>>     [exec]                  from hdfs.h:22,
>>     [exec]                  from hdfs.c:19:
>>     [exec] /usr/include/gnu/stubs.h:7:27: error: gnu/stubs-32.h: No such
>>file or directory
>>     [exec] make: ***
>>[/home/alakshman/FB-Projects/hadoop-0.13.0/build/libhdfs/hdfs.o]
>>Error 1
>>     [exec] Result: 2
>>
>>BUILD SUCCESSFUL
>>Total time: 1 second
>>
>>What is going on ? I have set my JAVA_HOME correctly and this is the
>>version
>>of my Linux box  2.6.12-1.1398_FC4smp.
>>
>>Thanks
>>A
>>
Index: build.xml
===================================================================
--- build.xml   (revision 546368)
+++ build.xml   (working copy)
@@ -744,9 +744,10 @@
   <!-- ================================================================== -->
   <target name="compile-libhdfs" depends="init">
     <mkdir dir="${build.libhdfs}"/>
-    <exec dir="${libhdfs.src}" executable="make">
+    <exec dir="${libhdfs.src}" executable="make" failonerror="true">
       <env key="OS_NAME" value="${os.name}"/>
       <env key="OS_ARCH" value="${os.arch}"/>
+         <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
       <env key="SHLIB_VERSION" value="${libhdfs.version}"/>
       <env key="LIBHDFS_BUILD_DIR" value="${build.libhdfs}"/>
     </exec>
@@ -761,6 +762,7 @@
     <exec dir="${libhdfs.src}" executable="make" failonerror="true">
         <env key="OS_NAME" value="${os.name}"/>
         <env key="OS_ARCH" value="${os.arch}"/>
+           <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
         <env key="SHLIB_VERSION" value="${libhdfs.version}"/>
         <env key="LIBHDFS_BUILD_DIR" value="${build.libhdfs}"/>
         <env key="HADOOP_HOME" value="${basedir}"/>
Index: src/c++/libhdfs/hdfs_test.c
===================================================================
--- src/c++/libhdfs/hdfs_test.c (revision 546368)
+++ src/c++/libhdfs/hdfs_test.c (working copy)
@@ -149,11 +149,11 @@
         totalResult += result;
 
         char buffer[256];
-        fprintf(stderr, "hdfsGetWorkingDirectory: %s\n", ((result = 
hdfsGetWorkingDirectory(fs, buffer, sizeof(buffer))) ? buffer : "Failed!"));
+        fprintf(stderr, "hdfsGetWorkingDirectory: %s\n", 
((hdfsGetWorkingDirectory(fs, buffer, sizeof(buffer)) != NULL) ? buffer : 
"Failed!"));
         totalResult += (result ? 0 : 1);
         fprintf(stderr, "hdfsSetWorkingDirectory: %s\n", ((result = 
hdfsSetWorkingDirectory(fs, slashTmp)) ? "Failed!" : "Success!"));
         totalResult += result;
-        fprintf(stderr, "hdfsGetWorkingDirectory: %s\n", ((result = 
hdfsGetWorkingDirectory(fs, buffer, sizeof(buffer))) ? buffer : "Failed!"));
+        fprintf(stderr, "hdfsGetWorkingDirectory: %s\n", 
((hdfsGetWorkingDirectory(fs, buffer, sizeof(buffer)) != NULL) ? buffer : 
"Failed!"));
         totalResult += (result ? 0 : 1);
 
         fprintf(stderr, "hdfsGetDefaultBlockSize: %Ld\n", 
hdfsGetDefaultBlockSize(fs));
Index: src/c++/libhdfs/Makefile
===================================================================
--- src/c++/libhdfs/Makefile    (revision 546368)
+++ src/c++/libhdfs/Makefile    (working copy)
@@ -19,7 +19,8 @@
 # a) JAVA_HOME
 # b) OS_NAME
 # c) OS_ARCH
-# d) LIBHDFS_BUILD_DIR
+# d) JVM_DATA_MODEL 
+# e) LIBHDFS_BUILD_DIR
 # All these are passed by build.xml.
 #
 
@@ -26,9 +27,9 @@
 CC = gcc
 LD = gcc
 CFLAGS =  -g -Wall -O2 -fPIC
-LDFLAGS = -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm -shared -m32 -Wl,-x 
+LDFLAGS = -L$(JAVA_HOME)/jre/lib/$(OS_ARCH)/server -ljvm -shared 
-m$(JVM_DATA_MODEL) -Wl,-x 
 PLATFORM = $(shell echo $$OS_NAME | tr [A-Z] [a-z])
-CPPFLAGS = -m32 -I$(JAVA_HOME)/include -I$(JAVA_HOME)/include/$(PLATFORM)
+CPPFLAGS = -m$(JVM_DATA_MODEL) -I$(JAVA_HOME)/include 
-I$(JAVA_HOME)/include/$(PLATFORM)
 
 LIB_NAME = hdfs
 SO_TARGET = $(LIBHDFS_BUILD_DIR)/lib$(LIB_NAME).so.$(SHLIB_VERSION)

Reply via email to