Author: eli
Date: Fri Jul 29 01:46:17 2011
New Revision: 1152070

URL: http://svn.apache.org/viewvc?rev=1152070&view=rev
Log:
MAPREDUCE-2127. svn merge -c 1152068 from trunk

Modified:
    hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt
    
hadoop/common/branches/branch-0.22/mapreduce/src/c++/utils/m4/hadoop_utils.m4

Modified: hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt?rev=1152070&r1=1152069&r2=1152070&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.22/mapreduce/CHANGES.txt Fri Jul 29 
01:46:17 2011
@@ -587,6 +587,9 @@ Release 0.22.0 - Unreleased
     MAPREDUCE-2571. CombineFileInputFormat.getSplits throws a
     java.lang.ArrayStoreException. (Bochun Bai via todd)
 
+    MAPREDUCE-2127. mapreduce trunk builds are filing on hudson.
+    (Bruno Mahé via eli)
+
 Release 0.21.1 - Unreleased
 
   NEW FEATURES

Modified: 
hadoop/common/branches/branch-0.22/mapreduce/src/c++/utils/m4/hadoop_utils.m4
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.22/mapreduce/src/c%2B%2B/utils/m4/hadoop_utils.m4?rev=1152070&r1=1152069&r2=1152070&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-0.22/mapreduce/src/c++/utils/m4/hadoop_utils.m4 
(original)
+++ 
hadoop/common/branches/branch-0.22/mapreduce/src/c++/utils/m4/hadoop_utils.m4 
Fri Jul 29 01:46:17 2011
@@ -51,8 +51,8 @@ AC_CHECK_HEADERS([pthread.h], [], 
   AC_MSG_ERROR(Please check if you have installed the pthread library)) 
 AC_CHECK_LIB([pthread], [pthread_create], [], 
   AC_MSG_ERROR(Cannot find libpthread.so, please check))
-AC_CHECK_LIB([ssl], [HMAC_Init], [], 
-  AC_MSG_ERROR(Cannot find libssl.so, please check))
+AC_CHECK_LIB([crypto], [HMAC_Init], [], 
+  AC_MSG_ERROR(Cannot find libcrypto.so, please check))
 ])
 
 # define a macro for using hadoop pipes


Reply via email to