Author: cutting Date: Wed May 16 12:23:48 2007 New Revision: 538693 URL: http://svn.apache.org/viewvc?view=rev&rev=538693 Log: HADOOP-234. Add pipes facility, which permits writing MapReduce programs in C++.
Added: lucene/hadoop/trunk/src/c++/pipes/ lucene/hadoop/trunk/src/c++/pipes/.autom4te.cfg lucene/hadoop/trunk/src/c++/pipes/Makefile.am lucene/hadoop/trunk/src/c++/pipes/Makefile.in lucene/hadoop/trunk/src/c++/pipes/aclocal.m4 lucene/hadoop/trunk/src/c++/pipes/api/ lucene/hadoop/trunk/src/c++/pipes/api/hadoop/ lucene/hadoop/trunk/src/c++/pipes/api/hadoop/Pipes.hh lucene/hadoop/trunk/src/c++/pipes/api/hadoop/TemplateFactory.hh lucene/hadoop/trunk/src/c++/pipes/compile lucene/hadoop/trunk/src/c++/pipes/config.guess lucene/hadoop/trunk/src/c++/pipes/config.sub lucene/hadoop/trunk/src/c++/pipes/configure lucene/hadoop/trunk/src/c++/pipes/configure.ac lucene/hadoop/trunk/src/c++/pipes/depcomp lucene/hadoop/trunk/src/c++/pipes/impl/ lucene/hadoop/trunk/src/c++/pipes/impl/HadoopPipes.cc lucene/hadoop/trunk/src/c++/pipes/impl/config.h.in lucene/hadoop/trunk/src/c++/pipes/install-sh lucene/hadoop/trunk/src/c++/pipes/ltmain.sh lucene/hadoop/trunk/src/c++/pipes/missing lucene/hadoop/trunk/src/c++/utils/ lucene/hadoop/trunk/src/c++/utils/.autom4te.cfg lucene/hadoop/trunk/src/c++/utils/Makefile.am lucene/hadoop/trunk/src/c++/utils/Makefile.in lucene/hadoop/trunk/src/c++/utils/aclocal.m4 lucene/hadoop/trunk/src/c++/utils/api/ lucene/hadoop/trunk/src/c++/utils/api/hadoop/ lucene/hadoop/trunk/src/c++/utils/api/hadoop/SerialUtils.hh lucene/hadoop/trunk/src/c++/utils/api/hadoop/StringUtils.hh lucene/hadoop/trunk/src/c++/utils/config.guess lucene/hadoop/trunk/src/c++/utils/config.sub lucene/hadoop/trunk/src/c++/utils/configure lucene/hadoop/trunk/src/c++/utils/configure.ac lucene/hadoop/trunk/src/c++/utils/depcomp lucene/hadoop/trunk/src/c++/utils/impl/ lucene/hadoop/trunk/src/c++/utils/impl/SerialUtils.cc lucene/hadoop/trunk/src/c++/utils/impl/StringUtils.cc lucene/hadoop/trunk/src/c++/utils/impl/config.h.in lucene/hadoop/trunk/src/c++/utils/install-sh lucene/hadoop/trunk/src/c++/utils/ltmain.sh lucene/hadoop/trunk/src/c++/utils/m4/ lucene/hadoop/trunk/src/c++/utils/m4/hadoop_utils.m4 lucene/hadoop/trunk/src/c++/utils/missing lucene/hadoop/trunk/src/examples/pipes/ lucene/hadoop/trunk/src/examples/pipes/.autom4te.cfg lucene/hadoop/trunk/src/examples/pipes/Makefile.am lucene/hadoop/trunk/src/examples/pipes/Makefile.in lucene/hadoop/trunk/src/examples/pipes/README.txt lucene/hadoop/trunk/src/examples/pipes/aclocal.m4 lucene/hadoop/trunk/src/examples/pipes/conf/ lucene/hadoop/trunk/src/examples/pipes/conf/word-part.xml lucene/hadoop/trunk/src/examples/pipes/conf/word.xml lucene/hadoop/trunk/src/examples/pipes/config.guess lucene/hadoop/trunk/src/examples/pipes/config.sub lucene/hadoop/trunk/src/examples/pipes/configure lucene/hadoop/trunk/src/examples/pipes/configure.ac lucene/hadoop/trunk/src/examples/pipes/depcomp lucene/hadoop/trunk/src/examples/pipes/impl/ lucene/hadoop/trunk/src/examples/pipes/impl/config.h.in lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-nopipe.cc lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-part.cc lucene/hadoop/trunk/src/examples/pipes/impl/wordcount-simple.cc lucene/hadoop/trunk/src/examples/pipes/install-sh lucene/hadoop/trunk/src/examples/pipes/ltmain.sh lucene/hadoop/trunk/src/examples/pipes/missing lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/Application.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/BinaryProtocol.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/DownwardProtocol.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/OutputHandler.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesMapRunner.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesPartitioner.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/PipesReducer.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/Submitter.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/UpwardProtocol.java lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/pipes/package.html lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/pipes/ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/pipes/TestPipes.java lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/pipes/WordCountInputFormat.java Modified: lucene/hadoop/trunk/CHANGES.txt lucene/hadoop/trunk/bin/hadoop lucene/hadoop/trunk/build.xml Modified: lucene/hadoop/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=538693&r1=538692&r2=538693 ============================================================================== --- lucene/hadoop/trunk/CHANGES.txt (original) +++ lucene/hadoop/trunk/CHANGES.txt Wed May 16 12:23:48 2007 @@ -24,6 +24,9 @@ 7. HADOOP-1357. Fix CopyFiles to correctly avoid removing "/". (Arun C Murthy via cutting) + 8. HADOOP-234. Add pipes facility, which permits writing MapReduce + programs in C++. + Branch 0.13 (unreleased changes) Modified: lucene/hadoop/trunk/bin/hadoop URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/hadoop?view=diff&rev=538693&r1=538692&r2=538693 ============================================================================== --- lucene/hadoop/trunk/bin/hadoop (original) +++ lucene/hadoop/trunk/bin/hadoop Wed May 16 12:23:48 2007 @@ -40,6 +40,7 @@ echo " fsck run a DFS filesystem checking utility" echo " fs run a generic filesystem user client" echo " jobtracker run the MapReduce job Tracker node" + echo " pipes run a Pipes job" echo " tasktracker run a MapReduce task Tracker node" echo " job manipulate MapReduce jobs" echo " version print the version" @@ -181,6 +182,8 @@ CLASS=org.apache.hadoop.mapred.TaskTracker elif [ "$COMMAND" = "job" ] ; then CLASS=org.apache.hadoop.mapred.JobClient +elif [ "$COMMAND" = "pipes" ] ; then + CLASS=org.apache.hadoop.mapred.pipes.Submitter elif [ "$COMMAND" = "version" ] ; then CLASS=org.apache.hadoop.util.VersionInfo elif [ "$COMMAND" = "jar" ] ; then Modified: lucene/hadoop/trunk/build.xml URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/build.xml?view=diff&rev=538693&r1=538692&r2=538693 ============================================================================== --- lucene/hadoop/trunk/build.xml (original) +++ lucene/hadoop/trunk/build.xml Wed May 16 12:23:48 2007 @@ -22,7 +22,11 @@ <property name="docs.dir" value="${basedir}/docs"/> <property name="contrib.dir" value="${basedir}/src/contrib"/> <property name="docs.src" value="${basedir}/src/web"/> - <property name="libhdfs.src" value="${basedir}/src/c++/libhdfs"/> + <property name="c++.src" value="${basedir}/src/c++"/> + <property name="c++.utils.src" value="${c++.src}/utils"/> + <property name="c++.pipes.src" value="${c++.src}/pipes"/> + <property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/> + <property name="libhdfs.src" value="${c++.src}/libhdfs"/> <property name="build.dir" value="${basedir}/build"/> <property name="build.classes" value="${build.dir}/classes"/> @@ -30,11 +34,20 @@ <property name="build.webapps" value="${build.dir}/webapps"/> <property name="build.examples" value="${build.dir}/examples"/> <property name="build.libhdfs" value="${build.dir}/libhdfs"/> - <property name="build.platform" value="${os.name}-${os.arch}-${sun.arch.data.model}"/> + <property name="build.platform" + value="${os.name}-${os.arch}-${sun.arch.data.model}"/> <property name="build.native" value="${build.dir}/native/${build.platform}"/> + <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/> + <property name="build.c++.utils" value="${build.c++}/utils"/> + <property name="build.c++.pipes" value="${build.c++}/pipes"/> + <property name="build.c++.examples.pipes" + value="${build.c++}/examples/pipes"/> <property name="build.docs" value="${build.dir}/docs"/> <property name="build.javadoc" value="${build.docs}/api"/> <property name="build.encoding" value="ISO-8859-1"/> + <property name="install.c++" value="${build.dir}/c++/${build.platform}"/> + <property name="install.c++.examples" + value="${build.dir}/c++-examples/${build.platform}"/> <property name="test.src.dir" value="${basedir}/src/test"/> <property name="test.build.dir" value="${build.dir}/test"/> @@ -109,6 +122,7 @@ <mkdir dir="${build.webapps}/dfs/WEB-INF"/> <mkdir dir="${build.webapps}/datanode/WEB-INF"/> <mkdir dir="${build.examples}"/> + <mkdir dir="${build.dir}/c++"/> <mkdir dir="${test.build.dir}"/> <mkdir dir="${test.build.classes}"/> @@ -295,7 +309,8 @@ </target> - <target name="compile-core" depends="compile-core-classes,compile-core-native"> + <target name="compile-core" + depends="compile-core-classes,compile-core-native,compile-c++"> </target> <target name="compile-contrib" depends="compile-core"> @@ -307,7 +322,8 @@ <target name="compile" depends="compile-core, compile-contrib"> </target> - <target name="compile-examples" depends="compile-core"> + <target name="compile-examples" + depends="compile-core,compile-c++-examples"> <javac encoding="${build.encoding}" srcdir="${examples.dir}" @@ -464,6 +480,11 @@ <sysproperty key="test.src.dir" value="${test.src.dir}"/> <sysproperty key="java.library.path" value="${build.native}/lib:${lib.dir}/native/${build.platform}"/> + <sysproperty key="install.c++.examples" value="${install.c++.examples}"/> + <!-- set compile.c++ in the child jvm only if it is set --> + <syspropertyset dynamic="no"> + <propertyref name="compile.c++"/> + </syspropertyset> <classpath refid="${test.classpath.id}"/> <formatter type="${test.junit.output.format}" /> <batchtest todir="${test.build.dir}" unless="testcase"> @@ -639,6 +660,10 @@ <fileset dir="src" excludes="**/*.template"/> </copy> + <copy todir="${dist.dir}/c++" includeEmptyDirs="false"> + <fileset dir="${build.dir}/c++"/> + </copy> + <copy todir="${dist.dir}/" file="build.xml"/> <chmod perm="ugo+x" type="file"> @@ -754,4 +779,94 @@ </exec> </target> + <target name="create-c++-configure" depends="init" if="compile.c++"> + <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes" + failonerror="yes"> + <arg value="-if"/> + </exec> + <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes" + failonerror="yes"> + <arg value="-if"/> + </exec> + <exec executable="autoreconf" dir="${c++.examples.pipes.src}" + searchpath="yes" failonerror="yes"> + <arg value="-if"/> + </exec> + </target> + + <target name="check-c++-makefiles" depends="init" if="compile.c++"> + <condition property="need.c++.utils.makefile"> + <not> <available file="${build.c++.utils}/Makefile"/> </not> + </condition> + <condition property="need.c++.pipes.makefile"> + <not> <available file="${build.c++.pipes}/Makefile"/> </not> + </condition> + <condition property="need.c++.examples.pipes.makefile"> + <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not> + </condition> + </target> + + <target name="create-c++-utils-makefile" depends="check-c++-makefiles" + if="need.c++.utils.makefile"> + <mkdir dir="${build.c++.utils}"/> + <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}" + failonerror="yes"> + <arg value="--prefix=${install.c++}"/> + </exec> + </target> + + <target name="compile-c++-utils" depends="create-c++-utils-makefile" + if="compile.c++"> + <exec executable="make" dir="${build.c++.utils}" searchpath="yes" + failonerror="yes"> + <arg value="install"/> + </exec> + </target> + + <target name="create-c++-pipes-makefile" depends="check-c++-makefiles" + if="need.c++.pipes.makefile"> + <mkdir dir="${build.c++.pipes}"/> + <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}" + failonerror="yes"> + <arg value="--prefix=${install.c++}"/> + </exec> + </target> + + <target name="compile-c++-pipes" + depends="create-c++-pipes-makefile,compile-c++-utils" + if="compile.c++"> + <exec executable="make" dir="${build.c++.pipes}" searchpath="yes" + failonerror="yes"> + <arg value="install"/> + </exec> + </target> + + <target name="compile-c++" + depends="compile-c++-pipes"/> + + <target name="create-c++-examples-pipes-makefile" + depends="check-c++-makefiles" + if="need.c++.examples.pipes.makefile"> + <mkdir dir="${build.c++.examples.pipes}"/> + <exec executable="${c++.examples.pipes.src}/configure" + dir="${build.c++.examples.pipes}" + failonerror="yes"> + <arg value="--prefix=${install.c++.examples}"/> + <arg value="--with-hadoop-utils=${install.c++}"/> + <arg value="--with-hadoop-pipes=${install.c++}"/> + </exec> + </target> + + <target name="compile-c++-examples-pipes" + depends="create-c++-examples-pipes-makefile,compile-c++-pipes" + if="compile.c++"> + <exec executable="make" dir="${build.c++.examples.pipes}" searchpath="yes" + failonerror="yes"> + <arg value="install"/> + </exec> + </target> + + <target name="compile-c++-examples" + depends="compile-c++-examples-pipes"/> + </project> Added: lucene/hadoop/trunk/src/c++/pipes/.autom4te.cfg URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/.autom4te.cfg?view=auto&rev=538693 ============================================================================== --- lucene/hadoop/trunk/src/c++/pipes/.autom4te.cfg (added) +++ lucene/hadoop/trunk/src/c++/pipes/.autom4te.cfg Wed May 16 12:23:48 2007 @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# autom4te configuration for hadoop utils library +# + +begin-language: "Autoheader-preselections" +args: --no-cache +end-language: "Autoheader-preselections" + +begin-language: "Automake-preselections" +args: --no-cache +end-language: "Automake-preselections" + +begin-language: "Autoreconf-preselections" +args: --no-cache +end-language: "Autoreconf-preselections" + +begin-language: "Autoconf-without-aclocal-m4" +args: --no-cache +end-language: "Autoconf-without-aclocal-m4" + +begin-language: "Autoconf" +args: --no-cache +end-language: "Autoconf" + Added: lucene/hadoop/trunk/src/c++/pipes/Makefile.am URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/Makefile.am?view=auto&rev=538693 ============================================================================== --- lucene/hadoop/trunk/src/c++/pipes/Makefile.am (added) +++ lucene/hadoop/trunk/src/c++/pipes/Makefile.am Wed May 16 12:23:48 2007 @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +ACLOCAL_AMFLAGS = -I ../utils/m4 +AM_CXXFLAGS=-I$(srcdir)/api -Wall -I$(HADOOP_UTILS_PREFIX)/include + +# List the api header files and where they will be installed +apidir = $(includedir)/hadoop +api_HEADERS = \ + api/hadoop/Pipes.hh \ + api/hadoop/TemplateFactory.hh + +# Define the libaries that need to be built +lib_LIBRARIES = libhadooppipes.a + +# Define the sources for lib +libhadooppipes_a_SOURCES = \ + impl/HadoopPipes.cc + Added: lucene/hadoop/trunk/src/c++/pipes/Makefile.in URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/c%2B%2B/pipes/Makefile.in?view=auto&rev=538693 ============================================================================== --- lucene/hadoop/trunk/src/c++/pipes/Makefile.in (added) +++ lucene/hadoop/trunk/src/c++/pipes/Makefile.in Wed May 16 12:23:48 2007 @@ -0,0 +1,523 @@ +# Makefile.in generated by automake 1.9 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, +# 2003, 2004 Free Software Foundation, Inc. +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + [EMAIL PROTECTED]@ + + +SOURCES = $(libhadooppipes_a_SOURCES) + +srcdir = @srcdir@ +top_srcdir = @top_srcdir@ +VPATH = @srcdir@ +pkgdatadir = $(datadir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +top_builddir = . +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +INSTALL = @INSTALL@ +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +host_triplet = @host@ +DIST_COMMON = config.guess config.sub $(srcdir)/Makefile.in \ + $(srcdir)/Makefile.am $(top_srcdir)/configure \ + $(am__configure_deps) $(top_srcdir)/impl/config.h.in depcomp \ + ltmain.sh config.guess config.sub $(api_HEADERS) +subdir = . +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/../utils/m4/hadoop_utils.m4 \ + $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ + configure.lineno configure.status.lineno +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/impl/config.h +CONFIG_CLEAN_FILES = +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; +am__installdirs = "$(DESTDIR)$(libdir)" "$(DESTDIR)$(apidir)" +libLIBRARIES_INSTALL = $(INSTALL_DATA) +LIBRARIES = $(lib_LIBRARIES) +ARFLAGS = cru +libhadooppipes_a_AR = $(AR) $(ARFLAGS) +libhadooppipes_a_LIBADD = +am__dirstamp = $(am__leading_dot)dirstamp +am_libhadooppipes_a_OBJECTS = impl/HadoopPipes.$(OBJEXT) +libhadooppipes_a_OBJECTS = $(am_libhadooppipes_a_OBJECTS) +DEFAULT_INCLUDES = -I. -I$(srcdir) -I$(top_builddir)/impl +depcomp = $(SHELL) $(top_srcdir)/depcomp +am__depfiles_maybe = depfiles +CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ + $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) +LTCXXCOMPILE = $(LIBTOOL) --mode=compile --tag=CXX $(CXX) $(DEFS) \ + $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ + $(AM_CXXFLAGS) $(CXXFLAGS) +CXXLD = $(CXX) +CXXLINK = $(LIBTOOL) --mode=link --tag=CXX $(CXXLD) $(AM_CXXFLAGS) \ + $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@ +SOURCES = $(libhadooppipes_a_SOURCES) +apiHEADERS_INSTALL = $(INSTALL_HEADER) +HEADERS = $(api_HEADERS) +ETAGS = etags +CTAGS = ctags +ACLOCAL = @ACLOCAL@ +AMDEP_FALSE = @AMDEP_FALSE@ +AMDEP_TRUE = @AMDEP_TRUE@ +AMTAR = @AMTAR@ +AR = @AR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CXX = @CXX@ +CXXCPP = @CXXCPP@ +CXXDEPMODE = @CXXDEPMODE@ +CXXFLAGS = @CXXFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +ECHO = @ECHO@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +F77 = @F77@ +FFLAGS = @FFLAGS@ +HADOOP_UTILS_PREFIX = @HADOOP_UTILS_PREFIX@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +LDFLAGS = @LDFLAGS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBTOOL = @LIBTOOL@ +LN_S = @LN_S@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +OBJEXT = @OBJEXT@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +RANLIB = @RANLIB@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +STRIP = @STRIP@ +VERSION = @VERSION@ +ac_ct_AR = @ac_ct_AR@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_CXX = @ac_ct_CXX@ +ac_ct_F77 = @ac_ct_F77@ +ac_ct_RANLIB = @ac_ct_RANLIB@ +ac_ct_STRIP = @ac_ct_STRIP@ +am__fastdepCC_FALSE = @am__fastdepCC_FALSE@ +am__fastdepCC_TRUE = @am__fastdepCC_TRUE@ +am__fastdepCXX_FALSE = @am__fastdepCXX_FALSE@ +am__fastdepCXX_TRUE = @am__fastdepCXX_TRUE@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +datadir = @datadir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +ACLOCAL_AMFLAGS = -I ../utils/m4 +AM_CXXFLAGS = -I$(srcdir)/api -Wall -I$(HADOOP_UTILS_PREFIX)/include + +# List the api header files and where they will be installed +apidir = $(includedir)/hadoop +api_HEADERS = \ + api/hadoop/Pipes.hh \ + api/hadoop/TemplateFactory.hh + + +# Define the libaries that need to be built +lib_LIBRARIES = libhadooppipes.a + +# Define the sources for lib +libhadooppipes_a_SOURCES = \ + impl/HadoopPipes.cc + +all: all-am + +.SUFFIXES: +.SUFFIXES: .cc .lo .o .obj +am--refresh: + @: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ + cd $(srcdir) && $(AUTOMAKE) --foreign \ + && exit 0; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ + cd $(top_srcdir) && \ + $(AUTOMAKE) --foreign Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + echo ' $(SHELL) ./config.status'; \ + $(SHELL) ./config.status;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + $(SHELL) ./config.status --recheck + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(srcdir) && $(AUTOCONF) +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) + +impl/config.h: impl/stamp-h1 + @if test ! -f $@; then \ + rm -f impl/stamp-h1; \ + $(MAKE) impl/stamp-h1; \ + else :; fi + +impl/stamp-h1: $(top_srcdir)/impl/config.h.in $(top_builddir)/config.status + @rm -f impl/stamp-h1 + cd $(top_builddir) && $(SHELL) ./config.status impl/config.h +$(top_srcdir)/impl/config.h.in: $(am__configure_deps) + cd $(top_srcdir) && $(AUTOHEADER) + rm -f impl/stamp-h1 + touch $@ + +distclean-hdr: + -rm -f impl/config.h impl/stamp-h1 +install-libLIBRARIES: $(lib_LIBRARIES) + @$(NORMAL_INSTALL) + test -z "$(libdir)" || $(mkdir_p) "$(DESTDIR)$(libdir)" + @list='$(lib_LIBRARIES)'; for p in $$list; do \ + if test -f $$p; then \ + f=$(am__strip_dir) \ + echo " $(libLIBRARIES_INSTALL) '$$p' '$(DESTDIR)$(libdir)/$$f'"; \ + $(libLIBRARIES_INSTALL) "$$p" "$(DESTDIR)$(libdir)/$$f"; \ + else :; fi; \ + done + @$(POST_INSTALL) + @list='$(lib_LIBRARIES)'; for p in $$list; do \ + if test -f $$p; then \ + p=$(am__strip_dir) \ + echo " $(RANLIB) '$(DESTDIR)$(libdir)/$$p'"; \ + $(RANLIB) "$(DESTDIR)$(libdir)/$$p"; \ + else :; fi; \ + done + +uninstall-libLIBRARIES: + @$(NORMAL_UNINSTALL) + @list='$(lib_LIBRARIES)'; for p in $$list; do \ + p=$(am__strip_dir) \ + echo " rm -f '$(DESTDIR)$(libdir)/$$p'"; \ + rm -f "$(DESTDIR)$(libdir)/$$p"; \ + done + +clean-libLIBRARIES: + -test -z "$(lib_LIBRARIES)" || rm -f $(lib_LIBRARIES) +impl/$(am__dirstamp): + @$(mkdir_p) impl + @: > impl/$(am__dirstamp) +impl/$(DEPDIR)/$(am__dirstamp): + @$(mkdir_p) impl/$(DEPDIR) + @: > impl/$(DEPDIR)/$(am__dirstamp) +impl/HadoopPipes.$(OBJEXT): impl/$(am__dirstamp) \ + impl/$(DEPDIR)/$(am__dirstamp) +libhadooppipes.a: $(libhadooppipes_a_OBJECTS) $(libhadooppipes_a_DEPENDENCIES) + -rm -f libhadooppipes.a + $(libhadooppipes_a_AR) libhadooppipes.a $(libhadooppipes_a_OBJECTS) $(libhadooppipes_a_LIBADD) + $(RANLIB) libhadooppipes.a + +mostlyclean-compile: + -rm -f *.$(OBJEXT) + -rm -f impl/HadoopPipes.$(OBJEXT) + +distclean-compile: + -rm -f *.tab.c + [EMAIL PROTECTED]@@am__include@ @[EMAIL PROTECTED]/$(DEPDIR)/[EMAIL PROTECTED]@ + +.cc.o: [EMAIL PROTECTED]@ depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`; \ [EMAIL PROTECTED]@ if $(CXXCOMPILE) -MT $@ -MD -MP -MF "$$depbase.Tpo" -c -o $@ $<; \ [EMAIL PROTECTED]@ then mv -f "$$depbase.Tpo" "$$depbase.Po"; else rm -f "$$depbase.Tpo"; exit 1; fi [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ [EMAIL PROTECTED]@ $(CXXCOMPILE) -c -o $@ $< + +.cc.obj: [EMAIL PROTECTED]@ depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`; \ [EMAIL PROTECTED]@ if $(CXXCOMPILE) -MT $@ -MD -MP -MF "$$depbase.Tpo" -c -o $@ `$(CYGPATH_W) '$<'`; \ [EMAIL PROTECTED]@ then mv -f "$$depbase.Tpo" "$$depbase.Po"; else rm -f "$$depbase.Tpo"; exit 1; fi [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ [EMAIL PROTECTED]@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` + +.cc.lo: [EMAIL PROTECTED]@ depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`; \ [EMAIL PROTECTED]@ if $(LTCXXCOMPILE) -MT $@ -MD -MP -MF "$$depbase.Tpo" -c -o $@ $<; \ [EMAIL PROTECTED]@ then mv -f "$$depbase.Tpo" "$$depbase.Plo"; else rm -f "$$depbase.Tpo"; exit 1; fi [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ [EMAIL PROTECTED]@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ [EMAIL PROTECTED]@ $(LTCXXCOMPILE) -c -o $@ $< + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs + +distclean-libtool: + -rm -f libtool +uninstall-info-am: +install-apiHEADERS: $(api_HEADERS) + @$(NORMAL_INSTALL) + test -z "$(apidir)" || $(mkdir_p) "$(DESTDIR)$(apidir)" + @list='$(api_HEADERS)'; for p in $$list; do \ + if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ + f=$(am__strip_dir) \ + echo " $(apiHEADERS_INSTALL) '$$d$$p' '$(DESTDIR)$(apidir)/$$f'"; \ + $(apiHEADERS_INSTALL) "$$d$$p" "$(DESTDIR)$(apidir)/$$f"; \ + done + +uninstall-apiHEADERS: + @$(NORMAL_UNINSTALL) + @list='$(api_HEADERS)'; for p in $$list; do \ + f=$(am__strip_dir) \ + echo " rm -f '$(DESTDIR)$(apidir)/$$f'"; \ + rm -f "$(DESTDIR)$(apidir)/$$f"; \ + done + +ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) + list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) ' { files[$$0] = 1; } \ + END { for (i in files) print i; }'`; \ + mkid -fID $$unique +tags: TAGS + +TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ + $(TAGS_FILES) $(LISP) + tags=; \ + here=`pwd`; \ + list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) ' { files[$$0] = 1; } \ + END { for (i in files) print i; }'`; \ + if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ + test -n "$$unique" || unique=$$empty_fix; \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$tags $$unique; \ + fi +ctags: CTAGS +CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ + $(TAGS_FILES) $(LISP) + tags=; \ + here=`pwd`; \ + list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | \ + $(AWK) ' { files[$$0] = 1; } \ + END { for (i in files) print i; }'`; \ + test -z "$(CTAGS_ARGS)$$tags$$unique" \ + || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ + $$tags $$unique + +GTAGS: + here=`$(am__cd) $(top_builddir) && pwd` \ + && cd $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) $$here + +distclean-tags: + -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags +check-am: all-am +check: check-am +all-am: Makefile $(LIBRARIES) $(HEADERS) +installdirs: + for dir in "$(DESTDIR)$(libdir)" "$(DESTDIR)$(apidir)"; do \ + test -z "$$dir" || $(mkdir_p) "$$dir"; \ + done +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + `test -z '$(STRIP)' || \ + echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -rm -f impl/$(DEPDIR)/$(am__dirstamp) + -rm -f impl/$(am__dirstamp) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-generic clean-libLIBRARIES clean-libtool \ + mostlyclean-am + +distclean: distclean-am + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -rf impl/$(DEPDIR) + -rm -f Makefile +distclean-am: clean-am distclean-compile distclean-generic \ + distclean-hdr distclean-libtool distclean-tags + +dvi: dvi-am + +dvi-am: + +html: html-am + +info: info-am + +info-am: + +install-data-am: install-apiHEADERS + +install-exec-am: install-libLIBRARIES + +install-info: install-info-am + +install-man: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f $(am__CONFIG_DISTCLEAN_FILES) + -rm -rf $(top_srcdir)/autom4te.cache + -rm -rf impl/$(DEPDIR) + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-compile mostlyclean-generic \ + mostlyclean-libtool + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-apiHEADERS uninstall-info-am \ + uninstall-libLIBRARIES + +.PHONY: CTAGS GTAGS all all-am am--refresh check check-am clean \ + clean-generic clean-libLIBRARIES clean-libtool ctags distclean \ + distclean-compile distclean-generic distclean-hdr \ + distclean-libtool distclean-tags dvi dvi-am html html-am info \ + info-am install install-am install-apiHEADERS install-data \ + install-data-am install-exec install-exec-am install-info \ + install-info-am install-libLIBRARIES install-man install-strip \ + installcheck installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-compile \ + mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ + tags uninstall uninstall-am uninstall-apiHEADERS \ + uninstall-info-am uninstall-libLIBRARIES + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: