I don't know if it's necessary, but here is the gridmix_config.xml
[code] <?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="nutch-conf.xsl"?> <!-- Put site-specific property overrides in this file. --> <configuration> <property> <name>GRID_MIX_DATA</name> <value>/gridmix/data</value> <description></description> </property> <property> <name>FIXCOMPTEXT</name> <value>${GRID_MIX_DATA}/EntropySimulationCompressed</value> <description></description> </property> <property> <name>VARINFLTEXT</name> <value>${GRID_MIX_DATA}/SortUncompressed</value> <description></description> </property> <property> <name>FIXCOMPSEQ</name> <value>${GRID_MIX_DATA}/MonsterQueryBlockCompressed</value> <description></description> </property> <property> <name>VARCOMPSEQ</name> <value>${GRID_MIX_DATA}/WebSimulationBlockCompressed</value> <description></description> </property> <!-- small --> <property> <name>streamSort.smallJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>streamSort.smallJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>streamSort.smallJobs.numOfReduces</name> <value>0</value> <description></description> </property> <property> <name>streamSort.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>streamSort.smallJobs.numOfOutputCompressed</name> <value>0</value> <description> </description> </property> <!-- medium --> <property> <name>streamSort.mediumJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>streamSort.mediumJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>streamSort.mediumJobs.numOfReduces</name> <value></value> <description></description> </property> <property> <name>streamSort.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>streamSort.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>streamSort.largeJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>streamSort.largeJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>streamSort.largeJobs.numOfReduces</name> <value></value> <description></description> </property> <property> <name>streamSort.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>streamSort.largeJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- small --> <property> <name>javaSort.smallJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>javaSort.smallJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>javaSort.smallJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>javaSort.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>javaSort.smallJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- medium --> <property> <name>javaSort.mediumJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>javaSort.mediumJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>javaSort.mediumJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>javaSort.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>javaSort.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>javaSort.largeJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>javaSort.largeJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>javaSort.largeJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>javaSort.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>javaSort.largeJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- small --> <property> <name>combiner.smallJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>combiner.smallJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>combiner.smallJobs.numOfReduces</name> <value>0</value> <description></description> </property> <property> <name>combiner.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>combiner.smallJobs.numOfOutputCompressed</name> <value>0</value> <description> </description> </property> <!-- medium --> <property> <name>combiner.mediumJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>combiner.mediumJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>combiner.mediumJobs.numOfReduces</name> <value>0</value> <description></description> </property> <property> <name>combiner.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>combiner.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>combiner.largeJobs.numOfJobs</name> <value>0</value> <description></description> </property> <property> <name>combiner.largeJobs.inputFiles</name> <value>${VARINFLTEXT}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>combiner.largeJobs.numOfReduces</name> <value>0</value> <description></description> </property> <property> <name>combiner.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>combiner.largeJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- small --> <property> <name>monsterQuery.smallJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.smallJobs.inputFiles</name> <value>${FIXCOMPSEQ}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>monsterQuery.smallJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>monsterQuery.smallJobs.numOfOutputCompressed</name> <value>0</value> <description> </description> </property> <!-- medium --> <property> <name>monsterQuery.mediumJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.mediumJobs.inputFiles</name> <value>${FIXCOMPSEQ}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>monsterQuery.mediumJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>monsterQuery.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>monsterQuery.largeJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.largeJobs.inputFiles</name> <value>${FIXCOMPSEQ}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>monsterQuery.largeJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>monsterQuery.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>monsterQuery.largeJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- small --> <property> <name>webdataScan.smallJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.smallJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>webdataScan.smallJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>webdataScan.smallJobs.numOfOutputCompressed</name> <value>0</value> <description> </description> </property> <!-- medium --> <property> <name>webdataScan.mediumJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.mediumJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>webdataScan.mediumJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>webdataScan.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>webdataScan.largeJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.largeJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>webdataScan.largeJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataScan.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>webdataScan.largeJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- small --> <property> <name>webdataSort.smallJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.smallJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-00000_0_0,part-00001_0_0,part-00002_0_0}</value> <description></description> </property> <property> <name>webdataSort.smallJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.smallJobs.numOfMapoutputCompressed</name> <value>0</value> <description> </description> </property> <property> <name>webdataSort.smallJobs.numOfOutputCompressed</name> <value>0</value> <description> </description> </property> <!-- medium --> <property> <name>webdataSort.mediumJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.mediumJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>webdataSort.mediumJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.mediumJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-000*0_0_0,part-000*1_0_0,part-000*2_0_0}</value> <description></description> </property> <property> <name>webdataSort.mediumJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.mediumJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> <property> <name>webdataSort.mediumJobs.numOfOutputCompressed</name> <value>5</value> <description> </description> </property> <!-- large --> <property> <name>webdataSort.largeJobs.numOfJobs</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.largeJobs.inputFiles</name> <value>${VARCOMPSEQ}/{part-*_0_0,part-*_0_0,part-*_0_0}</value> <description></description> </property> <property> <name>webdataSort.largeJobs.numOfReduces</name> <value>5</value> <description></description> </property> <property> <name>webdataSort.largeJobs.numOfMapoutputCompressed</name> <value>5</value> <description> </description> </property> </configuration> [/code] On Wed, Mar 23, 2011 at 12:03 PM, Pedro Costa <psdc1...@gmail.com> wrote: > Hi, > > when I'm running the Gridmix2 examples, during the execution the tests > halt and the following error is displayed: > > [code] > 11/03/23 12:52:06 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:06 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:06 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0042/job.split > 11/03/23 12:52:06 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:06 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:06 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0043/job.split > 11/03/23 12:52:06 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:06 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:06 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0044/job.split > 11/03/23 12:52:06 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:06 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:07 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0045/job.split > 11/03/23 12:52:07 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:07 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:07 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0046/job.split > 11/03/23 12:52:07 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:07 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:07 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0047/job.split > 11/03/23 12:52:07 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:07 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:07 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0048/job.split > 11/03/23 12:52:07 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:07 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:08 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0049/job.split > 11/03/23 12:52:08 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:08 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:08 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0050/job.split > 11/03/23 12:52:08 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:08 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:08 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0051/job.split > 11/03/23 12:52:08 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:08 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:08 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0052/job.split > 11/03/23 12:52:08 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:08 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:09 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0053/job.split > 11/03/23 12:52:09 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:09 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:09 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0054/job.split > 11/03/23 12:52:09 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:09 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:09 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0055/job.split > 11/03/23 12:52:09 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:09 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:10 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0056/job.split > 11/03/23 12:52:10 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:10 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:10 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0057/job.split > 11/03/23 12:52:10 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:10 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:10 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0058/job.split > 11/03/23 12:52:10 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:10 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:10 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0059/job.split > 11/03/23 12:52:10 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:10 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:10 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0060/job.split > 11/03/23 12:52:10 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:10 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:11 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0061/job.split > 11/03/23 12:52:11 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:11 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:11 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0062/job.split > 11/03/23 12:52:11 WARN mapred.JobClient:544 Use GenericOptionsParser > for parsing the arguments. Applications should implement Tool for the > same. > 11/03/23 12:52:11 DEBUG mapred.JobClient:573 default FileSystem: > hdfs://chinqchint-41.lille.grid5000.fr:54310 > 11/03/23 12:52:11 DEBUG mapred.JobClient:789 Creating splits at > hdfs://chinqchint-41.lille.grid5000.fr:54310/tmp/hadoop-pcosta/mapred/system/job_201103231247_0063/job.split > java.lang.RuntimeException: null is null > at > org.apache.hadoop.mapred.GridMixRunner.getStatForJob(GridMixRunner.java:497) > at > org.apache.hadoop.mapred.GridMixRunner.printStatsForJobs(GridMixRunner.java:621) > at org.apache.hadoop.mapred.GridMixRunner.run(GridMixRunner.java:658) > at org.apache.hadoop.mapred.GridMixRunner.main(GridMixRunner.java:670) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) > at java.lang.reflect.Method.invoke(Method.java:597) > at org.apache.hadoop.util.RunJar.main(RunJar.java:166) > Exception in thread "main" java.lang.NullPointerException > at > org.apache.hadoop.mapred.GridMixRunner.getStatForJob(GridMixRunner.java:502) > at > org.apache.hadoop.mapred.GridMixRunner.printStatsForJobs(GridMixRunner.java:621) > at org.apache.hadoop.mapred.GridMixRunner.run(GridMixRunner.java:658) > at org.apache.hadoop.mapred.GridMixRunner.main(GridMixRunner.java:670) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) > at java.lang.reflect.Method.invoke(Method.java:597) > at org.apache.hadoop.util.RunJar.main(RunJar.java:166) > > [/code] > > What's happening? Why the tests stop? > > Thanks, > -- > Pedro > -- Pedro