(Mapreduce)ClassCastException: org.apache.hadoop.io.LongWritable cannot be cast
to ....io.Text
----------------------------------------------------------------------------------------------
Key: MAHOUT-751
URL: https://issues.apache.org/jira/browse/MAHOUT-751
Project: Mahout
Issue Type: Bug
Components: build
Environment: ubuntu 10.04/hadoop 0.21.0
Reporter: patrick.J
Could somebody help me ?
My mapreduce program cast a Exception like:
11/07/01 20:18:06 INFO mapreduce.Job: Task Id :
attempt_201107011635_0005_m_000000_0, Status : FAILED
java.lang.ClassCastException: org.apache.hadoop.io.LongWritable cannot be cast
to org.apache.hadoop.io.Text
at Sum$MapClass.map(Sum.java:15)
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:144)
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:652)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:328)
at org.apache.hadoop.mapred.Child$4.run(Child.java:217)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:742)
at org.apache.hadoop.mapred.Child.main(Child.java:211)
There's the code:
import java.io.*;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.split.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;
public class Sum extends Configured implements Tool{
public static class MapClass extends
Mapper<Text, Text, Text, IntWritable>{
private static int i;
private static int j;
private static int a;
private static int b;
private String user;
public void map(Text key,Text value,Context context)
throws IOException, InterruptedException{
String[] user_score = value.toString().split("
");
for(i=1;i<user_score.length-1;i+=2)
for(j=i+2;j<user_score.length-1;j+=2){
user = user_score[i]+" "+user_score[j];
a = Integer.parseInt(user_score[i+1]);
b = Integer.parseInt(user_score[j+1]);
context.write(new Text(user),new
IntWritable(a*b));
}
}
}
public static class Reduce extends
Reducer<Text,IntWritable,Text,IntWritable>{
public void reduce(Text key,Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException{
int sum = 0;
for(IntWritable val:values){
sum += val.get();
}
context.write(key, new IntWritable(sum));
}
}
public int run(String[] args) throws Exception{
Configuration conf = getConf();
Job job = new Job(conf,"Myjob");
job.setJarByClass(Sum.class);
Path in = new Path(args[0]);
Path out = new Path(args[1]);
FileInputFormat.setInputPaths(job,in);
FileOutputFormat.setOutputPath(job,out);
job.setMapperClass(MapClass.class);
job.setCombinerClass(Reduce.class);
job.setReducerClass(Reduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
System.exit(job.waitForCompletion(true)?0:1);
return 0;
}
public static void main (String[] args) throws Exception{
int res = ToolRunner.run(new Configuration(),new Sum(),args);
System.exit(res);
}
}
--
This message is automatically generated by JIRA.
For more information on JIRA, see: http://www.atlassian.com/software/jira