[ 
https://issues.apache.org/jira/browse/MRUNIT-156?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13476849#comment-13476849
 ] 

Bertrand Dechoux commented on MRUNIT-156:
-----------------------------------------

I can't reproduce it. Maybe a project configuration error? What is the exact 
jar you are using for MRUnit? And what version/distribution of hadoop? Which 
jdk (should not matter but...)?

I have this configuration using a windows but the OS shouldn't matter
<dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <version>1.0.3</version>
        <scope>provided</scope>
</dependency>

<dependency>
        <groupId>org.apache.mrunit</groupId>
        <artifactId>mrunit</artifactId>
        <version>0.9.0-incubating</version>
        <classifier>hadoop1</classifier>
        <scope>test</scope>
</dependency>

Which gives me the following outputs :
ERROR [main] (TestDriver.java:352) - Missing expected output (hello, 1): 
Mismatch in key class: expected: class org.apache.hadoop.io.Text actual: class 
org.apache.hadoop.io.LongWritable
ERROR [main] (TestDriver.java:352) - Received unexpected output (3243, hello): 
Mismatch in key class: expected: class org.apache.hadoop.io.Text actual: class 
org.apache.hadoop.io.LongWritable
ERROR [main] (TestDriver.java:352) - Missing expected output (posa, 6) at 
position 0.
ERROR [main] (TestDriver.java:352) - Received unexpected output (posa, 3) at 
position 0.

Could you provide a minimal pom.xml for the test?
                
> java.lang.IncompatibleClassChangeError: Found class 
> org.apache.hadoop.mapreduce.TaskInputOutputContext, but interface was expected
> ----------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: MRUNIT-156
>                 URL: https://issues.apache.org/jira/browse/MRUNIT-156
>             Project: MRUnit
>          Issue Type: Bug
>    Affects Versions: 0.9.0
>         Environment: fedora,eclipse
>            Reporter: posa Wu
>            Priority: Minor
>
> This is the my first time to use MRUnit,the followed program is  the 
> WordCount, I can not figure out why it throws 
> :java.lang.IncompatibleClassChangeError: Found class 
> org.apache.hadoop.mapreduce.TaskInputOutputContext, but interface was 
> expected.
> I have already checked all similar issues but can't fixed.
> Thanks in advance.
> *********************************************************************************************************************
> package tmp;
> import java.io.IOException;
> import java.util.StringTokenizer;
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.io.IntWritable;
> import org.apache.hadoop.io.LongWritable;
> import org.apache.hadoop.io.Text;
> import org.apache.hadoop.mapreduce.Job;
> import org.apache.hadoop.mapreduce.Mapper;
> import org.apache.hadoop.mapreduce.Reducer;
> import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
> import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
> import org.apache.hadoop.util.GenericOptionsParser;
> public class WordCount{
>       public static class TokenizerMapper 
>               extends Mapper<LongWritable, Text, Text, IntWritable>
>       {
>               private final static IntWritable one = new IntWritable(1);
>               private Text word = new Text();
>               
>               public void map(Object key, Text value, Context context)
>                       throws IOException, InterruptedException{
>                       StringTokenizer itr = new 
> StringTokenizer(value.toString());
>                       while(itr.hasMoreTokens()){
>                               word.set(itr.nextToken());
>                               context.write(word, one);
>                       }
>               }
>       }
>       
>       public static class IntSumReducer
>               extends Reducer<Text, IntWritable, Text, IntWritable>
>       {
>               private IntWritable result = new IntWritable();
>               public void reduce(Text key, Iterable<IntWritable> values, 
> Context context)
>                       throws IOException, InterruptedException{
>                       int sum = 0;
>                       for(IntWritable val: values){
>                               sum += val.get();
>                       }
>                       result.set(sum);
>                       context.write(key, result);
>               }
>       }
>       
>       public static void main(String args[]) throws IOException, 
> ClassNotFoundException, InterruptedException{
>               Configuration conf = new Configuration();
>               String[] otherArgs = new GenericOptionsParser(conf, 
> args).getRemainingArgs();
>               if(otherArgs.length != 2){
>                       System.err.println("Usage: wordcount <int> <out>");
>                       System.exit(2);
>               }
>               
>               Job job = new Job(conf, "word count");
>               job.setJarByClass(WordCount.class);
>               job.setMapperClass(TokenizerMapper.class);
>               job.setCombinerClass(IntSumReducer.class);
>               job.setReducerClass(IntSumReducer.class);
>               job.setOutputKeyClass(Text.class);
>               job.setOutputValueClass(IntWritable.class);
>               FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
>               FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
>               System.exit(job.waitForCompletion(true)?0:1);
>       }
> }
> package test;
> import java.util.ArrayList;
> import java.util.List;
> import org.apache.hadoop.io.IntWritable;
> import org.apache.hadoop.io.LongWritable;
> import org.apache.hadoop.io.Text;
> import org.apache.hadoop.mrunit.mapreduce.MapDriver;
> import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
> import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
> import org.junit.Before;
> import org.junit.Test;
> import tmp.WordCount.IntSumReducer;
> import tmp.WordCount.TokenizerMapper;
> public class WordCountTest {
>       MapDriver<LongWritable, Text, Text, IntWritable> mapDriver;
>       ReduceDriver<Text, IntWritable, Text, IntWritable> reduceDriver;
>       MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, 
> IntWritable> mapReduceDriver;
>       
>       @Before
>       public void setUp(){
>               TokenizerMapper mapper = new TokenizerMapper();
>               IntSumReducer reducer = new IntSumReducer();
>               
>               mapDriver = MapDriver.newMapDriver(mapper);
>               reduceDriver = ReduceDriver.newReduceDriver(reducer);
>               mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, 
> reducer);
>       }
>       @Test
>       public void testMapper(){
>               mapDriver.withInput(new LongWritable(3243), new Text("hello"));
>               mapDriver.withOutput(new Text("hello"), new IntWritable(1));
>               mapDriver.runTest();
>       }
>       
>       @Test
>       public void testReducer(){
>               List<IntWritable> values = new ArrayList<IntWritable>();
>               values.add(new IntWritable(1));
>               values.add(new IntWritable(2));
>               reduceDriver.withInput(new Text("posa"), values);
>               reduceDriver.withOutput(new Text("posa"), new IntWritable(6));
>               reduceDriver.runTest();
>       }
> }
> java.lang.IncompatibleClassChangeError: Found class 
> org.apache.hadoop.mapreduce.TaskInputOutputContext, but interface was expected
>       at 
> org.apache.hadoop.mrunit.mapreduce.mock.MockContextWrapper.createCommon(MockContextWrapper.java:53)
>       at 
> org.apache.hadoop.mrunit.mapreduce.mock.MockMapContextWrapper.create(MockMapContextWrapper.java:70)
>       at 
> org.apache.hadoop.mrunit.mapreduce.mock.MockMapContextWrapper.<init>(MockMapContextWrapper.java:62)
>       at org.apache.hadoop.mrunit.mapreduce.MapDriver.run(MapDriver.java:217)
>       at 
> org.apache.hadoop.mrunit.MapDriverBase.runTest(MapDriverBase.java:150)
>       at org.apache.hadoop.mrunit.TestDriver.runTest(TestDriver.java:137)
>       at test.WordCountTest.testMapper(WordCountTest.java:41)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:601)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
>       at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:73)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:46)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:180)
>       at org.junit.runners.ParentRunner.access$000(ParentRunner.java:41)
>       at org.junit.runners.ParentRunner$1.evaluate(ParentRunner.java:173)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
>       at 
> org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:49)
>       at 
> org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38)
>       at 
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:467)
>       at 
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683)
>       at 
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390)
>       at 
> org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197)

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

Reply via email to