Please use: hadoop jar myjob.jar myjob.MyJob input output
On Tue, Apr 15, 2014 at 3:06 PM, [email protected] <[email protected]> wrote: > Hello EveryOne: > I am new to hadoop,and i am reading Hadoop in action. > When i tried to run a demo from this book,I got a problem and could not > find answer from the net. Can you help me on this ? > > below is the error info : > > $ hadoop jar myjob.jar MyJob input output > Exception in thread "main" java.lang.NoClassDefFoundError: MyJob (wrong > name: myjob/MyJob) > at java.lang.ClassLoader.defineClass1(Native Method) > at java.lang.ClassLoader.defineClass(ClassLoader.java:791) > at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) > at java.net.URLClassLoader.defineClass(URLClassLoader.java:449) > at java.net.URLClassLoader.access$100(URLClassLoader.java:71) > at java.net.URLClassLoader$1.run(URLClassLoader.java:361) > at java.net.URLClassLoader$1.run(URLClassLoader.java:355) > at java.security.AccessController.doPrivileged(Native Method) > at java.net.URLClassLoader.findClass(URLClassLoader.java:354) > at java.lang.ClassLoader.loadClass(ClassLoader.java:423) > at java.lang.ClassLoader.loadClass(ClassLoader.java:356) > at java.lang.Class.forName0(Native Method) > at java.lang.Class.forName(Class.java:264) > at org.apache.hadoop.util.RunJar.main(RunJar.java:149) > > and this is the command that i compile the .java , I compiled in Win7 and > ran on ubuntu . > > > below is MyJob.java > > package myjob; > > import java.io.IOException; > import java.util.Iterator; > > import org.apache.hadoop.conf.Configuration; > import org.apache.hadoop.conf.Configured; > import org.apache.hadoop.fs.Path; > import org.apache.hadoop.io.Text; > import org.apache.hadoop.mapred.FileInputFormat; > import org.apache.hadoop.mapred.FileOutputFormat; > import org.apache.hadoop.mapred.JobClient; > import org.apache.hadoop.mapred.JobConf; > import org.apache.hadoop.mapred.KeyValueTextInputFormat; > import org.apache.hadoop.mapred.MapReduceBase; > import org.apache.hadoop.mapred.Mapper; > import org.apache.hadoop.mapred.OutputCollector; > import org.apache.hadoop.mapred.Reducer; > import org.apache.hadoop.mapred.Reporter; > import org.apache.hadoop.mapred.TextOutputFormat; > import org.apache.hadoop.util.Tool; > import org.apache.hadoop.util.ToolRunner; > > public class MyJob extends Configured implements Tool{ > > @Override > public int run(String[] args) throws Exception { > Configuration conf = getConf(); > JobConf job = new JobConf(conf,MyJob.class); > Path in = new Path(args[0]); > Path out = new Path(args[1]); > FileInputFormat.setInputPaths(job, in); > FileOutputFormat.setOutputPath(job, out); > job.setJobName("MyJob"); > job.setJarByClass(MyJob.class); > job.setMapperClass(MapClass.class); > job.setReducerClass(Reduce.class); > > job.setInputFormat(KeyValueTextInputFormat.class); > job.setOutputFormat(TextOutputFormat.class); > job.setOutputKeyClass(Text.class); > job.setOutputValueClass(Text.class); > job.set("key.value.separator.in.input.line",","); > JobClient.runJob(job); > return 0; > } > > public static class MapClass extends MapReduceBase implements > Mapper<Text,Text,Text,Text>{ > > @Override > public void map(Text key, Text value, OutputCollector<Text, Text> output, > Reporter reporter) throws IOException { > output.collect(value, key); > } > } > > public static class Reduce extends MapReduceBase implements > Reducer<Text,Text,Text,Text>{ > > @Override > public void reduce(Text key, Iterator<Text> values, > OutputCollector<Text, Text> output, Reporter reporter) > throws IOException { > String csv = ""; > while(values.hasNext()){ > if(csv.length() > 0) > csv += ","; > csv += values.next().toString(); > } > } > } > > public static void main(String[] args) throws Exception { > int res = ToolRunner.run(new Configuration(), new MyJob(), args); > System.exit(res); > } > } > ------------------------------ > Thank you for your kindly help ! >
<<inline: 2014-04-15_150135.png>>
