java - Hadoop lzopCodec pack -


i'm trying create simple map-reduce example. here's code

public class main {  public static void main(string... args) throws ioexception, classnotfoundexception, interruptedexception {              job job = new job();     job.setmapperclass(mymapper.class);     job.setreducerclass(myreducer.class);     job.setmapoutputkeyclass(text.class);     job.setmapoutputvalueclass(longwritable.class);     job.setinputformatclass(textinputformat.class);     job.setnumreducetasks(5);     job.setjarbyclass(main.class);      fileinputformat.setinputpaths(job, new path(args[0]));     fileoutputformat.setoutputpath(job, new path(args[1]));     fileoutputformat.setcompressoutput(job, false);     job.waitforcompletion(true);     system.out.println("done");     } } 

here's mymapper

public class mymapper extends mapper<longwritable, text, text, longwritable> { @override protected void map(longwritable key, text value, context context) throws ioexception, interruptedexception {     context.write(value, new longwritable(1));     } } 

and myreducer

public class myreducer extends reducer<text, longwritable, text, longwritable> { private multipleoutputs<text, longwritable> mos;  @override protected void reduce(text key, iterable<longwritable> values, context context) throws ioexception, interruptedexception {     long sum = 0;     iterator<longwritable> iterator = values.iterator();     while (iterator.hasnext()){         sum += iterator.next().get();     }     mos.write(key, new longwritable(sum), "tabyretka"); }  @override protected void setup(context context) throws ioexception, interruptedexception {     super.setup(context);     mos = new multipleoutputs<text, longwritable>(context); }  @override protected void cleanup(context context) throws ioexception, interruptedexception {     mos.close();     } } 

when run program locally work's ok! when load program hadoop machine give me next error:

java.lang.illegalargumentexception: compression codec com.hadoop.compression.lzo.lzopcodec  not found. @ org.apache.hadoop.io.compress.compressioncodecfactory.getcodecclasses(compressioncodecfactory.java:116) @ org.apache.hadoop.io.compress.compressioncodecfactory.<init>(compressioncodecfactory.java:156) @ org.apache.hadoop.mapreduce.lib.input.textinputformat.issplitable(textinputformat.java:51) @ org.apache.hadoop.mapreduce.lib.input.fileinputformat.getsplits(fileinputformat.java:254) @ org.apache.hadoop.mapred.jobclient.writenewsplits(jobclient.java:950) @ org.apache.hadoop.mapred.jobclient.writesplits(jobclient.java:967) @ org.apache.hadoop.mapred.jobclient.access$500(jobclient.java:170) @ org.apache.hadoop.mapred.jobclient$2.run(jobclient.java:880) @ org.apache.hadoop.mapred.jobclient$2.run(jobclient.java:833) @ java.security.accesscontroller.doprivileged(native method) @ javax.security.auth.subject.doas(subject.java:396) @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1177) @ org.apache.hadoop.mapred.jobclient.submitjobinternal(jobclient.java:833) @ org.apache.hadoop.mapreduce.job.submit(job.java:476) @ org.apache.hadoop.mapreduce.job.waitforcompletion(job.java:506) @ org.apache.hadoop.examples.wordcount.main(wordcount.java:67) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:39) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:25) @ java.lang.reflect.method.invoke(method.java:597) @ org.apache.hadoop.util.programdriver$programdescription.invoke(programdriver.java:68) @ org.apache.hadoop.util.programdriver.driver(programdriver.java:139) @ org.apache.hadoop.examples.exampledriver.main(exampledriver.java:64) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:39) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:25) @ java.lang.reflect.method.invoke(method.java:597) @ org.apache.hadoop.util.runjar.main(runjar.java:197) caused by: java.lang.classnotfoundexception: com.hadoop.compression.lzo.lzopcodec  @ java.net.urlclassloader$1.run(urlclassloader.java:202) @ java.security.accesscontroller.doprivileged(native method) @ java.net.urlclassloader.findclass(urlclassloader.java:190) @ java.lang.classloader.loadclass(classloader.java:306) @ java.lang.classloader.loadclass(classloader.java:247) @ java.lang.class.forname0(native method) @ java.lang.class.forname(class.java:249) @ org.apache.hadoop.conf.configuration.getclassbyname(configuration.java:951) @ org.apache.hadoop.io.compress.compressioncodecfactory.getcodecclasses(compressioncodecfactory.java:109) ... 27 more 

but don't use lzopcodec anywhere. i've tried disable using codec creating configuration, setuping properties

conf.set("mapred.compress.map.output","false"); conf.set("mapred.output.compress","false");  

and pass configuration job. still fails.

does 1 have suggestion, why tries use lzopcodec , how can disable it?


Comments