mapreduce - Error: java.lang.NoClassDefFoundError: org/apache/hadoop/util/StopWatch -
i building custome input formatter getting exception on running programme i.e
error message
exception in thread "main" java.lang.noclassdeffounderror: org/apache/hadoop/util/stopwatch @ org.apache.hadoop.mapreduce.lib.input.fileinputformat.getsplits(fileinputformat.java:381) @ org.apache.hadoop.mapreduce.jobsubmitter.writenewsplits(jobsubmitter.java:304) @ org.apache.hadoop.mapreduce.jobsubmitter.writesplits(jobsubmitter.java:321) @ org.apache.hadoop.mapreduce.jobsubmitter.submitjobinternal(jobsubmitter.java:199) @ org.apache.hadoop.mapreduce.job$10.run(job.java:1290) @ org.apache.hadoop.mapreduce.job$10.run(job.java:1287) @ java.security.accesscontroller.doprivileged(native method) @ javax.security.auth.subject.doas(subject.java:415) @ org.apache.hadoop.security.usergroupinformation.doas(usergroupinformation.java:1628) @ org.apache.hadoop.mapreduce.job.submit(job.java:1287) @ org.apache.hadoop.mapreduce.job.waitforcompletion(job.java:1308) @ com.oodles.hadoop.inputformat.myfile.main(myfile.java:32)
myfile
import java.io.ioexception; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.path; import org.apache.hadoop.io.text; import org.apache.hadoop.mapreduce.job; import org.apache.hadoop.mapreduce.lib.input.fileinputformat; import org.apache.hadoop.mapreduce.lib.output.fileoutputformat; public class myfile { public static void main(string[] args) throws ioexception, classnotfoundexception, interruptedexception { configuration conf = new configuration(); path outputpath = new path("/home/oodles/output"); outputpath.getfilesystem(conf).delete(outputpath); job job = new job(conf,"custom input formate"); job.setjarbyclass(myfile.class); job.setjobname("customtest"); job.setnumreducetasks(0); job.setmapperclass(mymapper.class); job.setmapoutputkeyclass(text.class); job.setmapoutputvalueclass(text.class); job.setinputformatclass(customeinputformat.class); fileinputformat.addinputpath(job, new path("/home/oodles/input")); fileoutputformat.setoutputpath(job, outputpath); job.waitforcompletion(true); } }
customeinputformat
public class customeinputformat extends fileinputformat<mykey, myvalue>{ @override public recordreader<mykey, myvalue> createrecordreader(inputsplit split, taskattemptcontext context) throws ioexception, interruptedexception { // todo auto-generated method stub return new myrecordreader(); } }
mymapper
public class mymapper extends mapper<mykey,myvalue,text,text>{ protected void map(mykey key,myvalue value,context context) throws ioexception, interruptedexception{ string sensor = key.getsensortype().tostring(); if(sensor.tolowercase().equals("a")){ context.write(value.getvalue1(),value.getvalue2()); } } }
mykey
public class mykey implements writablecomparable{ private text sensortype,timestamp,status; public mykey(){ this.sensortype = new text(); this.timestamp = new text(); this.status = new text(); } public mykey(text sensortype,text timestamp,text status){ this.sensortype = sensortype; this.timestamp = timestamp; this.status = status; } public void readfields(datainput in) throws ioexception { // todo auto-generated method stub sensortype.readfields(in); timestamp.readfields(in); status.readfields(in); } public void write(dataoutput out) throws ioexception { // todo auto-generated method stub sensortype.write(out); timestamp.write(out); status.write(out); } public int compareto(object o) { // todo auto-generated method stub mykey other = (mykey)o; int cmp = sensortype.compareto(other.sensortype); if(cmp != 0){ return cmp; } cmp = timestamp.compareto(other.timestamp); if(cmp != 0){ return cmp; } return status.compareto(other.status); } public text getsensortype() { return sensortype; } public void setsensortype(text sensortype) { sensortype = sensortype; } public text gettimestamp() { return timestamp; } public void settimestamp(text timestamp) { this.timestamp = timestamp; } public text getstatus() { return status; } public void setstatus(text status) { this.status = status; } }
myvalue
public class myvalue implements writablecomparable{ private text value1,value2; public myvalue(){ this.value1 = new text(); this.value2 = new text(); } public myvalue(text value1,text value2){ this.value1 = value1; this.value2 = value2; } public void readfields(datainput in) throws ioexception { // todo auto-generated method stub value1.readfields(in); value2.readfields(in); } public void write(dataoutput out) throws ioexception { // todo auto-generated method stub value1.write(out); value2.write(out); } public int compareto(object o) { myvalue other = (myvalue)o; int cmp = value1.compareto(other.value1); if(cmp != 0){ return cmp; } return value2.compareto(other.value2); } public text getvalue1() { return value1; } public void setvalue1(text value1) { this.value1 = value1; } public text getvalue2() { return value2; } public void setvalue2(text value2) { this.value2 = value2; } }
myrecordreader
public class myrecordreader extends recordreader<mykey, myvalue>{ private mykey key; private myvalue value; private linerecordreader reader = new linerecordreader(); @override public void close() throws ioexception { // todo auto-generated method stub reader.close(); } @override public mykey getcurrentkey() throws ioexception, interruptedexception { // todo auto-generated method stub return key; } @override public myvalue getcurrentvalue() throws ioexception, interruptedexception { // todo auto-generated method stub return value; } @override public float getprogress() throws ioexception, interruptedexception { // todo auto-generated method stub return reader.getprogress(); } @override public void initialize(inputsplit is, taskattemptcontext tac) throws ioexception, interruptedexception { reader.initialize(is, tac); } @override public boolean nextkeyvalue() throws ioexception, interruptedexception { // todo auto-generated method stub boolean gotnextkeyvalue = reader.nextkeyvalue(); if(gotnextkeyvalue){ if(key==null){ key = new mykey(); } if(value == null){ value = new myvalue(); } text line = reader.getcurrentvalue(); string[] tokens = line.tostring().split("\t"); key.setsensortype(new text(tokens[0])); key.settimestamp(new text(tokens[1])); key.setstatus(new text(tokens[2])); value.setvalue1(new text(tokens[3])); value.setvalue2(new text(tokens[4])); } else { key = null; value = null; } return gotnextkeyvalue; } }
i using hadoop client 2.6.0 , hadoop-mapreduce-client-core 2.7.0. guess happening because of line job.waitforcompletion(true);.
in myfile not sure!
i solved problem , there version incompetency change hadoop-client 2.6.0 2.7.0
Comments
Post a Comment