java.lang.ClassCastException: org.apache.hadoop.io.LongWritable cannot be cast to org.apache.hadoop.hbase.io.ImmutableBytesWritable -
i'm new @ hadoop. utilize hadoop 2.3.0 , hbase 0.98.3 . i'm trying extract info text file , write hbase table in hadoop using mapreduce. altough set outputkeyclass , outputvalueclass'es of job, classcastexception. can help me ?
here's code.
public static void main(string[] args) { configuration config = hbaseconfiguration.create(); job job; seek { job = new job(config, "logbulkloader"); job.setjarbyclass(main.class); job.setmapperclass(logmapper.class); job.setoutputformatclass(tableoutputformat.class); job.getconfiguration().set(tableoutputformat.output_table, "fatih"); job.setoutputkeyclass(immutablebyteswritable.class); job.setoutputvalueclass(put.class); fileinputformat.addinputpath(job, new path(useractionstestfile)); job.setnumreducetasks(0); job.waitforcompletion(true); } grab (ioexception e) { e.printstacktrace(); } grab (classnotfoundexception e) { e.printstacktrace(); } grab (interruptedexception e) { e.printstacktrace(); } } public static class logmapper extends tablemapper<immutablebyteswritable, put> { @override protected void setup(context context) throws ioexception, interruptedexception { } @override protected void map(immutablebyteswritable key, result value, context context) throws ioexception, interruptedexception { seek { string[] l = value.tostring().split(","); string[] t = l[4].split(" "); string[] date = t[0].split("-"); string[] time = t[1].split(":"); gregoriancalendar gc = new gregoriancalendar( integer.parseint(date[0]), integer.parseint(date[1]), integer.parseint(date[2]), integer.parseint(time[0]), integer.parseint(time[1]), integer.parseint(time[2])); set put = new put(bytes.tobytes(l[0])); put.add(bytes.tobytes("song"), bytes.tobytes(l[1]), gc.gettimeinmillis(), bytes.tobytes(l[6])); put.add(bytes.tobytes("album"), bytes.tobytes(l[1]), gc.gettimeinmillis(), bytes.tobytes(l[5])); put.add(bytes.tobytes("album"), bytes.tobytes(l[2]), gc.gettimeinmillis(), bytes.tobytes(l[5])); put.add(bytes.tobytes("singer"), bytes.tobytes(l[1]), gc.gettimeinmillis(), bytes.tobytes(l[5])); put.add(bytes.tobytes("singer"), bytes.tobytes(l[2]), gc.gettimeinmillis(), bytes.tobytes(l[5])); put.add(bytes.tobytes("singer"), bytes.tobytes(l[3]), gc.gettimeinmillis(), bytes.tobytes(l[5])); context.write(new immutablebyteswritable(l[0].getbytes()), put); } grab (exception e) { e.printstacktrace(); } } } i next exception.
java.lang.exception: java.lang.classcastexception: org.apache.hadoop.io.longwritable cannot cast org.apache.hadoop.hbase.io.immutablebyteswritable @ org.apache.hadoop.mapred.localjobrunner$job.run(localjobrunner.java:403) caused by: java.lang.classcastexception: org.apache.hadoop.io.longwritable cannot cast org.apache.hadoop.hbase.io.immutablebyteswritable @ com.argedor.module1.main$logmapper.map(main.java:1) @ org.apache.hadoop.mapreduce.mapper.run(mapper.java:145) @ org.apache.hadoop.mapred.maptask.runnewmapper(maptask.java:763) @ org.apache.hadoop.mapred.maptask.run(maptask.java:339) @ org.apache.hadoop.mapred.localjobrunner$job$maptaskrunnable.run(localjobrunner.java:235) @ java.util.concurrent.executors$runnableadapter.call(executors.java:471) @ java.util.concurrent.futuretask.run(futuretask.java:262) @ java.util.concurrent.threadpoolexecutor.runworker(threadpoolexecutor.java:1145) @ java.util.concurrent.threadpoolexecutor$worker.run(threadpoolexecutor.java:615) @ java.lang.thread.run(thread.java:744)
add next code
job.setmapoutputkeyclass(immutablebyteswritable.class) job.setmapoutputvalueclass(put.class) to job configuration.
java hadoop mapreduce hbase
No comments:
Post a Comment