java - NoSuchMethodError: JobConf.getCredentials() -
i trying import table mysql hive. tested making connection mysql. table info wrapped in jar. after got nosuchmethoderror. running mysql on windows 8 , hadoop hive running in virtualbox horton sandbox.
nosuchmethoderror org.apache.hadoop.mapred.jobconf.getcredentials()
error:
info: initializing jvm metrics processname=jobtracker, sessionid= exception in thread "main" java.lang.nosuchmethoderror: org.apache.hadoop.mapred.jobconf.getcredentials()lorg/apache/hadoop/security/credentials; @ org.apache.sqoop.mapreduce.db.dbconfiguration.setpassword(dbconfiguration.java:158) @ org.apache.sqoop.mapreduce.db.dbconfiguration.configuredb(dbconfiguration.java:144) @ org.apache.sqoop.mapreduce.datadrivenimportjob.configureinputformat(datadrivenimportjob.java:171) @ org.apache.sqoop.mapreduce.importjobbase.runimport(importjobbase.java:231) @ org.apache.sqoop.manager.sqlmanager.importtable(sqlmanager.java:600) @ org.apache.sqoop.tool.importtool.importtable(importtool.java:413) @ org.apache.sqoop.tool.importtool.run(importtool.java:502) @ sqoopjavainterface.importtohive(sqoopjavainterface.java:66) @ sqoopjavainterface.main(sqoopjavainterface.java:32) this source code:-
import java.io.ioexception; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.filesystem; import org.apache.hadoop.fs.path; import org.apache.sqoop.tool.importtool; import com.cloudera.sqoop.sqoopoptions; public class sqoopjavainterface { private static final string job_name = "sqoop hive job"; private static final string mapreduce_job = "hive map cut down job"; private static final string dburl = "jdbc:mysql://localhost:3316/db"; private static final string driver = "com.mysql.jdbc.driver"; private static final string username = "user"; private static final string password = "password"; private static final string hadoop_home = "/home/master/apps/hadoop-1.0.4"; private static final string jar_output_dir = "/home/master/data"; private static final string hive_home = "/home/master/apps/hive-0.10.0"; private static final string hive_dir = "/user/hive/warehouse/"; private static final string warehouse_dir = "hdfs://localhost:9000/user/hive/warehouse/student"; private static final string success = "success !!!"; private static final string fail = "fail !!!"; /** * @param table * @throws ioexception */ public static void main(string args[]) throws ioexception{ importtohive("some_table"); } public static void importtohive(string table) throws ioexception { system.out.println("sqoopoptions loading ....."); configuration config = new configuration(); // hive connection parameters config.addresource(new path(hadoop_home+"/conf/core-site.xml")); config.addresource(new path(hadoop_home+"/conf/hdfs-site.xml")); config.addresource(new path(hive_home+"/conf/hive-site.xml")); filesystem dfs =filesystem.get(config); /* mysql connection parameters */ sqoopoptions options = new sqoopoptions(config); options.setconnectstring(dburl); options.settablename(table); options.setdriverclassname(driver); options.setusername(username); options.setpassword(password); options.sethadoopmapredhome(hadoop_home); options.sethivehome(hive_home); options.sethiveimport(true); options.sethivetablename(table); options.setoverwritehivetable(true); options.setfailifhivetableexists(false); options.setfieldsterminatedby(','); options.setoverwritehivetable(true); options.setdirectmode(true); options.setnummappers(1); // no. of mappers launched job options.setwarehousedir(warehouse_dir); options.setjobname(job_name); options.setmapreducejobname(mapreduce_job); options.settablename(table); options.setjaroutputdir(jar_output_dir); system.out.println("import tool running ...."); importtool = new importtool(); int retval = it.run(options); if (retval == 0) { system.out.println(success); } else { system.out.println(fail); } } }
fortunately found reply of question myself. have include jar file in build path hadoop-0.20.2+737-core.jar instead of hadoop-0.20.2-core.jar. looks modified version of same file containing jobconf class containing getcredentials() method.
problem solved still confused 2 versions? body know whats actual difference?
java mysql hadoop hive sqoop
No comments:
Post a Comment