在tomcat中运行java springmvc应用程序(调用spark sql api),java.sql.SQLException:未找到适合jdbc的驱动程序:mysql:
我曾尝试使用函数setjars[],spark.driver.extraLibraryPath,spark.executor.extraLibraryPath,它也没有函数在tomcat中运行java springmvc应用程序(调用spark sql api),java.sql.SQLException:未找到适合jdbc的驱动程序:mysql:,tomcat,apache-spark,bigdata,Tomcat,Apache Spark,Bigdata,我曾尝试使用函数setjars[],spark.driver.extraLibraryPath,spark.executor.extraLibraryPath,它也没有函数 @ResponseBody public String index() throws Exception { SparkConf conf = new SparkConf().setAppName("Simple Application") .setMaster("local");
@ResponseBody
public String index() throws Exception {
SparkConf conf = new SparkConf().setAppName("Simple Application")
.setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
sc.setLogLevel("ERROR");
SQLContext sqlContext = new SQLContext(sc);
Map<String, String> options = new HashMap<String, String>();
options.put("url",
"jdbc:mysql://127.0.0.1:3306/msp?user=root&password=root");
options.put("dbtable", "user_info");
DataFrame df = sqlContext.read().format("jdbc").options(options).load();
System.out.println(df.count());
return "index";
}
@ResponseBody
公共字符串索引()引发异常{
SparkConf conf=new SparkConf().setAppName(“简单应用程序”)
.setMaster(“本地”);
JavaSparkContext sc=新的JavaSparkContext(conf);
sc.setLogLevel(“错误”);
SQLContext SQLContext=新的SQLContext(sc);
Map options=newhashmap();
options.put(“url”,
“jdbc:mysql://127.0.0.1:3306/msp?user=root&password=root");
看跌期权(“dbtable”、“user_info”);
DataFrame df=sqlContext.read().format(“jdbc”).options(options).load();
System.out.println(df.count());
返回“索引”;
}
相同的代码,右键单击以作为java应用程序运行,就可以了!将spark-servlet.jar添加到类加载器中,因为spark-assembly-1.6.0-hadoop2.6.0.jar和tomcat servlet冲突,我将servlet从spark-assembly-1.6.0-hadoop2.6.0.jar中删除
public static void main(String[] args) throws Exception {
File file = new File("/home/miaosipeng/ext_jars/spark-servlet.jar");
Method method = URLClassLoader.class.getDeclaredMethod("addURL",
URL.class);
boolean accessible = method.isAccessible();
try {
if (accessible == false) {
method.setAccessible(true);
}
URLClassLoader classLoader = (URLClassLoader) ClassLoader
.getSystemClassLoader();
URL url = file.toURI().toURL();
try {
method.invoke(classLoader, url);
System.out.println("读取jar文件[name={}]" + file.getName());
} catch (Exception e) {
System.out.println("读取jar文件[name={}]失败" + file.getName());
}
} finally {
method.setAccessible(accessible);
}
SparkConf conf = new SparkConf()
.setAppName("Simple Application")
.setJars(
new String[] { "/home/miaosipeng/ext_jars/spark-servlet.jar" })
.setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
sc.setLogLevel("ERROR");
SQLContext sqlContext = new SQLContext(sc);
Map<String, String> options = new HashMap<String, String>();
options.put("url",
"jdbc:mysql://127.0.0.1:3306/msp?user=root&password=root");
options.put("dbtable", "user_info");
DataFrame df = sqlContext.read().format("jdbc").options(options).load();
System.out.println(df.count());
}
publicstaticvoidmain(字符串[]args)引发异常{
File File=new文件(“/home/miaosipeng/ext_jars/spark servlet.jar”);
方法Method=URLClassLoader.class.getDeclaredMethod(“addURL”,
URL.class);
boolean accessible=method.isAccessible();
试一试{
if(可访问==false){
方法setAccessible(true);
}
URLClassLoader类加载器=(URLClassLoader)类加载器
.getSystemClassLoader();
URL=file.toURI().toURL();
试一试{
调用(类加载器,url);
System.out.println(“读取罐子文件[name={}]“+file.getName());
}捕获(例外e){
System.out.println(“读取罐子文件[名称={}]失败" + getName());
}
}最后{
方法:setAccessible(可访问);
}
SparkConf conf=新的SparkConf()
.setAppName(“简单应用程序”)
塞贾尔斯先生(
新字符串[]{“/home/miaosipeng/ext_jars/spark servlet.jar”})
.setMaster(“本地”);
JavaSparkContext sc=新的JavaSparkContext(conf);
sc.setLogLevel(“错误”);
SQLContext SQLContext=新的SQLContext(sc);
Map options=newhashmap();
options.put(“url”,
“jdbc:mysql://127.0.0.1:3306/msp?user=root&password=root");
看跌期权(“dbtable”、“user_info”);
DataFrame df=sqlContext.read().format(“jdbc”).options(options).load();
System.out.println(df.count());
}