Apache spark 通过Spark连接配置单元JDBC(空指针异常)

Apache spark 通过Spark连接配置单元JDBC(空指针异常),apache-spark,hive,apache-spark-sql,hiveql,Apache Spark,Hive,Apache Spark Sql,Hiveql,我正在尝试使用配置单元JDBC连接运行spark,并获得空指针异常。下面相同的命令在我的另一个集群中运行良好 我在Spark shell中运行这个 val jdbcDF = spark.read.format("jdbc").option("url", "jdbc:hive2://bl.com:10000").option("dbtable", "cds.txn_fact").option("user", "user").option("password", "pwd").option("fet

我正在尝试使用配置单元JDBC连接运行spark,并获得空指针异常。下面相同的命令在我的另一个集群中运行良好

我在Spark shell中运行这个

val jdbcDF = spark.read.format("jdbc").option("url", "jdbc:hive2://bl.com:10000").option("dbtable", "cds.txn_fact").option("user", "user").option("password", "pwd").option("fetchsize","20").load()
这就是我得到的错误

org.apache.hive.service.cli.HiveSQLException: Error running query: java.lang.NullPointerException
at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:255)
at org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java:241)
at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:254)
at org.apache.hive.jdbc.HiveStatement.executeQuery(HiveStatement.java:392)
at org.apache.hive.jdbc.HivePreparedStatement.executeQuery(HivePreparedStatement.java:109)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:60)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:115)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:52)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
... 51 elided
Caused by: org.apache.hive.service.cli.HiveSQLException: Error running query: java.lang.NullPointerException
at org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:180)
at org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:228)
at org.apache.hive.service.cli.operation.Operation.run(Operation.java:264)
at org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:479)
at org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:466)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
at org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
at org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
at org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
at com.sun.proxy.$Proxy47.executeStatementAsync(Unknown Source)
at org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:315)
at org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:509)
at org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1377)
at org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1362)
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.lang.NullPointerException: null
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1227)
at org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:146)
... 27 more

org.apache.hive.service.cli.HiveSQLException:运行查询时出错:java.lang.NullPointerException
位于org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:255)
位于org.apache.hive.jdbc.Utils.verifySuccessWithInfo(Utils.java:241)
位于org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:254)
位于org.apache.hive.jdbc.HiveStatement.executeQuery(HiveStatement.java:392)
位于org.apache.hive.jdbc.HivePreparedStatement.executeQuery(HivePreparedStatement.java:109)
位于org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:60)
位于org.apache.spark.sql.execution.datasources.jdbc.jdbcreation.(jdbcreation.scala:115)
位于org.apache.spark.sql.execution.datasources.jdbc.jdbrelationprovider.createRelation(jdbrelationprovider.scala:52)
位于org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340)
位于org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
位于org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
位于org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
... 51删去
原因:org.apache.hive.service.cli.HiveSQLException:运行查询时出错:java.lang.NullPointerException
位于org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:180)
位于org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:228)
位于org.apache.hive.service.cli.operation.operation.run(operation.java:264)
位于org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:479)
位于org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:466)
在sun.reflect.NativeMethodAccessorImpl.invoke0(本机方法)处
位于sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
在sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)中
位于java.lang.reflect.Method.invoke(Method.java:498)
位于org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:78)
位于org.apache.hive.service.cli.session.HiveSessionProxy.access$000(HiveSessionProxy.java:36)
位于org.apache.hive.service.cli.session.HiveSessionProxy$1.run(HiveSessionProxy.java:63)
位于java.security.AccessController.doPrivileged(本机方法)
位于javax.security.auth.Subject.doAs(Subject.java:422)
位于org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1869)
位于org.apache.hive.service.cli.session.HiveSessionProxy.invoke(HiveSessionProxy.java:59)
位于com.sun.proxy.$Proxy47.executeStatementSync(未知源)
位于org.apache.hive.service.cli.CLIService.executeStatementSync(CLIService.java:315)
位于org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:509)
位于org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1377)
位于org.apache.hive.service.cli.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1362)
位于org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
位于org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
位于org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:56)
位于org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
位于java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
位于java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
运行(Thread.java:748)
原因:java.lang.NullPointerException:null
位于org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1237)
位于org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1227)
位于org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:146)
... 还有27个

我正在使用HDP2,在HDP2配置单元中不支持来自spark的并发JDBC调用,因此获取空指针异常

这是固定的HDP3,现在我可以运行上述代码没有任何错误