Apache spark Spark 2.3.0给出错误:提供程序org.apache.Spark.ml.source.libsvm.LibSVMFileFormat不是子类型

Apache spark Spark 2.3.0给出错误:提供程序org.apache.Spark.ml.source.libsvm.LibSVMFileFormat不是子类型,apache-spark,Apache Spark,我正在运行Spark 2.3.0,并尝试从MySql(MariaDB)加载数据集 使用:mysql连接器java:version 6.0.6 val dataSet:Dataset[Row] = spark .read .format("jdbc") .options(Map("url" -> jdbcUrl ,"user" -> username ,"password" -> pass

我正在运行Spark 2.3.0,并尝试从MySql(MariaDB)加载数据集 使用:mysql连接器java:version 6.0.6

  val dataSet:Dataset[Row] = spark
    .read
    .format("jdbc")
    .options(Map("url" -> jdbcUrl
                ,"user" -> username
                ,"password" -> password
                ,"dbtable" -> dataSourceTableName
                ,"driver" -> driver
                ))
    .load() 
当我使用spark submit运行时,我得到以下错误

Exception in thread "main" java.util.ServiceConfigurationError: org.apache.spark.sql.sources.DataSourceRegister: Provider org.apache.spark.ml.source.libsvm.LibSVMFileFormat not a subtype
        at java.util.ServiceLoader.fail(ServiceLoader.java:239)
        at java.util.ServiceLoader.access$300(ServiceLoader.java:185)
        at java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:376)
        at java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)
        at java.util.ServiceLoader$1.next(ServiceLoader.java:480)
        at scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43)
        at scala.collection.Iterator$class.foreach(Iterator.scala:742)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
        at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
        at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
        at scala.collection.TraversableLike$class.filterImpl(TraversableLike.scala:258)
        at scala.collection.TraversableLike$class.filter(TraversableLike.scala:270)
        at scala.collection.AbstractTraversable.filter(Traversable.scala:104)
        at org.apache.spark.sql.execution.datasources.DataSource$.lookupDataSource(DataSource.scala:614)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:190)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
        at analytics.utils.QuantumDataSets$.loadDataSet(QuantumDataSets.scala:93)
        at analytics.utils.QuantumDataSets$.<init>(QuantumDataSets.scala:50)
        at analytics.utils.QuantumDataSets$.<clinit>(QuantumDataSets.scala)
        at analytics.utils.QuantumDataSets.main(QuantumDataSets.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:879)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
线程“main”java.util.ServiceConfigurationError:org.apache.spark.sql.sources.DataSourceRegister:Provider org.apache.spark.ml.source.libsvm.LibSVMFileFormat不是子类型
在java.util.ServiceLoader.fail处(ServiceLoader.java:239)
在java.util.ServiceLoader.access$300(ServiceLoader.java:185)
位于java.util.ServiceLoader$LazyIterator.nextService(ServiceLoader.java:376)
位于java.util.ServiceLoader$LazyIterator.next(ServiceLoader.java:404)
位于java.util.ServiceLoader$1.next(ServiceLoader.java:480)
位于scala.collection.convert.Wrappers$JIteratorWrapper.next(Wrappers.scala:43)
位于scala.collection.Iterator$class.foreach(Iterator.scala:742)
位于scala.collection.AbstractIterator.foreach(迭代器.scala:1194)
位于scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
位于scala.collection.AbstractIterable.foreach(Iterable.scala:54)
位于scala.collection.TraversableLike$class.filterImpl(TraversableLike.scala:258)
位于scala.collection.TraversableLike$class.filter(TraversableLike.scala:270)
位于scala.collection.AbstractTraversable.filter(Traversable.scala:104)
位于org.apache.spark.sql.execution.datasources.DataSource$.lookUpdateSource(DataSource.scala:614)
位于org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:190)
位于org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
位于analytics.utils.QuantumDataSets$.loadDataSet(QuantumDataSets.scala:93)
位于analytics.utils.QuantumDataSets$(QuantumDataSets.scala:50)
位于analytics.utils.QuantumDataSets$(QuantumDataSets.scala)
位于analytics.utils.QuantumDataSets.main(QuantumDataSets.scala)
在sun.reflect.NativeMethodAccessorImpl.invoke0(本机方法)处
位于sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
在sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)中
位于java.lang.reflect.Method.invoke(Method.java:498)
位于org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
位于org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:879)
位于org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197)
位于org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227)
位于org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136)
位于org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

您是否能够解决此问题?我面临着同样的问题。@Bhaskar,是的,我解决了这个问题。问题是由我的版本不匹配引起的。查找和使用所有依赖项的兼容版本是一个痛苦的过程。为我工作的可能不为你工作。。所以,通过改变版本继续尝试