Apache spark 火花点火:尝试保存火花数据帧以点火时出错

Apache spark 火花点火:尝试保存火花数据帧以点火时出错,apache-spark,ignite,apache-zeppelin,Apache Spark,Ignite,Apache Zeppelin,我试图在spark上使用ignite并运行一些测试,看看它是否对我们的特定用例有用。能够使用以下代码启动点火节点。非常感谢您的帮助。这段代码在我的本地机器上运行良好。在spark cluser上运行时出错。所有这些代码都是使用齐柏林飞艇执行的 import org.apache.ignite.{Ignite, IgniteCache, Ignition} import org.apache.ignite.configuration.CacheConfiguration import org.ap

我试图在spark上使用ignite并运行一些测试,看看它是否对我们的特定用例有用。能够使用以下代码启动点火节点。非常感谢您的帮助。这段代码在我的本地机器上运行良好。在spark cluser上运行时出错。所有这些代码都是使用齐柏林飞艇执行的

import org.apache.ignite.{Ignite, IgniteCache, Ignition}
import org.apache.ignite.configuration.CacheConfiguration
import org.apache.ignite.spark.IgniteDataFrameSettings._
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

val configPath = "/mnt/yarn/data/example-ignite.xml"
val cacheName = "KiCache"

val ignite = Ignition.start(configPath)
val ccfg = new CacheConfiguration[Any, Any](cacheName).setSqlSchema("PUBLIC")

  //ccfg.setSqlEscapeAll(true)

val cache = ignite.getOrCreateCache(ccfg)
之后,在尝试将spark数据帧保存为ignite表时,遇到错误

import org.apache.ignite.Ignite
import org.apache.ignite.spark.IgniteContext
import org.apache.ignite.cache.query.SqlFieldsQuery
import org.apache.ignite.spark.IgniteDataFrameSettings._
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
//val igniteContext = new IgniteContext(sc, configPath)
df.write.format(FORMAT_IGNITE).option(OPTION_CONFIG_FILE, 
configPath).option(OPTION_TABLE, 
"Test").option(OPTION_CREATE_TABLE_PRIMARY_KEY_FIELDS, 
"mtAccountNumber").option(OPTION_CREATE_TABLE_PARAMETERS, 
"template=replicated").save()
错误:

org.apache.ignite.IgniteCheckedException: Failed to create Ignite component (consider adding ignite-spring module to classpath) [component=SPRING, cls=org.apache.ignite.internal.util.spring.IgniteSpringHelperImpl]
  at org.apache.ignite.internal.IgniteComponentType.componentException(IgniteComponentType.java:320)
  at org.apache.ignite.internal.IgniteComponentType.create0(IgniteComponentType.java:296)
  at org.apache.ignite.internal.IgniteComponentType.create(IgniteComponentType.java:207)
  at org.apache.ignite.internal.IgnitionEx.loadConfigurations(IgnitionEx.java:742)
  at org.apache.ignite.internal.IgnitionEx.loadConfigurations(IgnitionEx.java:783)
  at org.apache.ignite.internal.IgnitionEx.loadConfiguration(IgnitionEx.java:823)
  at org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$1.apply(IgniteRelationProvider.scala:216)
  at org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$1.apply(IgniteRelationProvider.scala:213)
  at org.apache.ignite.spark.Once.apply(IgniteContext.scala:222)
  at org.apache.ignite.spark.IgniteContext.ignite(IgniteContext.scala:144)
  at org.apache.ignite.spark.IgniteContext.<init>(IgniteContext.scala:63)
  at org.apache.ignite.spark.IgniteContext$.apply(IgniteContext.scala:192)
  at org.apache.ignite.spark.impl.IgniteRelationProvider.igniteContext(IgniteRelationProvider.scala:236)
  at org.apache.ignite.spark.impl.IgniteRelationProvider.createRelation(IgniteRelationProvider.scala:100)
  at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
  at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
  at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
  at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
  at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
  at org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
  at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
  at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
  at org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
  ... 51 elided
Caused by: java.lang.NoClassDefFoundError: org/springframework/beans/factory/ListableBeanFactory
  at java.lang.Class.forName0(Native Method)
  at java.lang.Class.forName(Class.java:264)
  at org.apache.ignite.internal.IgniteComponentType.create0(IgniteComponentType.java:282)
  ... 81 more
Caused by: java.lang.ClassNotFoundException: org.springframework.beans.factory.ListableBeanFactory
  at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
  at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
  at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
  ... 84 more
org.apache.ignite.IgniteCheckedException:未能创建ignite组件(考虑将ignite spring模块添加到类路径)[component=spring,cls=org.apache.ignite.internal.util.spring.ignite SpringHelperImpl]
位于org.apache.ignite.internal.IgniteComponentType.componentException(IgniteComponentType.java:320)
位于org.apache.ignite.internal.IgniteComponentType.create0(IgniteComponentType.java:296)
位于org.apache.ignite.internal.IgniteComponentType.create(IgniteComponentType.java:207)
位于org.apache.ignite.internal.IgnitionEx.loadConfigurations(IgnitionEx.java:742)
位于org.apache.ignite.internal.IgnitionEx.loadConfigurations(IgnitionEx.java:783)
位于org.apache.ignite.internal.IgnitionEx.loadConfiguration(IgnitionEx.java:823)
在org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$1.apply上(IgniteRelationProvider.scala:216)
在org.apache.ignite.spark.impl.IgniteRelationProvider$$anonfun$configProvider$1$1.apply上(IgniteRelationProvider.scala:213)
在org.apache.ignite.spark.Once.apply上(IgniteContext.scala:222)
位于org.apache.ignite.spark.IgniteContext.ignite(IgniteContext.scala:144)
位于org.apache.ignite.spark.IgniteContext.(IgniteContext.scala:63)
位于org.apache.ignite.spark.IgniteContext$.apply(IgniteContext.scala:192)
位于org.apache.ignite.spark.impl.IgniteRelationProvider.igniteContext(IgniteRelationProvider.scala:236)
位于org.apache.ignite.spark.impl.IgniteRelationProvider.createRelation(IgniteRelationProvider.scala:100)
位于org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
位于org.apache.spark.sql.execution.command.executeCommandExec.sideEffectResult$lzycompute(commands.scala:70)
位于org.apache.spark.sql.execution.command.executeCommandExec.sideEffectResult(commands.scala:68)
位于org.apache.spark.sql.execution.command.executeCommandExec.doExecute(commands.scala:86)
位于org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
位于org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
位于org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
位于org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
位于org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
位于org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
位于org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
位于org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
位于org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
位于org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
位于org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
位于org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
位于org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
位于org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
... 51删去
原因:java.lang.NoClassDefFoundError:org/springframework/beans/factory/ListableBeanFactory
位于java.lang.Class.forName0(本机方法)
位于java.lang.Class.forName(Class.java:264)
位于org.apache.ignite.internal.IgniteComponentType.create0(IgniteComponentType.java:282)
... 81多
原因:java.lang.ClassNotFoundException:org.springframework.beans.factory.ListableBeanFactory
位于java.net.URLClassLoader.findClass(URLClassLoader.java:381)
位于java.lang.ClassLoader.loadClass(ClassLoader.java:424)
位于sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)
位于java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 84多

看起来您缺少了
ignite spring
软件包。以下是Spark shell的示例:

谢谢您的回复。点火弹簧已加载,我可以在日志中看到它。我们用livy做spark。不确定是否与此有关。嗯,你肯定因为某种原因错过了一些春季课程。实际上,Ignite尝试解析XML配置,并为此使用
Ignite spring
模块,这反过来又取决于spring JAR。是否确实添加了整个
ignite spring
Maven依赖项,但没有添加
ignite spring.jar
仅添加它所依赖的spring jar?