Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/apache-spark/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
spark solr脂肪罐装配问题_Solr_Apache Spark_Jar_Sbt Assembly - Fatal编程技术网

spark solr脂肪罐装配问题

spark solr脂肪罐装配问题,solr,apache-spark,jar,sbt-assembly,Solr,Apache Spark,Jar,Sbt Assembly,我正在使用这里找到的spark solr客户端 我正在使用sbt汇编插件()来打包我的fat jar。我使用了这篇文章中的说明 我的build.sbt文件是 name := "SolrSpark" version := "1.0" scalaVersion := "2.10.4" libraryDependencies ++= Seq( "com.lucidworks.spark" % "spark-solr" % "2.0.0" ) mergeStrategy in assemb

我正在使用这里找到的spark solr客户端

我正在使用sbt汇编插件()来打包我的fat jar。我使用了这篇文章中的说明

我的build.sbt文件是

name := "SolrSpark"

version := "1.0"

scalaVersion := "2.10.4"


libraryDependencies ++= Seq(
  "com.lucidworks.spark" % "spark-solr" % "2.0.0"
)

mergeStrategy in assembly <<= (mergeStrategy in assembly) { (old) => {
  case PathList("META-INF", xs@_*) => MergeStrategy.discard
  case x => MergeStrategy.first
}
}
我的jarget打包没有错误,但是当我尝试使用

java -jar SolrSpark-assembly-1.0.jar 
我得到了错误

 [main] ERROR SparkContext  - Error initializing SparkContext.
com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.version'
    at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:145)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:151)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
    at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:206)
    at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:169)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:505)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1988)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1979)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:266)
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:457)
    at com.xendo.solr.SolrSparkWordCount$delayedInit$body.apply(SolrSparkWordCount.scala:15)
    at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
    at scala.App$$anonfun$main$1.apply(App.scala:71)
    at scala.App$$anonfun$main$1.apply(App.scala:71)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32)
    at scala.App$class.main(App.scala:71)
    at com.xendo.solr.SolrSparkWordCount$.main(SolrSparkWordCount.scala:7)
    at com.xendo.solr.SolrSparkWordCount.main(SolrSparkWordCount.scala)

有人知道导致此错误的原因吗?

使用
spark submit
脚本启动您的应用程序,因为这将负责使用spark及其所需依赖项设置类路径

下面是Spark 1.6.x的启动命令args,有关更多详细信息,请参阅

/bin/spark提交\
--阶级\
--主人\
--部署模式\
--形态=\
... # 其他选择
\
[应用程序参数]

使用
spark submit
脚本启动应用程序,因为这将负责使用spark及其所需依赖项设置类路径

下面是Spark 1.6.x的启动命令args,有关更多详细信息,请参阅

/bin/spark提交\
--阶级\
--主人\
--部署模式\
--形态=\
... # 其他选择
\
[应用程序参数]
 [main] ERROR SparkContext  - Error initializing SparkContext.
com.typesafe.config.ConfigException$Missing: No configuration setting found for key 'akka.version'
    at com.typesafe.config.impl.SimpleConfig.findKey(SimpleConfig.java:124)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:145)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:151)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:159)
    at com.typesafe.config.impl.SimpleConfig.find(SimpleConfig.java:164)
    at com.typesafe.config.impl.SimpleConfig.getString(SimpleConfig.java:206)
    at akka.actor.ActorSystem$Settings.<init>(ActorSystem.scala:169)
    at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:505)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:142)
    at akka.actor.ActorSystem$.apply(ActorSystem.scala:119)
    at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
    at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
    at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1988)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
    at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1979)
    at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:266)
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:457)
    at com.xendo.solr.SolrSparkWordCount$delayedInit$body.apply(SolrSparkWordCount.scala:15)
    at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
    at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
    at scala.App$$anonfun$main$1.apply(App.scala:71)
    at scala.App$$anonfun$main$1.apply(App.scala:71)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:32)
    at scala.App$class.main(App.scala:71)
    at com.xendo.solr.SolrSparkWordCount$.main(SolrSparkWordCount.scala:7)
    at com.xendo.solr.SolrSparkWordCount.main(SolrSparkWordCount.scala)
  val sc = new SparkContext(conf)
./bin/spark-submit \
  --class <main-class> \
  --master <master-url> \
  --deploy-mode <deploy-mode> \
  --conf <key>=<value> \
  ... # other options
  <application-jar> \
  [application-arguments]