Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/apache-spark/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Scala 使用sbt编译spark项目时未解决的依赖关系问题_Scala_Apache Spark_Sbt - Fatal编程技术网

Scala 使用sbt编译spark项目时未解决的依赖关系问题

Scala 使用sbt编译spark项目时未解决的依赖关系问题,scala,apache-spark,sbt,Scala,Apache Spark,Sbt,我正试图用sbt0.13.8编译一个非常简单的spark项目,它的唯一功能是 Test.scala import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf object SimpleApp { def main(args: Array[String]) { val logFile = "YOUR_SPARK_HOME/R

我正试图用
sbt0.13.8
编译一个非常简单的spark项目,它的唯一功能是

Test.scala

import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf

object SimpleApp {
  def main(args: Array[String]) {
    val logFile = "YOUR_SPARK_HOME/README.md" // Should be some file on your system
    val conf = new SparkConf().setAppName("Simple Application")
    val sc = new SparkContext(conf)
    val logData = sc.textFile(logFile, 2).cache()
    val numAs = logData.filter(line => line.contains("a")).count()
    val numBs = logData.filter(line => line.contains("b")).count()
    println("Lines with a: %s, Lines with b: %s".format(numAs, numBs))
  }
}
projet根目录中的build.sbt文件如下所示:

name := "Test"

version := "1.0"

scalaVersion := "2.11.7"

libraryDependencies += "org.apache.spark" %% "spark-core" % "1.5.2"

resolvers ++= Seq(
  "Apache Repository" at "https://repository.apache.org/content/repositories/releases/",
  "Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos/",
  Resolver.sonatypeRepo("public")
)
sbt compile
返回的错误为:

[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn]  ::          UNRESOLVED DEPENDENCIES         ::
[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn]  :: oro#oro;2.0.8: configuration not found in oro#oro;2.0.8: 'master(compile)'. Missing configuration: 'compile'. It was required from org.apache.spark#spark-core_2.11;1.5.2 compile
[warn]  ::::::::::::::::::::::::::::::::::::::::::::::
[warn] 
[warn]  Note: Unresolved dependencies path:
[warn]      oro:oro:2.0.8
[warn]        +- org.apache.spark:spark-core_2.11:1.5.2 (/home/osboxes/Documents/bookings/Test/build.sbt#L7-8)
[warn]        +- default:test_2.11:1.0
sbt.ResolveException: unresolved dependency: oro#oro;2.0.8: configuration not found in oro#oro;2.0.8: 'master(compile)'. Missing configuration: 'compile'. It was required from org.apache.spark#spark-core_2.11;1.5.2 compile
    at sbt.IvyActions$.sbt$IvyActions$$resolve(IvyActions.scala:291)
    at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:188)
    at sbt.IvyActions$$anonfun$updateEither$1.apply(IvyActions.scala:165)
    at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:155)
    at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:155)
    at sbt.IvySbt$$anonfun$withIvy$1.apply(Ivy.scala:132)
    at sbt.IvySbt.sbt$IvySbt$$action$1(Ivy.scala:57)
    at sbt.IvySbt$$anon$4.call(Ivy.scala:65)
    at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:93)
    at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:78)
    at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:97)
    at xsbt.boot.Using$.withResource(Using.scala:10)
    at xsbt.boot.Using$.apply(Using.scala:9)
    at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:58)
    at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:48)
    at xsbt.boot.Locks$.apply0(Locks.scala:31)
    at xsbt.boot.Locks$.apply(Locks.scala:28)
    at sbt.IvySbt.withDefaultLogger(Ivy.scala:65)
    at sbt.IvySbt.withIvy(Ivy.scala:127)
    at sbt.IvySbt.withIvy(Ivy.scala:124)
    at sbt.IvySbt$Module.withModule(Ivy.scala:155)
    at sbt.IvyActions$.updateEither(IvyActions.scala:165)
    at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1369)
    at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1365)
    at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$87.apply(Defaults.scala:1399)
    at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$87.apply(Defaults.scala:1397)
    at sbt.Tracked$$anonfun$lastOutput$1.apply(Tracked.scala:37)
    at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1402)
    at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1396)
    at sbt.Tracked$$anonfun$inputChanged$1.apply(Tracked.scala:60)
    at sbt.Classpaths$.cachedUpdate(Defaults.scala:1419)
    at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1348)
    at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1310)
    at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
    at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40)
    at sbt.std.Transform$$anon$4.work(System.scala:63)
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:226)
    at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
    at sbt.Execute.work(Execute.scala:235)
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:226)
    at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
    at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
    at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
    at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
[error] (*:update) sbt.ResolveException: unresolved dependency: oro#oro;2.0.8: configuration not found in oro#oro;2.0.8: 'master(compile)'. Missing configuration: 'compile'. It was required from org.apache.spark#spark-core_2.11;1.5.2 compile
我如何解决这个依赖性问题

编辑

我听从了@mark91的建议:

  • 将scala版本更改为
    2.10.5
  • 将spark依赖项更改为
    libraryDependencies+=“org.apache.spark”%”spark-core_2.10“%”1.2.0-cdh5.3.2“%”提供的“
然而,我仍然在org.scala lang#scala库中得到一个未解决的依赖项;2.10.4:

 [error] sbt.ResolveException: unresolved dependency: org.scala-lang#scala-library;2.10.4: configuration not found in org.scala-lang#scala-library;2.10.4: 'master(compile)'. Missing configuration: 'compile'. It was required from org.apache.spark#spark-core_2.10;1.2.0-cdh5.3.2 compile

你知道我为什么会遇到这个问题吗?

你的问题是Spark是用Scala 2.10编写的。因此,您应该使用Scala的2.10版,而不是2.11版

例如:

scalaVersion := "2.10.5"

libraryDependencies += "org.apache.spark" % "spark-core_2.10" % "1.2.0-cdh5.3.2" % "provided"


resolvers += "Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos/"

我有一个类似的依赖性问题,通过重新加载插件和更新依赖性解决了这个问题。我认为你的依赖问题是由于常春藤缓存。通常,如果自上次成功解析以来没有更改依赖项管理配置,并且检索到的文件仍然存在,sbt不会要求Ivy执行解析

尝试运行:

sbt reload plugins
sbt update
sbt reload

如果不起作用,请按照与@mgaido相同的原因上的说明进行操作:使用不同版本的Scala会导致此问题。
但有时,即使我们稍后更改为scala的正确版本,我们的缓存仍然可能有旧版本。在这种情况下,我们可以删除
~/.ivy2
文件夹。

当我使用Scala 2.10(准确地说是2.10.4)时,我得到了一些错误。另外,还有两个未解决的依赖项:
org.scala lang#scala库;2.10.4
org.scala lang#scala reflect;2.10.4
您的新解决方案仍然存在问题:
[错误]sbt.ResolveException:未解决的依赖项:org.scala lang#scala library;2.10.4:org.scala lang#scala库中未找到配置;2.10.4:‘主(编译)’。缺少配置:“编译”。它是org.apache.spark#spark-core_2.10所要求的;1.2.0-cdh5.3.2编译
我觉得很奇怪…我正在使用它成功编译的那个文件…1.x系列文档可以在这里找到: