Scala 火花作业测试异常

Scala 火花作业测试异常,scala,unit-testing,apache-spark,Scala,Unit Testing,Apache Spark,这里是星火队的新秀。我试图为我的spark工作编写一些单元测试,但当我试图在测试中创建一个新的StreamingContext时,在ssc=new StreamingContext(conf,batchDuration) 代码如下所示: @RunWith(classOf[JUnitRunner]) class SparkJobTest extends FlatSpec with MockitoSugar with BeforeAndAfter { private val master = "

这里是星火队的新秀。我试图为我的spark工作编写一些单元测试,但当我试图在测试中创建一个新的StreamingContext时,在ssc=new StreamingContext(conf,batchDuration) 代码如下所示:

@RunWith(classOf[JUnitRunner])
class SparkJobTest extends FlatSpec with MockitoSugar with BeforeAndAfter {
  private val master = "local[2]"
  private val appName = "TestingAppName"
  private val batchDuration = Seconds(1)
  private val loggerMock = mock[Logger]
  private var ssc: StreamingContext = _
  private var sc: SparkContext = _

  before {
    val conf = new SparkConf()
      .setMaster(master)
      .setAppName(appName)
    ssc = new StreamingContext(conf, batchDuration)
  }

  after {
    if (ssc != null) {
      ssc.stop()
    }
  }


  "This" should "always work" in {
    assert(1==1)
  }
我得到的例外:

An exception or error caused a run to abort: Bad return type
Exception Details:
  Location:
    org/apache/spark/streaming/StreamingContext.fileStream(Ljava/lang/String;Lscala/Function1;ZLscala/reflect/ClassTag;Lscala/reflect/ClassTag;Lscala/reflect/ClassTag;)Lorg/apache/spark/streaming/dstream/InputDStream; @23: areturn
  Reason:
    Type 'org/apache/spark/streaming/dstream/FileInputDStream' (current frame, stack[0]) is not assignable to 'org/apache/spark/streaming/dstream/InputDStream' (from method signature)
  Current Frame:
    bci: @23
    flags: { }
    locals: { 'org/apache/spark/streaming/StreamingContext', 'java/lang/String', 'scala/Function1', integer, 'scala/reflect/ClassTag', 'scala/reflect/ClassTag', 'scala/reflect/ClassTag' }
    stack: { 'org/apache/spark/streaming/dstream/FileInputDStream' }
  Bytecode:
    0000000: bb01 9959 2a2b 2c1d b201 9eb6 01a6 1904
    0000010: 1905 1906 b701 a9b0 
java.lang.VerifyError: Bad return type
Exception Details:
  Location:
    org/apache/spark/streaming/StreamingContext.fileStream(Ljava/lang/String;Lscala/Function1;ZLscala/reflect/ClassTag;Lscala/reflect/ClassTag;Lscala/reflect/ClassTag;)Lorg/apache/spark/streaming/dstream/InputDStream; @23: areturn
  Reason:
    Type 'org/apache/spark/streaming/dstream/FileInputDStream' (current frame, stack[0]) is not assignable to 'org/apache/spark/streaming/dstream/InputDStream' (from method signature)
  Current Frame:
    bci: @23
    flags: { }
    locals: { 'org/apache/spark/streaming/StreamingContext', 'java/lang/String', 'scala/Function1', integer, 'scala/reflect/ClassTag', 'scala/reflect/ClassTag', 'scala/reflect/ClassTag' }
    stack: { 'org/apache/spark/streaming/dstream/FileInputDStream' }
  Bytecode:
    0000000: bb01 9959 2a2b 2c1d b201 9eb6 01a6 1904
    0000010: 1905 1906 b701 a9b0                    

    at com.appnexus.data.spark.job.SparkJobTest$$anonfun$1.apply$mcV$sp(SparkJobTest.scala:25)
    at com.appnexus.data.spark.job.SparkJobTest$$anonfun$1.apply(SparkJobTest.scala:21)
    at com.appnexus.data.spark.job.SparkJobTest$$anonfun$1.apply(SparkJobTest.scala:21)
    at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:195)
    at com.appnexus.data.spark.job.SparkJobTest.runTest(SparkJobTest.scala:13)
    at org.scalatest.FlatSpecLike$$anonfun$runTests$1.apply(FlatSpecLike.scala:1714)
    at org.scalatest.FlatSpecLike$$anonfun$runTests$1.apply(FlatSpecLike.scala:1714)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:390)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:427)
    at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
    at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
    at org.scalatest.FlatSpecLike$class.runTests(FlatSpecLike.scala:1714)
    at org.scalatest.FlatSpec.runTests(FlatSpec.scala:1683)
    at org.scalatest.Suite$class.run(Suite.scala:1424)
    at org.scalatest.FlatSpec.org$scalatest$FlatSpecLike$$super$run(FlatSpec.scala:1683)
    at org.scalatest.FlatSpecLike$$anonfun$run$1.apply(FlatSpecLike.scala:1760)
    at org.scalatest.FlatSpecLike$$anonfun$run$1.apply(FlatSpecLike.scala:1760)
    at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
    at org.scalatest.FlatSpecLike$class.run(FlatSpecLike.scala:1760)
    at com.appnexus.data.spark.job.SparkJobTest.org$scalatest$BeforeAndAfter$$super$run(SparkJobTest.scala:13)
    at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241)
    at com.appnexus.data.spark.job.SparkJobTest.run(SparkJobTest.scala:13)
    at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
    at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
    at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
    at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
    at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
    at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
    at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
    at org.scalatest.tools.Runner$.run(Runner.scala:883)
    at org.scalatest.tools.Runner.run(Runner.scala)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)
    at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)


Process finished with exit code 0
我在谷歌上搜索了很多,有人说这是版本问题,我尝试了spark 1.2、1.3、1.4,但没有成功。 我正在MacOSX上运行测试,Java1.8

编辑:pom中的Spark dep

<properties>

    <!-- maven specific properties -->
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <scala.version>2.10.4</scala.version>
    <kafka.version>0.8.2.0</kafka.version>
    <spark.version>1.3.1</spark.version>
    <hadoop.version>2.6.0-cdh5.4.0</hadoop.version>
    <samza.version>0.8.x-hadoop${hadoop.version}-kafka${kafka.version}</samza.version>
    <assembly.configuration>src/main/assembly/src.xml</assembly.configuration>
    <data.deps.scope>compile</data.deps.scope>
<spray.version>1.3.1</spray.version>
</properties>

<dependencies>
    <dependency>
        <groupId>org.scala-lang</groupId>
        <artifactId>scala-library</artifactId>
        <version>${scala.version}</version>
    </dependency>
    <dependency>
        <groupId>org.scalatest</groupId>
        <artifactId>scalatest_2.10</artifactId>
        <version>2.2.1</version>
        <scope>test</scope>
    </dependency>
    <dependency>
        <groupId>org.scalamock</groupId>
        <artifactId>scalamock-scalatest-support_2.11</artifactId>
        <version>3.2</version>
        <scope>test</scope>
    </dependency>

    <!-- spark dependencies -->
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>${spark.version}</version>
        <scope>provided</scope>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming_2.10</artifactId>
        <version>${spark.version}</version>
        <scope>provided</scope>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming-kafka_2.10</artifactId>
        <version>${spark.version}</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.10</artifactId>
        <version>${spark.version}</version>
        <scope>provided</scope>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-hive_2.10</artifactId>
        <version>${spark.version}</version>
        <scope>provided</scope>
    </dependency>
</dependencies>

UTF-8
2.10.4
0.8.2.0
1.3.1
2.6.0-cdh5.4.0
0.8.x-hadoop${hadoop.version}-kafka${kafka.version}
src/main/assembly/src.xml
编译
1.3.1
org.scala-lang
scala图书馆
${scala.version}
org.scalatest
比例测试2.10
2.2.1
测试
org.scalamock
scalamock-scalatest-support_2.11
3.2
测试
org.apache.spark
spark-core_2.10
${spark.version}
假如
org.apache.spark
spark-2.10
${spark.version}
假如
org.apache.spark
spark-streaming-kafka_2.10
${spark.version}
org.apache.spark
spark-sql_2.10
${spark.version}
假如
org.apache.spark
spark-hive_2.10
${spark.version}
假如

你能展示你的build.sbt吗?这是一个maven项目,我在pom中添加了spark相关的DEP。你能展示你的build.sbt吗?这是一个maven项目,我在pom中添加了spark相关的DEP