Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/apache-spark/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/cassandra/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Apache spark 无法使用Cassandra驱动程序运行Spark作业_Apache Spark_Cassandra - Fatal编程技术网

Apache spark 无法使用Cassandra驱动程序运行Spark作业

Apache spark 无法使用Cassandra驱动程序运行Spark作业,apache-spark,cassandra,Apache Spark,Cassandra,build.gradle plugins { id 'java' id 'com.github.johnrengelman.shadow' version '1.2.3' } group 'com.hello.aggregation' version '1.0-SNAPSHOT' apply plugin: 'java' apply plugin: 'scala' apply plugin: 'idea' sourceCompatibility = 1.8 targetCo

build.gradle

plugins {
    id 'java'
    id 'com.github.johnrengelman.shadow' version '1.2.3'
}

group 'com.hello.aggregation'
version '1.0-SNAPSHOT'

apply plugin: 'java'
apply plugin: 'scala'
apply plugin: 'idea'

sourceCompatibility = 1.8
targetCompatibility = 1.8

configurations {
    provided
}

sourceSets {
    main {
        compileClasspath += configurations.provided
    }
}

repositories {
    mavenCentral()
}

dependencies {
    compile "org.scala-lang:scala-library:$scalaVersion"
    compile "org.scala-lang:scala-reflect:$scalaVersion"
    compile "org.scala-lang:scala-compiler:$scalaVersion"

    compile "org.apache.spark:spark-core_$scalaBase:$sparkVersion"
    compile "org.apache.spark:spark-sql_$scalaBase:$sparkVersion"

    compile "com.datastax.cassandra:cassandra-driver-core:$cassandraDriverVersion"
    compile "com.datastax.spark:spark-cassandra-connector_$scalaBase:$connectorVersion"

    compile "org.slf4j:slf4j-api:$slf4jVersion"

    compile "mysql:mysql-connector-java:$mySqlConnectorVersion"

    testCompile group: 'junit', name: 'junit', version: '4.12'
}

task run(type: JavaExec, dependsOn: classes) {
    main = mainClassFile
    classpath sourceSets.main.runtimeClasspath
    classpath configurations.runtime
}

jar {
    classifier = 'all'
    manifest {
        attributes 'Implementation-Title': title,
                'Implementation-Version': version,
                'Main-Class': mainClassFile
    }
    include{sourceSets.main.output.classesDir}
    zip64 true
}

shadowJar {
    classifier = 'shadow'
    append 'reference.conf'
    dependencies {

    }

    zip64 true
}

idea {
    module {
        // IntelliJ does not know about the standard idiom of provided as used in managing
        // uber/shaded jar dependencies. Make it so!
        scopes.PROVIDED.plus += [ configurations.provided ]
    }
}
gradle.properties

version=1.0.0

scalaBase=2.11
scalaVersion=2.11.4
slf4jVersion=1.7.25
sparkVersion=1.6.3
connectorVersion=1.6.7
cassandraDriverVersion=3.0.7
mySqlConnectorVersion=5.1.37
例外情况:

17/06/19 16:03:54 INFO BlockManagerMaster: Registered BlockManager
Exception in thread "main" java.lang.NoSuchMethodError: scala.runtime.ObjectRef.zero()Lscala/runtime/ObjectRef;
        at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150)
        at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:32)
        at com.datastax.spark.connector.cql.RefCountedCache.syncAcquire(RefCountedCache.scala:69)
        at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:57)
        at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:80)
        at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:107)
        at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:118)
        at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:330)
        at com.datastax.spark.connector.cql.Schema$.tableFromCassandra(Schema.scala:350)
        at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:50)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:137)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:230)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
        at scala.Option.getOrElse(Option.scala:120)
        at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
        at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359)
        at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
        at org.apache.spark.rdd.RDD.distinct(RDD.scala:358)
        at com.achoo.scala.streambright.SimpleDailyRun$.delayedEndpoint$com$achoo$scala$streambright$SimpleDailyRun$1(SimpleDailyRun.scala:30)
        at com.achoo.scala.streambright.SimpleDailyRun$delayedInit$body.apply(SimpleDailyRun.scala:14)
        at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
        at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
        at scala.App$$anonfun$main$1.apply(App.scala:71)
        at scala.App$$anonfun$main$1.apply(App.scala:71)
        at scala.collection.immutable.List.foreach(List.scala:318)
package com.streambright

import java.sql.{Connection, DriverManager}

import com.mysql.jdbc.Driver
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}
import com.datastax.spark.connector.toSparkContextFunctions
import org.apache.spark.sql.cassandra.CassandraSQLContext


object SimpleDailyRun extends App {
  DriverManager.registerDriver(new Driver())
  val config = new SparkConf(true).setAppName("Simple Daily Run")
  val sc = SparkContext.getOrCreate(config)
  val cc = new CassandraSQLContext(sc)
  cc.setKeyspace("achoo")
  val conn = DriverManager.getConnection("jdbc:mysql://10.175.190.95/db?useUnicode=yes&characterEncoding=UTF-8&user=user&password=pass")

  val mySqlJdbcRDD = new JdbcRDD(sc, () => conn,
    "SELECT b.project_id,a.keyword FROM keyword a " +
      "JOIN project_keyword b ON a.id = b.keyword_id LIMIT ?, ?",
    0, 100000000, 1, r => (r.getInt("project_id"), r.getString("keyword")))

  val cassandraRDD = sc.cassandraTable("hello", "instagram_keyword_analytic")
    .select("keyword", "relativepath")
    .as((_: String, _: String))
    .distinct()

  mySqlJdbcRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt")
  cassandraRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt")

}
代码:

17/06/19 16:03:54 INFO BlockManagerMaster: Registered BlockManager
Exception in thread "main" java.lang.NoSuchMethodError: scala.runtime.ObjectRef.zero()Lscala/runtime/ObjectRef;
        at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150)
        at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$7.apply(CassandraConnector.scala:150)
        at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:32)
        at com.datastax.spark.connector.cql.RefCountedCache.syncAcquire(RefCountedCache.scala:69)
        at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:57)
        at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:80)
        at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:107)
        at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:118)
        at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:330)
        at com.datastax.spark.connector.cql.Schema$.tableFromCassandra(Schema.scala:350)
        at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:50)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:137)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:60)
        at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:230)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
        at scala.Option.getOrElse(Option.scala:120)
        at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
        at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359)
        at org.apache.spark.rdd.RDD$$anonfun$distinct$2.apply(RDD.scala:359)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
        at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
        at org.apache.spark.rdd.RDD.distinct(RDD.scala:358)
        at com.achoo.scala.streambright.SimpleDailyRun$.delayedEndpoint$com$achoo$scala$streambright$SimpleDailyRun$1(SimpleDailyRun.scala:30)
        at com.achoo.scala.streambright.SimpleDailyRun$delayedInit$body.apply(SimpleDailyRun.scala:14)
        at scala.Function0$class.apply$mcV$sp(Function0.scala:40)
        at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
        at scala.App$$anonfun$main$1.apply(App.scala:71)
        at scala.App$$anonfun$main$1.apply(App.scala:71)
        at scala.collection.immutable.List.foreach(List.scala:318)
package com.streambright

import java.sql.{Connection, DriverManager}

import com.mysql.jdbc.Driver
import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}
import com.datastax.spark.connector.toSparkContextFunctions
import org.apache.spark.sql.cassandra.CassandraSQLContext


object SimpleDailyRun extends App {
  DriverManager.registerDriver(new Driver())
  val config = new SparkConf(true).setAppName("Simple Daily Run")
  val sc = SparkContext.getOrCreate(config)
  val cc = new CassandraSQLContext(sc)
  cc.setKeyspace("achoo")
  val conn = DriverManager.getConnection("jdbc:mysql://10.175.190.95/db?useUnicode=yes&characterEncoding=UTF-8&user=user&password=pass")

  val mySqlJdbcRDD = new JdbcRDD(sc, () => conn,
    "SELECT b.project_id,a.keyword FROM keyword a " +
      "JOIN project_keyword b ON a.id = b.keyword_id LIMIT ?, ?",
    0, 100000000, 1, r => (r.getInt("project_id"), r.getString("keyword")))

  val cassandraRDD = sc.cassandraTable("hello", "instagram_keyword_analytic")
    .select("keyword", "relativepath")
    .as((_: String, _: String))
    .distinct()

  mySqlJdbcRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt")
  cassandraRDD.saveAsTextFile("/data/MySQL_projectid_keywords_"+System.currentTimeMillis()+".txt")

}
卡桑德拉版本:卡桑德拉21-2.1.15-1(数据税) Spark版本:1.6.3


有人知道如何解决这个问题吗?

解决这个问题的方法是使用Scala 2.10而不是2.11