Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/scala/16.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Apache spark WARN RandomBlockReplicationPolicy:应为1个副本,其中只有0个对等副本_Apache Spark_Twitter_Apache Kafka_Warnings - Fatal编程技术网

Apache spark WARN RandomBlockReplicationPolicy:应为1个副本,其中只有0个对等副本

Apache spark WARN RandomBlockReplicationPolicy:应为1个副本,其中只有0个对等副本,apache-spark,twitter,apache-kafka,warnings,Apache Spark,Twitter,Apache Kafka,Warnings,我尝试将卡夫卡、Spark流媒体与Twitter相结合 我收到以下警告消息,我希望找到解决方案。。 如果消息是错误消息,我想我可以更容易地找到解决方案,但我无法找到警告消息的解决方案 20/10/14 09:09:27 WARN RandomBlockReplicationPolicy: Expecting 1 replicas with only 0 peer/s. 20/10/14 09:09:27 WARN BlockManager: Block input-0-1602634166800

我尝试将卡夫卡、Spark流媒体与Twitter相结合

我收到以下警告消息,我希望找到解决方案。。 如果消息是错误消息,我想我可以更容易地找到解决方案,但我无法找到警告消息的解决方案

20/10/14 09:09:27 WARN RandomBlockReplicationPolicy: Expecting 1 replicas with only 0 peer/s.
20/10/14 09:09:27 WARN BlockManager: Block input-0-1602634166800 replicated to only 0 peer(s) instead of 1 peers
我在Apache Spark中使用了如下所示的跳线

import java.util.Properties
import org.apache.spark.SparkConf
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.Seconds
import twitter4j.conf.ConfigurationBuilder
import twitter4j.auth.OAuthAuthorization
import twitter4j.Status
import org.apache.spark.streaming.twitter.TwitterUtils
import org.apache.spark.streaming._
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

val appName = "TwitterData"
val ssc = new StreamingContext(sc, Seconds(10))
val consumerKey = "consumerKey"
val consumerSecret = "consumerSecret"
val accessToken = "accessToken"
val accessTokenSecret = "accessTokenSecret"

val cb = new ConfigurationBuilder  
cb.setDebugEnabled(true).setOAuthConsumerKey(consumerKey).setOAuthConsumerSecret(consumerSecret).setOAuthAccessToken(accessToken).setOAuthAccessTokenSecret(accessTokenSecret)
val auth = new OAuthAuthorization(cb.build)
val tweets = TwitterUtils.createStream(ssc, Some(auth))
val XRPTweets = tweets.filter(_.getLang() == "XRP")
val statuses = XRPTweets.map(status => (status.getText(),status.getUser.getName(),status.getUser.getScreenName(),status.getCreatedAt.toString))

 statuses.foreachRDD { (rdd, time) =>
 | rdd.foreachPartition { partitionIter =>
 | val props = new Properties()
 | val bootstrap = "localhost:9092"
 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
 | props.put("bootstrap.servers", bootstrap)
 | val producer = new KafkaProducer[String, String](props)
 | partitionIter.foreach { elem =>
 | val dat = elem.toString()
 | val data = new ProducerRecord[String, String]("XRP", null, dat)
 | producer.send(data)
 | }
 | producer.flush()
 | producer.close()
 | }

ssc.start()
如何修复此警告消息并获得结果? 我已经试图通过在谷歌上搜索找到解决方案,但是没有太多关于卡夫卡和Spark与Twitter集成的信息来显示这条警告消息

多谢各位