Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/372.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java spark自定义接收器无法处理数据_Java_Maven_Apache Spark_Spark Streaming - Fatal编程技术网

Java spark自定义接收器无法处理数据

Java spark自定义接收器无法处理数据,java,maven,apache-spark,spark-streaming,Java,Maven,Apache Spark,Spark Streaming,我已经编写了一个自定义接收器来处理数据流,代码编译和运行良好,但是当我从服务器发送数据时,它会显示在控制台中,但我看不到正在处理的输入 自定义服务器:- nc -lk 9999 Java代码:- import com.google.common.io.Closeables; import org.apache.spark.SparkConf; import org.apache.spark.storage.StorageLevel; import org.apache.spark.strea

我已经编写了一个自定义接收器来处理数据流,代码编译和运行良好,但是当我从服务器发送数据时,它会显示在控制台中,但我看不到正在处理的输入

自定义服务器:-

nc -lk 9999
Java代码:-

import com.google.common.io.Closeables;

import org.apache.spark.SparkConf;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.receiver.Receiver;
import scala.Tuple2;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.ConnectException;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.regex.Pattern;

/**
 * Custom Receiver that receives data over a socket. Received bytes is interpreted as
 * text and \n delimited lines are considered as records. They are then counted and printed.
 *
 * Usage: JavaCustomReceiver <master> <hostname> <port>
 *   <master> is the Spark master URL. In local mode, <master> should be 'local[n]' with n > 1.
 *   <hostname> and <port> of the TCP server that Spark Streaming would connect to receive data.
 *
 * To run this on your local machine, you need to first run a Netcat server
 *    `$ nc -lk 9999`
 * and then run the example
 *    `$ bin/run-example org.apache.spark.examples.streaming.JavaCustomReceiver localhost 9999`
 */

public class JavaCustomReceiver extends Receiver<String> {
  private static final Pattern SPACE = Pattern.compile(" ");

  public static void main(String[] args) throws Exception {
//    if (args.length < 2) {
//      System.err.println("Usage: JavaCustomReceiver <hostname> <port>");
//      System.exit(1);
//    }

//    StreamingExamples.setStreamingLogLevels();

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaCustomReceiver").setMaster("local[*]");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(10000));

    // Create an input stream with the custom receiver on target ip:port and count the
    // words in input stream of \n delimited text (eg. generated by 'nc')
    JavaReceiverInputDStream<String> lines = ssc.receiverStream(
      new JavaCustomReceiver("localhost", 9999));    

//    JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(""))).iterator();
    JavaDStream<String> words = lines.flatMap(x -> Arrays.asList(SPACE.split(" ")).iterator());
    words.foreachRDD(   x-> {
        x.collect().stream().forEach(n-> System.out.println("item of list: "+n));
    });

    JavaPairDStream<String, Integer> wordCounts = words.mapToPair(s -> new Tuple2<>(s, 1))
        .reduceByKey((i1, i2) -> i1 + i2);
    wordCounts.count();
    System.out.println("WordCounts == " + wordCounts);
    wordCounts.print();

    ssc.start();
    ssc.awaitTermination();
  }

  // ============= Receiver code that receives data over a socket ==============

  String host = null;
  int port = -1;

  public JavaCustomReceiver(String host_ , int port_) {
    super(StorageLevel.MEMORY_AND_DISK_2());
    host = host_;
    port = port_;
  }

  @Override
  public void onStart() {
    // Start the thread that receives data over a connection
    new Thread(this::receive).start();
  }

  @Override
  public void onStop() {
    // There is nothing much to do as the thread calling receive()
    // is designed to stop by itself isStopped() returns false
  }

  /** Create a socket connection and receive data until receiver is stopped */
  private void receive() {
    try {
      Socket socket = null;
      BufferedReader reader = null;
      try {
        // connect to the server
        socket = new Socket(host, port);
        reader = new BufferedReader(
            new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
        // Until stopped or connection broken continue reading
        String userInput;
        while (!isStopped() && (userInput = reader.readLine()) != null) {
          System.out.println("Received data '" + userInput + "'");
          store(userInput);
        }
      } finally {
        Closeables.close(reader, /* swallowIOException = */ true);
        Closeables.close(socket,  /* swallowIOException = */ true);
      }
      // Restart in an attempt to connect again when server is active again
      restart("Trying to connect again");
    } catch(ConnectException ce) {
      // restart if could not connect to server
      restart("Could not connect", ce);
    } catch(Throwable t) {
      restart("Error receiving data", t);
    }
  }
}
import com.google.common.io.Closeables;
导入org.apache.spark.SparkConf;
导入org.apache.spark.storage.StorageLevel;
导入org.apache.spark.streaming.Duration;
导入org.apache.spark.streaming.api.java.JavaDStream;
导入org.apache.spark.streaming.api.java.JavaPairDStream;
导入org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
导入org.apache.spark.streaming.api.java.JavaStreamingContext;
导入org.apache.spark.streaming.receiver.receiver;
导入scala.Tuple2;
导入java.io.BufferedReader;
导入java.io.InputStreamReader;
导入java.net.ConnectException;
导入java.net.Socket;
导入java.nio.charset.StandardCharset;
导入java.util.array;
导入java.util.regex.Pattern;
/**
*通过套接字接收数据的自定义接收器。接收的字节被解释为
*文本和\n分隔行被视为记录。然后进行计数和打印。
*
*用法:JavaCustomReceiver
*是Spark主URL。在本地模式下,应为“本地[n]”,且n>1。
*以及Spark Streaming将连接以接收数据的TCP服务器。
*
*要在本地计算机上运行此功能,首先需要运行Netcat服务器
*`$nc-9999斯里兰卡克朗`
*然后运行示例
*`$bin/run example org.apache.spark.examples.streaming.JavaCustomReceiver localhost 9999`
*/
公共类JavaCustomReceiver扩展了Receiver{
私有静态最终模式空间=Pattern.compile(“”);
公共静态void main(字符串[]args)引发异常{
//如果(参数长度<2){
//System.err.println(“用法:JavaCustomReceiver”);
//系统出口(1);
//    }
//StreamingExamples.setStreamingLogLevels();
//创建具有1秒批量大小的上下文
SparkConf SparkConf=新的SparkConf().setAppName(“JavaCustomReceiver”).setMaster(“local[*]”);
JavaStreamingContext ssc=新的JavaStreamingContext(sparkConf,新的持续时间(10000));
//在目标ip:端口上使用自定义接收器创建输入流,并计算
//分隔文本的输入流中的单词(例如由“nc”生成)
JavaReceiverInputDStream行=ssc.receiverStream(
新的JavaCustomReceiver(“localhost”,9999));
//JavaDStream words=lines.flatMap(x->Arrays.asList(SPACE.split(“”)).iterator();
JavaDStream words=lines.flatMap(x->Arrays.asList(SPACE.split(“”).iterator());
words.foreachRDD(x->{
x、 collect().stream().forEach(n->System.out.println(“列表项:+n));
});
JavaPairDStream wordCounts=words.mapToPair(s->new Tuple2(s,1))
.还原基((i1,i2)->i1+i2);
count.count();
System.out.println(“字数=”+字数);
wordCounts.print();
ssc.start();
ssc.终止();
}
//=============通过套接字接收数据的接收器代码==============
字符串host=null;
int端口=-1;
公共JavaCustomReceiver(字符串主机、int端口){
super(StorageLevel.MEMORY_和_DISK_2());
主机=主机;
端口=端口;
}
@凌驾
public void onStart(){
//启动通过连接接收数据的线程
新线程(this::receive.start();
}
@凌驾
公共void onStop(){
//除了调用receive()的线程之外,没有什么可做的
//被设计为自行停止,isStopped()返回false
}
/**创建套接字连接并接收数据,直到接收器停止*/
私有无效接收(){
试一试{
套接字=空;
BufferedReader reader=null;
试一试{
//连接到服务器
套接字=新套接字(主机、端口);
读卡器=新的BufferedReader(
新的InputStreamReader(socket.getInputStream(),StandardCharsets.UTF_8));
//直到停止或连接断开,继续读取
字符串用户输入;
而(!isStopped()&&(userInput=reader.readLine())!=null){
System.out.println(“接收到的数据“+”用户输入“+””);
存储(用户输入);
}
}最后{
Closeables.close(读卡器,/*异常=*/true);
Closeables.close(套接字,/*异常=*/true);
}
//当服务器再次处于活动状态时,重新启动以尝试再次连接
重新启动(“尝试再次连接”);
}捕获(连接异常){
//如果无法连接到服务器,请重新启动
重新启动(“无法连接”,ce);
}捕获(可丢弃的t){
重新启动(“接收数据错误”,t);
}
}
}
为什么在print语句的控制台中看不到任何输入

这是我的pom.xml

<dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.0.2</version>
        </dependency>

        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.11</version>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>com.microsoft.azure</groupId>
            <artifactId>azure-storage</artifactId>
            <version>5.5.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.11</artifactId>
            <version>2.0.2</version>
            <scope>provided</scope>
        </dependency>

org.apache.spark
spark-core_2.11
2.0.2
朱尼特
朱尼特
4.11
测试
com.microsoft.azure
azure存储
5.5.0
org.apache.spark
spark-U 2.11
2.0.2
假如