Netty(Netty-all-4.1.12.Final.jar)java.io.IOException:现有连接被远程主机强制关闭

Netty(Netty-all-4.1.12.Final.jar)java.io.IOException:现有连接被远程主机强制关闭,java,multithreading,network-programming,netty,Java,Multithreading,Network Programming,Netty,问题描述: 我的程序创建了一个Netty服务器和客户端,然后它与该服务器建立了2^17个连接,在某个时候客户端开始接收此异常: java.io.IOException:Istniejące poączenie zostało gwałtownie zamknięte przez zdalnego hosta 英文对应词为: java.io.IOException:远程主机强制关闭了现有连接 显然,不希望服务器强制关闭现有连接 复制步骤: 为了方便任何想重现这个问题的人,我创建了这个“单一可运行j

问题描述:

我的程序创建了一个Netty服务器和客户端,然后它与该服务器建立了2^17个连接,在某个时候客户端开始接收此异常:

java.io.IOException:Istniejące poączenie zostało gwałtownie zamknięte przez zdalnego hosta

英文对应词为:

java.io.IOException:远程主机强制关闭了现有连接

显然,不希望服务器强制关闭现有连接

复制步骤:

为了方便任何想重现这个问题的人,我创建了这个“单一可运行java文件”程序来重现它,它只需要
netty-all-4.1.12.Final.jar
依赖项。它在某个空闲端口上启动netty server,然后创建客户端,执行请求,稍等片刻,让服务器有机会处理请求,然后打印有关建立了多少连接、服务器处理了多少连接、丢失了多少连接的统计信息,服务器遇到了多少和什么样的异常,客户端遇到了多少和什么样的异常

package netty.exception.tst;

import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;

public class NettyException {

    public static void main(String[] args) throws InterruptedException {
        System.out.println("starting server");
        NettyServer server = new NettyServer(0);
        int port = server.getPort();
        System.out.println("server started at port: " + port);

        System.out.println("staring client");
        NettyClient client = new NettyClient();
        System.out.println("client started");

        int noOfConnectionsToPerform = 1 << 17;
        System.out.println("performing " + noOfConnectionsToPerform + " connections");
        for (int n = 0; n < noOfConnectionsToPerform; n++) {
            // send a request
            ChannelFuture f = client.getBootstrap().connect("localhost", port);
        }
        System.out.println("client performed " + noOfConnectionsToPerform + " connections");

        System.out.println("wait a bit to give a chance for server to finish processing incoming requests");
        Thread.currentThread().sleep(80000);

        System.out.println("shutting down server and client");
        server.stop();
        client.stop();

        System.out.println("stopped, server received: " + server.connectionsCount() + " connections");
        int numberOfLostConnections = noOfConnectionsToPerform - server.connectionsCount();
        if (numberOfLostConnections > 0) {
            System.out.println("Where do we lost " + numberOfLostConnections + " connections?");
        }

        System.out.println("srerver exceptions: ");
        printExceptions(server.getExceptions());
        System.out.println("client exceptions: ");
        printExceptions(client.getExceptions());
    }

    private static void printExceptions(Map<String, Integer> exceptions) {
        if (exceptions.isEmpty()) {
            System.out.println("There was no exceptions");
        }
        for (Entry<String, Integer> exception : exceptions.entrySet()) {
            System.out.println("There was " + exception.getValue() + " times this exception:");
            System.out.println(exception.getKey());
        }
    }

    public static class NettyServer {
        private ChannelFuture channelFuture;
        private EventLoopGroup bossGroup;
        private EventLoopGroup workerGroup;
        private AtomicInteger connections = new AtomicInteger(0);
        private ExceptionCounter exceptionCounter = new ExceptionCounter();

        public NettyServer(int port) throws InterruptedException {
            bossGroup = new NioEventLoopGroup();
            workerGroup = new NioEventLoopGroup();
            ServerBootstrap serverBootstrap = new ServerBootstrap();
            serverBootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class)
                    .childHandler(new ChannelInitializer<SocketChannel>() {
                        @Override
                        public void initChannel(SocketChannel ch) throws Exception {
                            ch.pipeline().addLast(new TimeServerHandler() {

                                @Override
                                public void channelActive(final ChannelHandlerContext ctx) {
                                    connections.incrementAndGet();
                                    super.channelActive(ctx);
                                }

                                @Override
                                public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
                                    exceptionCounter.countException(cause);
                                    super.exceptionCaught(ctx, cause);
                                }

                            });
                        }
                    }).option(ChannelOption.SO_BACKLOG, 128).childOption(ChannelOption.SO_KEEPALIVE, true);
            channelFuture = serverBootstrap.bind(port).sync();
        }

        public int getPort() {
            return ((InetSocketAddress) channelFuture.channel().localAddress()).getPort();
        }

        public int connectionsCount() {
            return connections.get();
        }

        public Map<String, Integer> getExceptions() {
            return exceptionCounter.getExceptions();
        }

        public void stop() {
            bossGroup.shutdownGracefully();
            workerGroup.shutdownGracefully();
            try {
                bossGroup.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
                workerGroup.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    public static class NettyClient {
        private Bootstrap bootstrap;
        private EventLoopGroup workerGroup;
        private ExceptionCounter exceptionCounter = new ExceptionCounter();

        public NettyClient() {
            workerGroup = new NioEventLoopGroup();

            bootstrap = new Bootstrap();
            bootstrap.group(workerGroup);
            bootstrap.channel(NioSocketChannel.class);
            bootstrap.option(ChannelOption.SO_KEEPALIVE, true);
            bootstrap.handler(new ChannelInitializer<SocketChannel>() {
                @Override
                public void initChannel(SocketChannel ch) throws Exception {
                    ch.pipeline().addLast(new TimeClientHandler() {
                        @Override
                        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
                            exceptionCounter.countException(cause);
                            super.exceptionCaught(ctx, cause);
                        }
                    });
                }
            });
        }

        public Bootstrap getBootstrap() {
            return bootstrap;
        }

        public void stop() {
            workerGroup.shutdownGracefully();
            try {
                workerGroup.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        public Map<String, Integer> getExceptions() {
            return exceptionCounter.getExceptions();
        }
    }

    public static class TimeServerHandler extends ChannelInboundHandlerAdapter {

        @Override
        public void channelActive(final ChannelHandlerContext ctx) {
            final ByteBuf time = ctx.alloc().buffer(4);
            time.writeInt((int) (System.currentTimeMillis() / 1000L + 2208988800L));

            final ChannelFuture f = ctx.writeAndFlush(time);
            f.addListener(new ChannelFutureListener() {
                @Override
                public void operationComplete(ChannelFuture future) {
                    assert f == future;
                    ctx.close();
                }
            });
        }

        @Override
        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
            ctx.close();
        }
    }

    public static class TimeClientHandler extends ChannelInboundHandlerAdapter {
        private ThreadLocal<ByteBuf> buf = new ThreadLocal<ByteBuf>();

        @Override
        public void handlerAdded(ChannelHandlerContext ctx) {
            buf.set(ctx.alloc().buffer(4));
        }

        @Override
        public void handlerRemoved(ChannelHandlerContext ctx) {
            buf.get().release();
            buf.remove();
        }

        @Override
        public void channelRead(ChannelHandlerContext ctx, Object msg) {
            ByteBuf m = (ByteBuf) msg;
            buf.get().writeBytes(m);
            m.release();

            if (buf.get().readableBytes() >= 4) {
                long currentTimeMillis = (buf.get().readUnsignedInt() - 2208988800L) * 1000L;
                ctx.close();
            }
        }

        @Override
        public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
            ctx.close();
        }
    }

    public static class ExceptionCounter {
        private ConcurrentHashMap<String, AtomicInteger> exceptions = new ConcurrentHashMap<String, AtomicInteger>();

        private void countException(Throwable cause) {

            StringWriter writer = new StringWriter();
            cause.printStackTrace(new PrintWriter(writer));
            String stackTrace = writer.toString();

            AtomicInteger exceptionCount = exceptions.get(stackTrace);
            if (exceptionCount == null) {
                exceptionCount = new AtomicInteger(0);
                AtomicInteger prevCount = exceptions.putIfAbsent(stackTrace, exceptionCount);
                if (prevCount != null) {
                    exceptionCount = prevCount;
                }
            }
            exceptionCount.incrementAndGet();
        }

        public Map<String, Integer> getExceptions() {
            Map<String, Integer> newMap = exceptions.entrySet().stream()
                    .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().get()));
            return Collections.unmodifiableMap(newMap);
        }
    }
}
问题:

  • 为什么会引发此异常
  • 失去的联系到哪里去了?为什么他们没有错误
  • 如何避免这种情况,对这种“高吞吐量”应用程序进行编程的正确方法是什么,以避免出现诸如断开/断开现有连接之类的问题
  • 与主题无关,但可能有一些网络专家会知道:为什么当我在
    TimeClientHandler
    中将
    private ThreadLocal buf
    的字段声明更改为也是静态的时,
    TimeClientHandler.handler中出现空指针异常?这很奇怪,这个类是否以某种方式复制?或者来自
    NioEventLoopGroup
    的线程有点奇怪
环境:

  • Netty版本:Netty-all-4.1.12.Final.jar
  • JVM版本:jdk1.8.0_111 64位
  • 操作系统版本:Windows 10 64位

每个IP地址的端口数限制为64k,因此无法打开2^17个端口。由于每个套接字使用一个文件句柄,因此可能会达到每个进程最大打开文件数的限制。请参阅。

当然,这可能是故事的一部分,但为什么它会终止现有的连接,以及为什么没有超出限制的错误?我试图做的事情是了解当我们超过限制时会发生什么,并学习如何编程这种“高流量”应用程序。
starting server
server started at port: 56069
staring client
client started
performing 131072 connections
client performed 131072 connections
wait a bit to give a chance for server to finish processing incoming requests
shutting down server and client
stopped, server received: 34735 connections
Where do we lost 96337 connections?
srerver exceptions: 
There was no exceptions
client exceptions: 
There was 258 times this exception:
java.io.IOException: Istniejące połączenie zostało gwałtownie zamknięte przez zdalnego hosta
    at sun.nio.ch.SocketDispatcher.read0(Native Method)
    at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43)
    at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
    at sun.nio.ch.IOUtil.read(IOUtil.java:192)
    at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:380)
    at io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes(PooledUnsafeDirectByteBuf.java:288)
    at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1100)
    at io.netty.buffer.WrappedByteBuf.writeBytes(WrappedByteBuf.java:813)
    at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:372)
    at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:123)
    at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496)
    at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458)
    at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
    at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
    at java.lang.Thread.run(Thread.java:745)

There was 30312 times this exception:
java.io.IOException: Istniejące połączenie zostało gwałtownie zamknięte przez zdalnego hosta
    at sun.nio.ch.SocketDispatcher.read0(Native Method)
    at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43)
    at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
    at sun.nio.ch.IOUtil.read(IOUtil.java:192)
    at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:380)
    at io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes(PooledUnsafeDirectByteBuf.java:288)
    at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:1100)
    at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:372)
    at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:123)
    at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:644)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:579)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:496)
    at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:458)
    at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
    at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
    at java.lang.Thread.run(Thread.java:745)