Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/scala/17.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Scala 净值4:延迟ctx.write_Scala_Netty_Future - Fatal编程技术网

Scala 净值4:延迟ctx.write

Scala 净值4:延迟ctx.write,scala,netty,future,Scala,Netty,Future,基于“helloworld”HTTP示例(Netty 4.0.12.Final),我想以某种方式推迟HTTP响应的编写,即请求处理和响应创建应该在Netty的工作人员之外执行。在响应可用后,Netty应该从它离开的地方开始,并将响应写入通道 下面的代码应该演示我想要实现的目标。问题是,在handleDeferred()的情况下,Netty会在某个地方阻塞,而响应永远不会到达客户端。我不确定我是否完全偏离了轨道。因此,任何建议都将不胜感激 import io.netty.channel.{Chan

基于“helloworld”HTTP示例(Netty 4.0.12.Final),我想以某种方式推迟HTTP响应的编写,即请求处理和响应创建应该在Netty的工作人员之外执行。在响应可用后,Netty应该从它离开的地方开始,并将响应写入通道

下面的代码应该演示我想要实现的目标。问题是,在handleDeferred()的情况下,Netty会在某个地方阻塞,而响应永远不会到达客户端。我不确定我是否完全偏离了轨道。因此,任何建议都将不胜感激

import io.netty.channel.{ChannelFutureListener, ChannelHandlerContext, ChannelInboundHandlerAdapter}
import io.netty.handler.codec.http._
import io.netty.handler.codec.http.HttpHeaders.Values
import io.netty.buffer.Unpooled
import scala.concurrent._

class DispatchingHandler extends ChannelInboundHandlerAdapter {

  class StaticResponseFactory {
    def response = {
      val content = Unpooled.wrappedBuffer("Hey there!".getBytes)
      val response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, content)
      response.headers().set(HttpHeaders.Names.CONTENT_TYPE, "text/plain")
      response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, response.content().readableBytes())
      response
    }
  }

  override def channelRead(ctx: ChannelHandlerContext, msg: Any) {
    val deferred = true // <= select how to respond.
    msg match {
      case request: HttpRequest if deferred => handleDeferred(ctx, request)
      case request: HttpRequest => handleImmediately(ctx, request)
      case lastHttpContent: LastHttpContent => /* ignore trailing headers */
      case other => println("unexpected message: " + other)
    }
  }

  override def channelReadComplete(ctx: ChannelHandlerContext) {
    println("[" + Thread.currentThread() + "] flushing...")
    ctx.flush()
  }

  override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
    cause.printStackTrace()
  }

  def handleImmediately(ctx: ChannelHandlerContext, request: HttpRequest) {
    val response = new StaticResponseFactory().response
    writeResponse(ctx, request, response)
  }

  def handleDeferred(ctx: ChannelHandlerContext, request: HttpRequest) {
    // execute dispatch and request handling on Scala-managed thread pool (global).
    import scala.concurrent.ExecutionContext.Implicits.global
    future(dispatchRequestAndCollectResponse(request)).onSuccess { case factory =>
      writeResponse(ctx, request, factory.response)
    }
  }

  def writeResponse(ctx: ChannelHandlerContext, request: HttpRequest, response: FullHttpResponse) {
    if (!HttpHeaders.isKeepAlive(request)) {
      val channelFuture = ctx.write (response)
      channelFuture.addListener (ChannelFutureListener.CLOSE)
    } else {
      response.headers ().set (HttpHeaders.Names.CONNECTION, Values.KEEP_ALIVE)
      ctx.write (response)
    }
  }

  def dispatchRequestAndCollectResponse(request: HttpRequest): StaticResponseFactory = {
    // do some heavy lifting...
    Thread.sleep(1000)
    new StaticResponseFactory
  }
}
import io.netty.channel.{ChannelFutureListener,ChannelHandlerContext,ChannelInboundHandlerAdapter}
导入io.netty.handler.codec.http_
导入io.netty.handler.codec.http.HttpHeaders.Values
导入io.netty.buffer.Unpooled
导入scala.concurrent_
类DispatchingHandler扩展ChannelInboundHandlerAdapter{
类StaticResponseFactory{
def响应={
val content=unmooled.wrappedBuffer(“嘿,那里!”.getBytes)
val response=newdefaultfullhttpresponse(HttpVersion.HTTP_1_1,HttpResponseStatus.OK,content)
response.headers().set(HttpHeaders.Names.CONTENT_类型,“text/plain”)
response.headers().set(HttpHeaders.Names.CONTENT_长度,response.CONTENT().readableBytes())
回答
}
}
覆盖def channelRead(ctx:ChannelHandlerContext,msg:Any){
val deferred=true//handleDeferred(ctx,请求)
案例请求:HttpRequest=>handleImmediately(ctx,请求)
案例lastHttpContent:lastHttpContent=>/*忽略尾部标题*/
case other=>println(“意外消息:“+other”)
}
}
覆盖def channelReadComplete(ctx:ChannelHandlerContext){
println(“[”+线程.currentThread()+“]刷新…”)
ctx.flush()
}
覆盖def例外命令(ctx:ChannelHandlerContext,原因:可丢弃){
原因:printStackTrace()
}
def立即处理(ctx:ChannelHandlerContext,请求:HttpRequest){
val response=new StaticResponseFactory().response
写入响应(ctx、请求、响应)
}
def handleDeferred(ctx:ChannelHandlerContext,请求:HttpRequest){
//在Scala托管线程池(全局)上执行分派和请求处理。
导入scala.concurrent.ExecutionContext.Implicits.global
未来(dispatchRequestAndCollectResponse(请求)).onSuccess{case factory=>
writeResponse(ctx、请求、工厂响应)
}
}
def writeResponse(ctx:ChannelHandlerContext,请求:HttpRequest,响应:FullHttpResponse){
如果(!HttpHeaders.isKeepAlive(请求)){
val channelFuture=ctx.write(响应)
channelFuture.addListener(ChannelFutureListener.CLOSE)
}否则{
response.headers().set(HttpHeaders.Names.CONNECTION,Values.KEEP_活动)
ctx.write(响应)
}
}
def dispatchRequestAndCollectResponse(请求:HttpRequest):StaticResponseFactory={
//做一些繁重的工作。。。
线程。睡眠(1000)
新静态响应系数
}
}

writeResponse(…)中的刷新缓冲区(ctx.flush())成功了。没错。自4.0以来,
write()
不会自动刷新。您必须手动刷新或改用
writeAndFlush()
。谢谢您的确认。