Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/118.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
使用OpenGL TextureCache生成电影的iOS无法正确显示帧_Ios_Opengl Es_Textures_Render_Movie - Fatal编程技术网

使用OpenGL TextureCache生成电影的iOS无法正确显示帧

使用OpenGL TextureCache生成电影的iOS无法正确显示帧,ios,opengl-es,textures,render,movie,Ios,Opengl Es,Textures,Render,Movie,我正在通过渲染OpenGL纹理生成一部电影。结果电影的某些帧看起来没有完全渲染,因为它们显示了前一帧的一部分。 如果我添加一个NSThread[NSThread sleepForTimeInterval:0.05];这个问题没有出现,但我不能依赖这个指令 我使用的代码是: dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^ {

我正在通过渲染OpenGL纹理生成一部电影。结果电影的某些帧看起来没有完全渲染,因为它们显示了前一帧的一部分。 如果我添加一个NSThread[NSThread sleepForTimeInterval:0.05];这个问题没有出现,但我不能依赖这个指令

我使用的代码是:

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
             {                   
               //Video writer
               AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                             [NSURL fileURLWithPath:tempVideoPath]
                                                                      fileType:AVFileTypeQuickTimeMovie
                                                                         error:&error];
               NSParameterAssert(videoWriter);

               NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              AVVideoCodecH264, AVVideoCodecKey,
                                              [NSNumber numberWithInt:MOVsize.width], AVVideoWidthKey,
                                              [NSNumber numberWithInt:MOVsize.height], AVVideoHeightKey,
                                              nil];
               AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                                       outputSettings:videoSettings];
               videoWriterInput.expectsMediaDataInRealTime=NO;

               NSParameterAssert(videoWriterInput);
               NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

               [videoWriter addInput:videoWriterInput];

               NSDictionary* pixelAttributesDict;

                 pixelAttributesDict= [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                       [NSNumber numberWithInt:1024], kCVPixelBufferWidthKey,
                                       [NSNumber numberWithInt:768], kCVPixelBufferHeightKey,
                                       nil];

               AVAssetWriterInputPixelBufferAdaptor* adaptor=[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelAttributesDict];
               [videoWriter startWriting];
               [videoWriter startSessionAtSourceTime:kCMTimeZero];

               if([EAGLContext currentContext]!= glContext)
                 [EAGLContext setCurrentContext:glContext];

               [self createDataFBO:adaptor];

               for (int frame=0;frame<samplesNumber;frame++){

                 while (!videoWriterInput.readyForMoreMediaData)
                 {
                   NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
                   [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
                 }

                   //glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

                   //Render current frame with openGL
                   [self renderFrameAt:frame];

                   CVReturn lockResult= CVPixelBufferLockBaseAddress(renderTarget, 0);
                   BOOL test =(lockResult==kCVReturnSuccess) && [adaptor appendPixelBuffer:renderTarget withPresentationTime:CMTimeMake(frame, kFps)];
                   if(!test) {
                     NSLog(@"append failed!");
                   }
}

这是我用来创建纹理缓存的代码

    - (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor;
    {
      glActiveTexture(GL_TEXTURE1);
      glGenFramebuffers(1, &movieFramebuffer);
      glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

    #if defined(__IPHONE_6_0)
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #else
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #endif

      if (err)
      {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
      }

      // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
      CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget);

      size_t frameWidth = CVPixelBufferGetWidth(renderTarget);
      size_t frameHeight = CVPixelBufferGetHeight(renderTarget);

      CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
                                                    NULL, // texture attributes
                                                    GL_TEXTURE_2D,
                                                    GL_RGBA, // opengl format
                                                    frameWidth,
                                                    frameHeight,
                                                    GL_BGRA, // native iOS format
                                                    GL_UNSIGNED_BYTE,
                                                    0,
                                                    &renderTexture);

      glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

      glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);

      GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

      NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
    }

渲染后是否尝试过glFlush?

我在渲染结束时使用GLFISH,尝试过添加glFlush,但仍有重叠帧。
    - (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor;
    {
      glActiveTexture(GL_TEXTURE1);
      glGenFramebuffers(1, &movieFramebuffer);
      glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);

    #if defined(__IPHONE_6_0)
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #else
      CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache);
    #endif

      if (err)
      {
        NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
      }

      // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
      CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget);

      size_t frameWidth = CVPixelBufferGetWidth(renderTarget);
      size_t frameHeight = CVPixelBufferGetHeight(renderTarget);

      CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
                                                    NULL, // texture attributes
                                                    GL_TEXTURE_2D,
                                                    GL_RGBA, // opengl format
                                                    frameWidth,
                                                    frameHeight,
                                                    GL_BGRA, // native iOS format
                                                    GL_UNSIGNED_BYTE,
                                                    0,
                                                    &renderTexture);

      glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

      glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);

      GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);

      NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
    }