Objective c AVPlayerLayer将图像获取到UIImageView缓冲区

Objective c AVPlayerLayer将图像获取到UIImageView缓冲区,objective-c,ios,screenshot,avplayerlayer,Objective C,Ios,Screenshot,Avplayerlayer,我尝试[playerLayer RenderContext:UIGraphicsGetCurrentContext()];它将显示黑色背景。所以我得到当前的播放器项目作为重击图像不断它看起来不像视频播放只是静止图像不断流动。那么还有其他选择吗 请为安装程序读取器尝试此代码 //setUp Reader AVURLAsset * asset = [AVURLAsset URLAssetWithURL:urlvalue options:nil]; [asset loadValues

我尝试[playerLayer RenderContext:UIGraphicsGetCurrentContext()];它将显示黑色背景。所以我得到当前的播放器项目作为重击图像不断它看起来不像视频播放只是静止图像不断流动。那么还有其他选择吗

请为安装程序读取器尝试此代码

//setUp Reader
    AVURLAsset * asset = [AVURLAsset URLAssetWithURL:urlvalue options:nil]; 
    [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{ dispatch_async(dispatch_get_main_queue(), ^{
        AVAssetTrack * videoTrack = nil; 
        NSArray * tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
        if ([tracks count] == 1) {
            videoTrack = [tracks objectAtIndex:0];
            NSError * error = nil; 
            _movieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; 
            if (error) 
                NSLog(error.localizedDescription); 
            NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; 
            NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_4444AYpCbCr16]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
            [_movieReader addOutput:[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoSettings]]; 
             [_movieReader startReading];

        } 
    }); 
    }];
获取下一个电影帧

- (void) readNextMovieFrame { 

    if (_movieReader.status == AVAssetReaderStatusReading) { 

        AVAssetReaderTrackOutput * output = [_movieReader.outputs objectAtIndex:0]; 
        CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
        if (sampleBuffer) {
            CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
            // Lock the image buffer 
            CVPixelBufferLockBaseAddress(imageBuffer,0);
            // Get information of the image 
            uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
            size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
            size_t width = CVPixelBufferGetWidth(imageBuffer); 
            size_t height = CVPixelBufferGetHeight(imageBuffer); 

            /*We unlock the  image buffer*/
            CVPixelBufferUnlockBaseAddress(imageBuffer,0);

            /*Create a CGImageRef from the CVImageBufferRef*/
             CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
            CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
            CGImageRef newImage = CGBitmapContextCreateImage(newContext); 

            /*We release some components*/
            CGContextRelease(newContext); 
            CGColorSpaceRelease(colorSpace);

            /*We display the result on the custom layer*/
            /*self.customLayer.contents = (id) newImage;*/

            /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/
            UIImage *image= [UIImage imageWithCGImage:newImage scale:0.0 orientation:UIImageOrientationRight];
            UIGraphicsBeginImageContext(image.size);

            [image drawAtPoint:CGPointMake(0, 0)];

           // UIImage *img=UIGraphicsGetImageFromCurrentImageContext();
            videoImage=UIGraphicsGetImageFromCurrentImageContext();

            UIGraphicsEndImageContext();


//videoImage=image;

          //  if (frameCount < 40) {
                NSLog(@"readNextMovieFrame==%d",frameCount);
                      NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
                      NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
                     [UIImagePNGRepresentation(videoImage) writeToFile: pngPath atomically: YES];
                     frameCount++;
        //        }

            CVPixelBufferUnlockBaseAddress(imageBuffer,0); 
            CFRelease(sampleBuffer); 
        } 
    } 
}
-(void)readNextMovieFrame{
如果(_movieReader.status==avassetraderStatusReading){
AvassetraderTrackOutput*输出=[\u movieReader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBuffer=[output copyNextSampleBuffer];
if(采样缓冲区){
CVImageBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
//锁定图像缓冲区
CVPixelBufferLockBaseAddress(imageBuffer,0);
//获取图像的信息
uint8_t*基地址=(uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow=CVPixelBufferGetBytesPerRow(图像缓冲区);
size\u t width=CVPixelBufferGetWidth(imageBuffer);
大小\u t高度=CVPixelBufferGetHeight(imageBuffer);
/*我们解锁图像缓冲区*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/*从CVImageBufferRef创建CGImageRef*/
CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef newContext=CGBitmapContextCreate(基地址、宽度、高度、8、字节数、颜色空间、kCGBitmapByteOrder32Little | KCGimageAlphaPremultipledFirst);
CGImageRef newImage=cgitmapcontextcreateimage(newContext);
/*我们发布了一些组件*/
CGContextRelease(newContext);
CGCOLORSPACTERELEASE(色彩空间);
/*我们在自定义图层上显示结果*/
/*self.customLayer.contents=(id)newImage*/
/*我们在图像视图上显示结果(我们需要更改图像的方向,以便正确显示视频)*/
UIImage*image=[UIImage imageWithCGImage:newImage比例:0.0方向:UIImageOrientationRight];
UIGraphicsBeginImageContext(image.size);
[图像绘制点:CGPointMake(0,0)];
//UIImage*img=UIGraphicsGetImageFromCurrentImageContext();
videoImage=UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsSendImageContext();
//视频图像=图像;
//如果(帧数<40){
NSLog(@“readNextMovieFrame==%d”,帧数);
NSString*文件名=[NSString stringWithFormat:@“Documents/frame_u%d.png”,frameCount];
NSString*pngPath=[NSHomeDirectory()stringByAppendingPathComponent:filename];
[UIImagePngPresentation(videoImage)写入文件:pngPath原子:是];
frameCount++;
//        }
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CFRelease(sampleBuffer);
} 
} 
}

这是7twenty7博客的直接内容