Iphone 如何在Xcode中逐帧获取图片帧
嗨,我想用iphone后置摄像头一帧一帧地拍摄照片。我到目前为止所做的一切Iphone 如何在Xcode中逐帧获取图片帧,iphone,ios,xcode,camera,Iphone,Ios,Xcode,Camera,嗨,我想用iphone后置摄像头一帧一帧地拍摄照片。我到目前为止所做的一切 我在全模式下打开相机 (IBAction)showCameraUI{ [自启动MeracontrollerFromViewController:自启动 使用代理:self]; } (BOOL)startCameraControllerFromViewController:(UIViewController*)控制器 usingDelegate:(id)delegate{ } 设置AVCapture以逐帧获取图片 (
- (IBAction)showCameraUI{ [自启动MeracontrollerFromViewController:自启动 使用代理:self]; }
- (BOOL)startCameraControllerFromViewController:(UIViewController*)控制器 usingDelegate:(id)delegate{
- (无效)设置捕获会话{ n错误*错误=nil
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetLow;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn]; // use AVCaptureTorchModeOff to turn off
[device unlockForConfiguration];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init] ;
//output.alwaysDiscardsLateVideoFrames=是;
[会话添加输出:输出]
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
//output.minFrameDuration=CMTimeMake(1,1)
}
//从样本缓冲区数据创建UIImage
-(UIImage*)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer{
NSLog(@“图片越来越多”);
CVImageBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
//锁定像素缓冲区的基址
CVPixelBufferLockBaseAddress(imageBuffer,0)
}
//在写入样本缓冲区时调用的委托例程
-(无效)captureOutput:(AVCaptureOutput*)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection*)连接{
//从样本缓冲区数据创建UIImage
NSLog(@“图片越来越多”);
UIImage*image=[self-imageFromSampleBuffer:sampleBuffer]
//[self.delegate cameraCaptureGotFrame:image];
}
现在代理“captureOutput”没有收到调用。
我不知道我哪里做错了。这对我有帮助。提前谢谢
// Configure the session to produce lower resolution video frames, if your
// processing algorithm can cope. We'll specify medium quality for the
// chosen device.
session.sessionPreset = AVCaptureSessionPresetLow;
// Find a suitable AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn]; // use AVCaptureTorchModeOff to turn off
[device unlockForConfiguration];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device
error:&error];
if (!input) {
// Handling the error appropriately.
}
[session addInput:input];
// Create a VideoDataOutput and add it to the session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init] ;
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Specify the pixel format
output.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// If you wish to cap the frame rate to a known value, such as 15 fps, set
// minFrameDuration.
// Start the session running to start the flow of data
NSLog(@"session is going to start at here");
[session startRunning];
// Assign session to an ivar.
//[self setSession:session];
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
if (!colorSpace)
{
NSLog(@"CGColorSpaceCreateDeviceRGB failure");
return nil;
}
// Get the base address of the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
// Create a Quartz direct-access data provider that uses data we supply
CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize,
NULL);
// Create a bitmap image from data supplied by our data provider
CGImageRef cgImage =
CGImageCreate(width,
height,
8,
32,
bytesPerRow,
colorSpace,
kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
provider,
NULL,
true,
kCGRenderingIntentDefault);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
// Create and return an image object representing the specified Quartz image
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return image;