Ios 如何通过iPhone摄像头进行实时人脸检测&xFF1F;

Ios 如何通过iPhone摄像头进行实时人脸检测&xFF1F;,ios,objective-c,iphone,swift,Ios,Objective C,Iphone,Swift,这是Swift语言版本。(源地址:) 这是我的密码。有人能帮我找到它不起作用的原因吗 #import "ViewController.h" #import <AVFoundation/AVFoundation.h> @interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{ AVCaptureSession *captureSession; AVCaptureDevice *ca

这是Swift语言版本。(源地址:)

这是我的密码。有人能帮我找到它不起作用的原因吗

#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>

@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *captureSession;
AVCaptureDevice *captureDevice;
AVCaptureVideoPreviewLayer *previewLayer;
UIImage *resultImage;
BOOL isStart;


}

@end

@implementation ViewController

- (void)viewDidLoad {
[super viewDidLoad];
isStart = NO;
[self isStartTrue];
captureSession = [[AVCaptureSession alloc]init];
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];

captureSession.sessionPreset = AVCaptureSessionPresetLow;
NSArray *devices = [[NSArray alloc]init];
devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
    if ([device hasMediaType:AVMediaTypeVideo]) {
        if (device.position == AVCaptureDevicePositionFront) {
            captureDevice = device;
            if (captureDevice != nil) {
                NSLog(@"Capture Device found");
                [self beginSession];
            }
        }
    }
}
// Do any additional setup after loading the view, typically from a nib.
}


-(void) isStartTrue {
isStart = YES;
}

-(void)beginSession {
AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil];
[captureSession addInput:captureDeviceInput];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
dispatch_queue_t cameraQueue;
cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:cameraQueue];
NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
output.videoSettings = videoSettings;
[captureSession addOutput:output];
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect";
previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previewLayer];
[captureSession startRunning];

}


-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

if (isStart) {

    resultImage = [[UIImage alloc] init];
    resultImage = [self sampleBufferToImage:sampleBuffer];

    CIContext *context = [CIContext contextWithOptions:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:kCIContextUseSoftwareRenderer]];
    CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
    CIImage *ciImage = [[CIImage alloc]init];
    ciImage = [CIImage imageWithCGImage:resultImage.CGImage];
    dispatch_async(dispatch_get_main_queue(), ^{
        previewIV.image = resultImage;
    });
    NSArray *results = [detector featuresInImage:ciImage options:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:6] forKey:CIDetectorImageOrientation]];
    for (CIFaceFeature *face in results) {
        UIImage *faceImage = [UIImage imageWithCGImage:[context createCGImage:ciImage fromRect:face.bounds] scale:1.0 orientation:UIImageOrientationRight];
        NSLog(@"     ====%@", NSStringFromCGRect(face.bounds));


    }

}
}


-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bitsPerCompornent = 8;
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, (kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst));
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage *result = [[UIImage alloc]initWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationRight];
return result;

}

- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
[captureSession stopRunning];
// Dispose of any resources that can be recreated.
}

@end
#导入“ViewController.h”
#进口
@界面视图控制器(){
AVCaptureSession*captureSession;
AVCaptureDevice*captureDevice;
AVCaptureVideoPreviewLayer*预览层;
UIImage*结果图像;
布尔伊斯塔特;
}
@结束
@实现视图控制器
-(无效)viewDidLoad{
[超级视图下载];
isStart=否;
[自我肯定];
captureSession=[[AVCaptureSession alloc]init];
captureDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
captureSession.sessionPreset=AVCaptureSessionPresetLow;
NSArray*设备=[[NSArray alloc]init];
设备=[AvCaptureDevices];
用于(AVCaptureDevice*设备中的设备){
if([设备hasMediaType:AVMediaTypeVideo]){
if(device.position==AVCaptureDevicePositionFront){
captureDevice=设备;
如果(captureDevice!=无){
NSLog(@“找到捕获设备”);
[自我开始];
}
}
}
}
//加载视图后,通常从nib执行任何其他设置。
}
-(无效)是真的{
isStart=是;
}
-(无效)开始{
AVCaptureDeviceInput*captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice错误:nil];
[captureSession addInput:captureDeviceInput];
AVCaptureVideoDataOutput*输出=[[AVCaptureVideoDataOutput alloc]init];
调度队列摄像机队列;
cameraQueue=调度队列创建(“cameraQueue”,调度队列\u序列);
[output setSampleBufferDelegate:自队列:cameraQueue];
NSDictionary*videoSettings=[[NSDictionary alloc]initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,无];
output.videoSettings=视频设置;
[captureSession addOutput:output];
previewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.videoGravity=@“AVLayerVideoGravityResizeAspect”;
previewLayer.frame=self.view.bounds;
[self.view.layer addSublayer:previewLayer];
[captureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接{
如果(isStart){
结果年龄=[[UIImage alloc]init];
结果年龄=[self-sampleBufferToImage:sampleBuffer];
CIContext*上下文=[CIContext contextWithOptions:[NSDictionary Dictionary WithObject:[NSNumber numberWithBool:YES]forKey:KCIContexturesSoftwareRenderer];
CIDetector*检测器=[CIDetector检测器类型:CIDetectorTypeFace上下文:上下文选项:[NSDictionary Dictionary Dictionary WithObject:CIDetectorAccuracyHigh-forKey:CIDetectorAccuracy]];
CIImage*CIImage=[[CIImage alloc]init];
ciImage=[ciImage-imageWithCGImage:resultImage.CGImage];
dispatch\u async(dispatch\u get\u main\u queue()^{
previewIV.image=resultImage;
});
NSArray*结果=[detector featuresInImage:ciImage选项:[NSDictionary Dictionary Dictionary WithObject:[NSNumber numberWithInt:6]forKey:CIDetectorImageOrientation];
用于(CIFaceFeature*结果中的面){
UIImage*faceImage=[UIImage imageWithCGImage:[context CreateCImage:ciImage fromRect:face.bounds]比例:1.0方向:UIImageOrientationRight];
NSLog(@“===%@”,NSStringFromCGRect(face.bounds));
}
}
}
-(UIImage*)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer);
void*baseAddress=CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
size_t bytesPerRow=CVPixelBufferGetBytesPerRow(图像缓冲区);
size\u t width=CVPixelBufferGetWidth(imageBuffer);
大小\u t高度=CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace=CGColorSpaceCreateDeviceRGB();
int BitsPerComponent=8;
CGContextRef context=CGBitmapContextCreate(基址、宽度、高度、比特组成、字节数、颜色空间,(kCGBitmapByteOrder32Little | KCGimageAlphaPremultipledFirst));
CGImageRef imageRef=CGBitmapContextCreateImage(上下文);
UIImage*结果=[[UIImage alloc]initWithCGImage:imageRef比例:1.0方向:UIImageOrientionRight];
返回结果;
}
-(无效)未收到记忆警告{
[超级记忆警告];
[捕获会话停止运行];
//处置所有可以重新创建的资源。
}
@结束
这是该检测viewcontroller的全部代码


您可以复制此文件并导入AVdoundation.framework和coremedia.framework。

如果查看预览,您会发现它是一个空图像

所以我把你的sampleBufferToImage方法改为blow,它就可以工作了

-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext
                             createCGImage:ciImage
                             fromRect:CGRectMake(0, 0,
                                                 CVPixelBufferGetWidth(imageBuffer),
                                                 CVPixelBufferGetHeight(imageBuffer))];
    UIImage *result = [[UIImage alloc] initWithCGImage:videoImage];
    CGImageRelease(videoImage);
    return result;
}

它以什么方式不起作用?它会崩溃吗?你会出错吗?正如拉塞尔先生所说,你需要比仅仅要求人们让你的代码“正常工作”更具体一点。我附加了一些更详细的情况信息。它没有崩溃,也没有出现任何错误。它只是没有正确地检测到我的脸,然后帮我记录信息。谢谢你的回答。如你所说,在编辑我的代码后,它可以捕捉相机图像。我已经接受了你的回答。再次感谢你!