Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/xcode/7.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Xcode 拍摄的图像是暗的_Xcode_Macos_Avfoundation_Cmsamplebufferref - Fatal编程技术网

Xcode 拍摄的图像是暗的

Xcode 拍摄的图像是暗的,xcode,macos,avfoundation,cmsamplebufferref,Xcode,Macos,Avfoundation,Cmsamplebufferref,在osx上,我使用AVFoundation从USB摄像头捕获图像,一切正常,但与实时视频相比,我得到的图像更暗 设备捕获配置 -(BOOL)prepareCapture{ captureSession = [[AVCaptureSession alloc] init]; NSError *error; imageOutput=[[AVCaptureStillImageOutput alloc] init]; NSNumber * pixelFormat = [NSNumber numberWi

在osx上,我使用AVFoundation从USB摄像头捕获图像,一切正常,但与实时视频相比,我得到的图像更暗

设备捕获配置

-(BOOL)prepareCapture{
captureSession = [[AVCaptureSession alloc] init];
NSError *error;

imageOutput=[[AVCaptureStillImageOutput alloc] init];
NSNumber * pixelFormat = [NSNumber numberWithInt:k32BGRAPixelFormat];
[imageOutput setOutputSettings:[NSDictionary dictionaryWithObject:pixelFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey]];

videoOutput=[[AVCaptureMovieFileOutput alloc] init];

AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:MyVideoDevice error:&error];
if (videoInput) {
    [captureSession beginConfiguration];
    [captureSession addInput:videoInput];
    [captureSession setSessionPreset:AVCaptureSessionPresetHigh];
    //[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    [captureSession addOutput:imageOutput];
    [captureSession addOutput:videoOutput];
    [captureSession commitConfiguration];
}
else {
    // Handle the failure.
    return NO;
}
return YES;
}
添加实时预览视图

-(void)settingPreview:(NSView*)View{
// Attach preview to session
previewView = View;
CALayer *previewViewLayer = [previewView layer];
[previewViewLayer setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
AVCaptureVideoPreviewLayer *newPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
[newPreviewLayer setFrame:[previewViewLayer bounds]];
[newPreviewLayer setAutoresizingMask:kCALayerWidthSizable | kCALayerHeightSizable];
[previewViewLayer addSublayer:newPreviewLayer];
//[self setPreviewLayer:newPreviewLayer];
[captureSession startRunning];
}
捕获图像的代码

-(void)captureImage{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
    for (AVCaptureInputPort *port in [connection inputPorts]) {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
 ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
     CFDictionaryRef exifAttachments =
     CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
     if (exifAttachments) {
         // Do something with the attachments.
     }
     // Continue as appropriate.
     //IMG is a global NSImage
     IMG = [self imageFromSampleBuffer:imageSampleBuffer];
     [[self delegate] imageReady:IMG];
}];
}
从样本缓冲区数据创建NSImage,我认为问题就在这里

- (NSImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                             bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
NSImage * image = [[NSImage alloc] initWithCGImage:quartzImage size:NSZeroSize];
// Release the Quartz image
CGImageRelease(quartzImage);

return (image);
}

找到解决方案

问题出现在imageFromSampleBuffer 我使用了这个代码,图片非常完美

// Continue as appropriate.
     //IMG = [self imageFromSampleBuffer:imageSampleBuffer];

     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);

     if (imageBuffer) {
         CVBufferRetain(imageBuffer);

         NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];

         IMG = [[NSImage alloc] initWithSize: [imageRep size]];
         [IMG addRepresentation: imageRep];

         CVBufferRelease(imageBuffer);
     }

在此

中找到的代码在我的例子中,您仍然需要多次调用captureStillImageAsynchronouslyFromConnection:以强制内置相机正确曝光

int primeCount = 8;  //YMMV
for (int i = 0; i < primeCount; i++) { 
    [imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {}];
}

[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
    if (imageBuffer) {
        CVBufferRetain(imageBuffer);

        NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
        IMG = [[NSImage alloc] initWithSize: [imageRep size]];
        [IMG addRepresentation: imageRep];
    }
}];
int primeCount=8//YMMV
对于(inti=0;i
快速浏览一下您的代码,我看不到任何突出部分。。。但是AVCapture图像可能变暗的一个原因是相机需要一点时间来自动调整焦距、曝光等。您是否在开始运行捕获会话的
设置预览
方法之后立即调用
captureImage
方法?代码主要是从apple示例中复制的。程序启动时调用settingPreview。关键是保存的图像更暗一些,与实时图像的差异很小,好像我稍微降低了亮度。我认为问题发生在从CMSampleBufferRef到NSImagehi的转换过程中,你能提供swift版本吗?