Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/103.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
C++ 将原始IOS相机数据上载到纹理_C++_Ios_Opengl Es_Camera_Render To Texture - Fatal编程技术网

C++ 将原始IOS相机数据上载到纹理

C++ 将原始IOS相机数据上载到纹理,c++,ios,opengl-es,camera,render-to-texture,C++,Ios,Opengl Es,Camera,Render To Texture,我们正在使用IOS上的AVCaptureDevice扫描二维码。我们使用avcapturemataoutput将相机的输出传递给代码以识别QR代码,目前我们还将相机显示为打开GL视图上的单独视图。但是,我们现在希望其他图形显示在摄影机预览上,因此我们希望能够将摄影机数据加载到其中一个Open GL纹理上 那么,有没有办法从相机获取原始RGB数据 这是我们用来初始化捕获设备和视图的代码(如下) 我们如何修改它以访问RGB数据,从而将其加载到我们的GL纹理之一?我们正在使用C++/Objective

我们正在使用IOS上的
AVCaptureDevice
扫描二维码。我们使用
avcapturemataoutput
将相机的输出传递给代码以识别QR代码,目前我们还将相机显示为打开GL视图上的单独视图。但是,我们现在希望其他图形显示在摄影机预览上,因此我们希望能够将摄影机数据加载到其中一个Open GL纹理上

那么,有没有办法从相机获取原始RGB数据

这是我们用来初始化捕获设备和视图的代码(如下)

我们如何修改它以访问RGB数据,从而将其加载到我们的GL纹理之一?我们正在使用C++/Objective C

谢谢

肖恩南方酒店

self.captureSession = [[AVCaptureSession alloc] init];

NSError *error;
// Set camera capture device to default and the media type to video.
AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// Set video capture input: If there a problem initialising the camera, it will give am error.
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];

if (!input) 
{
    NSLog(@"Error Getting Camera Input");
    return;
}
// Adding input souce for capture session. i.e., Camera
[self.captureSession addInput:input];

AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
// Set output to capture session. Initalising an output object we will use later.
[self.captureSession addOutput:captureMetadataOutput];

// Create a new queue and set delegate for metadata objects scanned.
dispatch_queue_t dispatchQueue;
dispatchQueue = dispatch_queue_create("scanQueue", NULL);
[captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];

// Delegate should implement captureOutput:didOutputMetadataObjects:fromConnection: to get callbacks on detected metadata.
[captureMetadataOutput setMetadataObjectTypes:[captureMetadataOutput availableMetadataObjectTypes]];

// Layer that will display what the camera is capturing.

self.captureLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
[self.captureLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];

gCameraPreviewView= [[[UIView alloc] initWithFrame:CGRectMake(gCamX1, gCamY1, gCamX2-gCamX1, gCamY2-gCamY1)] retain];
[self.captureLayer setFrame:gCameraPreviewView.layer.bounds];
[gCameraPreviewView.layer addSublayer:self.captureLayer];

UIViewController * lVC = [[[UIApplication sharedApplication] keyWindow] rootViewController];
[lVC.view addSubview:gCameraPreviewView];

您不需要直接访问rgb相机帧来将其作为纹理,因为IOS支持比您更快的纹理缓存

- (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType pixel:(CVImageBufferRef)cameraFrame time:(CMTime)frameTime;
在回调方法中,可以使用以下参数和函数生成纹理

  CVOpenGLESTextureCacheCreate(...)
  CVOpenGLESTextureCacheCreateTextureFromImage(...)

最后,我们将CMSampleBuffer转换为原始数据(因此我们可以上传到GL纹理),如下所示。这需要一点时间,但对于我们的目的来说已经足够快了

如果有任何改进,我很高兴知道:)

肖恩

- (void)captureOutput:(AVCaptureFileOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];

    if(!self.context)
    {
        self.context = [CIContext contextWithOptions:nil];  //only create this once     
    }

    int xsize= CVPixelBufferGetWidth(imageBuffer);
    int ysize= CVPixelBufferGetHeight(imageBuffer);

    CGImageRef videoImage = [self.context createCGImage:ciImage fromRect:CGRectMake(0, 0, xsize, ysize)];           
    UIImage *image = [[UIImage alloc] initWithCGImage:videoImage];
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    if(!colorSpace) 
    {
        return ;
    }

    size_t bitsPerPixel = 32;
    size_t bitsPerComponent = 8;
    size_t bytesPerPixel = bitsPerPixel / bitsPerComponent;
    size_t bytesPerRow = xsize * bytesPerPixel;
    size_t bufferLength = bytesPerRow * ysize;
    uint32_t * tempbitmapData = (uint32_t *)malloc(bufferLength);

    if(!tempbitmapData) 
    {
        CGColorSpaceRelease(colorSpace);
        return ;
    }

    CGContextRef cgcontext = CGBitmapContextCreate(tempbitmapData, xsize, ysize, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast);    
    if(!cgcontext) 
    {
        free(tempbitmapData);
        return;
    }

    CGColorSpaceRelease(colorSpace);
    CGRect rect = CGRectMake(0, 0, xsize, ysize);
    CGContextDrawImage(cgcontext, rect, image.CGImage);                                 
    unsigned char *bitmapData = (unsigned char *)CGBitmapContextGetData(cgcontext);     // Get a pointer to the data    
    CGContextRelease(cgcontext);                                                        
    [image release];

    CallbackWithData((unsigned int *)bitmapData,xsize,ysize);               //send data

    free(bitmapData);
    CGImageRelease(videoImage);
}