Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/26.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c glGrab用于在Mac OS 10.7.3和XCode 4.3.2上进行屏幕捕获_Objective C_Macos_Opengl_Screen Capture - Fatal编程技术网

Objective c glGrab用于在Mac OS 10.7.3和XCode 4.3.2上进行屏幕捕获

Objective c glGrab用于在Mac OS 10.7.3和XCode 4.3.2上进行屏幕捕获,objective-c,macos,opengl,screen-capture,Objective C,Macos,Opengl,Screen Capture,我正试图在上面提到的配置下集成用于Mac OS屏幕捕获的glGrab代码,目前我的窗口中呈现的全是蓝屏。我相信图像纹理是如何创建的,但我不知道是什么。我在OpenGL中才几周大,所以如果我错过了一些明显的东西,请对我宽容一些 我使用glGrab代码,除了CGLSetFullScreen方法(甚至不是CGLSetFullScreenOnDisplay),因为这些方法现在已经被弃用了。所以这一行代码暂时被注释掉了 我从一段时间以来一直在做一些关于这个主题的研究,发现了另一个关于stackoverfl

我正试图在上面提到的配置下集成用于Mac OS屏幕捕获的glGrab代码,目前我的窗口中呈现的全是蓝屏。我相信图像纹理是如何创建的,但我不知道是什么。我在OpenGL中才几周大,所以如果我错过了一些明显的东西,请对我宽容一些

我使用glGrab代码,除了CGLSetFullScreen方法(甚至不是CGLSetFullScreenOnDisplay),因为这些方法现在已经被弃用了。所以这一行代码暂时被注释掉了

我从一段时间以来一直在做一些关于这个主题的研究,发现了另一个关于stackoverflow的线索,这可能是完整的答案,但它仍然有很大帮助


glGrab代码的直接参考是

我上述问题的答案如下。因此,没有更多的opengl或glGrab。使用针对Mac OSX的最佳优化。这还不包括捕获鼠标指针的代码,但是我相信如果你已经登陆了这个页面,你就足够聪明,可以自己解决这个问题。或者,如果有人读到这篇文章知道解决方案,那么这就是你帮助兄弟会的机会:)这段代码也会返回CVPixelBufferRef。您可以选择按原样发回CGImageRef甚至ByTestStream,只需根据自己的喜好调整即可

void swizzleBitmap(void *data, int rowBytes, int height) {
    int top, bottom;
    void * buffer;
    void * topP;
    void * bottomP;
    void * base;

    top = 0;
    bottom = height - 1;
    base = data;
    buffer = malloc(rowBytes);

    while (top < bottom) {
        topP = (void *)((top * rowBytes) + (intptr_t)base);
        bottomP = (void *)((bottom * rowBytes) + (intptr_t)base);

        bcopy( topP, buffer, rowBytes );
        bcopy( bottomP, topP, rowBytes );
        bcopy( buffer, bottomP, rowBytes );

        ++top;
        --bottom;
    }   
    free(buffer);
}   

CVImageBufferRef grabViaOpenGL() {
    int bytewidth;

    CGImageRef image = CGDisplayCreateImage(kCGDirectMainDisplay);    // Main screenshot capture call

    CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));    // Get screenshot bounds

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
                            nil];

    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
                                          frameSize.height,  kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
                                          &pxbuffer);


    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipLast);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);

    bytewidth = frameSize.width * 4; // Assume 4 bytes/pixel for now
    bytewidth = (bytewidth + 3) & ~3; // Align to 4 bytes
    swizzleBitmap(pxdata, bytewidth, frameSize.height);     // Solution for ARGB madness

    CGColorSpaceRelease(rgbColorSpace);
    CGImageRelease(image);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
void swizzleBitmap(void*数据、整数行字节、整数高度){
顶部,底部;
空*缓冲区;
void*topP;
void*bottomP;
空*基;
top=0;
底部=高度-1;
基数=数据;
缓冲区=malloc(行字节);
while(顶部<底部){
topP=(void*)((top*行字节)+(intptr_t)基);
bottomP=(void*)((bottom*rowBytes)+(intptr_t)base);
b复制(top、buffer、rowBytes);
b复制(底部、顶部、行字节);
b复制(缓冲区、底部、行字节);
++顶部;
--底部;
}   
自由(缓冲);
}   
CVImageBufferRef grabViaOpenGL(){
int字节宽度;
CGImageRef image=CGDisplayCreateImage(kCGDirectMainDisplay);//主屏幕截图捕获调用
CGSize frameSize=CGSizeMake(CGImageGetWidth(image),CGImageGetHeight(image));//获取屏幕截图边界
NSDictionary*选项=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberWithBool:NO],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO],kCVPixelBufferCGBitmapContextCompatibilityKey,
零];
CVPixelBufferRef pxbuffer=NULL;
CVReturn status=CVPixelBufferCreate(kCFAllocatorDefault,frameSize.width,
frameSize.height,kCVPixelFormatType_32ARGB,(CFDictionaryRef)选项,
&PX缓冲区);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void*pxdata=CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef context=CGBitmapContextCreate(pxdata,frameSize.width,
frameSize.height,8,4*frameSize.width,rgbColorSpace,
kCGImageAlphaNoneSkipLast);
CGContextDrawImage(上下文,CGRectMake(0,0,CGImageGetWidth(图像),
CGImageGetHeight(图像)),图像);
bytewidth=frameSize.width*4;//现在假设为4字节/像素
bytewidth=(bytewidth+3)&~3;//对齐到4字节
swizzleBitmap(pxdata、bytewidth、frameSize.height);//ARGB疯狂的解决方案
CGBColorSpace;
CGImageRelease(图像);
CGContextRelease(上下文);
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
返回缓冲区;
}

下面是我上述问题的答案。因此,没有更多的opengl或glGrab。使用针对Mac OSX的最佳优化。这还不包括捕获鼠标指针的代码,但是我相信如果你已经登陆了这个页面,你就足够聪明,可以自己解决这个问题。或者,如果有人读到这篇文章知道解决方案,那么这就是你帮助兄弟会的机会:)这段代码也会返回CVPixelBufferRef。您可以选择按原样发回CGImageRef甚至ByTestStream,只需根据自己的喜好调整即可

void swizzleBitmap(void *data, int rowBytes, int height) {
    int top, bottom;
    void * buffer;
    void * topP;
    void * bottomP;
    void * base;

    top = 0;
    bottom = height - 1;
    base = data;
    buffer = malloc(rowBytes);

    while (top < bottom) {
        topP = (void *)((top * rowBytes) + (intptr_t)base);
        bottomP = (void *)((bottom * rowBytes) + (intptr_t)base);

        bcopy( topP, buffer, rowBytes );
        bcopy( bottomP, topP, rowBytes );
        bcopy( buffer, bottomP, rowBytes );

        ++top;
        --bottom;
    }   
    free(buffer);
}   

CVImageBufferRef grabViaOpenGL() {
    int bytewidth;

    CGImageRef image = CGDisplayCreateImage(kCGDirectMainDisplay);    // Main screenshot capture call

    CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));    // Get screenshot bounds

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
                            nil];

    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
                                          frameSize.height,  kCVPixelFormatType_32ARGB, (CFDictionaryRef) options,
                                          &pxbuffer);


    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipLast);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);

    bytewidth = frameSize.width * 4; // Assume 4 bytes/pixel for now
    bytewidth = (bytewidth + 3) & ~3; // Align to 4 bytes
    swizzleBitmap(pxdata, bytewidth, frameSize.height);     // Solution for ARGB madness

    CGColorSpaceRelease(rgbColorSpace);
    CGImageRelease(image);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
void swizzleBitmap(void*数据、整数行字节、整数高度){
顶部,底部;
空*缓冲区;
void*topP;
void*bottomP;
空*基;
top=0;
底部=高度-1;
基数=数据;
缓冲区=malloc(行字节);
while(顶部<底部){
topP=(void*)((top*行字节)+(intptr_t)基);
bottomP=(void*)((bottom*rowBytes)+(intptr_t)base);
b复制(top、buffer、rowBytes);
b复制(底部、顶部、行字节);
b复制(缓冲区、底部、行字节);
++顶部;
--底部;
}   
自由(缓冲);
}   
CVImageBufferRef grabViaOpenGL(){
int字节宽度;
CGImageRef image=CGDisplayCreateImage(kCGDirectMainDisplay);//主屏幕截图捕获调用
CGSize frameSize=CGSizeMake(CGImageGetWidth(image),CGImageGetHeight(image));//获取屏幕截图边界
NSDictionary*选项=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberWithBool:NO],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO],kCVPixelBufferCGBitmapContextCompatibilityKey,
零];
CVPixelBufferRef pxbuffer=NULL;
CVReturn status=CVPixelBufferCreate(kCFAllocatorDefault,frameSize.width,
frameSize.height,kCVPixelFormatType_32ARGB,(CFDictionaryRef)选项,
&PX缓冲区);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void*pxdata=CVPixelBufferGetBaseAddress(pxbuffer);
彩色空间