如何通过USB镜像iOS屏幕?

如何通过USB镜像iOS屏幕?,ios,macos,avfoundation,Ios,Macos,Avfoundation,我正在尝试通过USB连接到OSX镜像iOS设备屏幕。QuickTime做得很好,我通过一个代码示例阅读了这篇文章: 然而,CMIOStreamCopyBufferQueue的回调从未被调用,我想知道我做错了什么 有没有人面对过这个问题,并能提供一个有效的例子 谢谢。好的。。最终,我按照他的博客中告诉我的做了——发现DAL设备并使用AVCaptureSession捕获它们的输出,如下所示: -(id) init { // Allow iOS Devices Discovery C

我正在尝试通过USB连接到OSX镜像iOS设备屏幕。QuickTime做得很好,我通过一个代码示例阅读了这篇文章:

然而,
CMIOStreamCopyBufferQueue
的回调从未被调用,我想知道我做错了什么

有没有人面对过这个问题,并能提供一个有效的例子


谢谢。

好的。。最终,我按照他的博客中告诉我的做了——发现DAL设备并使用
AVCaptureSession
捕获它们的输出,如下所示:

-(id) init {

    // Allow iOS Devices Discovery
    CMIOObjectPropertyAddress prop =
    { kCMIOHardwarePropertyAllowScreenCaptureDevices,
        kCMIOObjectPropertyScopeGlobal,
        kCMIOObjectPropertyElementMaster };
    UInt32 allow = 1;
    CMIOObjectSetPropertyData( kCMIOObjectSystemObject,
                              &prop, 0, NULL,
                              sizeof(allow), &allow );

    // Get devices
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
    BOOL deviceAttahced = false;
    for (int i = 0; i < [devices count]; i++) {
        AVCaptureDevice *device = devices[i];
        if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) {
            deviceAttahced = true;
            [self startSession:device];
            break;
        }
    }

    NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];

    // Device not attached - subscribe to onConnect notifications
    if (!deviceAttahced) {


        id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
                                                                        object:nil
                                                                         queue:[NSOperationQueue mainQueue]
                                                                    usingBlock:^(NSNotification *note) {
                                                                        AVCaptureDevice *device = note.object;
                                                                        [self deviceConnected:device];
                                                                    }];

        observers = [[NSArray alloc] initWithObjects:deviceWasConnectedObserver, nil];
    }

    return self;
}

- (void) deviceConnected:(AVCaptureDevice *)device {
    if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) {
        [self startSession:device];
    }
}

- (void) startSession:(AVCaptureDevice *)device {

    // Init capturing session
    session = [[AVCaptureSession alloc] init];

    // Star session configuration
    [session beginConfiguration];

    // Add session input
    NSError *error;
    newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
    if (newVideoDeviceInput == nil) {
        dispatch_async(dispatch_get_main_queue(), ^(void) {
            NSLog(@"%@", error);
        });
    } else {
        [session addInput:newVideoDeviceInput];
    }  

    // Add session output
    videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];

    dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL);

    [videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
    [session addOutput:videoDataOutput];

    // Finish session configuration
    [session commitConfiguration];

    // Start the session
    [session startRunning];
}

#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer];

    /*
     * Here you can do whatever you need with the frame (e.g. convert to JPG)
     */
}
-(id)init{
//允许iOS设备发现
CMIOObjectPropertyAddress属性=
{kCMIOHardwarePropertyAllowScreenCaptureDevices,
kCMIOObjectPropertyScopeGlobal,
kCMIOObjectPropertyElementMaster};
UInt32-allow=1;
CMIOObjectSetPropertyData(KCMIObjectSystemObject,
&prop,0,NULL,
sizeof(允许),&allow);
//获取设备
NSArray*设备=[AVCaptureDevicesWithMediaType:AVMediaTypeMuxed];
BOOL deviceAttahced=假;
对于(int i=0;i<[设备计数];i++){
AVCaptureDevice*设备=设备[i];
if([[device uniqueID]IsequalString:/*DeviceAudid*/])){
DeviceAttached=true;
[自启动会话:设备];
打破
}
}
NSNotificationCenter*notificationCenter=[NSNotificationCenter defaultCenter];
//设备未连接-订阅onConnect通知
如果(!DeviceAttached){
id deviceWasConnectedObserver=[notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
对象:无
队列:[NSOperationQueue mainQueue]
使用block:^(NSNotification*注释){
AVCaptureDevice*设备=note.object;
[自设备连接:设备];
}];
观察者=[[NSArray alloc]initWithObjects:deviceWasConnectedObserver,nil];
}
回归自我;
}
-(无效)设备已连接:(AVCaptureDevice*)设备{
if([[device uniqueID]IsequalString:/*DeviceAudid*/])){
[自启动会话:设备];
}
}
-(无效)开始会话:(AVCaptureDevice*)设备{
//初始化捕获会话
会话=[[AVCaptureSession alloc]init];
//星型会话配置
[会议开始配置];
//添加会话输入
n错误*错误;
newVideoDeviceInput=[AvCaptureDeviceInputDeviceInputWithDevice:设备错误:&错误];
if(newVideoDeviceInput==nil){
dispatch\u async(dispatch\u get\u main\u queue(),^(void){
NSLog(@“%@”,错误);
});
}否则{
[会话添加输入:新视频设备输入];
}  
//添加会话输出
videoDataOutput=[[AVCaptureVideoDataOutput alloc]init];
videoDataOutput.videoSettings=[NSDictionary Dictionary WithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]forKey:(id)kCVPixelBufferPixelFormatTypeKey];
调度队列\u t视频队列=调度队列\u创建(“视频队列”,NULL);
[videoDataOutput setSampleBufferDelegate:自队列:videoQueue];
[会话添加输出:视频数据输出];
//完成会话配置
[会议委员会配置];
//开始会话
[会议开始和结束];
}
#pragma标记-AVCaptureAudioDataOutputSampleBufferDelegate
-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)SampleBufferfromConnection:(AVCaptureConnection*)连接{
NSImage*resultNSImage=[self-imageFromSampleBuffer:sampleBuffer];
/*
*在这里,您可以对框架执行任何需要的操作(例如,转换为JPG)
*/
}