使用iOS广播扩展录制视频

使用iOS广播扩展录制视频,ios,ios-app-extension,avassetwriter,Ios,Ios App Extension,Avassetwriter,我正在尝试调整iOS广播扩展功能来录制视频,而不是直播。 这似乎是可能的,因为您可以在processSampleBuffer:withType:方法中获取像素缓冲区 然后我编写了如下代码,但在附加缓冲区时失败 我既不熟悉AVAssetWriter,也不熟悉应用程序扩展编程,所以我无法找出这里出了什么问题 我在做我们在分机里不该做的事吗?或者AVAssetWriter的用法是错误的 任何想法都是有用的。谢谢 // // SampleHandler.m // The main class of

我正在尝试调整iOS广播扩展功能来录制视频,而不是直播。 这似乎是可能的,因为您可以在
processSampleBuffer:withType:
方法中获取像素缓冲区

然后我编写了如下代码,但在附加缓冲区时失败

我既不熟悉AVAssetWriter,也不熟悉应用程序扩展编程,所以我无法找出这里出了什么问题

我在做我们在分机里不该做的事吗?或者AVAssetWriter的用法是错误的

任何想法都是有用的。谢谢

//
//  SampleHandler.m
//  The main class of the App Extension
//
#import "SampleHandler.h"
#import <AVFoundation/AVFoundation.h>

@implementation SampleHandler {
    VideoExporter *exporter;
    NSDate *startDate;
}

- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
    // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
    [self setupWriter];    
}

- (void)setupWriter {
    NSFileManager *fm = [NSFileManager defaultManager];

    // Set video path into shared container
    NSURL *containerURL = [fm containerURLForSecurityApplicationGroupIdentifier:@"group.com.mycompany"];
    NSURL *libraryURL = [containerURL URLByAppendingPathComponent:@"Library" isDirectory: true];
    BOOL isDir = false;
    NSURL *cachesURL = [libraryURL URLByAppendingPathComponent:@"Caches" isDirectory: true];    
    NSURL *outVideoURL = [cachesURL URLByAppendingPathComponent:@"output.mov"];

    if([[NSFileManager defaultManager] fileExistsAtPath:[outVideoURL path]]){
        [[NSFileManager defaultManager] removeItemAtPath:[outVideoURL path] error:nil];
    }

    exporter = [[VideoExporter alloc] initWithOutputURL:outVideoURL size:CGSizeMake(1280, 720) frameRate:30];
    exporter.delegate = self;
    [exporter beginExport];

    startDate = [NSDate date];
}

- (void)broadcastPaused {
    // User has requested to pause the broadcast. Samples will stop being delivered.
}

- (void)broadcastResumed {
    // User has requested to resume the broadcast. Samples delivery will resume.
}

- (void)broadcastFinished {
    // User has requested to finish the broadcast.
    NSLog(@"User requested finish writing");
    [exporter finishWriting];
}

- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {

    switch (sampleBufferType) {
        case RPSampleBufferTypeVideo:
            // Handle video sample buffer
            [exporter addCMSampleBuffer:sampleBuffer];
            break;
        case RPSampleBufferTypeAudioApp:
            // Handle audio sample buffer for app audio
            break;
        case RPSampleBufferTypeAudioMic:
            // Handle audio sample buffer for mic audio
            break;            
        default:
            break;
    }
}
@end


//
//  VideoExporter.m
//  Helper class to write the video
//
#import "VideoExporter.h"

@implementation VideoExporter

@synthesize width, height;
@synthesize framesPerSecond;
@synthesize outputURL;
@synthesize delegate;

- (id)initWithOutputURL:(NSURL *)aURL size:(CGSize)size frameRate:(uint64_t)fps {
    if ((self = [super init])) {
        width = (int)round(size.width);
        height = (int)round(size.height);
        framesPerSecond = fps;
        outputURL = aURL;
    }
    return self;
}

- (void)beginExport {    
    NSError *error = nil;
    writer = [[AVAssetWriter alloc] initWithURL:outputURL
                                       fileType:AVFileTypeQuickTimeMovie
                                          error:&error];
    NSAssert(writer != nil, @"Writer should not be nil");

    NSDictionary * outSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264, AVVideoCodecKey,
                                  [NSNumber numberWithInt:width], AVVideoWidthKey,
                                  [NSNumber numberWithInt:height], AVVideoHeightKey, nil];
    writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:outSettings];

    NSDictionary * pixelAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
    adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelAttributes];

    [writer addInput:writerInput];

    BOOL started = [writer startWriting];
    NSAssert(started, @"Should start writing!");

    [writer startSessionAtSourceTime:kCMTimeZero];
}

- (void)addCMSampleBuffer:(CMSampleBufferRef)buf {
    if(writer.status==0) {
        NSLog(@"Writer status unknown!!");
    }    
    [self appendCMSampleBuffer:buf];
}

- (void)finishWriting {        
    [writerInput markAsFinished];
    dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);

    int64_t longDuration = 1000;
    CMTime cmTime = CMTimeMake(longDuration, 1);
    [writer endSessionAtSourceTime:cmTime];

    [writer finishWritingWithCompletionHandler:^{
        // Call delegate method here
        dispatch_semaphore_signal(semaphore);
    }];
    dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);    
}

#pragma mark - Private -

- (void)appendCMSampleBuffer:(CMSampleBufferRef)bufferRef {
    if(![writerInput isReadyForMoreMediaData]) {
        NSLog(@"WriterInput not ready! status = %ld, error=%@", (long)writer.status, writer.error);
        return;
    }    
    BOOL success = [adaptor appendPixelBuffer:(CVPixelBufferRef)bufferRef withPresentationTime:CMTimeMake(frameCount++, (int32_t) framesPerSecond)];
    if(success == NO) {
        NSLog(@"Append buffer failed! status = %ld, error=%@", (long)writer.status, writer.error);  // Always gets here
    }
}
@end
//
//SampleHandler.m
//应用程序扩展的主类
//
#导入“SampleHandler.h”
#进口
@实现SampleHandler{
视频出口商*出口商;
NSDate*起始日期;
}
-(无效)广播已启动,设置信息:(NSDictionary*)设置信息{
//用户已请求启动广播。可以提供UI扩展的设置信息,但这是可选的。
[自编];
}
-(无效)作者{
NSFileManager*fm=[NSFileManager defaultManager];
//将视频路径设置到共享容器中
NSURL*containerURL=[fm ContainerUrlForSecurity应用程序组标识符:@“group.com.mycompany”];
NSURL*libraryURL=[ContainerUrlByAppendingPathComponent:@“Library”是目录:true];
BOOL-isDir=false;
NSURL*cachesURL=[libraryURL URLByAppendingPathComponent:@“Caches”是目录:true];
NSURL*outVideoURL=[CacheSurlByAppendingPathComponent:@“output.mov”];
如果([[NSFileManager defaultManager]文件ExistSatPath:[outVideoURL路径]]){
[[NSFileManager defaultManager]removeItemAtPath:[outVideoURL path]错误:nil];
}
exporter=[[VideoExporter alloc]initWithOutputURL:outVideoURL大小:CGSizeMake(1280720)帧速率:30];
exporter.delegate=self;
[出口商开始出口];
startDate=[NSDate日期];
}
-(无效)广播暂停{
//用户已请求暂停广播。样本将停止发送。
}
-(无效)恢复广播{
//用户已请求恢复广播。将继续发送样本。
}
-(无效)广播结束{
//用户已请求完成广播。
NSLog(@“用户请求完成写入”);
[出口商完成写作];
}
-(void)processSampleBuffer:(CMSampleBufferRef)SampleBufferWithType:(RPSampleBufferType)sampleBufferType{
开关(sampleBufferType){
案例RPSampleBufferTypeVideo:
//处理视频采样缓冲区
[导出器addCMSampleBuffer:sampleBuffer];
打破
案例RPSampleBufferTypeAudioApp:
//处理应用程序音频的音频采样缓冲区
打破
案例RPSampleBufferTypeAudioMic:
//处理麦克风音频的音频采样缓冲区
打破
违约:
打破
}
}
@结束
//
//视频出口商
//帮助类来编写视频
//
#导入“VideoExporter.h”
@实施视频导出器
@综合宽度、高度;
@合成帧秒;
@合成输出URL;
@综合代表;
-(id)initWithOutputURL:(NSURL*)aURL size:(CGSize)size帧速率:(uint64_t)fps{
if((self=[super init])){
宽度=(int)圆形(size.width);
高度=(整数)圆形(大小.高度);
framesPerSecond=fps;
outputURL=aURL;
}
回归自我;
}
-(void)beginExport{
n错误*错误=nil;
writer=[[AVAssetWriter alloc]initWithURL:outputURL
文件类型:AVFileTypeQuickTimeMovie
错误:&错误];
NSAssert(writer!=nil,@“writer不应为nil”);
NSDictionary*outSettings=[NSDictionary Dictionary WithObjectsAndKeys:AVVideoCodeTypeH264,AVVideoCodeKey,
[NSNumber numberWithInt:width]、AVVideoWidthKey、,
[NSNumber numberWithInt:height],AVVideoHeightKey,无];
writerInput=[[AVAssetWriterInput alloc]initWithMediaType:AvMediaType视频输出设置:outSettings];
NSDictionary*pixelAttributes=[NSDictionary Dictionary WithObjectsSandKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YPCBCRCR8BIPLANARVIDERANGE],kCVPixelBufferPixelFormatTypeKey,无];
适配器=[[AvassetWriterInputPixelBufferAdapter alloc]initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelAttributes];
[writer addInput:writerInput];
BOOL start=[writer startWriting];
NSAssert(已开始,@“应开始编写!”);
[writer startSessionAtSourceTime:kCMTimeZero];
}
-(void)addCMSampleBuffer:(CMSampleBufferRef)buf{
if(writer.status==0){
NSLog(@“写入程序状态未知!!”);
}    
[self-appendCMSampleBuffer:buf];
}
-(无效)完成写入{
[writerInput markAsFinished];
dispatch\u semaphore\u t semaphore=dispatch\u semaphore\u create(0);
int64_t longDuration=1000;
CMTime CMTime=CMTimeMake(长持续时间,1);
[编写器EndSessionSourceTime:cmTime];
[编写器使用CompletionHandler完成编写:^{
//在此调用委托方法
调度信号灯(信号灯);
}];
调度信号灯等待(信号灯,调度时间永远);
}
#布拉格马克-私人-
-(void)appendCMSampleBuffer:(CMSampleBufferRef)bufferRef{
如果(![writerInput isReadyForMoreMediaData]){
NSLog(@“WriterInput not ready!状态=%ld,错误=%@”,(长)writer.status,writer.error);
返回;
}    
BOOL success=[Adapter appendPixelBuffer:(CVPixelBufferRef)bufferRef with PresentationTime:CMTimeMake(frameCount++,(int32_t)framesPerSecond)];
如果(成功==否){
NSLog(@“追加缓冲区失败!状态=%ld,错误=%@”,(长)writer.status,writer.error);//始终位于此处
}
}
@结束

我在iOS 13.7(17H35)上测试了这种行为

似乎
AVAssetWriter
需要前台特权,而扩展没有前台特权。()

startWriting
met
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={
    NSLocalizedFailureReason=An unknown error occurred (-17508),
    NSLocalizedDescription=The operation could not be completed,
    NSUnderlyingError=0x282a80120 {
        Error Domain=NSOSStatusErrorDomain Code=-17508 "(null)"
    }
}