在iOS中通过AvasseTraderTrackOutput获取样本缓冲区时音频丢失?

在iOS中通过AvasseTraderTrackOutput获取样本缓冲区时音频丢失?,ios,video,avfoundation,video-processing,Ios,Video,Avfoundation,Video Processing,在这里,我使用AssetReader获取示例缓冲区,然后为定制目的处理每个帧。但是音频丢失用于保存在文档中的最终视频。我知道还有另一种处理每个帧的方法,如“ApplyingCifilterWithHandler”,但需要每个示例缓冲区,并在上面渲染图像或过滤器。你能给我一个解决方案吗 NSError *error; NSString *path = [[NSBundle mainBundle] pathForResource:@"recordmovie" ofType:@"mov"]; NSUR

在这里,我使用AssetReader获取示例缓冲区,然后为定制目的处理每个帧。但是音频丢失用于保存在文档中的最终视频。我知道还有另一种处理每个帧的方法,如“ApplyingCifilterWithHandler”,但需要每个示例缓冲区,并在上面渲染图像或过滤器。你能给我一个解决方案吗

NSError *error;
NSString *path = [[NSBundle mainBundle] pathForResource:@"recordmovie" ofType:@"mov"];
NSURL *videoURL = [NSURL fileURLWithPath:path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:nil];;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// add audio track here
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];

NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                      [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];

    CGSize renderSize = [videoTrack naturalSize];
    /*
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                         AVVideoCodecH264 , AVVideoCodecKey,
                                         renderSize.width , AVVideoWidthKey,
                                         renderSize.height , AVVideoHeightKey,
                                         AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey, nil];
     */

AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
                                                                                    outputSettings:readerOutputSettings];

    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

    NSDictionary* audioOutputSettings  = [NSDictionary dictionaryWithObjectsAndKeys:
                                          [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                                          [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                                          [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                                          [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                                          [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                                          nil];

    NSDictionary  *settings = @{ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM] };

AVAssetReaderTrackOutput *audioTrackOutput  = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:settings];

[reader addOutput:readerOutput];
[reader addOutput:audioTrackOutput];
[reader startReading];


NSMutableArray *samples = [[NSMutableArray alloc] init];

CMSampleBufferRef sample;
while((sample = [readerOutput copyNextSampleBuffer])) {
    [samples addObject:(__bridge id)sample];
    CFRelease(sample);
}



NSString *outputPath = [self getDocumentsUrlForFilterMovie];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];

AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
                                                  fileType:AVFileTypeQuickTimeMovie
                                                     error:&error];


NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
                                       @(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
                                       nil];

NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                      AVVideoCodecH264, AVVideoCodecKey,
                                      [NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
                                      [NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
                                      videoCompressionProps, AVVideoCompressionPropertiesKey,
                                      nil];

AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
                                                                 outputSettings:writerOutputSettings
                                                               sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];

[writerInput setExpectsMediaDataInRealTime:NO];
[writer addInput:writerInput];

AVAssetWriterInput *WriterAudioInput  = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
WriterAudioInput.expectsMediaDataInRealTime = YES;
if([writer canAddInput:WriterAudioInput]) {
    [writer addInput:WriterAudioInput];
}

AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];

[writer startWriting];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];


    //NSMutableArray *audioSamples = [[NSMutableArray alloc] init];
    while((sample = [audioTrackOutput copyNextSampleBuffer])) {
        //[audioSamples addObject:(__bridge id)sample];
        [WriterAudioInput appendSampleBuffer:sample];
        while (!WriterAudioInput.readyForMoreMediaData) {
            [NSThread sleepForTimeInterval:0.1];
        }
        CFRelease(sample);
    }



CIFilter *filter = [CIFilter filterWithName:@"CISepiaTone"];
[filter setDefaults];
[filter setValue:@(1) forKey:kCIInputIntensityKey];

//CIImage *outputImage =  filter.outputImage;

for(NSInteger i = 0; i < samples.count; i++) {

    CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);

    //CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);

    CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[i]);


    CIImage *frameImage = [CIImage imageWithCVPixelBuffer:videoFrameBuffer];
    [filter setValue:frameImage forKey:kCIInputImageKey];
    CIImage *outputImage = filter.outputImage;
    //}
    [self->ciContext render:outputImage toCVPixelBuffer:videoFrameBuffer bounds:outputImage.extent colorSpace:self->colorSpace];
    while (!writerInput.readyForMoreMediaData) {
        [NSThread sleepForTimeInterval:0.1];
    }

   // [writerInput appendSampleBuffer:videoFrameBuffer];

    [pixelBufferAdaptor appendPixelBuffer:videoFrameBuffer withPresentationTime:presentationTime];

}


[writerInput markAsFinished];
[writer finishWritingWithCompletionHandler:^(){
    //[self.delegate didFinishReverse:YES andVideoURL:outputURL withError:error];
    NSLog(@"Finish video rendering");

}];
});
NSError*错误;
NSString*path=[[NSBundle mainBundle]pathForResource:@“recordmovie”类型:@“mov”];
NSURL*videoURL=[NSURL fileURLWithPath:path];
AVURLAsset*asset=[AVURLAsset URLAssetWithURL:videoURL选项:nil];;
Avassetrader*reader=[[Avassetrader alloc]initWithAsset:asset错误:nil];
AVAssetTrack*videoTrack=[[asset Tracks SwithMediaType:AVMediaTypeVideo]第一个对象];
//在此处添加音频曲目
AVAssetTrack*audioTrack=[[asset Tracks SwithMediaType:AVMediaTypeAudio]firstObject];
NSDictionary*readerOutputSettings=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberwhint:kCVPixelFormatType_420YPCBCRC8BIPLANARFullRange],kCVPixelBufferPixelFormatTypeKey,无];
CGSize renderSize=[videoTrack naturalSize];
/*
NSDictionary*readerOutputSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
renderSize.width、AVVideoWidthKey、,
renderSize.height,AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey,nil];
*/
AVAssetReaderTrackOutput*readerOutput=[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
音频通道布局acl;
bzero(&acl,sizeof(acl));
acl.mChannelLayoutTag=kAudioChannelLayoutTag\u Mono;
NSDictionary*audioOutputSettings=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberwhint:kaudioformMPEG4AAC],AVFormatIDKey,
[NSNumber numberWithInt:1],AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0],AVSampleRateKey,
[NSData dataWithBytes:&acl length:sizeof(acl)],AVChannelLayoutKey,
[NSNumber Number Withint:64000],AVEncoderBitRateKey,
零];
NSDictionary*设置=@{AVFormatIDKey:[NSNumber numberWithInt:kAudioFormatLinearPCM]};
AVAssetReaderTrackOutput*audioTrackOutput=[AVASSETREADERTRACKOUTPUTWITHTRACK:audioTrack输出设置:设置];
[读卡器添加输出:读卡器输出];
[读卡器添加输出:audioTrackOutput];
[读者开始阅读];
NSMUTABLEARRY*样本=[[NSMUTABLEARRY alloc]init];
CMSampleBufferRef样本;
而((样本=[readerOutput copyNextSampleBuffer])){
[samples addObject:(_桥id)sample];
发布(样本);
}
NSString*outputPath=[self-getDocumentsUrlForFilterMovie];
NSURL*outputURL=[NSURL fileURLWithPath:outputPath];
AVAssetWriter*编写器=[[AVAssetWriter alloc]initWithURL:outputURL
文件类型:AVFileTypeQuickTimeMovie
错误:&错误];
NSDictionary*videoCompressionProps=[NSDictionary Dictionary WithObjectsAndKeys:
@(videoTrack.estimatedDataRate)、AVVideoAverageBitRateKey、,
零];
NSDictionary*writerOutputSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
[NSNumber Number Withint:videoTrack.naturalSize.width],AVVideoWidthKey,
[NSNumber Number Withint:videoTrack.naturalSize.height],AVVideoHeightKey,
视频压缩道具,AVVideoCompressionPropertiesKey,
零];
AVAssetWriterInput*writerInput=[[AVAssetWriterInput alloc]initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(uu桥CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject];
[writerInput setExpectsMediaDataInRealTime:否];
[writer addInput:writerInput];
AVAssetWriterInput*WriterAudioInput=[AVAssetWriterInput assetWriterInputWithMediaType:AvMediaType音频输出设置:音频输出设置];
WriterAudioInput.expectsMediaDataInRealTime=是;
if([writer canAddInput:WriterAudioInput]){
[writer addInput:WriterAudioInput];
}
AvassetWriterInputPixelBufferAdapter*PixelBufferAdapter=[[AvassetWriterInputPixelBufferAdapter alloc]initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
[作家开始写作];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((u桥CMSampleBufferRef)样本[0]);
//NSMutableArray*音频样本=[[NSMutableArray alloc]init];
而((示例=[audioTrackOutput copyNextSampleBuffer])){
//[音频样本添加对象:(_桥id)样本];
[WriterAudioInput appendSampleBuffer:sample];
而(!WriterAudioInput.readyForMoreMediaData){
[NSThread sleepForTimeInterval:0.1];
}
发布(样本);
}
CIFilter*过滤器=[CIFilter过滤器名称:@“CISepiaTone”];
[过滤器设置默认值];
[过滤器设置值:@(1)forKey:kCIInputIntensityKey];
//CIImage*outputImage=filter.outputImage;
对于(NSInteger i=0;i    NSError *error;

    AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];;
    AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];
    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    AVAssetTrack *audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];

    NSDictionary *videoReaderOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                               [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];

    AVAssetReaderTrackOutput* assetReaderVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoReaderOutputSettings];

    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;

    NSDictionary* audioOutputSettings  = [NSDictionary dictionaryWithObjectsAndKeys:
                                          [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                                          [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                                          [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                                          [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                                          [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                                          nil];

    NSDictionary  *audioDecodesettings = @{ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM] };

    AVAssetReaderTrackOutput *assetReaderAudioTrackOutput  = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:audioDecodesettings];

    [assetReader addOutput:assetReaderVideoTrackOutput];
    [assetReader addOutput:assetReaderAudioTrackOutput];
    [assetReader startReading];


    NSMutableArray *samples = [[NSMutableArray alloc] init];

    CMSampleBufferRef sample;
    while((sample = [assetReaderVideoTrackOutput copyNextSampleBuffer])) {
        [samples addObject:(__bridge id)sample];
        CFRelease(sample);
    }

    NSString *outputPath = [self getDocumentsUrlForFilterMovie];
    NSURL *outputURL = [NSURL fileURLWithPath:outputPath];

    AVAssetWriter *assetWriter = [[AVAssetWriter alloc] initWithURL:outputURL
                                                           fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];


    NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
                                           @(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
                                           nil];

    NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
                                          [NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
                                          videoCompressionProps, AVVideoCompressionPropertiesKey,
                                          nil];

    AVAssetWriterInput *videoWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
                                                                          outputSettings:writerOutputSettings
                                                                        sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];

    [videoWriterInput setExpectsMediaDataInRealTime:NO];
    [assetWriter addInput:videoWriterInput];

    AVAssetWriterInput *audioWriterInput  = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
    audioWriterInput.expectsMediaDataInRealTime = YES;
    if([assetWriter canAddInput:audioWriterInput]) {
        [assetWriter addInput:audioWriterInput];
    }

    AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];

    [assetWriter startWriting];
    [assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];

    while((sample = [assetReaderAudioTrackOutput copyNextSampleBuffer])) {
        [audioWriterInput appendSampleBuffer:sample];
        while (!audioWriterInput.readyForMoreMediaData) {
            [NSThread sleepForTimeInterval:0.1];
        }
        CFRelease(sample);
    }


    for(NSInteger i = 0; i < samples.count; i++) {

        CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);

        CVPixelBufferRef videoFrameBuffer = nil;

        if(frameRenderType == KVideoNormal) {
            videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[i]);
        } else if (frameRenderType == KVideoReverse) {
            videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);
        }
        if(self.filters.count > 0) {
            CIImage *frameImage = [CIImage imageWithCVPixelBuffer:videoFrameBuffer];

            for(CIFilter *filter in self.filters) {

                [filter setValue:frameImage forKey:kCIInputImageKey];
                frameImage = filter.outputImage;

            }

            [self->ciContext render:frameImage toCVPixelBuffer:videoFrameBuffer bounds:frameImage.extent colorSpace:self->colorSpace];
        }
        while (!videoWriterInput.readyForMoreMediaData) {
            [NSThread sleepForTimeInterval:0.1];
        }

        [pixelBufferAdaptor appendPixelBuffer:videoFrameBuffer withPresentationTime:presentationTime];

    }

    [videoWriterInput markAsFinished];
    [assetWriter finishWritingWithCompletionHandler:^(){
        dispatch_async(dispatch_get_main_queue(), ^{
            NSLog(@"Finished video processing");
        });
    }];
});