如何使用比特率在ios中压缩视频?

如何使用比特率在ios中压缩视频?,ios,objective-c,iphone,video,Ios,Objective C,Iphone,Video,如何使用比特率压缩视频 我试着用下面的代码来压缩视频,但它不起作用,因为它给了我一个错误,比如 ****由于未捕获的异常“NSInvalidArgumentException”而终止应用程序,原因:“*-[Avassetrader startReading]在读取已开始后无法再次调用”***** - (void) imagePickerController: (UIImagePickerController *) picker didFinishPickingMed

如何使用比特率压缩视频

我试着用下面的代码来压缩视频,但它不起作用,因为它给了我一个错误,比如

****由于未捕获的异常“NSInvalidArgumentException”而终止应用程序,原因:“*-[Avassetrader startReading]在读取已开始后无法再次调用”*****

     - (void) imagePickerController: (UIImagePickerController *) picker
          didFinishPickingMediaWithInfo: (NSDictionary *) info 
     {


        // Handle movie capture
        NSURL *movieURL = [info objectForKey:
                           UIImagePickerControllerMediaURL];

        NSData *data = [NSData dataWithContentsOfURL:movieURL];

        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,       NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *tempPath = [documentsDirectory stringByAppendingFormat:@"/vid1.mp4"];

        BOOL success = [data writeToFile:tempPath atomically:NO];

        if (success)
        {
                      NSLog(@"VIdeo Successfully written");
        }
        else
        {
                       NSLog(@"VIdeo Wrting failed");
        }


        NSURL *uploadURL = [NSURL fileURLWithPath:[[NSTemporaryDirectory() stringByAppendingPathComponent:@"1234"] stringByAppendingString:@".mp4"]];

        // Compress movie first
        [self convertVideoToLowQuailtyWithInputURL:movieURL outputURL:uploadURL];
    }




 - (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
                                       outputURL:(NSURL*)outputURL
    {
        //setup video writer
        AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];

        AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo]   objectAtIndex:0];

        CGSize videoSize = videoTrack.naturalSize;

        NSDictionary *videoWriterCompressionSettings =  [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];

        NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];

        AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                                assetWriterInputWithMediaType:AVMediaTypeVideo
                                                outputSettings:videoWriterSettings];

        videoWriterInput.expectsMediaDataInRealTime = YES;

        videoWriterInput.transform = videoTrack.preferredTransform;

        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];

        [videoWriter addInput:videoWriterInput];

        //setup video reader
        NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

        AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];

        AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];

        [videoReader addOutput:videoReaderOutput];

        //setup audio writer
        AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                                assetWriterInputWithMediaType:AVMediaTypeAudio
                                                outputSettings:nil];

        audioWriterInput.expectsMediaDataInRealTime = NO;

        [videoWriter addInput:audioWriterInput];

        //setup audio reader
        AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

        AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

        AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];

        [audioReader addOutput:audioReaderOutput];

        [videoWriter startWriting];

        //start writing from video reader
        [videoReader startReading];

        [videoWriter startSessionAtSourceTime:kCMTimeZero];

        dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);

        [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
         ^{

             while ([videoWriterInput isReadyForMoreMediaData])
             {

                 CMSampleBufferRef sampleBuffer;

                 if ([videoReader status] == AVAssetReaderStatusReading &&
                     (sampleBuffer = [videoReaderOutput copyNextSampleBuffer]))
                 {

                     [videoWriterInput appendSampleBuffer:sampleBuffer];
                     CFRelease(sampleBuffer);
                 }

                 else
                 {
                     [videoWriterInput markAsFinished];

                     if ([videoReader status] == AVAssetReaderStatusCompleted)
                     {
                             [audioReader startReading];

                             [videoWriter startSessionAtSourceTime:kCMTimeZero];

                             dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);

                             [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{

                                 while (audioWriterInput.readyForMoreMediaData)
                                 {
                                     CMSampleBufferRef sampleBuffer;

                                     if ([audioReader status] == AVAssetReaderStatusReading &&
                                         (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {

                                         [audioWriterInput appendSampleBuffer:sampleBuffer];
                                         CFRelease(sampleBuffer);
                                     }

                                     else
                                     {

                                         [audioWriterInput markAsFinished];

                                         if ([audioReader status] == AVAssetReaderStatusCompleted)
                                         {
                                             [videoWriter finishWritingWithCompletionHandler:^()
                                             {
                                                 NSLog(@"Output URl : %@",outputURL);
                                             }];
                                         }
                                     }
                                 }

                             }
                              ];                     
                     }
                 }
             }
         }

         ];


    }

我也遇到了同样的崩溃问题,但在做了一些更改后,这种方法对我很有效。。只需用上述方法替换此方法

- (void)convertVideoToLowQuailtyWithInputURL:(NSURL*)inputURL
                                   outputURL:(NSURL*)outputURL
{
    //setup video writer
    AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];

    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    CGSize videoSize = videoTrack.naturalSize;

    NSDictionary *videoWriterCompressionSettings =  [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];

    NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:videoSize.width], AVVideoWidthKey, [NSNumber numberWithFloat:videoSize.height], AVVideoHeightKey, nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeVideo
                                            outputSettings:videoWriterSettings];

    videoWriterInput.expectsMediaDataInRealTime = YES;

    videoWriterInput.transform = videoTrack.preferredTransform;

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:nil];

    [videoWriter addInput:videoWriterInput];

    //setup video reader
    NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

    AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];

    AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];

    [videoReader addOutput:videoReaderOutput];

    //setup audio writer
    AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeAudio
                                            outputSettings:nil];

    audioWriterInput.expectsMediaDataInRealTime = NO;

    [videoWriter addInput:audioWriterInput];

    //setup audio reader
    AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

    AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];

    [audioReader addOutput:audioReaderOutput];

    [videoWriter startWriting];

    //start writing from video reader
    [videoReader startReading];

    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", NULL);

    [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
     ^{

         while ([videoWriterInput isReadyForMoreMediaData]) {

             CMSampleBufferRef sampleBuffer;

             if ([videoReader status] == AVAssetReaderStatusReading &&
                 (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {

                 [videoWriterInput appendSampleBuffer:sampleBuffer];
                 CFRelease(sampleBuffer);
             }

             else {

                 [videoWriterInput markAsFinished];

                 if ([videoReader status] == AVAssetReaderStatusCompleted) {
                     if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {

                     }
                     else{
                         //start writing from audio reader
                         [audioReader startReading];

                         [videoWriter startSessionAtSourceTime:kCMTimeZero];

                         dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);

                         [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{

                             while (audioWriterInput.readyForMoreMediaData) {

                                 CMSampleBufferRef sampleBuffer;

                                 if ([audioReader status] == AVAssetReaderStatusReading &&
                                     (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {

                                     [audioWriterInput appendSampleBuffer:sampleBuffer];
                                     CFRelease(sampleBuffer);
                                 }
                                 else {

                                     [audioWriterInput markAsFinished];

                                     if ([audioReader status] == AVAssetReaderStatusCompleted) {

                                         [videoWriter finishWritingWithCompletionHandler:^(){
                                             //                                         [self sendMovieFileAtURL:outputURL];
                                             NSString *moviePath = [outputURL path];
                                             if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(moviePath)) {
                                                 UISaveVideoAtPathToSavedPhotosAlbum(moviePath, self,
                                                                                     @selector(video:didFinishSavingWithError:contextInfo:), nil);

                                             }
                                         }];
                                         break;
                                     }
                                 }
                             }

                         }
                          ];
                     }
                 }

             }
         }
     }
     ];
}

您可以使用下面的参数来压缩视频以提高质量

  • AvassetExport低质量
  • Avassetex中等质量
  • AvassetextExportPresethestQuality
代码:


请尽快帮助我解决此问题勾选此答案:您的代码正在工作,但它正在生成一个损坏的视频,这意味着它无法在任何媒体播放器上运行,并且在一段时间后内存警告出现,应用程序被终止。出于某种原因,我第一次调用此代码时,此代码对我来说工作正常。如果我再叫它一次,它会在[音频阅读器Startreding]崩溃;说“读取已经开始后不能再次调用”仍然存在崩溃问题。这段代码运行完全正常。它还将转换质量较低的视频。
- (void)CompressVideo
{
    if(firstAsset !=nil)
    {
        //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
        AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

        //        http://stackoverflow.com/questions/22715881/merge-video-files-with-their-original-audio-in-ios

        //VIDEO TRACK
        AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];

        //For Audio Track inclusion
        //============================================================================================
        NSArray *arr = [firstAsset tracksWithMediaType:AVMediaTypeAudio];
        AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[arr lastObject] atTime:kCMTimeZero error:nil];
        //===============================================================================================

        AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);

        //FIXING ORIENTATION//
        AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
        AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
        BOOL  isFirstAssetPortrait_  = NO;

        CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
        if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)  {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
        if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)  {FirstAssetOrientation_ =  UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
        if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)   {FirstAssetOrientation_ =  UIImageOrientationUp;}
        if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;
        }
        CGFloat FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.width;

        if(isFirstAssetPortrait_)
        {
            FirstAssetScaleToFitRatio = VideoWidth/FirstAssetTrack.naturalSize.height;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
        }
        else
        {
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
        }
        [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];

        MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil];

        AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
        MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
        MainCompositionInst.frameDuration = CMTimeMake(1, 30);
        //        MainCompositionInst.renderSize = CGSizeMake(VideoWidth, 900);
        MainCompositionInst.renderSize = CGSizeMake(VideoWidth, [UIScreen mainScreen].bounds.size.height);

        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:@"CompressedVideo.mov"];

        NSLog(@"myPath Docs : %@",myPathDocs);

        NSURL *url = [NSURL fileURLWithPath:myPathDocs];

        if ([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
        {
            NSError *error;
            [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:&error];
        }

        //Movie Quality
        //==================================================
        AVAssetExportSession *exporter = [[AVAssetExportSession alloc]       initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
        //==================================================

        exporter.outputURL=url;

        //Movie Type
        //==================================================
        exporter.outputFileType = AVFileTypeQuickTimeMovie;
        //==================================================
        exporter.videoComposition = MainCompositionInst;
        exporter.shouldOptimizeForNetworkUse = YES;
        [exporter exportAsynchronouslyWithCompletionHandler:^
         {
             dispatch_async(dispatch_get_main_queue(), ^
                            {
                                videoUrToUload = url;
                                [self exportDidFinish:exporter];
                            });
         }];
    }
}

- (void)exportDidFinish:(AVAssetExportSession *)session
{
    if(session.status == AVAssetExportSessionStatusCompleted)
    {
         //Store URL Somewhere using session.url
    }
}