Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/visual-studio-2008/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Iphone 当长度不同时合并音频和视频_Iphone_Avfoundation - Fatal编程技术网

Iphone 当长度不同时合并音频和视频

Iphone 当长度不同时合并音频和视频,iphone,avfoundation,Iphone,Avfoundation,我正在制作一个视频,将一幅图像(只有一帧)中的一个电影文件与几秒钟的音频进行合并 在iphone设备中,视频持续时间相当于音频持续时间,我在所有视频中都看到了图像 但当我分享到android设备(通过whatsapp)并按play时,播放时间是“来自图像的电影”的持续时间(一帧)。我做了一个测试,如果我从一张重复了100次的图像(10fps,10秒)创建一个电影文件,在android设备中Playback的持续时间是10秒 我认为android设备只能播放视频中最短的曲目,但如果我将addMut

我正在制作一个视频,将一幅图像(只有一帧)中的一个电影文件与几秒钟的音频进行合并

在iphone设备中,视频持续时间相当于音频持续时间,我在所有视频中都看到了图像

但当我分享到android设备(通过whatsapp)并按play时,播放时间是“来自图像的电影”的持续时间(一帧)。我做了一个测试,如果我从一张重复了100次的图像(10fps,10秒)创建一个电影文件,在android设备中Playback的持续时间是10秒

我认为android设备只能播放视频中最短的曲目,但如果我将addMutableTrackWithMediaType视频中的时间范围修改为音频持续时间,则不会发生任何事情

有什么建议吗

谢谢你的支持

我把所有代码都放在这里:

-(void) writeImagesToMovieAtPath:(NSString *)path withSize:(CGSize) size {

    NSMutableArray *m_PictArray = [NSMutableArray arrayWithCapacity:1];
    [m_PictArray addObject:[UIImage imageNamed:@"prueba.jpg"]];

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
    NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil];
    for (NSString *tString in dirContents) {
        if ([tString isEqualToString:@"essai.mp4"]) 
        {
            [[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil];

        }
    }

    NSLog(@"Write Started");

    NSError *error = nil;

    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
                                                              error:&error];    
    NSParameterAssert(videoWriter);

    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   [NSNumber numberWithInt:128000], AVVideoAverageBitRateKey,
                                   [NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey,
                                   AVVideoProfileLevelH264Main30, AVVideoProfileLevelKey,
                                   nil];    

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   codecSettings,AVVideoCompressionPropertiesKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];    

    AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
                                             assetWriterInputWithMediaType:AVMediaTypeVideo
                                             outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                     sourcePixelBufferAttributes:nil];

    NSParameterAssert(videoWriterInput);

    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    [videoWriter addInput:videoWriterInput];
    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];


    //Video encoding

    CVPixelBufferRef buffer = NULL;

    //convert uiimage to CGImage.

    int frameCount = 0;

    for(int i = 0; i<[m_PictArray count]; i++)
    {
        buffer = [self newPixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];

        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 30) 
        {
            if (adaptor.assetWriterInput.readyForMoreMediaData) 
            {
                printf("appending %d attemp %d\n", frameCount, j);

                CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
                /*
                Float64 seconds = 1; 
                int32_t preferredTimeScale = 10;
                CMTime frameTime = CMTimeMakeWithSeconds(seconds, preferredTimeScale);
                */
                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
                NSParameterAssert(bufferPool != NULL);

                [NSThread sleepForTimeInterval:0.05];
            } 
            else 
            {
                printf("adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1];
            }
            j++;
        }
        if (!append_ok) {
            printf("error appending image %d times %d\n", frameCount, j);
        }
        frameCount++;
        CVBufferRelease(buffer);
    }

    [videoWriterInput markAsFinished];
    [videoWriter finishWriting];

    [videoWriterInput release];
    [videoWriter release];

    [m_PictArray removeAllObjects];

    NSLog(@"Write Ended");

    [self saveVideoToAlbum:path]; 
}


-(void)CompileFilesToMakeMovie {

    NSLog(@"CompileFilesToMakeMovie");

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];    

    //Audio file in AAC
    NSString* audio_inputFileName = @"zApY4o8QY.m4a";

    NSString* audio_inputFilePath = [NSString stringWithFormat:@"%@/%@",[[NSBundle mainBundle] resourcePath],audio_inputFileName];
    NSURL*    audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

    NSString* video_inputFileName = @"essai.mp4";
    NSString* video_inputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,video_inputFileName];
    NSURL*    video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];

    NSString* outputFileName = @"outputFile.mov";
    NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName];

    NSURL*    outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];


    CMTime nextClipStartTime = kCMTimeZero;

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];


    //CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality];   
    _assetExport.shouldOptimizeForNetworkUse = YES;
    _assetExport.outputFileType = @"com.apple.quicktime-movie";
    _assetExport.outputURL = outputFileUrl;
    _assetExport.timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);

    [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         [self saveVideoToAlbum:outputFilePath]; 
     }       
     ];

    NSLog(@"CompileFilesToMakeMovie Finish");
}

- (void) saveVideoToAlbum:(NSString*)path {

    NSLog(@"saveVideoToAlbum");

    if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
        UISaveVideoAtPathToSavedPhotosAlbum (path, self, @selector(video:didFinishSavingWithError: contextInfo:), nil);
    }
}

-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
    if(error)
        NSLog(@"Exportado con error: %@", error);
    else 
        NSLog(@"Exportado OK");
} 

- (CVPixelBufferRef) newPixelBufferFromCGImage: (CGImageRef)image andSize:(CGSize)frameSize {

    CGAffineTransform frameTransform = CGAffineTransformMake(0, 0, 0, 0, 0, 0);

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
                                          frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, 
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    //CGContextConcatCTM(context, frameTransform);
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return (CVPixelBufferRef)pxbuffer;
}
-(void)writeImagesToMovieAtPath:(NSString*)路径的大小为:(CGSize)size{
NSMutableArray*m_PictArray=[NSMutableArray阵列容量:1];
[m_PictArray addObject:[UIImage ImageName:@“prueba.jpg”];
NSString*documentsDirectoryPath=[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES)对象索引:0];
NSArray*dirContents=[[NSFileManager defaultManager]目录目录路径:documentsDirectoryPath错误:nil];
for(目录中的NSString*tString){
if([tString IsequalString:@“essai.mp4”])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@“%@/%@”,documentsDirectoryPath,tString]错误:nil];
}
}
NSLog(@“已开始写入”);
n错误*错误=nil;
AVAssetWriter*videoWriter=[[AVAssetWriter alloc]initWithURL:
[NSURL fileURLWithPath:path]文件类型:AVFileTypeMPEG4
错误:&错误];
NSParameterAssert(视频编写器);
NSDictionary*CodeSettings=[NSDictionary Dictionary WithObjectsAndKeys:
[NSNumber numberWithInt:128000],AVVideoAverageBitRateKey,
[NSNumber numberWithInt:15],AVVideoMaxKeyFrameIntervalKey,
AVVideoProfileLevelH264Main30,AVVideoProfileLevelKey,
零];
NSDictionary*videoSettings=[NSDictionary Dictionary WithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodeKey,
编解码器设置、AVVideoCompressionPropertiesKey、,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,
零];
AVAssetWriterInput*videoWriterInput=[[AVAssetWriterInput]
assetWriterInputWithMediaType:AVMediaTypeVideo
输出设置:视频设置]保留];
AvassetWriterInputPixelBufferAdapter*适配器=[AvassetWriterInputPixelBufferAdapter]
AssetWriterInputPixelBufferAdapter带AssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime=是;
[videoWriter附加输入:videoWriterInput];
//启动会话:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//视频编码
CVPixelBufferRef buffer=NULL;
//将uiimage转换为CGImage。
int frameCount=0;
对于(inti=0;i刚刚修复

我创建了一个重复X倍图像的电影文件,然后在构图过程中缩放到audioAsset.duration的大小

CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[a_compositionVideoTrack scaleTimeRange:video_timeRange toDuration:audioAsset.duration];

你需要重复一次图像才能缩放轨迹,但是如果电影只有2帧,在android中只能播放8秒,因此我制作了一个视频,其中图像重复10次,以允许我在whatsapp的视频共享中超过45秒的限制。在CompileFileTomakeMovie方法中,使用视频时间范围而不是音频时间范围需要的地方