Ios AudioUnitRender在iPad上的AudioBufferList中返回null

Ios AudioUnitRender在iPad上的AudioBufferList中返回null,ios,objective-c,ipad,avfoundation,core-audio,Ios,Objective C,Ipad,Avfoundation,Core Audio,我正在通过AudioUnitRender从AVAudioEngineOutput创建音频文件。在iPhone上,这个实现很好,但在iPad上,我得到了一个持续时间正确的无效音频文件。为什么会发生这种情况 主要方法 NSTimeInterval duration = CMTimeGetSeconds(asset.duration); NSUInteger lengthInFrames = (NSUInteger) (duration * audioDescription->mSampleR

我正在通过AudioUnitRender从AVAudioEngineOutput创建音频文件。在iPhone上,这个实现很好,但在iPad上,我得到了一个持续时间正确的无效音频文件。为什么会发生这种情况

主要方法

NSTimeInterval duration = CMTimeGetSeconds(asset.duration);

NSUInteger lengthInFrames = (NSUInteger) (duration * audioDescription->mSampleRate);

    const NSUInteger kBufferLength = 1024; //3756;

    AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
    AudioTimeStamp timeStamp;
    memset (&timeStamp, 0, sizeof(timeStamp));
    timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
    OSStatus status = noErr;
    for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
        status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
        if (status != noErr)
            break;
    }
    if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
        NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
        AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, (Float32)restBufferLength);
        status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
        AEFreeAudioBufferList(restBufferList);
    }


    SInt64 fileLengthInFrames;
    UInt32 size = sizeof(SInt64);
    ExtAudioFileGetProperty(audioFile, kExtAudioFileProperty_FileLengthFrames, &size, &fileLengthInFrames);
    AEFreeAudioBufferList(bufferList);
    ExtAudioFileDispose(audioFile);
    if (status != noErr)
        [self showAlertWithTitle:@"Error" message:@"See logs for details"];
    else {
        NSLog(@"Finished writing to file at path: %@ \n File size must be %f Mb", path,(tmpData.length/1024.0)/1024.0);
        [self showAlertWithTitle:@"Success!" message:@"Now you can play a result file"];
    }
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
                   writeToFile:(ExtAudioFileRef)audioFile
                  bufferLength:(NSUInteger)bufferLength
                     timeStamp:(AudioTimeStamp *)timeStamp {
    [self clearBufferList:bufferList];
    AudioUnit outputUnit = self.engine.outputNode.audioUnit;
    OSStatus status =AudioUnitRender(outputUnit, 0, timeStamp, 0, (UInt32)bufferLength, bufferList);

    [tmpData appendBytes:bufferList->mBuffers[0].mData length:bufferLength];

    float *data1 = bufferList->mBuffers[0].mData;
    float *data2 = bufferList->mBuffers[1].mData;;

    for(int i=0; i<bufferLength/4; i++)
    {
//On iPad data[i]==0 and data2[i] == 0 
        if(data1[i]!=0||data2[i]!=0)
        NSLog(@"%f - %f",data1[i],data2[i]);
    }

    if (status != noErr) {
        NSLog(@"Can not render audio unit");
        return status;
    }
    timeStamp->mSampleTime += bufferLength;
    status = ExtAudioFileWrite(audioFile, (UInt32)bufferLength, bufferList);
    if (status != noErr)
        NSLog(@"Can not write audio to file");
    return status;
}
NSTimeInterval duration=CMTimeGetSeconds(asset.duration);
NSUInteger lengthInFrames=(NSUInteger)(持续时间*audioDescription->mSampleRate);
常数整数kBufferLength=1024//3756;
AudioBufferList*bufferList=AEAllocateAndInitAudioBufferList(*audioDescription,kBufferLength);
音频时间戳;
memset(&timeStamp,0,sizeof(timeStamp));
timeStamp.mFlags=kAudioTimeStampSampleTimeValid;
骨状态=noErr;
对于(整数i=kBufferLength;i
缓冲区的分配

AudioBufferList *AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat, int frameCount) {
    int numberOfBuffers = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat.mChannelsPerFrame : 1;
    int channelsPerBuffer = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat.mChannelsPerFrame;
    int bytesPerBuffer = audioFormat.mBytesPerFrame * frameCount;
    AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers - 1) * sizeof(AudioBuffer));
    if (!audio) {
        return NULL;
    }
    audio->mNumberBuffers = numberOfBuffers;
    for (int i = 0; i < numberOfBuffers; i++) {
        if (bytesPerBuffer > 0) {
            audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1);
            if (!audio->mBuffers[i].mData) {
                for (int j = 0; j < i; j++) free(audio->mBuffers[j].mData);
                free(audio);
                return NULL;
            }
        } else {
            audio->mBuffers[i].mData = NULL;
        }
        audio->mBuffers[i].mDataByteSize = bytesPerBuffer;
        audio->mBuffers[i].mNumberChannels = channelsPerBuffer;
    }
    return audio;
}
AudioBufferList*AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat,int frameCount){
int numberOfBuffers=audioFormat.mFormatFlags&KaudioFormatFlags是非交互的?audioFormat.mChannelsPerFrame:1;
int channelsPerBuffer=audioFormat.mFormatFlags&kaudioFormatFlags为非交互?1:audioFormat.mChannelsPerFrame;
int bytesPerBuffer=audioFormat.mBytesPerFrame*帧数;
AudioBufferList*audio=malloc(sizeof(AudioBufferList)+(numberOfBuffers-1)*sizeof(AudioBuffer));
如果(!音频){
返回NULL;
}
音频->mNumberBuffers=numberOfBuffers;
对于(int i=0;i0){
audio->mBuffers[i].mData=calloc(bytesPerBuffer,1);
如果(!audio->mBuffers[i].mData){
对于(int j=0;jmBuffers[j].mData);
免费(音频);
返回NULL;
}
}否则{
音频->mBuffers[i].mData=NULL;
}
音频->mBuffers[i].mDataByteSize=bytesPerBuffer;
音频->mbuffer[i].mNumberChannels=channelsPerBuffer;
}
返回音频;
}
渲染方法

NSTimeInterval duration = CMTimeGetSeconds(asset.duration);

NSUInteger lengthInFrames = (NSUInteger) (duration * audioDescription->mSampleRate);

    const NSUInteger kBufferLength = 1024; //3756;

    AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
    AudioTimeStamp timeStamp;
    memset (&timeStamp, 0, sizeof(timeStamp));
    timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
    OSStatus status = noErr;
    for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
        status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
        if (status != noErr)
            break;
    }
    if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
        NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
        AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, (Float32)restBufferLength);
        status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
        AEFreeAudioBufferList(restBufferList);
    }


    SInt64 fileLengthInFrames;
    UInt32 size = sizeof(SInt64);
    ExtAudioFileGetProperty(audioFile, kExtAudioFileProperty_FileLengthFrames, &size, &fileLengthInFrames);
    AEFreeAudioBufferList(bufferList);
    ExtAudioFileDispose(audioFile);
    if (status != noErr)
        [self showAlertWithTitle:@"Error" message:@"See logs for details"];
    else {
        NSLog(@"Finished writing to file at path: %@ \n File size must be %f Mb", path,(tmpData.length/1024.0)/1024.0);
        [self showAlertWithTitle:@"Success!" message:@"Now you can play a result file"];
    }
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
                   writeToFile:(ExtAudioFileRef)audioFile
                  bufferLength:(NSUInteger)bufferLength
                     timeStamp:(AudioTimeStamp *)timeStamp {
    [self clearBufferList:bufferList];
    AudioUnit outputUnit = self.engine.outputNode.audioUnit;
    OSStatus status =AudioUnitRender(outputUnit, 0, timeStamp, 0, (UInt32)bufferLength, bufferList);

    [tmpData appendBytes:bufferList->mBuffers[0].mData length:bufferLength];

    float *data1 = bufferList->mBuffers[0].mData;
    float *data2 = bufferList->mBuffers[1].mData;;

    for(int i=0; i<bufferLength/4; i++)
    {
//On iPad data[i]==0 and data2[i] == 0 
        if(data1[i]!=0||data2[i]!=0)
        NSLog(@"%f - %f",data1[i],data2[i]);
    }

    if (status != noErr) {
        NSLog(@"Can not render audio unit");
        return status;
    }
    timeStamp->mSampleTime += bufferLength;
    status = ExtAudioFileWrite(audioFile, (UInt32)bufferLength, bufferList);
    if (status != noErr)
        NSLog(@"Can not write audio to file");
    return status;
}
-(OSStatus)renderToBufferList:(AudioBufferList*)bufferList
writeToFile:(ExtAudioFileRef)音频文件
bufferLength:(nsInteger)bufferLength
时间戳:(AudioTimeStamp*)时间戳{
[自清除缓冲列表:缓冲列表];
AudioUnit输出单元=self.engine.outputNode.AudioUnit;
OSStatus状态=AudioUnitRender(输出单元,0,时间戳,0,(UInt32)缓冲长度,缓冲列表);
[tmpdataappendbytes:bufferList->mbuffer[0].mData-length:bufferLength];
float*data1=bufferList->mbuffer[0].mData;
float*data2=bufferList->mbuffer[1].mData;;
对于(int i=0;imSampleTime+=bufferLength;
状态=ExtAudioFileWrite(音频文件,(UInt32)缓冲长度,缓冲列表);
如果(状态!=noErr)
NSLog(@“无法将音频写入文件”);
返回状态;
}

呈现方法中出现问题

音频文件
空,当您将其写入
缓冲列表
时?我将outputUnit写入缓冲列表,然后bufferList写入音频文件。outputUnit发出的信号不为空-如果通过流进行ListNig,我可以听到它,但缓冲列表中的数据为空。哦,使用了这个NSData对象仅用于测试。我不确定它是否能正确显示当前情况。但此时:
for(int I=0;ii如果有帮助,我从这个github线程获取了代码。请尝试
(Float32*)data1=(Float32*)bufferList->mBuffers[0]。mData
。它可能是空的,因为它被错误地强制转换了?现在只是一个猜测