Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/105.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 带音频单元的实时AAC编码_Ios_Objective C_Audio_Core Audio - Fatal编程技术网

Ios 带音频单元的实时AAC编码

Ios 带音频单元的实时AAC编码,ios,objective-c,audio,core-audio,Ios,Objective C,Audio,Core Audio,我正在尝试使用音频单元服务和音频转换器服务进行流媒体传输,但我有点卡住了。我的目标是捕获麦克风输入(PCM),将其实时转换为AAC,并通过网络发送数据包。它似乎可以工作,但音频很糟糕,应用程序在4s后崩溃 //初始化音频单元 OSStatus status; AudioComponentDescription desc; desc.componentType = kAudioUnitType_Output; desc.componentSubType = kAudioUn

我正在尝试使用音频单元服务和音频转换器服务进行流媒体传输,但我有点卡住了。我的目标是捕获麦克风输入(PCM),将其实时转换为AAC,并通过网络发送数据包。它似乎可以工作,但音频很糟糕,应用程序在4s后崩溃

//初始化音频单元

OSStatus status;
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    AudioComponent component = AudioComponentFindNext(NULL, &desc);
    status = AudioComponentInstanceNew(component, &_audioUnit);
    NSLog(@"status instance new: %lu",status);

    UInt32 flag = 1;
    status = AudioUnitSetProperty(_audioUnit,
                                  kAudioOutputUnitProperty_EnableIO,
                                  kAudioUnitScope_Input,
                                  1,
                                  &flag,
                                  sizeof(flag));
    NSLog(@"status AudioUnitSetProperty input: %lu",status);

    AudioStreamBasicDescription audioFormat;
    memset(&audioFormat, 0, sizeof(AudioStreamBasicDescription));
    audioFormat.mSampleRate         = 44100;
    audioFormat.mFormatID           = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket    = 1;
    audioFormat.mChannelsPerFrame   = 1;
    audioFormat.mBitsPerChannel     = 16;
    audioFormat.mBytesPerPacket     =
    audioFormat.mBytesPerFrame      = audioFormat.mChannelsPerFrame * sizeof(SInt16);

    status = AudioUnitSetProperty(_audioUnit,
                                  kAudioUnitProperty_StreamFormat,
                                  kAudioUnitScope_Input,
                                  0,
                                  &audioFormat,
                                  sizeof(audioFormat));
    NSLog(@"status AudioUnitSetProperty audioFormat: %lu",status);

    AURenderCallbackStruct renderCallbackInfo;
    renderCallbackInfo.inputProc       = recordingCallback;
    renderCallbackInfo.inputProcRefCon = NULL;
    AudioUnitSetProperty(_audioUnit,
                         kAudioOutputUnitProperty_SetInputCallback,
                         kAudioUnitScope_Global,
                         1,
                         &renderCallbackInfo,
                         sizeof(renderCallbackInfo));

    float aBufferLength = 0.005; // In seconds
    AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration,
                            sizeof(aBufferLength), &aBufferLength);

    _converter = [[Converter alloc] initWithFormat];
    status = AudioUnitInitialize(_audioUnit);
    NSLog(@"status AudioUnit initialize: %lu",status);
//录制回调:

static OSStatus recordingCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData) {

    AudioBufferList bufferList;
    bufferList.mNumberBuffers = 1;
    bufferList.mBuffers[0].mNumberChannels = 1;
    bufferList.mBuffers[0].mData = NULL;
    bufferList.mBuffers[0].mDataByteSize = inNumberFrames * sizeof(SInt16);
    OSStatus status = AudioUnitRender(_audioUnit, ioActionFlags, inTimeStamp,1, inNumberFrames, &bufferList);
    AudioBuffer aac;

    pcm.mData = malloc(inNumberFrames * sizeof(SInt16));
    unsigned char * p = malloc(inNumberFrames * sizeof(SInt16));
    memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
    memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct
    pcm.mDataByteSize = inNumberFrames * sizeof(SInt16);
    pcm.mNumberChannels = 1;
    int osstatus = [_converter convertAudioBuffer:&pcm EncodedAudioBuffer:&aac];

    // send to the network
    NSData* data = [[NSData alloc]initWithBytes:aac.mData length:aac.mDataByteSize];
    dispatch_async(myQueue, ^{_pts+=1024;[sender sendBuf2:data withTime:_pts];});
    return noErr;
}
OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter,
                                  UInt32 *ioDataPacketCount,
                                  AudioBufferList *ioData,
                                  AudioStreamPacketDescription **outDataPacketDescription,
                                  void *inUserData)
{   
    if (outDataPacketDescription)
    {
        *outDataPacketDescription = NULL;
    }
    Settings *audioConverterSettings = (Settings *)inUserData;
    ioData->mBuffers[0].mData = audioConverterSettings->buffer.mData;
    ioData->mBuffers[0].mDataByteSize = audioConverterSettings->buffer.mDataByteSize;
    ioData->mBuffers[0].mNumberChannels = audioConverterSettings->buffer.mNumberChannels;

    return noErr;
}
//变流器侧:

-(int) convertAudioBuffer:(AudioBuffer*)inSamples EncodedAudioBuffer:(AudioBuffer*) outData{
    memset(_buffer, 0, _converterSettings.maxPacketSize);

    _converterSettings.buffer = *inSamples;
    //_converterSettings.bytesToEncode = inSamples->mDataByteSize;

    UInt32 ioOutputDataPackets = 1;
    AudioStreamPacketDescription outPacketDesc[1];


    AudioBufferList convertedData;
    convertedData.mNumberBuffers = 1;
    convertedData.mBuffers[0].mNumberChannels = 1;
    convertedData.mBuffers[0].mDataByteSize = _converterSettings.maxPacketSize;
    convertedData.mBuffers[0].mData = _buffer;

    OSStatus error = AudioConverterFillComplexBuffer(_audioConverter,
                                                     MyAudioConverterCallback,
                                                     &_converterSettings,
                                                     &ioOutputDataPackets,
                                                     &convertedData,
                                                     outPacketDesc);
    if (error != noErr)
    {
        NSError *err = [NSError errorWithDomain:NSOSStatusErrorDomain code:error userInfo:nil];
        NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize);
    }
    NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize);
    /* Set the ouput data */
    outData->mNumberChannels    = convertedData.mBuffers[0].mNumberChannels;
    outData->mDataByteSize      = convertedData.mBuffers[0].mDataByteSize;
    outData->mData              = convertedData.mBuffers[0].mData;
    return 0;
}
进程回调:

static OSStatus recordingCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData) {

    AudioBufferList bufferList;
    bufferList.mNumberBuffers = 1;
    bufferList.mBuffers[0].mNumberChannels = 1;
    bufferList.mBuffers[0].mData = NULL;
    bufferList.mBuffers[0].mDataByteSize = inNumberFrames * sizeof(SInt16);
    OSStatus status = AudioUnitRender(_audioUnit, ioActionFlags, inTimeStamp,1, inNumberFrames, &bufferList);
    AudioBuffer aac;

    pcm.mData = malloc(inNumberFrames * sizeof(SInt16));
    unsigned char * p = malloc(inNumberFrames * sizeof(SInt16));
    memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
    memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct
    pcm.mDataByteSize = inNumberFrames * sizeof(SInt16);
    pcm.mNumberChannels = 1;
    int osstatus = [_converter convertAudioBuffer:&pcm EncodedAudioBuffer:&aac];

    // send to the network
    NSData* data = [[NSData alloc]initWithBytes:aac.mData length:aac.mDataByteSize];
    dispatch_async(myQueue, ^{_pts+=1024;[sender sendBuf2:data withTime:_pts];});
    return noErr;
}
OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter,
                                  UInt32 *ioDataPacketCount,
                                  AudioBufferList *ioData,
                                  AudioStreamPacketDescription **outDataPacketDescription,
                                  void *inUserData)
{   
    if (outDataPacketDescription)
    {
        *outDataPacketDescription = NULL;
    }
    Settings *audioConverterSettings = (Settings *)inUserData;
    ioData->mBuffers[0].mData = audioConverterSettings->buffer.mData;
    ioData->mBuffers[0].mDataByteSize = audioConverterSettings->buffer.mDataByteSize;
    ioData->mBuffers[0].mNumberChannels = audioConverterSettings->buffer.mNumberChannels;

    return noErr;
}
最后是输出:

2013-07-16 16:58:57.192 AudioUnitAAC[84656:c07] status instance new: 0
2013-07-16 16:58:57.195 AudioUnitAAC[84656:c07] status AudioUnitSetProperty input: 0
2013-07-16 16:58:57.197 AudioUnitAAC[84656:c07] status AudioUnitSetProperty audioFormat: 0
2013-07-16 16:58:57.235 AudioUnitAAC[84656:c07] status AudioUnit initialize: 0
2013-07-16 16:58:58.182 AudioUnitAAC[84656:c07] start : 0
2013-07-16 16:58:58.200 AudioUnitAAC[84656:6e07] 4 bytes encoded
2013-07-16 16:58:58.211 AudioUnitAAC[84656:6e07] 152 bytes encoded
2013-07-16 16:58:58.223 AudioUnitAAC[84656:6e07] 169 bytes encoded
2013-07-16 16:58:58.235 AudioUnitAAC[84656:6e07] 157 bytes encoded
2013-07-16 16:58:58.246 AudioUnitAAC[84656:6e07] 160 bytes encoded
2013-07-16 16:58:58.258 AudioUnitAAC[84656:6e07] 164 bytes encoded
....
直到它因某种随机原因崩溃(AudioConverterFillComplexBuffer错误访问,NSLog(@“%ld”,convertedData.mBuffers[0].mDataByteSize)错误指令…)


我是apple audio core的新手,任何帮助都将不胜感激:)

多亏了这篇文章,我才让它工作起来!我改变了:

pcm.mData = malloc(inNumberFrames * sizeof(SInt16));
unsigned char * p = malloc(inNumberFrames * sizeof(SInt16));
memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct
pcm.mDataByteSize = inNumberFrames * sizeof(SInt16);
pcm.mNumberChannels = 1;
致:

在回调中,我执行以下操作:

ioData->mBuffers[0].mData = audioBuffer.mBuffers[0].mData;
ioData->mBuffers[0].mDataByteSize = audioBuffer.mBuffers[0].mDataByteSize;
ioData->mBuffers[0].mNumberChannels = audioBuffer.mBuffers[0].mNumberChannels;

你明白了吗?我在iOS 7.1nop上也遇到了同样的问题,我不得不带着音频队列回去