Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/backbone.js/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
如何在iphone sdk中使用audiounit和augraph播放音乐_Iphone_Core Audio - Fatal编程技术网

如何在iphone sdk中使用audiounit和augraph播放音乐

如何在iphone sdk中使用audiounit和augraph播放音乐,iphone,core-audio,Iphone,Core Audio,我不熟悉AudioUnit和AUGraph服务。 我需要使用这些服务播放单个声音文件。 我从苹果的开发者网站下载了MixeHost项目,并修改了一些代码。 但是代码现在崩溃了 我新更改的代码是: #import "JKAudioPlayer.h" #pragma mark Mixer input bus render callback // // Declared as AURenderCallback in AudioUnit/AUComponent.h. See Audio Unit

我不熟悉AudioUnit和AUGraph服务。 我需要使用这些服务播放单个声音文件。 我从苹果的开发者网站下载了MixeHost项目,并修改了一些代码。 但是代码现在崩溃了

我新更改的代码是:

#import "JKAudioPlayer.h"
#pragma mark Mixer input bus render callback

//
//    Declared as AURenderCallback in AudioUnit/AUComponent.h. See Audio Unit Component Services Reference.
static OSStatus inputRenderCallback (

                                     void                        *inRefCon,  
                                     AudioUnitRenderActionFlags  *ioActionFlags, 
                                     const AudioTimeStamp        *inTimeStamp,  
                                     UInt32                      inBusNumber,  
                                     UInt32                      inNumberFrames,
                                     AudioBufferList             *ioData   
                                     ) {

    soundStructPtr    soundStructPointerArray   = (soundStructPtr) inRefCon;
    UInt32            frameTotalForSound        = soundStructPointerArray[inBusNumber].frameCount;
    BOOL              isStereo                  = soundStructPointerArray[inBusNumber].isStereo;

    AudioUnitSampleType *dataInLeft;
    AudioUnitSampleType *dataInRight;

    dataInLeft                 = soundStructPointerArray[inBusNumber].audioDataLeft;
    if (isStereo) dataInRight  = soundStructPointerArray[inBusNumber].audioDataRight;

    AudioUnitSampleType *outSamplesChannelLeft;
    AudioUnitSampleType *outSamplesChannelRight;

    outSamplesChannelLeft                 = (AudioUnitSampleType *) ioData->mBuffers[0].mData;
    if (isStereo) outSamplesChannelRight  = (AudioUnitSampleType *) ioData->mBuffers[1].mData;

    UInt32 sampleNumber = soundStructPointerArray[0].sampleNumber;

    for (UInt32 frameNumber = 0; frameNumber < inNumberFrames; ++frameNumber) {
        outSamplesChannelLeft[frameNumber]                 = dataInLeft[sampleNumber];
        if (isStereo) outSamplesChannelRight[frameNumber]  = dataInRight[sampleNumber];
        sampleNumber++;
        if (sampleNumber >= frameTotalForSound) sampleNumber = 0;
    }
    soundStructPointerArray[inBusNumber].sampleNumber = sampleNumber;
    return noErr;
}

#pragma mark -
#pragma mark Audio route change listener callback
static void audioRouteChangeListenerCallback (
                                              void                      *inUserData,
                                              AudioSessionPropertyID    inPropertyID,
                                              UInt32                    inPropertyValueSize,
                                              const void                *inPropertyValue
                                              ) {
    if (inPropertyID != kAudioSessionProperty_AudioRouteChange) return;
    JKAudioPlayer *audioObject = (__bridge_transfer JKAudioPlayer *) inUserData;  
    if (NO == audioObject.isPlaying) {
        NSLog (@"Audio route change while application audio is stopped.");
        return;
    } else {
        CFDictionaryRef routeChangeDictionary = inPropertyValue;
        CFNumberRef routeChangeReasonRef =
        CFDictionaryGetValue (
                              routeChangeDictionary,
                              CFSTR (kAudioSession_AudioRouteChangeKey_Reason)
                              );

        SInt32 routeChangeReason;

        CFNumberGetValue (
                          routeChangeReasonRef,
                          kCFNumberSInt32Type,
                          &routeChangeReason
                          );        
        if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable) {

            NSLog (@"Audio output device was removed; stopping audio playback.");
            NSString *MixerHostAudioObjectPlaybackStateDidChangeNotification = @"MixerHostAudioObjectPlaybackStateDidChangeNotification";
            [[NSNotificationCenter defaultCenter] postNotificationName: MixerHostAudioObjectPlaybackStateDidChangeNotification object: audioObject]; 

        } else {

            NSLog (@"A route change occurred that does not require stopping application audio.");
        }
    }
}

@implementation JKAudioPlayer
@synthesize monoStreamFormat;           // mono format for use in buffer and mixer input for "beats" sound
@synthesize graphSampleRate;            // sample rate to use throughout audio processing chain
@synthesize mixerUnit;                  // the Multichannel Mixer unit
@synthesize playing;                    // Boolean flag to indicate whether audio is playing or not
@synthesize interruptedDuringPlayback;  // Boolean flag to indicate whether audio was playing when an interruption arrived
- (id) init {

    self = [super init];

    if (!self) return nil;
    NSURL *beatsLoop   = [[NSBundle mainBundle] URLForResource: @"beatsMono"
                                                 withExtension: @"caf"];
    sourceURL   = (__bridge_retained  CFURLRef) beatsLoop;
    self.interruptedDuringPlayback = NO;
    [self setupAudioSession];    
    [self setupMonoStreamFormat];
    [self readAudioFilesIntoMemory];
    [self configureAndInitializeAudioProcessingGraph];
    [self enableMixerInput: 0 isOn: YES];
    [self setMixerOutputGain:1];
    [self setMixerInput: 0 gain:YES];
    return self;
}
- (void) setupAudioSession {
    AVAudioSession *mySession = [AVAudioSession sharedInstance];
    [mySession setDelegate: self];
    NSError *audioSessionError = nil;
    [mySession setCategory: AVAudioSessionCategoryPlayback
                     error: &audioSessionError];

    if (audioSessionError != nil) {

        NSLog (@"Error setting audio session category.");
        return;
    }
    self.graphSampleRate = 44100.0;    // Hertz
    [mySession setPreferredHardwareSampleRate: graphSampleRate
                                        error: &audioSessionError];
    if (audioSessionError != nil) {

        NSLog (@"Error setting preferred hardware sample rate.");
        return;
    }
    [mySession setActive: YES
                   error: &audioSessionError];

    if (audioSessionError != nil) {

        NSLog (@"Error activating audio session during initial setup.");
        return;
    }    
    self.graphSampleRate = [mySession currentHardwareSampleRate];
    AudioSessionAddPropertyListener (
                                     kAudioSessionProperty_AudioRouteChange,
                                     audioRouteChangeListenerCallback,
                                     (__bridge void*)self
                                     );
}
- (void) setupMonoStreamFormat {
    size_t bytesPerSample = sizeof (AudioUnitSampleType);
    monoStreamFormat.mFormatID          = kAudioFormatLinearPCM;
    monoStreamFormat.mFormatFlags       = kAudioFormatFlagsAudioUnitCanonical;
    monoStreamFormat.mBytesPerPacket    = bytesPerSample;
    monoStreamFormat.mFramesPerPacket   = 1;
    monoStreamFormat.mBytesPerFrame     = bytesPerSample;
    monoStreamFormat.mChannelsPerFrame  = 1;                  // 1 indicates mono
    monoStreamFormat.mBitsPerChannel    = 8 * bytesPerSample;
    monoStreamFormat.mSampleRate        = graphSampleRate;
}


- (void) readAudioFilesIntoMemory {
    ExtAudioFileRef audioFileObject = 0;
    OSStatus result = ExtAudioFileOpenURL (sourceURL, &audioFileObject);
    if (noErr != result || NULL == audioFileObject) {NSLog(@"error ext audiofile open url %ld",result); return;}
    UInt64 totalFramesInFile = 0;
    UInt32 frameLengthPropertySize = sizeof (totalFramesInFile);
    result =    ExtAudioFileGetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_FileLengthFrames,
                                         &frameLengthPropertySize,
                                         &totalFramesInFile
                                         );

    if (noErr != result) {NSLog(@"ExtAudioFileGetProperty (audio file length in frames %ld",result); return;}
    soundStruct.frameCount = totalFramesInFile;
    AudioStreamBasicDescription fileAudioFormat = {0};
    UInt32 formatPropertySize = sizeof (fileAudioFormat);
    result =    ExtAudioFileGetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_FileDataFormat,
                                         &formatPropertySize,
                                         &fileAudioFormat
                                         );

    if (noErr != result) {NSLog( @"ExtAudioFileGetProperty (file audio format)%ld",result); return;}

    UInt32 channelCount = fileAudioFormat.mChannelsPerFrame;
    soundStruct.audioDataLeft =
    (AudioUnitSampleType *) calloc (totalFramesInFile, sizeof (AudioUnitSampleType));

    AudioStreamBasicDescription importFormat = {0};
    if (1 == channelCount) {

        soundStruct.isStereo = NO;
        importFormat = monoStreamFormat;

    } else {

        NSLog (@"*** WARNING: File format not supported - wrong number of channels");
        ExtAudioFileDispose (audioFileObject);
        return;
    }
    result =    ExtAudioFileSetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_ClientDataFormat,
                                         sizeof (importFormat),
                                         &importFormat
                                         );

    if (noErr != result) {NSLog( @"ExtAudioFileSetProperty (client data format %ld", result); return;}
    AudioBufferList *bufferList;
    bufferList = (AudioBufferList *) malloc (
                                             sizeof (AudioBufferList) + sizeof (AudioBuffer) * (channelCount - 1)
                                             );

    if (NULL == bufferList) {NSLog (@"*** malloc failure for allocating bufferList memory"); return;}

    bufferList->mNumberBuffers = channelCount;
    AudioBuffer emptyBuffer = {0};
    size_t arrayIndex;
    for (arrayIndex = 0; arrayIndex < channelCount; arrayIndex++) {
        bufferList->mBuffers[arrayIndex] = emptyBuffer;
    }
    bufferList->mBuffers[0].mNumberChannels  = 1;
    bufferList->mBuffers[0].mDataByteSize    = totalFramesInFile * sizeof (AudioUnitSampleType);
    bufferList->mBuffers[0].mData            = soundStruct.audioDataLeft;

    if (2 == channelCount) {
        bufferList->mBuffers[1].mNumberChannels  = 1;
        bufferList->mBuffers[1].mDataByteSize    = totalFramesInFile * sizeof (AudioUnitSampleType);
        bufferList->mBuffers[1].mData            = soundStruct.audioDataRight;
    }

    UInt32 numberOfPacketsToRead = (UInt32) totalFramesInFile;

    result = ExtAudioFileRead (
                               audioFileObject,
                               &numberOfPacketsToRead,
                               bufferList
                               );

    free (bufferList);

    if (noErr != result) {

        NSLog( @"ExtAudioFileRead failure - %ld " , result);
        free (soundStruct.audioDataLeft);
        soundStruct.audioDataLeft = 0;

        if (2 == channelCount) {
            free (soundStruct.audioDataRight);
            soundStruct.audioDataRight = 0;
        }

        ExtAudioFileDispose (audioFileObject);            
        return;
    }

    NSLog (@"Finished reading file  into memory");
    soundStruct.sampleNumber = 0;
    ExtAudioFileDispose (audioFileObject);
}

- (void) configureAndInitializeAudioProcessingGraph {

    NSLog (@"Configuring and then initializing audio processing graph");
    OSStatus result = noErr;
    result = NewAUGraph (&processingGraph);

    if (noErr != result) {[self printErrorMessage: @"NewAUGraph" withStatus: result]; return;}

    // I/O unit
    AudioComponentDescription iOUnitDescription;
    iOUnitDescription.componentType          = kAudioUnitType_Output;
    iOUnitDescription.componentSubType       = kAudioUnitSubType_RemoteIO;
    iOUnitDescription.componentManufacturer  = kAudioUnitManufacturer_Apple;
    iOUnitDescription.componentFlags         = 0;
    iOUnitDescription.componentFlagsMask     = 0;

    // Multichannel mixer unit
    AudioComponentDescription MixerUnitDescription;
    MixerUnitDescription.componentType          = kAudioUnitType_Mixer;
    MixerUnitDescription.componentSubType       = kAudioUnitSubType_MultiChannelMixer;
    MixerUnitDescription.componentManufacturer  = kAudioUnitManufacturer_Apple;
    MixerUnitDescription.componentFlags         = 0;
    MixerUnitDescription.componentFlagsMask     = 0;


    //............................................................................
    // Add nodes to the audio processing graph.
    NSLog (@"Adding nodes to audio processing graph");

    AUNode   iONode;         // node for I/O unit
    AUNode   mixerNode;      // node for Multichannel Mixer unit

    // Add the nodes to the audio processing graph
    result =    AUGraphAddNode (
                                processingGraph,
                                &iOUnitDescription,
                                &iONode);

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for I/O unit" withStatus: result]; return;}


    result =    AUGraphAddNode (
                                processingGraph,
                                &MixerUnitDescription,
                                &mixerNode
                                );

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for Mixer unit" withStatus: result]; return;}
    result = AUGraphOpen (processingGraph);

    if (noErr != result) {[self printErrorMessage: @"AUGraphOpen" withStatus: result]; return;}
     result =    AUGraphNodeInfo (
                                 processingGraph,
                                 mixerNode,
                                 NULL,
                                 &mixerUnit
                                 );

    if (noErr != result) {[self printErrorMessage: @"AUGraphNodeInfo" withStatus: result]; return;}
    UInt32 busCount   = 2;    // bus count for mixer unit input

    UInt32 beatsBus   = 1;    // mixer unit bus 1 will be mono and will take the beats sound

    NSLog (@"Setting mixer unit input bus count to: %lu", busCount);
    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_ElementCount,
                                   kAudioUnitScope_Input,
                                   0,
                                   &busCount,
                                   sizeof (busCount)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit bus count)" withStatus: result]; return;}
   UInt32 maximumFramesPerSlice = 4096;

    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_MaximumFramesPerSlice,
                                   kAudioUnitScope_Global,
                                   0,
                                   &maximumFramesPerSlice,
                                   sizeof (maximumFramesPerSlice)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit input stream format)" withStatus: result]; return;}
    for (UInt16 busNumber = 0; busNumber < busCount; ++busNumber) {

        // Setup the struture that contains the input render callback 
        AURenderCallbackStruct inputCallbackStruct;
        inputCallbackStruct.inputProc        = &inputRenderCallback;
        inputCallbackStruct.inputProcRefCon  = &soundStruct;

        NSLog (@"Registering the render callback with mixer unit input bus %u", busNumber);
        // Set a callback for the specified node's specified input
        result = AUGraphSetNodeInputCallback (
                                              processingGraph,
                                              mixerNode,
                                              busNumber,
                                              &inputCallbackStruct
                                              );

        if (noErr != result) {[self printErrorMessage: @"AUGraphSetNodeInputCallback" withStatus: result]; return;}
    }


    NSLog (@"Setting mono stream format for mixer unit \"beats\" input bus");
    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_StreamFormat,
                                   kAudioUnitScope_Input,
                                   beatsBus,
                                   &monoStreamFormat,
                                   sizeof (monoStreamFormat)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit beats input bus stream format)" withStatus: result];return;}


    NSLog (@"Setting sample rate for mixer unit output scope");
     result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_SampleRate,
                                   kAudioUnitScope_Output,
                                   0,
                                   &graphSampleRate,
                                   sizeof (graphSampleRate)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit output stream format)" withStatus: result]; return;}
     NSLog (@"Connecting the mixer output to the input of the I/O unit output element");

    result = AUGraphConnectNodeInput (
                                      processingGraph,
                                      mixerNode,         // source node
                                      0,                 // source node output bus number
                                      iONode,            // destination node
                                      0                  // desintation node input bus number
                                      );

    if (noErr != result) {[self printErrorMessage: @"AUGraphConnectNodeInput" withStatus: result]; return;}
    CAShow (processingGraph);
    result = AUGraphInitialize (processingGraph);

    if (noErr != result) {[self printErrorMessage: @"AUGraphInitialize" withStatus: result]; return;}
}

// Start playback
- (void) startAUGraph  {
     OSStatus result = AUGraphStart (processingGraph);
    if (noErr != result) {[self printErrorMessage: @"AUGraphStart" withStatus: result]; return;}

    self.playing = YES;
}

// Stop playback
- (void) stopAUGraph {
    Boolean isRunning = false;
    OSStatus result = AUGraphIsRunning (processingGraph, &isRunning);
    if (noErr != result) {[self printErrorMessage: @"AUGraphIsRunning" withStatus: result]; return;}

    if (isRunning) {

        result = AUGraphStop (processingGraph);
        if (noErr != result) {[self printErrorMessage: @"AUGraphStop" withStatus: result]; return;}
        self.playing = NO;
    }
}

- (void) enableMixerInput: (UInt32) inputBus isOn: (AudioUnitParameterValue) isOnValue {
   OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Enable,
                                             kAudioUnitScope_Input,
                                             inputBus,
                                             isOnValue,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (enable the mixer unit)" withStatus: result]; return;}


}

- (void) setMixerInput: (UInt32) inputBus gain: (AudioUnitParameterValue) newGain {
    NSLog(@"mixer input %lu gain %f",inputBus,newGain);

    OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Volume,
                                             kAudioUnitScope_Input,
                                             inputBus,
                                             newGain,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit input volume)" withStatus: result]; return;}

}
- (void) setMixerOutputGain: (AudioUnitParameterValue) newGain {
    NSLog(@"mixer output  gain %f",newGain);
    OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Volume,
                                             kAudioUnitScope_Output,
                                             0,
                                             newGain,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit output volume)" withStatus: result]; return;}

}
- (void) printErrorMessage: (NSString *) errorString withStatus: (OSStatus) result {

    char resultString[5];
    UInt32 swappedResult = CFSwapInt32HostToBig (result);
    bcopy (&swappedResult, resultString, 4);
    resultString[4] = '\0';

    NSLog (
           @"*** %@ error: %@ %@s",
           errorString,
           (char*) &resultString
           );
}

@end
#导入“jkadioplayer.h”
#pragma标记混合器输入总线渲染回调
//
//在AudioUnit/AUComponent.h中声明为AURenderCallback。请参阅音频单元组件服务参考。
静态OSStatus输入端回拨(
在refcon中无效*,
AudioUnitRenderActionFlags*ioActionFlags,
常量音频时间戳*inTimeStamp,
UInt32 InBunsNumber,
UInt32数字帧,
音频缓冲列表*ioData
) {
soundStructPtr soundStructPointerArray=(soundStructPtr)在refcon中;
UInt32 frameTotalForSound=soundStructPointerArray[inBusNumber].frameCount;
BOOL isStereo=soundStructPointerArray[inBusNumber].isStereo;
AudioUnitSampleType*dataInLeft;
AudioUnitSampleType*dataInRight;
dataInLeft=soundStructPointerArray[inBusNumber].audioDataLeft;
如果(isStereo)dataInRight=soundStructPointerArray[inBusNumber].audioDataRight;
AudioUnitSampleType*外采样通道左;
AudioUnitSampleType*outSamplesChannelRight;
outSamplesChannelLeft=(AudioUnitSampleType*)ioData->mBuffers[0].mData;
if(IsTereo)outSamplesChannelRight=(AudioUnitSampleType*)ioData->mBuffers[1].mData;
UInt32 sampleNumber=soundStructPointerArray[0]。sampleNumber;
对于(UInt32 frameNumber=0;frameNumber=frameTotalForSound)sampleNumber=0;
}
soundStructPointerArray[inBusNumber].SampNumber=SampNumber;
返回noErr;
}
#布拉格标记-
#pragma标记音频路由更改侦听器回调
静态void AudioRouteChangelisterCallback(
void*inUserData,
AudioSessionPropertyID inPropertyID,
UInt32 InProperty值大小,
常量void*inPropertyValue
) {
if(inPropertyID!=kAudioSessionProperty\u AudioRouteChange)返回;
JKAudioPlayer*audioObject=(uu桥u传输JKAudioPlayer*)inUserData;
if(NO==audioObject.isPlaying){
NSLog(@“应用程序音频停止时音频路由更改”);
回来
}否则{
CFDictionaryRef routeChangeDictionary=inPropertyValue;
CFNumberRef routeChangeReasonRef=
CfyGetValue(
路特ChangeDictionary,
CFSTR(音频路由更改原因)
);
改变原因;
CfNumberTargetValue(
RouteChangeRef,
kCFNumberSInt32Type,
&路线改变原因
);        
如果(routeChangeReason==kAudioSessionRouteChangeReason\u旧设备不可用){
NSLog(@“音频输出设备已删除;正在停止音频播放”);
NSString*MixerHostAudioObjectPlaybackStateDidChangeNotification=@“MixerHostAudioObjectPlaybackStateDidChangeNotification”;
[[NSNotificationCenter defaultCenter]postNotificationName:MixerHostAudioObject播放BackstateDidChangeNotification对象:audioObject];
}否则{
NSLog(@“发生不需要停止应用程序音频的路由更改”);
}
}
}
@音频播放器的实现
@合成单流格式;//用于缓冲区和混音器输入的“节拍”声音的单声道格式
@合成葡萄酸盐;//在整个音频处理链中使用的采样率
@合成mixerUnit;//多通道混频器
@综合播放;//用于指示音频是否正在播放的布尔标志
@合成中断播放;//布尔标志,用于指示中断到达时是否正在播放音频
-(id)init{
self=[super init];
如果(!self)返回nil;
NSURL*BeatsLop=[[NSBundle mainBundle]URLForResource:@“BeatsNo”
带扩展名:@“caf”];
sourceURL=(uuuu桥u保留的CFURLRef)beatsLoop;
self.interruptedDuringPlayback=否;
[自我设置音频会话];
[自设置格式];
[自读音频文件存储];
[self-ConfigureandInitializeeAudioProcessingGraph];
[self-enableMixerInput:0:是];
[自设置混合器输出增益:1];
[self-setMixerInput:0增益:是];
回归自我;
}
-(void)会话{
AVAudioSession*mySession=[AVAudioSession sharedInstance];
[mySession setDelegate:self];
N错误*音频会话错误=无;
[mySession setCategory:AVAudioSessionCategoryPlayback
错误:&audioSessionError];
如果(audioSessionError!=无){
NSLog(@“设置音频会话类别时出错”);
回来
}
self.graphSampleRate=44100.0;//赫兹
[mySession SetPreferredHardwares示例:graphSampleRate