CaptureSessionController.mm在AVCaptureToAudioUnit中赢得';t录制wav、aac或mp3文件

CaptureSessionController.mm在AVCaptureToAudioUnit中赢得';t录制wav、aac或mp3文件,mp3,wav,aac,caf,Mp3,Wav,Aac,Caf,我正在使用AVCaptureToAudioUnit的示例代码,以iPhone作为麦克风录制语音。我使用这个例子作为起点,因为我使用这个示例项目比使用其他示例项目更成功 当文件为.aif或.caf时,演示应用程序运行正常,但当我创建.wav、.aac或.mp3文件时,调试控制台中将显示以下消息 AudioStreamBasicDescription:1 ch,44100 Hz,'lpcm'(0x0000000E)16位大端有符号整数 2013-07-26 19:52:06.653 AVCaptur

我正在使用AVCaptureToAudioUnit的示例代码,以iPhone作为麦克风录制语音。我使用这个例子作为起点,因为我使用这个示例项目比使用其他示例项目更成功

当文件为.aif或.caf时,演示应用程序运行正常,但当我创建.wav、.aac或.mp3文件时,调试控制台中将显示以下消息

AudioStreamBasicDescription:1 ch,44100 Hz,'lpcm'(0x0000000E)16位大端有符号整数

2013-07-26 19:52:06.653 AVCaptureToAudioUnit[2514:907] Failed to setup audio file! (29759)
为了更改文件格式,我做了两个更改

[a] NSString语句中的文件扩展名(请参见下面init中的更改)和

[b] 使用音频文件服务参考(在startRecording中)中定义的常量进行兼容设置

使用其他文件格式时是否必须更改其他属性?有没有人遇到过这个问题

这是[a]的代码

@implementation CaptureSessionController

#pragma mark ======== Setup and teardown methods =========

- (id)init
{
    self = [super init];

    if (self) {

    NSArray  *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.aac", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.caf", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.wav", documentsDirectory];
    //NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.mp3", documentsDirectory];

    // and the following statement is the line of code found in the original example

    NSString *destinationFilePath = [NSString stringWithFormat: @"%@/AudioRecording.aif", documentsDirectory];
    _outputFile = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);

    [self registerForNotifications];
    }

return self;

}
这是[b]的代码

- (void)startRecording
{
    if (!self.isRecording) {
    OSErr err = kAudioFileUnspecifiedError;
    @synchronized(self) {
    if (!extAudioFile) {
/*
    Start recording by creating an ExtAudioFile and configuring it with the same 
    sample rate and channel layout as those of the current sample buffer.
*/
    // recording format is the format of the audio file itself
    CAStreamBasicDescription recordingFormat(currentInputASBD.mSampleRate, currentInputASBD.mChannelsPerFrame, CAStreamBasicDescription::kPCMFormatInt16, true);
    recordingFormat.mFormatFlags |= kAudioFormatFlagIsBigEndian;

    NSLog(@"Recording Audio Format:");
    recordingFormat.Print();

    err = ExtAudioFileCreateWithURL(_outputFile,

    //kAudioFileAAC_ADTSType,   // won't restart recording "Failed to setup audio file"
    //kAudioFileCAFType,        // starts and stops correctly
    //kAudioFileWAVEType,       // won't restart recording "Failed to setup audio file"
    //kAudioFileMP3Type,        // won't restart recording "Failed to setup audio file"
    kAudioFileAIFFType,       // starts and stops correctly

    &recordingFormat,
    currentRecordingChannelLayout,
    kAudioFileFlags_EraseFile,
    &extAudioFile);

    if (noErr == err)

    // client format is the output format from the delay unit
    err = ExtAudioFileSetProperty(extAudioFile, kExtAudioFileProperty_ClientDataFormat, sizeof(graphOutputASBD), &graphOutputASBD);

    if (noErr != err) {

                        if (extAudioFile) ExtAudioFileDispose(extAudioFile);
                        extAudioFile = NULL;
                      }
                  }
              } // @synchronized

    if (noErr == err) {
        self.recording = YES;
      NSLog(@"Recording Started");
       } else {
        NSLog(@"Failed to setup audio file! (%ld)", (long)err);
         }
    }
}