Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/118.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Iphone 如何使用NSInputStream和NSOutputStream读取和写入音频文件_Iphone_Ios_Avaudiorecorder - Fatal编程技术网

Iphone 如何使用NSInputStream和NSOutputStream读取和写入音频文件

Iphone 如何使用NSInputStream和NSOutputStream读取和写入音频文件,iphone,ios,avaudiorecorder,Iphone,Ios,Avaudiorecorder,我使用AVAudioRecorder录制并保存caf音频文件。当我将(300 mb).caf格式转换为.wav格式时,应用程序因错误而崩溃(接收到内存警告.Level=1和接收到内存警告.Level=2)。如何使用NSInputStream读取音频文件,使用NSOutputStream写入音频文件 -(void) convertToWav:(NSNumber *) numIndex { // set up an AVAssetReader to read from the iPod Librar

我使用AVAudioRecorder录制并保存caf音频文件。当我将(300 mb).caf格式转换为.wav格式时,应用程序因错误而崩溃(接收到内存警告.Level=1和接收到内存警告.Level=2)。如何使用NSInputStream读取音频文件,使用NSOutputStream写入音频文件

-(void) convertToWav:(NSNumber *) numIndex
{
// set up an AVAssetReader to read from the iPod Library
int index = [numIndex integerValue];

NSString *strName;
NSString *strFilePath1 =[delegate.strCassettePathSide stringByAppendingPathComponent:@"audio_list.plist"];

bool bTapeInfoFileExists = [[NSFileManager defaultManager] fileExistsAtPath:strFilePath1];

if (bTapeInfoFileExists)
{
    NSMutableDictionary *dictInfo = [[NSMutableDictionary alloc] initWithContentsOfFile:strFilePath1];

    if ([dictInfo valueForKey:@"lastName"])
        strName =[dictInfo valueForKey:@"lastName"];
    else
        strName= [delegate.arrNameList objectAtIndex:0];
}
else
{
    strName = [delegate.arrNameList objectAtIndex:0];
}

NSString *cafFilePath =[[delegate.arrSessionList objectAtIndex:index] valueForKey:@"path"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];

NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
                                                           error:&assetError]
;
if (assetError) {
    NSLog (@"error: %@", assetError);
    return;
}

AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
                                          assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
                                          audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
    NSLog (@"can't add reader output... die!");
    return;
}
[assetReader addOutput: assetReaderOutput];

NSString *strWavFileName = [NSString stringWithFormat:@"%@.wav",[[cafFilePath lastPathComponent] stringByDeletingPathExtension]];
NSString *wavFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strWavFileName];

if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
    [[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
                                                      fileType:AVFileTypeWAVE
                                                         error:&assetError];
if (assetError)
{
    NSLog (@"error: %@", assetError);
    return;
}

AppDelegate *appDelegate =[[UIApplication sharedApplication]delegate];
int nSampleRate=[[appDelegate.dictWAVQuality valueForKey:@"samplerate"] integerValue];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
                                [NSNumber numberWithFloat:nSampleRate], AVSampleRateKey,
                                [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
                                [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
                                [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
                                [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
                                nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
                                                                          outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
    [assetWriter addInput:assetWriterInput];
}
else
{
    NSLog(@"can't add asset writer input... die!");
    return;
}

assetWriterInput.expectsMediaDataInRealTime = NO;

[assetWriter startWriting];
[assetReader startReading];

AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);

[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
                                        usingBlock: ^
 {
     while (assetWriterInput.readyForMoreMediaData)
     {
         CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
         if (nextBuffer)
         {
             // append buffer
             [assetWriterInput appendSampleBuffer: nextBuffer];
             convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
         }
         else
         {
             [assetWriterInput markAsFinished];
             //              [assetWriter finishWriting];
             [assetReader cancelReading];

             [dictTemp setValue:wavFilePath forKey:@"path"];
             [dictTemp setValue:nil forKey:@"progress"];
             [delegate.arrSessionList replaceObjectAtIndex:index withObject:dictTemp];

             NSString *strListFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:@"audiolist.plist"];
             [delegate.arrSessionList writeToFile:strListFilePath atomically:YES];
             break;
         }
     }
 }];}

我假设您使用的是ARC,您需要释放缓冲区

核心基础不按照《核心基础内存管理程序指南》中的“创建规则”进行ARC发布。您必须释放从-(CMSampleBufferRef)copyNextSampleBuffer获得的CMSampleBufferRef,否则将导致内存泄漏

https://developer.apple.com/library/mac/documentation/CoreFoundation/Conceptual/CFMemoryMgmt/Concepts/Ownership.html#//apple_ref/doc/uid/20001148-103029


if (nextBuffer)
         {
             // append buffer
             [assetWriterInput appendSampleBuffer: nextBuffer];
             convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
             CFRelease(nextBuffer);
         }

向我们展示您的一些代码可能会很有用。neilco我用我的代码更新了