Warning: file_get_contents(/data/phpspider/zhask/data//catemap/6/jenkins/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
iOS Swift-合并.wav文件并将其转换为.mp3_Ios_Swift_Audio_Mp3_Lame - Fatal编程技术网

iOS Swift-合并.wav文件并将其转换为.mp3

iOS Swift-合并.wav文件并将其转换为.mp3,ios,swift,audio,mp3,lame,Ios,Swift,Audio,Mp3,Lame,我想将两个或多个.wav文件合并为一个文件,然后将其转换为.mp3,我想在Swift中完成这项工作(或者至少可以选择将其包含到Swift项目中) 在swift中合并两个.wav文件不是问题。现在我不知道如何将lame库添加到swift项目中以及如何使用它(如何更改objective c lame代码使用语法以在swift中使用它) 我坚持使用swift,所以我尝试了使用Objective C的Lame库。我找到了将.caf转换为.mp3的示例代码,所以我尝试了它。以下是我尝试过的: - (voi

我想将两个或多个.wav文件合并为一个文件,然后将其转换为.mp3,我想在Swift中完成这项工作(或者至少可以选择将其包含到Swift项目中)

在swift中合并两个.wav文件不是问题。现在我不知道如何将lame库添加到swift项目中以及如何使用它(如何更改objective c lame代码使用语法以在swift中使用它)

我坚持使用swift,所以我尝试了使用Objective C的Lame库。我找到了将.caf转换为.mp3的示例代码,所以我尝试了它。以下是我尝试过的:

- (void) toMp3
{
    NSString *cafFilePath = [[NSBundle mainBundle] pathForResource:@"sound" ofType:@"caf"];

    NSString *mp3FileName = @"Mp3File";
    mp3FileName = [mp3FileName stringByAppendingString:@".mp3"];
    NSString *mp3FilePath = [[NSHomeDirectory() stringByAppendingFormat:@"/Documents/"] stringByAppendingPathComponent:mp3FileName];

    NSLog(@"%@", mp3FilePath);

    @try {
        int read, write;

        FILE *pcm = fopen([cafFilePath cStringUsingEncoding:1], "rb");  //source
        fseek(pcm, 4*1024, SEEK_CUR);                                   //skip file header
        FILE *mp3 = fopen([mp3FilePath cStringUsingEncoding:1], "wb");  //output

        const int PCM_SIZE = 8192;
        const int MP3_SIZE = 8192;
        short int pcm_buffer[PCM_SIZE*2];
        unsigned char mp3_buffer[MP3_SIZE];

        lame_t lame = lame_init();
        lame_set_in_samplerate(lame, 44100);
        lame_set_VBR(lame, vbr_default);
        lame_init_params(lame);

        do {
            read = fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
            if (read == 0)
                write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
            else
                write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);

            fwrite(mp3_buffer, write, 1, mp3);

        } while (read != 0);

        lame_close(lame);
        fclose(mp3);
        fclose(pcm);
    }
    @catch (NSException *exception) {
        NSLog(@"%@",[exception description]);
    }
    @finally {
        [self performSelectorOnMainThread:@selector(convertMp3Finish)
                               withObject:nil
                            waitUntilDone:YES];
    }
}

- (void) convertMp3Finish
{
}
但这样做的结果只是。mp3有噪音

因此,我需要解决三个问题:

  • 从目标C中的caf创建正确的mp3
  • 更改代码以将其用于wav文件
  • 并将其更改为能够在Swift中使用

我知道在iOS中编码和转换mp3有很多问题,但我找不到一个有Swift示例的问题,也找不到有工作目标C代码的示例(就上面的代码)。感谢您的帮助

我们有专门的课程来从文件中读/写媒体。它们是
AVAssetReader
AVAssetWriter
,在
AVAssetExportSession
的帮助下,您可以将其导出为mp3文件。
或者你可以使用

我想发布我的工作解决方案,因为我得到了很多赞许,而纳雷什的回答对我没有多大帮助

  • 我已经从这个项目生成了lame.framework库
  • 我已将库添加到我的Swift项目(构建阶段->将二进制文件链接到库)
  • 我已经为在Objective c中使用c函数创建了包装器,并通过桥接头在Swift中使用它
  • 对于连接wav文件,我使用AVAssetExportSession和Swift
  • 现在是源代码。第一个包装器。这是一个将.wav文件转换为.mp3的类。可能会有很多更改(可能是输出文件的参数和其他选项),但我认为每个人都可以更改它。我想这可以改写成Swift,但我不知道怎么做。所以这是客观的C类:

    #import "AudioWrapper.h"
    #import "lame/lame.h"
    
    @implementation AudioWrapper
    
    + (void)convertFromWavToMp3:(NSString *)filePath {
    
    
        NSString *mp3FileName = @"Mp3File";
        mp3FileName = [mp3FileName stringByAppendingString:@".mp3"];
        NSString *mp3FilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:mp3FileName];
    
        NSLog(@"%@", mp3FilePath);
    
        @try {
            int read, write;
    
            FILE *pcm = fopen([filePath cStringUsingEncoding:1], "rb");  //source
            fseek(pcm, 4*1024, SEEK_CUR);                                   //skip file header
            FILE *mp3 = fopen([mp3FilePath cStringUsingEncoding:1], "wb");  //output
    
            const int PCM_SIZE = 8192;
            const int MP3_SIZE = 8192;
            short int pcm_buffer[PCM_SIZE*2];
            unsigned char mp3_buffer[MP3_SIZE];
    
            lame_t lame = lame_init();
            lame_set_in_samplerate(lame, 44100);
            lame_set_VBR(lame, vbr_default);
            lame_init_params(lame);
    
            do {
                read = fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
                if (read == 0)
                    write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
                else
                    write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
    
                fwrite(mp3_buffer, write, 1, mp3);
    
            } while (read != 0);
    
            lame_close(lame);
            fclose(mp3);
            fclose(pcm);
        }
        @catch (NSException *exception) {
            NSLog(@"%@",[exception description]);
        }
        @finally {
            [self performSelectorOnMainThread:@selector(convertMp3Finish)
                                   withObject:nil
                                waitUntilDone:YES];
        }
    }
    
    Swift AudioHelper类,用于关联音频文件,并调用将.wav文件转换为.mp3的方法:

    import UIKit
    import AVFoundation
    
    
    protocol AudioHelperDelegate {
        func assetExportSessionDidFinishExport(session: AVAssetExportSession, outputUrl: NSURL)
    }
    
    class AudioHelper: NSObject {
    
        var delegate: AudioHelperDelegate?
    
        func concatenate(audioUrls: [NSURL]) {
    
            //Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
            var composition = AVMutableComposition()
            var compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
    
            //create new file to receive data
            var documentDirectoryURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask).first! as! NSURL
            var fileDestinationUrl = NSURL(fileURLWithPath: NSTemporaryDirectory().stringByAppendingPathComponent("resultmerge.wav"))
            println(fileDestinationUrl)
    
            StorageManager.sharedInstance.deleteFileAtPath(NSTemporaryDirectory().stringByAppendingPathComponent("resultmerge.wav"))
    
            var avAssets: [AVURLAsset] = []
            var assetTracks: [AVAssetTrack] = []
            var durations: [CMTime] = []
            var timeRanges: [CMTimeRange] = []
    
            var insertTime = kCMTimeZero
    
            for audioUrl in audioUrls {
                let avAsset = AVURLAsset(URL: audioUrl, options: nil)
                avAssets.append(avAsset)
    
                let assetTrack = avAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack
                assetTracks.append(assetTrack)
    
                let duration = assetTrack.timeRange.duration
                durations.append(duration)
    
                let timeRange = CMTimeRangeMake(kCMTimeZero, duration)
                timeRanges.append(timeRange)
    
                compositionAudioTrack.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: insertTime, error: nil)
                insertTime = CMTimeAdd(insertTime, duration)
            }
    
            //AVAssetExportPresetPassthrough => concatenation
            var assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
            assetExport.outputFileType = AVFileTypeWAVE
            assetExport.outputURL = fileDestinationUrl
            assetExport.exportAsynchronouslyWithCompletionHandler({
                self.delegate?.assetExportSessionDidFinishExport(assetExport, outputUrl: fileDestinationUrl!)
            })
        }
    
        func exportTempWavAsMp3() {
    
            let wavFilePath = NSTemporaryDirectory().stringByAppendingPathComponent("resultmerge.wav")
            AudioWrapper.convertFromWavToMp3(wavFilePath)
        }
    }
    
    桥接标头包含:

    #import "lame/lame.h"
    #import "AudioWrapper.h"
    

    我对您的回答的问题是,当我使用
    avassetextortsession
    尝试
    AVFileTypeMPEGLayer3
    作为
    outputFileType
    时,我遇到异常
    无效的输出文件类型
    。我想我需要mp3文件在开始时能够导出到mp3。库的第二个解决方案是不导出到mp3,或者我没有找到任何关于这方面的信息。它将音频文件转换为.m4a。在AudioWrapper内部调用的convertMp3Finish()方法在哪里?它在Objective-C桥接文件中调用,因为库是使用Objective Chi完成的,导入到桥接时我没有找到“AudioWrapper.h”,你能帮忙吗?在这个代码计数赏金,如果他们更多的高内存wav文件,然后崩溃代码。。。不将wav转换为mp3文件