Iphone iOS:降低包含iPod音乐的MPMediaItem的比特率
我正在制作一个将主题音乐添加到视频的应用程序 一些用户抱怨说,如果他们的音乐是苹果的无损格式,视频就会太大 我发现这是因为我使用的Iphone iOS:降低包含iPod音乐的MPMediaItem的比特率,iphone,ios,media-player,ipod,Iphone,Ios,Media Player,Ipod,我正在制作一个将主题音乐添加到视频的应用程序 一些用户抱怨说,如果他们的音乐是苹果的无损格式,视频就会太大 我发现这是因为我使用的AVMutableComposition只是将原始音乐格式放入我生成的视频中 有没有办法降低MPMediaItem中音乐的比特率,或者改变它的编码格式 这是我用来将音乐添加到视频中的代码片段 AVMutableComposition* mixComposition = [AVMutableComposition composition]; AVMutableComp
AVMutableComposition
只是将原始音乐格式放入我生成的视频中
有没有办法降低MPMediaItem中音乐的比特率,或者改变它的编码格式
这是我用来将音乐添加到视频中的代码片段
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSURL *exportUrl = [NSURL fileURLWithPath:_videoOutputPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:_videoOutputPath]){
[[NSFileManager defaultManager] removeItemAtPath:_videoOutputPath error:nil];
}
_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:^(void ) {}
我终于明白了,这是我使用的代码:
static NSString * const kWriterInputIsReadyForMoreData = @"readyForMoreMediaData";
#import <AVFoundation/AVFoundation.h>
@implementation AudioUtil
{
AVAssetReader *_assetReader;
AVAssetWriter *_assetWriter;
AVAssetWriterInput *_assetWriterInput;
AVAssetReaderTrackOutput *_readerOutput;
void (^_callback)(BOOL);
CMSampleBufferRef _sampleBufferToAppend;
}
-(void)downSamplingAudioWithSourceURL:(NSURL *)sourceURL destinationURL:(NSURL *)destURL timeRange:(CMTimeRange)timeRange callBack:(void (^)(BOOL))callback
{
NSError *error = nil;
_callback = callback;
[[NSFileManager defaultManager] removeItemAtURL:destURL error:nil];
//initialize reader
AVURLAsset *inputAsset = [AVURLAsset assetWithURL:sourceURL];
_assetReader = [[AVAssetReader alloc] initWithAsset:inputAsset error:&error];
_assetReader.timeRange = timeRange;
AVAssetTrack* track = [[inputAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
NSMutableDictionary* audioReadSettings = [NSMutableDictionary dictionary];
audioReadSettings[AVFormatIDKey] = @(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
_readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([_assetReader canAddOutput:_readerOutput], @"reader can't add output");
[_assetReader addOutput:_readerOutput];
//initialize writer
_assetWriter = [[AVAssetWriter alloc] initWithURL:destURL fileType:[QLVideoFormatProvider audioFileType] error:nil];
NSMutableDictionary *audioOutputSettings = [NSMutableDictionary dictionary];
audioOutputSettings[AVFormatIDKey] = [QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey] = @([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey] = @([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AVEncoderBitRateKey] = @([QLVideoFormatProvider audioBitrate]);
_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
[_assetWriter addInput:_assetWriterInput];
//start
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
BOOL canStartReading = [_assetReader startReading];
NSLog(@"can start reading %d",canStartReading);
if (!canStartReading) {
callback(NO);
return;
}
[_assetWriterInput addObserver:self forKeyPath:kWriterInputIsReadyForMoreData options:NSKeyValueObservingOptionOld|NSKeyValueObservingOptionNew context:NULL];
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
[self appendBufferToAppend];
}
-(void)appendBufferToAppend
{
if ([_assetWriterInput isReadyForMoreMediaData]) {
if (_sampleBufferToAppend) {
[_assetWriterInput appendSampleBuffer:_sampleBufferToAppend];
CFRelease(_sampleBufferToAppend);
}
_sampleBufferToAppend = [_readerOutput copyNextSampleBuffer];
if (_sampleBufferToAppend) {
[self appendBufferToAppend];
}
else {
[_assetWriter finishWritingWithCompletionHandler:^(){
if (_callback) {
_callback(_assetWriter.status == AVAssetWriterStatusCompleted);
};
}];
}
}
else {
}
}
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ([keyPath isEqualToString:kWriterInputIsReadyForMoreData]) {
if ([change[NSKeyValueChangeNewKey] boolValue] == YES) {
[self appendBufferToAppend];
}
}
}
静态NSString*常量kWriterInputIsReadyForMoreData=@“readyForMoreMediaData”;
#进口
@实现AudioUtil
{
Avassetrader*_assetrader;
AVAssetWriter*_assetWriter;
AVAssetWriterInput*_assetWriterInput;
AvassetraderTrackOutput*\u读取器输出;
无效(BOOL);;
CMSampleBufferRef_sampleBufferToAppend;
}
-(void)downSamplingAudioWithSourceURL:(NSURL*)sourceURL destinationURL:(NSURL*)destURL timeRange:(CMTimeRange)timeRange回调:(void(^)(BOOL))回调
{
n错误*错误=nil;
_回调=回调;
[[NSFileManager defaultManager]删除属性:destURL错误:nil];
//初始化读取器
AVURLAsset*inputAsset=[AvurlAssetWithUrl:sourceURL];
_AssetTreader=[[AvAssetTreader alloc]initWithAsset:inputAsset错误:&错误];
_AssetTreader.timeRange=时间范围;
AVAssetTrack*track=[[inputAsset tracksWithMediaType:AVMediaTypeAudio]对象索引:0];
NSMutableDictionary*audioReadSettings=[NSMutableDictionary];
audioReadSettings[AVFormatIDKey]=@(kAudioFormatLinearPCM);
audioReadSettings[AVNumberOfChannelsKey]=@([QLVeoFormatProvider audioChannelCount]);
_readerOutput=[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:track outputSettings:audioReadSettings];
NSAssert([[u assetReader CanadOutput:[u readerOutput],@“reader无法添加输出”);
[\u assetReader addOutput:\u readerOutput];
//初始化写入程序
_assetWriter=[[AVAssetWriter alloc]initWithURL:destURL文件类型:[QLVideoFormatProvider audioFileType]错误:nil];
NSMutableDictionary*音频输出设置=[NSMutableDictionary];
audioOutputSettings[AVFormatIDKey]=[QLVideoFormatProvider audioFormatKeyForEncoder];
audioOutputSettings[AVNumberOfChannelsKey]=@([QLVideoFormatProvider audioChannelCount]);
audioOutputSettings[AVSampleRateKey]=@([QLVideoFormatProvider audioSampleRate]);
audioOutputSettings[AvencoderBiteKey]=@([QLVideoFormatProvider audioBitrate]);
_assetWriterInput=[AVAssetWriterInput assetWriterInputWithMediaType:AvMediaType音频输出设置:音频输出设置];
[_assetWriter附加输入:_assetWriterInput];
//开始
[_AssetWriterStartWriting];
[_AssetWriterStartSessionatSourceTime:kCMTimeZero];
BOOL canStartReading=[u assetrader startReading];
NSLog(@“可以开始读取%d”,可以开始读取);
如果(!canstartreding){
回调(否);
返回;
}
[_AssetWriterInputAddObserver:self-forKeyPath:KwriterInputIsReadyForMore数据选项:NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew-context:NULL];
_sampleBufferToAppend=[\u readerOutput copyNextSampleBuffer];
[自行出现];
}
-(无效)附录
{
如果([\u assetWriterInput已准备好接收更多MediaData]){
如果(_sampleBufferToAppend){
[\u assetWriterInput appendSampleBuffer:\u sampleBufferToAppend];
CFRelease(_sampleBufferToAppend);
}
_sampleBufferToAppend=[\u readerOutput copyNextSampleBuffer];
如果(_sampleBufferToAppend){
[自行出现];
}
否则{
[\u assetWriter使用CompletionHandler完成写入:^(){
如果(_回调){
_回调(_assetWriter.status==AVAssetWriterStatusCompleted);
};
}];
}
}
否则{
}
}
-(void)observeValueForKeyPath:(NSString*)对象的键路径:(id)对象更改:(NSDictionary*)更改上下文:(void*)上下文
{
if([keyPath isEqualToString:kWriterInputIsReadyForMoreData]){
如果([change[NSKeyValueChangeNewKey]布尔值]==是){
[自行出现];
}
}
}