Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/objective-c/26.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Objective c 如何在MPMusicLayer控制器中创建WaveROM中的歌曲可视化_Objective C_Ios_Ios5_Core Audio - Fatal编程技术网

Objective c 如何在MPMusicLayer控制器中创建WaveROM中的歌曲可视化

Objective c 如何在MPMusicLayer控制器中创建WaveROM中的歌曲可视化,objective-c,ios,ios5,core-audio,Objective C,Ios,Ios5,Core Audio,最近几天,我正在编写一个代码,在WaveROM、、、中创建歌曲可视化。这是我从Avassetrader绘制波形中获得的代码 有谁能帮我在播放歌曲时调用哪种方法?我想显示可视化效果。 这就是我的密码 首先是一种通用渲染方法,它使用指针指向平均样本数据, 并返回一个UIImage。注意:这些示例不是可播放的音频示例。 -(UIImage *) audioImageGraph:(SInt16 *) samples normalizeMax:(SInt16) normalizeM

最近几天,我正在编写一个代码,在WaveROM、、、中创建歌曲可视化。这是我从Avassetrader绘制波形中获得的代码 有谁能帮我在播放歌曲时调用哪种方法?我想显示可视化效果。 这就是我的密码

首先是一种通用渲染方法,它使用指针指向平均样本数据, 并返回一个UIImage。注意:这些示例不是可播放的音频示例。

-(UIImage *) audioImageGraph:(SInt16 *) samples
            normalizeMax:(SInt16) normalizeMax
             sampleCount:(NSInteger) sampleCount 
            channelCount:(NSInteger) channelCount
             imageHeight:(float) imageHeight {

CGSize imageSize = CGSizeMake(sampleCount, imageHeight);
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();

CGContextSetFillColorWithColor(context, [UIColor blackColor].CGColor);
CGContextSetAlpha(context,1.0);
CGRect rect;
rect.size = imageSize;
rect.origin.x = 0;
rect.origin.y = 0;

CGColorRef leftcolor = [[UIColor whiteColor] CGColor];
CGColorRef rightcolor = [[UIColor redColor] CGColor];

CGContextFillRect(context, rect);

CGContextSetLineWidth(context, 1.0);

float halfGraphHeight = (imageHeight / 2) / (float) channelCount ;
float centerLeft = halfGraphHeight;
float centerRight = (halfGraphHeight*3) ; 
float sampleAdjustmentFactor = (imageHeight/ (float) channelCount) / (float) normalizeMax;

for (NSInteger intSample = 0 ; intSample < sampleCount ; intSample ++ ) {
    SInt16 left = *samples++;
    float pixels = (float) left;
    pixels *= sampleAdjustmentFactor;
    CGContextMoveToPoint(context, intSample, centerLeft-pixels);
    CGContextAddLineToPoint(context, intSample, centerLeft+pixels);
    CGContextSetStrokeColorWithColor(context, leftcolor);
    CGContextStrokePath(context);

    if (channelCount==2) {
        SInt16 right = *samples++;
        float pixels = (float) right;
        pixels *= sampleAdjustmentFactor;
        CGContextMoveToPoint(context, intSample, centerRight - pixels);
        CGContextAddLineToPoint(context, intSample, centerRight + pixels);
        CGContextSetStrokeColorWithColor(context, rightcolor);
        CGContextStrokePath(context); 
    }
}

// Create new image
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();

// Tidy up
UIGraphicsEndImageContext();   

return newImage;
}

调用此函数的示例:

  -(void) importMediaItem 
{

}


我已经在我的项目中添加了所有的类方法、库、NSThread类,一切都得到了修复,但没有找到要调用的方法。这真的很麻烦。创建一个视图控制器并添加
importMediaItem
方法

确保媒体项不是
nil
。您可以通过iPad播放器获得当前曲目,如下所示:

MPMusicPlayerController* player = [MPMusicPlayerController iPodMusicPlayer];
MPMediaItem *item = [player nowPlayingItem];
视图显示
方法中,调用
importMediaItem

为回调添加
displayWaveFormImage
方法,并使用返回的图像创建
UIImageView

在运行应用程序之前,请确保启动iPod播放器


然后一切都开始了

你可能想在我们的姐妹网站上发布这个。这实在太难处理了。
 - (NSData *) renderPNGAudioPictogramLogForAssett:(AVURLAsset *)songAsset {

NSError * error = nil;


AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

AVAssetTrack * songTrack = [songAsset.tracks objectAtIndex:0];

NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:

                                    [NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
                                    //     [NSNumber numberWithInt:44100.0],AVSampleRateKey, /*Not Supported*/
                                    //     [NSNumber numberWithInt: 2],AVNumberOfChannelsKey,    /*Not Supported*/

                                    [NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                    [NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,

                                    nil];


AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];

[reader addOutput:output];
[output release];

UInt32 sampleRate,channelCount;

NSArray* formatDesc = songTrack.formatDescriptions;
for(unsigned int i = 0; i < [formatDesc count]; ++i) {
    CMAudioFormatDescriptionRef item = (CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
    const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
    if(fmtDesc ) {

        sampleRate = fmtDesc->mSampleRate;
        channelCount = fmtDesc->mChannelsPerFrame;

        //    NSLog(@"channels:%u, bytes/packet: %u, sampleRate %f",fmtDesc->mChannelsPerFrame, fmtDesc->mBytesPerPacket,fmtDesc->mSampleRate);
    }
}


UInt32 bytesPerSample = 2 * channelCount;
Float32 normalizeMax = noiseFloor;
NSLog(@"normalizeMax = %f",normalizeMax);
NSMutableData * fullSongData = [[NSMutableData alloc] init];
[reader startReading];


UInt64 totalBytes = 0; 


Float64 totalLeft = 0;
Float64 totalRight = 0;
Float32 sampleTally = 0;

NSInteger samplesPerPixel = sampleRate / 50;


while (reader.status == AVAssetReaderStatusReading){

    AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
    CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];

    if (sampleBufferRef){
        CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);

        size_t length = CMBlockBufferGetDataLength(blockBufferRef);
        totalBytes += length;


        NSAutoreleasePool *wader = [[NSAutoreleasePool alloc] init];

        NSMutableData * data = [NSMutableData dataWithLength:length];
        CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);


        SInt16 * samples = (SInt16 *) data.mutableBytes;
        int sampleCount = length / bytesPerSample;
        for (int i = 0; i < sampleCount ; i ++) {

            Float32 left = (Float32) *samples++;
            left = decibel(left);
            left = minMaxX(left,noiseFloor,0);

            totalLeft  += left;



            Float32 right;
            if (channelCount==2) {
                right = (Float32) *samples++;
                right = decibel(right);
                right = minMaxX(right,noiseFloor,0);

                totalRight += right;
            }

            sampleTally++;

            if (sampleTally > samplesPerPixel) {

                left  = totalLeft / sampleTally; 
                if (left > normalizeMax) {
                    normalizeMax = left;
                }

                // NSLog(@"left average = %f, normalizeMax = %f",left,normalizeMax);


                [fullSongData appendBytes:&left length:sizeof(left)];

                if (channelCount==2) {
                    right = totalRight / sampleTally; 


                    if (right > normalizeMax) {
                        normalizeMax = right;
                    }


                    [fullSongData appendBytes:&right length:sizeof(right)];
                }

                totalLeft   = 0;
                totalRight  = 0;
                sampleTally = 0;

            }
        }



        [wader drain];


        CMSampleBufferInvalidate(sampleBufferRef);

        CFRelease(sampleBufferRef);
    }
}


NSData * finalData = nil;

if (reader.status == AVAssetReaderStatusFailed || reader.status == AVAssetReaderStatusUnknown){
    // Something went wrong. Handle it.
}

if (reader.status == AVAssetReaderStatusCompleted){
    // You're done. It worked.

    NSLog(@"rendering output graphics using normalizeMax %f",normalizeMax);

    UIImage *test = [self audioImageLogGraph:(Float32 *) fullSongData.bytes 
                                normalizeMax:normalizeMax 
                                 sampleCount:fullSongData.length / (sizeof(Float32) * 2) 
                                channelCount:2
                                 imageHeight:100];

    finalData = imageToData(test);
}




[fullSongData release];
[reader release];

 return finalData;
  **Now the init method that does "the business"**


 - (id) initWithMPMediaItem:(MPMediaItem*) item 
       completionBlock:(void (^)(UIImage* delayedImagePreparation))completionBlock 
{

 NSFileManager *fman = [NSFileManager defaultManager];
 NSString *assetPictogramFilepath = [[self class]    cachedAudioPictogramPathForMPMediaItem:item];

if ([fman fileExistsAtPath:assetPictogramFilepath]) {

    NSLog(@"Returning cached waveform pictogram: %@",[assetPictogramFilepath lastPathComponent]);

    self = [self initWithContentsOfFile:assetPictogramFilepath];
    return self;

}


NSString *assetFilepath = [[self class] cachedAudioFilepathForMPMediaItem:item];

NSURL *assetFileURL = [NSURL fileURLWithPath:assetFilepath];

if ([fman fileExistsAtPath:assetFilepath]) {

    NSLog(@"scanning cached audio data to create UIImage file: %@",[assetFilepath lastPathComponent]);

    [assetFileURL retain];
    [assetPictogramFilepath retain];

    [NSThread MCSM_performBlockInBackground: ^{


        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
        NSData *waveFormData = [self renderPNGAudioPictogramForAssett:asset]; 

        [waveFormData writeToFile:assetPictogramFilepath atomically:YES];

        [assetFileURL release];

        [assetPictogramFilepath release];

        if (completionBlock) {

            [waveFormData retain];
            [NSThread MCSM_performBlockOnMainThread:^{


                 UIImage *result = [UIImage imageWithData:waveFormData];

                  NSLog(@"returning rendered pictogram on main thread (%d bytes %@ data           in UIImage %0.0f x %0.0f pixels)",waveFormData.length, [imgExtuppercaseString],result.size.width,result.size.height);

                completionBlock(result);

                [waveFormData release];

            }];

        }


    }];

    return nil;

} else {



   NSString *assetFolder = [[self class] assetCacheFolder];

    [fman createDirectoryAtPath:assetFolder withIntermediateDirectories:YES 
   attributes:nil error:nil];

    NSLog(@"Preparing to import audio asset data %@",[assetFilepath lastPathComponent]);

    [assetPictogramFilepath retain];
    [assetFileURL retain];

    TSLibraryImport* import = [[TSLibraryImport alloc] init];
    NSURL    * assetURL = [item valueForProperty:MPMediaItemPropertyAssetURL];

    [import importAsset:assetURL toURL:assetFileURL completionBlock:^(TSLibraryImport* import) {
        //check the status and error properties of
        //TSLibraryImport


        if (import.error) {

            NSLog (@"audio data import failed:%@",import.error);


        } else{
            NSLog (@"Creating waveform pictogram file: %@", [assetPictogramFilepath lastPathComponent]);
            AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:assetFileURL options:nil];
            NSData *waveFormData = [self renderPNGAudioPictogramForAssett:asset]; 

            [waveFormData writeToFile:assetPictogramFilepath atomically:YES];


            if (completionBlock) {
                [waveFormData retain];
                [NSThread MCSM_performBlockOnMainThread:^
                {

                    UIImage *result = [UIImage imageWithData:waveFormData];
                    NSLog(@"returning rendered pictogram on main thread (%d bytes %@ data in UIImage %0.0f x %0.0f pixels)",waveFormData.length,[imgExt uppercaseString],result.size.width,result.size.height);

                    completionBlock(result);

                    [waveFormData release];

                }];

            }
        }

        [assetPictogramFilepath release];
        [assetFileURL release];


    }  ];

    return nil;
}
  -(void) importMediaItem 
MPMediaItem* item = [self mediaItem];

// since we will be needing this for playback, save the url to the cached audio.
[url release];
url = [[UIImage cachedAudioURLForMPMediaItem:item] retain];


[waveFormImage release];

waveFormImage = [[UIImage alloc ] initWithMPMediaItem:item completionBlock:^(UIImage* delayedImagePreparation){

    waveFormImage = [delayedImagePreparation retain];
    [self displayWaveFormImage];

}];

if (waveFormImage) {
    [waveFormImage retain];
    [self displayWaveFormImage];
}
MPMusicPlayerController* player = [MPMusicPlayerController iPodMusicPlayer];
MPMediaItem *item = [player nowPlayingItem];