Ios 录制或播放声音时,如何在视图中连续发出声波

Ios 录制或播放声音时,如何在视图中连续发出声波,ios,objective-c,iphone,xcode,swift,Ios,Objective C,Iphone,Xcode,Swift,ViewController类的ViewDidLoad我已实例化SoundWaveView - (void)viewDidLoad { [super viewDidLoad]; _soundWaveView = [[SoundWaveView alloc] initWithFrame:CGRectMake(10, 50, 300, 200)]; [self.view addSubview:soundWaveView]; } 在SoundWaveView中,我编写了以下

ViewController类的ViewDidLoad我已实例化SoundWaveView

- (void)viewDidLoad
{
    [super viewDidLoad];

    _soundWaveView = [[SoundWaveView alloc] initWithFrame:CGRectMake(10, 50, 300, 200)];
    [self.view addSubview:soundWaveView];
}
在SoundWaveView中,我编写了以下代码

- (void) layoutSubviews {

    [super layoutSubviews];

    if (_waveImageView == nil)
    {
        _waveImageView = [[UIImageView alloc] initWithFrame:self.bounds];
        _progressImageView = [[UIImageView alloc] initWithFrame:self.bounds];

        _waveImageView.contentMode = UIViewContentModeLeft;
        _progressImageView.contentMode = UIViewContentModeLeft;
        _waveImageView.clipsToBounds = YES;
        _progressImageView.clipsToBounds = YES;

        [self addSubview:_waveImageView];
        [self addSubview:_progressImageView];
    }
}
播放声音时,playSoundFromUrl被调用:

- (void) playSoundFromUrl:(NSURL*)soundURL {

        _soundURL = soundURL;

        [self render];
    }








- (void) render {

        AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:_soundURL options:nil];
        UIImage *renderedImage = [self renderWaveImageFromAudioAsset:asset];

        _waveImageView.image = renderedImage;
        _progressImageView.image = [renderedImage tintedImageWithColor:_progressColor];

        _waveImageView.width = renderedImage.size.width;
        _waveImageView.left = (self.width - renderedImage.size.width) ;
        _progressImageView.left = _waveImageView.left;
        _progressImageView.width = 0;


    }






 - (UIImage*) renderWaveImageFromAudioAsset:(AVURLAsset *)songAsset {

    NSError* error = nil;

    AVAssetReader* reader = [[AVAssetReader alloc] initWithAsset:songAsset error:&error];

    AVAssetTrack* songTrack = [songAsset.tracks objectAtIndex:0];
    //change
    NSDictionary* outputSettingsDict = [[NSDictionary alloc] initWithObjectsAndKeys:
                                        [NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
                                        [NSNumber numberWithInt:1],AVNumberOfChannelsKey,
                                        [NSNumber numberWithInt:8],AVLinearPCMBitDepthKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                        [NSNumber numberWithBool:NO],AVLinearPCMIsNonInterleaved,
                                        nil];

    AVAssetReaderTrackOutput* output = [[AVAssetReaderTrackOutput alloc] initWithTrack:songTrack outputSettings:outputSettingsDict];

    [reader addOutput:output];

    UInt32 sampleRate, channelCount;

    NSArray* formatDesc = songTrack.formatDescriptions;

    for (int i = 0; i < [formatDesc count]; ++i)
    {
        CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)[formatDesc objectAtIndex:i];
        const AudioStreamBasicDescription* fmtDesc = CMAudioFormatDescriptionGetStreamBasicDescription (item);
        if (fmtDesc)
        {
            sampleRate = fmtDesc->mSampleRate;
            channelCount = fmtDesc->mChannelsPerFrame;
        }
    }

    UInt32 bytesPerSample = 2 *  channelCount;
    SInt16 maxValue = 0;

    NSMutableData *fullSongData = [[NSMutableData alloc] init];

    [reader startReading];

    UInt64 totalBytes = 0;
    SInt64 totalLeft = 0;
    SInt64 totalRight = 0;
    NSInteger sampleTally = 0;

    NSInteger samplesPerPixel = 100; // pretty enougth for most of ui and fast

    int buffersCount = 0;
    while (reader.status == AVAssetReaderStatusReading)
    {
        AVAssetReaderTrackOutput * trackOutput = (AVAssetReaderTrackOutput *)[reader.outputs objectAtIndex:0];
        CMSampleBufferRef sampleBufferRef = [trackOutput copyNextSampleBuffer];

        if (sampleBufferRef)
        {
            CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBufferRef);

            size_t length = CMBlockBufferGetDataLength(blockBufferRef);
            totalBytes += length;

            @autoreleasepool
            {
                NSMutableData *data = [NSMutableData dataWithLength:length];
                CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, data.mutableBytes);

                SInt16 * samples = (SInt16*) data.mutableBytes;
                int sampleCount = length / bytesPerSample;

                for (int i = 0; i < sampleCount; i++)
                {
                    SInt16 left = *samples++;

                    totalLeft += left;

                    SInt16 right;

                    if (channelCount == 2)
                    {
                        right = *samples++;

                        totalRight += right;
                    }

                    sampleTally++;

                    if (sampleTally > samplesPerPixel)
                    {
                        left = (totalLeft / sampleTally);

                        if (channelCount == 2)
                        {
                            right = (totalRight / sampleTally);
                        }

                        SInt16 val = right ? ((right + left) / 2) : left;

                        [fullSongData appendBytes:&val length:sizeof(val)];

                        totalLeft = 0;
                        totalRight = 0;
                        sampleTally = 0;
                    }
                }
                CMSampleBufferInvalidate(sampleBufferRef);

                CFRelease(sampleBufferRef);
            }
        }

        buffersCount++;
    }

    NSMutableData *adjustedSongData = [[NSMutableData alloc] init];

    int sampleCount =  fullSongData.length / 2; // sizeof(SInt16)

    int adjustFactor = ceilf((float)sampleCount / (self.width / (_drawSpaces ? 2.0 : 1.0)));

    SInt16* samples = (SInt16*) fullSongData.mutableBytes;

    int i = 0;

    while (i < sampleCount)
    {
        SInt16 val = 0;

        for (int j = 0; j < adjustFactor; j++)
        {
            val += samples[i + j];
        }
        val /= adjustFactor;
        if (ABS(val) > maxValue)
        {
            maxValue = ABS(val);
        }
        [adjustedSongData appendBytes:&val length:sizeof(val)];
        i += adjustFactor;
    }

    sampleCount = adjustedSongData.length / 2;

    if (reader.status == AVAssetReaderStatusCompleted)
    {
        UIImage *image = [self drawImageFromSamples:(SInt16 *)adjustedSongData.bytes
                                           maxValue:maxValue
                                        sampleCount:sampleCount];
        return image;
    }
    return nil;
}
-(void)playSoundFromUrl:(NSURL*)soundURL{
_soundURL=soundURL;
[自我呈现];
}
-(无效)渲染{
AVURLAsset*asset=[[AVURLAsset alloc]initWithURL:_soundURL选项:nil];
UIImage*RenderImage=[自渲染视频图像源音频资产:资产];
_waveImageView.image=渲染图像;
_progressImageView.image=[RenderImageTinteDimageWithColor:_progressColor];
_waveImageView.width=RenderImageSize.width;
_waveImageView.left=(self.width-renderImage.size.width);
_progressImageView.left=\u waveImageView.left;
_progressImageView.width=0;
}
-(UIImage*)renderWaveImageFromAudioAsset:(AVURLAsset*)songAsset{
n错误*错误=nil;
Avassetrader*reader=[[Avassetrader alloc]initWithAsset:songAsset错误:&error];
AVAssetTrack*songTrack=[songAsset.tracks对象索引:0];
//改变
NSDictionary*outputSettingsDict=[[NSDictionary alloc]InitWithObjects和Keys:
[NSNumber numberWithInt:kAudioFormatLinearPCM],AVFormatIDKey,
[NSNumber numberWithInt:1],AVNumberOfChannelsKey,
[NSNumber Number Withint:8],AVLinearPCMBitDepthKey,
[NSNumber numberWithBool:NO],AVLinearPCMIsBigEndianKey,
[NSNumber numberWithBool:NO]、AVLinearPCMIsFloatKey、,
[NSNumber numberWithBool:NO],AVLINEARPCM为非交叉,
零];
AvassetraderTrackOutput*output=[[AvassetraderTrackOutput alloc]initWithTrack:songTrack输出设置:输出设置DICT];
[读卡器添加输出:输出];
UInt32采样器,通道计数;
NSArray*formatDesc=songTrack.formatDescriptions;
对于(int i=0;i<[formatDesc count];++i)
{
CMAudioFormatDescriptionRef项=(u桥CMAudioFormatDescriptionRef)[FormatDescription对象索引:i];
const AudioStreamBasicDescription*fmtDesc=CMAudioFormatDescription GetStreamBasicDescription(项目);
如果(fmtDesc)
{
采样器=fmtDesc->mSampleRate;
channelCount=fmtDesc->mChannelsPerFrame;
}
}
UInt32 bytesPerSample=2*channelCount;
sint16maxvalue=0;
NSMutableData*fullSongData=[[NSMutableData alloc]init];
[读者开始阅读];
UInt64 totalBytes=0;
SInt64 TotalEFT=0;
SInt64 totalRight=0;
NSInteger-sampleTally=0;
NSInteger samplesPerPixel=100;//对于大多数ui和fast来说都非常强大
int bufferscont=0;
while(reader.status==AvassetraderStatusReading)
{
AvassetraderTrackOutput*trackOutput=(AvassetraderTrackOutput*)[reader.outputs objectAtIndex:0];
CMSampleBufferRef sampleBufferRef=[trackOutput copyNextSampleBuffer];
if(sampleBufferRef)
{
CMBlockBufferRef blockBufferRef=CMSampleBufferGetDataBuffer(sampleBufferRef);
大小\u t长度=CMBlockBufferGetDataLength(blockBufferRef);
totalBytes+=长度;
@自动释放池
{
NSMutableData*data=[NSMutableData dataWithLength:length];
CMBlockBufferCopyDataBytes(blockBufferRef,0,长度,data.mutableBytes);
SInt16*samples=(SInt16*)data.mutableBytes;
int sampleCount=长度/字节数样本;
对于(int i=0;isamplesPerPixel)
{
左=(totaleft/sampleTally);
if(channelCount==2)
{
右=(总右/样本计数);
}
sint16val=右?((右+左)/2):左;
[fullsongdataappendbytes:&val长度:sizeof(val)];
totaleft=0;
totalRight=0;
样本数=0;
}
}
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
}
}
buffersCount++;
}
NSMutableData*adjustedSongData=[[NSMutableData alloc]init];
int sampleCount=fullSongData.length/2;//sizeof(SInt16)
int adjustFactor=ceilf((浮点)采样计数/(self.width/(0.0:1.0));
SInt16*samples=(SInt16*)fullSongData.mutableBytes;
int i=0;
而(i最大值)
{
最大值=绝对值(val);
}
[adjustedSongData appendBytes:&val长度:sizeof(val)];
i+=调整因子;
}
sampleCount=adjustedSongData.length/2;
如果(reader.status==AvassetraderStatusCompleted)
{
UIImage*image=[self-drawImageFromSamples:(SInt16*)adjustedSongData.bytes
maxValue:maxValue
样本计数:样本计数];
返回图像;
}
返回零;
}
然后d
- (UIImage*) drawImageFromSamples:(SInt16*)samples
                         maxValue:(SInt16)maxValue
                      sampleCount:(NSInteger)sampleCount {



    CGSize imageSize = CGSizeMake(sampleCount * (_drawSpaces ? 6 : 6), self.height); UIGraphicsBeginImageContextWithOptions(imageSize, NO, 0);
    CGContextRef context = UIGraphicsGetCurrentContext();

    CGContextSetFillColorWithColor(context, self.backgroundColor.CGColor); CGContextSetAlpha(context, 1.0);

    CGRect rect; rect.size = imageSize; rect.origin.x = 0; rect.origin.y = 0;

    CGColorRef waveColor = self.waveColor.CGColor;

    CGContextFillRect(context, rect);

    CGContextSetLineWidth(context, 2.0);

    CGContextSetLineCap (context, kCGLineCapRound);
    float channelCenterY = imageSize.height / 2; float sampleAdjustmentFactor = imageSize.height / (float)maxValue;

    for (NSInteger i = 0; i < sampleCount; i++) { float val = *samples++; val = val * sampleAdjustmentFactor; if ((int)val == 0) val = 1.0; 

        CGContextMoveToPoint(context, i * (_drawSpaces ? 6 : 6), channelCenterY - val / 2.0);
        CGContextAddLineToPoint(context, i * (_drawSpaces ? 6 : 6), channelCenterY + val / 2.0);CGContextSetStrokeColorWithColor(context, waveColor); CGContextStrokePath(context); }

    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();

    UIGraphicsEndImageContext();

    return newImage;
}