C# 使用WindowsPhone 8应用程序的实时(RTP)音频流

C# 使用WindowsPhone 8应用程序的实时(RTP)音频流,c#,audio,windows-phone-8,rtp,pcm,C#,Audio,Windows Phone 8,Rtp,Pcm,我正在通过Barix Instream 100以以下格式传输RTP音频流: PCM 16位8kHz单声道(小端) 我试图通过使用自定义MediaStreamSource的MediaElement“实时”播放该流。问题是,在播放该流时,我得到了2秒的延迟。在我的电脑上使用VLC时,“没有”延迟 我发现MediaStreamSource有一个属性“AudioBufferLength”,可以设置为15ms到1000ms之间的值。但是当值太小时,我在GetSampleSync->ReportGetSam

我正在通过Barix Instream 100以以下格式传输RTP音频流:

PCM 16位8kHz单声道(小端)

我试图通过使用自定义MediaStreamSource的MediaElement“实时”播放该流。问题是,在播放该流时,我得到了2秒的延迟。在我的电脑上使用VLC时,“没有”延迟

我发现MediaStreamSource有一个属性“AudioBufferLength”,可以设置为15ms到1000ms之间的值。但是当值太小时,我在GetSampleSync->ReportGetSampleCompleted中得到一个异常

这是我的密码:

        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
    {
        Debug.WriteLine("GetSampleAsync called.");

        // Start with one second of data, rounded up to the nearest block.
        var blocksize = (uint)AlignUp(_wavParser.WaveFormatEx.AvgBytesPerSec, _wavParser.WaveFormatEx.BlockAlign);

        var chunkSize = Math.Min(blocksize, (uint)_stream.Length - _currentPosition);
        var chunkDuration = _currentPosition * 10000000L / 8000;  //// _wavParser.WaveFormatEx.AudioDurationFromBufferSize((uint)chunkSize);

        // Send out the next sample
        var chunkSample = new MediaStreamSample(_mediaStreamDescription, _stream, _currentPosition, chunkSize, _currentTimeStamp, chunkDuration, _emptySampleDict);

        // Move our timestamp and position forward

        _currentPosition += chunkSize;
        _currentTimeStamp += chunkDuration;

        ReportGetSampleCompleted(chunkSample);  // <-- There i got a NullRef Ex, when the chunk is too small
    }
protectedoverride void getSampleSync(MediaStreamType MediaStreamType)
{
WriteLine(“getSamPleaseSync调用”);
//从一秒钟的数据开始,四舍五入到最近的块。
var blocksize=(uint)对齐(_wavParser.WaveFormatEx.AvgBytesPerSec,_wavParser.WaveFormatEx.BlockAlign);
var chunkSize=Math.Min(blocksize,(uint)\u stream.Length-\u currentPosition);
var chunkDuration=_currentPosition*10000000L/8000;///_wavParser.WaveFormatEx.AudioDurationFromBufferSize((uint)chunkSize);
//发送下一个样本
var chunkSample=new MediaStreamSample(_mediaStreamDescription,_stream,_currentPosition,chunkSize,_currentTimeStamp,chunkDuration,_emptySampleDict);
//向前移动时间戳和位置
_currentPosition+=chunkSize;
_currentTimeStamp+=持续时间;
ReportGetSampleCompleted(chunkSample)//
        protected override void OpenMediaAsync()
    {
        try
        {
            // Create a parser
            _wavParser = new WavParser(_stream);

            // Parse the header
            _wavParser.ParseWaveHeader();
            _wavParser.WaveFormatEx.ValidateWaveFormat();
            _startPosition = _currentPosition = _wavParser.DataPosition;

            // Init
            _streamAttributes = new Dictionary<MediaStreamAttributeKeys, string>();
            _sourceAttributes = new Dictionary<MediaSourceAttributesKeys, string>();
            var availableStreams = new List<MediaStreamDescription>();

            // Stream Description
            _streamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = _wavParser.WaveFormatEx.ToHexString();
            _mediaStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, _streamAttributes);
            availableStreams.Add(_mediaStreamDescription);

            _sourceAttributes[MediaSourceAttributesKeys.Duration] = _wavParser.Duration.ToString();
            ReportOpenMediaCompleted(_sourceAttributes, availableStreams);
        }
        catch (Exception exception)
        {
            Debug.WriteLine("Error while opening media source." + exception.Message);
        }
    }