如何使用Xamarin for Android将数据从MediaCodec传输到AudioTrack
我正在尝试解码一个mp3文件并将其流式传输到AudioTrack。这一切都很好,但在Java端会导致大量GC。我已确保在播放/流循环中不分配内存,并责怪ByteBuffer.Get(byte[],int,int)绑定分配临时Java数组。任何人都可以确认和/或展示将数据从MediaCodec传送到AudioTrack的更好方式?(我知道API 21引入了AudioTrack.write(ByteBuffer,…) 谢谢 以下是我的工作:如何使用Xamarin for Android将数据从MediaCodec传输到AudioTrack,android,xamarin,xamarin.android,android-mediacodec,audiotrack,Android,Xamarin,Xamarin.android,Android Mediacodec,Audiotrack,我正在尝试解码一个mp3文件并将其流式传输到AudioTrack。这一切都很好,但在Java端会导致大量GC。我已确保在播放/流循环中不分配内存,并责怪ByteBuffer.Get(byte[],int,int)绑定分配临时Java数组。任何人都可以确认和/或展示将数据从MediaCodec传送到AudioTrack的更好方式?(我知道API 21引入了AudioTrack.write(ByteBuffer,…) 谢谢 以下是我的工作: byte[] audioBuffer = new byte
byte[] audioBuffer = new byte[...];
...
ByteBuffer codecOutputBuffer = codecOutputBuffers[outputIndex];
// The next line seems to be the source of a lot of GC during playback
codecOutputBuffer.Get(audioBuffer, 0, bufInfo.Size);
audioTrack.Write(audioBuffer, 0, bufInfo.Size);
更新1:我尝试使用分配跟踪器确认分配站点。我发现分配的对象是8kb大字节数组。不幸的是,Allocation Tracker没有为他们显示分配站点stacktrace:
1 32 org.apache.harmony.dalvik.ddmc.Chunk 6 org.apache.harmony.dalvik.ddmc.DdmServer dispatch
2 16 java.lang.Integer 6 java.lang.Integer valueOf
3 16 byte[] 6
4 8192 byte[] 20
5 8192 byte[] 20
6 8192 byte[] 20
为了确保分配数组的是ByteBuffer.Get(byte[],int,int),我使用以下命令重新运行应用程序:
导入android.content.Context;
导入android.content.res.AssetFileDescriptor;
导入android.media.AudioFormat;
导入android.media.AudioManager;
导入android.media.AudioTrack;
导入android.media.MediaCodec;
导入android.media.MediaExtractor;
导入android.media.MediaFormat;
导入java.nio.ByteBuffer;
公共级音频播放器{
公共无效播放(上下文、文本、最终int资源ID){
最终上下文=文本;
新线程()
{
@凌驾
公开募捐{
试一试{
AssetFileDescriptor fd=context.getResources().openRawResourceFd(resourceId);
MediaExtractor提取器=新的MediaExtractor();
setDataSource(fd.getFileDescriptor(),fd.getStartOffset(),fd.getLength());
提取器。选择Track(0);
MediaFormat trackFormat=提取器.getTrackFormat(0);
MediaCodec解码器=MediaCodec.createDecoderByType(trackFormat.getString(MediaFormat.KEY_MIME));
解码器。配置(trackFormat,null,null,0);
decoder.start();
ByteBuffer[]decoderInputBuffers=解码器。getInputBuffers();
ByteBuffer[]decoderOutputBuffers=解码器。getOutputBuffers();
int-inputIndex=解码器.dequeueInputBuffer(-1);
ByteBuffer inputBuffer=decoderInputBuffers[inputIndex];
MediaCodec.BufferInfo BufferInfo=新的MediaCodec.BufferInfo();
字节[]音频缓冲区=null;
AudioTrack AudioTrack=null;
int read=extractor.readSampleData(inputBuffer,0);
while(读取>0){
decoder.queueInputBuffer(inputIndex,0,read,extractor.getSampleTime(),0);
提取器;
int outputIndex=decoder.dequeueOutputBuffer(bufferInfo,-1);
if(outputIndex==MediaCodec.INFO\u输出\u格式\u更改){
trackFormat=decoder.getOutputFormat();
}else if(outputIndex>=0){
如果(bufferInfo.size>0){
ByteBuffer outputBuffer=decoderOutputBuffers[outputIndex];
if(audioBuffer==null | | audioBuffer.lengthimport android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
public class AudioPlayer {
public void play(Context aContext, final int resourceId){
final Context context = aContext;
new Thread()
{
@Override
public void run() {
try {
AssetFileDescriptor fd = context.getResources().openRawResourceFd(resourceId);
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
extractor.selectTrack(0);
MediaFormat trackFormat = extractor.getTrackFormat(0);
MediaCodec decoder = MediaCodec.createDecoderByType(trackFormat.getString(MediaFormat.KEY_MIME));
decoder.configure(trackFormat, null, null, 0);
decoder.start();
ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
int inputIndex = decoder.dequeueInputBuffer(-1);
ByteBuffer inputBuffer = decoderInputBuffers[inputIndex];
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
byte[] audioBuffer = null;
AudioTrack audioTrack = null;
int read = extractor.readSampleData(inputBuffer, 0);
while (read > 0) {
decoder.queueInputBuffer(inputIndex, 0, read, extractor.getSampleTime(), 0);
extractor.advance();
int outputIndex = decoder.dequeueOutputBuffer(bufferInfo, -1);
if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
trackFormat = decoder.getOutputFormat();
} else if (outputIndex >= 0) {
if (bufferInfo.size > 0) {
ByteBuffer outputBuffer = decoderOutputBuffers[outputIndex];
if (audioBuffer == null || audioBuffer.length < bufferInfo.size) {
audioBuffer = new byte[bufferInfo.size];
}
outputBuffer.rewind();
outputBuffer.get(audioBuffer, 0, bufferInfo.size);
decoder.releaseOutputBuffer(outputIndex, false);
if (audioTrack == null) {
int sampleRateInHz = trackFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int channelCount = trackFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int channelConfig = channelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRateInHz,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT) * 2,
AudioTrack.MODE_STREAM);
audioTrack.play();
}
audioTrack.write(audioBuffer, 0, bufferInfo.size);
}
}
inputIndex = decoder.dequeueInputBuffer(-1);
inputBuffer = decoderInputBuffers[inputIndex];
read = extractor.readSampleData(inputBuffer, 0);
}
} catch (Exception e) {
}
}
}.start();
}
}
[Activity(Label = "AndroidAudioTest", MainLauncher = true, Icon = "@drawable/icon")]
public class MainActivity : Activity
{
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
SetContentView(Resource.Layout.Main);
var play = FindViewById<Button>(Resource.Id.Play);
play.Click += (s, e) =>
{
new AudioPlayer().Play(this, Resource.Raw.PianoInsideMics);
};
}
}