Android 音轨不工作,没有声音

Android 音轨不工作,没有声音,android,codec,android-mediacodec,audiotrack,input-buffer,Android,Codec,Android Mediacodec,Audiotrack,Input Buffer,你能检查一下为什么我的音轨不工作吗?我有一个音轨的缓冲区,它应该可以工作 public class MainActivity extends AppCompatActivity { private MediaExtractor extractor; private MediaCodec decoder; private Surface surface; private byte[] b; AudioManager audioManager; @Override protected void

你能检查一下为什么我的音轨不工作吗?我有一个音轨的缓冲区,它应该可以工作

public class MainActivity extends AppCompatActivity {

private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
private byte[] b;
AudioManager audioManager;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    Button button = (Button)findViewById(R.id.button);
    button.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            run();
        }
    });
}

public void run() {
    extractor = new MediaExtractor();
    AssetFileDescriptor sampleFD = getResources().openRawResourceFd(R.raw.pinkfloyd);
    try {
        extractor.setDataSource(sampleFD.getFileDescriptor(), sampleFD.getStartOffset(), sampleFD.getLength());
    } catch (IOException e) {
        e.printStackTrace();
    }

    for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        if (mime.startsWith("audio/")) {
            extractor.selectTrack(i);
            try {
                decoder = MediaCodec.createDecoderByType(mime);
            } catch (IOException e) {
                e.printStackTrace();
            }
            decoder.configure(format, surface, null, 0);
            break;
        }
    }

    if (decoder == null) {
        Log.e("DecodeActivity", "Can't find video info!");
        return;
    }

    decoder.start();

    ByteBuffer inputBuffers [] = decoder.getInputBuffers();
    ByteBuffer outputBuffers [] = decoder.getOutputBuffers();

    audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    audioManager.setMode(AudioManager.MODE_CURRENT);
    audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
    int lengthOfAudioClip = outputBuffers.length;
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, lengthOfAudioClip, AudioTrack.MODE_STREAM);

    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    audioTrack.play();
    boolean isEOS = false;

    while (!Thread.interrupted()) {
        if (!isEOS) {
            int inIndex = decoder.dequeueInputBuffer(10000);
            if (inIndex >= 0) {
                ByteBuffer buffer = inputBuffers[inIndex];
                decoder.getInputBuffer(inIndex);
                int sampleSize = extractor.readSampleData(buffer, 0);

                if (sampleSize < 0) {
                    Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
                    decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    isEOS = true;
                } else {
                    decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                    extractor.advance();
                }
            }
        }

        int outIndex = decoder.dequeueOutputBuffer(info, 10000);
        switch (outIndex)
        {
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);
                b = new byte[info.size-info.offset];

                Log.d("LOGGING FOR B", b + "");
                audioTrack.write(b, 0, outputBuffers.length);
                decoder.releaseOutputBuffer(outIndex, true);

                Log.d("LOGGING FOREST KEEP OUT", outIndex + "");
                Log.d("LOG STATE", audioTrack.getState() + "");
                Log.d("LOG STREAMTYPE", audioTrack.getStreamType() + "");
                break;
        }

        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
            Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");

            audioTrack.flush();
            audioTrack.release();

            break;
        }
    }

    Log.d("LOGGING FOR INPUT", inputBuffers + "");
    Log.d("LOGGING FOR OUTPUT", outputBuffers + "");
    Log.d("OUTLENGTH", outputBuffers.length + "");
    Log.d("SIZE OF B", b.length + "");

//        AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, 44100, AudioTrack.MODE_STREAM);
//        audioTrack.getSampleRate();

    decoder.stop();
    decoder.release();
    extractor.release();
}
}
public类MainActivity扩展了AppCompatActivity{
专用媒体提取器;
专用媒体编解码器;
私人表面;
专用字节[]b;
AudioManager AudioManager;
@凌驾
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button Button=(Button)findViewById(R.id.Button);
setOnClickListener(新视图.OnClickListener(){
@凌驾
公共void onClick(视图v){
run();
}
});
}
公开募捐{
提取器=新的媒体提取器();
AssetFileDescriptor sampleFD=getResources().openRawResourceFd(R.raw.pinkfloyd);
试一试{
setDataSource(sampleFD.getFileDescriptor(),sampleFD.getStartOffset(),sampleFD.getLength());
}捕获(IOE异常){
e、 printStackTrace();
}
for(int i=0;i=0){
ByteBuffer buffer=inputBuffers[inIndex];
解码器.getInputBuffer(inIndex);
int sampleSize=extractor.readSampleData(缓冲区,0);
如果(样本大小<0){
Log.d(“DecodeActivity”,“InputBuffer BUFFER\u FLAG\u END\u OF_STREAM”);
解码器.queueInputBuffer(inIndex,0,0,0,MediaCodec.BUFFER\u标志\u结束\u流);
isEOS=真;
}否则{
decoder.queueInputBuffer(inIndex,0,sampleSize,提取器.getSampleTime(),0);
提取器;
}
}
}
int OUTIDEX=解码器.dequeueOutputBuffer(信息,10000);
交换机(OUTIDEX)
{
案例MediaCodec.INFO\u输出\u格式\u已更改:
Log.d(“DecodeActivity”,“新格式”+decoder.getOutputFormat());
打破
case MediaCodec.INFO\u请稍后再试:
Log.d(“DecodeActivity”,“dequeueOutputBuffer超时!”);
打破
违约:
ByteBuffer缓冲区=输出缓冲区[outIndex];
Log.v(“DecodeActivity”,“我们不能使用此缓冲区,但由于API限制而呈现它,”+缓冲区);
b=新字节[信息大小信息偏移];
Log.d(“B的日志记录”,B+);
audioTrack.write(b,0,outputBuffers.length);
解码器.releaseOutputBuffer(outIndex,true);
Log.d(“森林禁伐”,outinex+);
Log.d(“日志状态”,audioTrack.getState()+”);
Log.d(“Log STREAMTYPE”,audioTrack.getStreamType()+”);
打破
}
if((流的info.flags和MediaCodec.BUFFER\u FLAG\u END)!=0){
Log.d(“解码活动”,“输出缓冲缓冲区缓冲区标志\u结束\u流”);
audioTrack.flush();
audioTrack.release();
打破
}
}
d(“记录输入”,inputBuffers+”);
d(“记录输出”,outputBuffers+”);
Log.d(“OUTLENGTH”,outputBuffers.length+”);
Log.d(“B的大小”,B.length+”);
//AudioTrack AudioTrack=新的AudioTrack(AudioManager.STREAM\u MUSIC,44100,AudioFormat.CHANNEL\u OUT\u立体声,AudioFormat.ENCODING\u PCM\u 16位,44100,AudioTrack.MODE\u STREAM);
//audioTrack.getSampleRate();
解码器。停止();
decoder.release();
提取器。释放();
}
}

您可以轻松地尝试:

 MediaPlayer mPlayer = MediaPlayer.create(ThisActivity.this, R.raw.mysoundfile);
        mPlayer.start();
当活动被破坏时,不要忘记停止:

public void onDestroy() {

    mPlayer.stop();
    super.onDestroy();

}
有关更多参数,请参阅此旧帖子:


您可以轻松尝试:

 MediaPlayer mPlayer = MediaPlayer.create(ThisActivity.this, R.raw.mysoundfile);
        mPlayer.start();
当活动被破坏时,不要忘记停止:

public void onDestroy() {

    mPlayer.stop();
    super.onDestroy();

}
有关更多参数,请参阅此旧帖子:


当您将字节[]b写入音频曲目时,它似乎为空。您可以像这样填充字节[]b

buffer.get(b, 0, info.size-info.offset);

在将其写入音频曲目之前

当您将其写入音频曲目时,字节[]b似乎为空。您可以像这样填充字节[]b

buffer.get(b, 0, info.size-info.offset);
在将其写入AudioTrack之前