Android 获取缓冲区超时(CPU是否已锁定?)

Android 获取缓冲区超时(CPU是否已锁定?),android,audiorecord,audiotrack,android-audiorecord,aec,Android,Audiorecord,Audiotrack,Android Audiorecord,Aec,我使用自己的应用程序同时播放和捕获音频数据 我正在使用一个音乐文件进行播放,并捕获数据以测试android中的渲染延迟,但我得到的日志如下所示 获取缓冲区超时(CPU是否已锁定?) 我的档案是: MainActivity.java public class MainActivity extends Activity implements OnClickListener { @SuppressLint("NewApi") class AudioTask implements Runnabl

我使用自己的应用程序同时播放和捕获音频数据

我正在使用一个音乐文件进行播放,并捕获数据以测试android中的渲染延迟,但我得到的日志如下所示

获取缓冲区超时(CPU是否已锁定?)

我的档案是:

MainActivity.java

public class MainActivity extends Activity implements OnClickListener {



@SuppressLint("NewApi")
class AudioTask implements Runnable {
    /**
     * Queue on which audio blocks are placed.
     */

    LinkedBlockingQueue<byte[]> mQueue;
    VoiceAudioRecord mAudioRecord;
    int block_size;
    boolean done;

    static final int DEFAULT_BLOCK_SIZE = 1024;

    AudioTask(LinkedBlockingQueue<byte[]> q, int block_size) {
        this.done = false;
        mQueue = q;
        this.block_size = block_size;
        int intSize = android.media.AudioTrack.getMinBufferSize(8000, channelConfig,
                audioFormat);
        mAudioRecord = new VoiceAudioRecord(8000, intSize, true, true, true);
        boolean isAvailable = AcousticEchoCanceler.isAvailable();

        Log.i(LOG_TAG, "AEC available : "+isAvailable);
        if(mAecToggle.isChecked()) {
            Log.i("TOGGLE_BUTTON_AEC", "STARTED AEC PROCESS");

            runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    int audioId = mAudioRecord.getAudioSessionId();
                    mAec = AcousticEchoCanceler.create(audioId);
                    Log.i(LOG_TAG, "AEC created " + mAec);

                    try {
                        mAec.setEnabled(true);
                        Log.i(LOG_TAG, "AEC getEnabled  : " + mAec.getEnabled());
                    } catch (IllegalStateException e) {
                        Log.i(LOG_TAG, "setEnabled() in wrong state");
                        e.printStackTrace();
                    }
                }
            });
        }
    }


    public void stop() {
        this.done = true;
    }

    public void run() {
        if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
            mAudioRecord.startRecording();
            while (!this.done) {
                int nshorts = this.readBlock();
                if (nshorts == AudioRecord.ERROR_INVALID_OPERATION) {
                    Log.i("AUDIO_REC","read: ERROR_INVALID_OPERATION");
                    break;
                } else if (nshorts == AudioRecord.ERROR_BAD_VALUE) {
                    Log.i("AUDIO_REC","read: ERROR_BAD_VALUE");
                    break;
                } else if (nshorts <= 0) {
                    Log.i("AUDIO_REC","read: " + nshorts);
                    break;
                }
            }
            mAudioRecord.stop();
        } else {
            Log.i("AUDIO_REC","AudioRecord not initialized");
        }
        mAudioRecord.release();
    }

    private int readBlock() {
        byte[] buffer = new byte[this.block_size];
        int nshorts = mAudioRecord.read(buffer, 0, buffer.length);
        if (nshorts > 0) {
            Log.i("AUDIO_REC","Posting " + nshorts + " samples to queue");
            mQueue.add(buffer);

        }
        return nshorts;
    }
}

/**
 * Audio recording task.
 */
AudioTask mAudioTask;
/**
 * Thread associated with recording task.
 */
Thread audio_thread;
/**
 * Queue of audio buffers.
 */
LinkedBlockingQueue<byte[]> mAudioQ;


short[] audioShorts, recvShorts, recordedShorts, filteredShorts;
byte[] audioBytes, recvBytes;
int shortsRead;
byte[][] fullTrack;

AudioRecord recorder;
AudioTrack player;
Boolean isInterrupted = true;
int indexOfArray = 0;

// ---------------------------------
@SuppressWarnings("deprecation")
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
private Button mRecordButton, mPlayButton;
private boolean isRecording = false;
PlayAudioFile audioFile;
private RecordedAudioFile mRecordedFile;
private MediaPlayer mMediaPlayer;
private AcousticEchoCanceler mAec;
private ToggleButton mAecToggle;
private boolean isPlaying = false;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    setContentView(R.layout.activity_main);
    mRecordButton = (Button)findViewById(R.id.button1);
    mRecordButton.setOnClickListener(this);

    //findViewById(R.id.button2).setOnClickListener(this);
    mPlayButton = (Button)findViewById(R.id.button3);
    mPlayButton.setOnClickListener(this);
    mPlayButton.setVisibility(View.INVISIBLE);

    mAecToggle = (ToggleButton) findViewById(R.id.aecButton);

    audioFile = new PlayAudioFile(getApplicationContext());
    mAudioQ = new LinkedBlockingQueue<byte[]>();
    mRecordedFile = new RecordedAudioFile();

    mMediaPlayer = null;

}

@Override
public void onClick(View v) {
    switch (v.getId()) {
        case R.id.button1: {
            if (isRecording) {
                stopRecording();
                mRecordButton.setText("Record");
                mPlayButton.setVisibility(View.VISIBLE);
                mAecToggle.setVisibility(View.VISIBLE);

            } else {
                new RecordAudio().execute();
                if(mMediaPlayer != null) {
                    mMediaPlayer.stop();
                    mMediaPlayer.release();
                    mMediaPlayer = null;
                }
                mRecordButton.setText("Stop");
                mPlayButton.setVisibility(View.INVISIBLE);
                mAecToggle.setVisibility(View.INVISIBLE);
                playAudio();
            }
            break;
        }
        case R.id.button3:
            if(!isPlaying) {
                playRecordedAudio();
            }
            break;
        default:
            break;
    }
}

private void playAudio() {
    audioFile.playAudio();

}

private void stopRecording() {
    assert mAudioTask != null;
    mAudioTask.stop();
    try {
        this.audio_thread.join();
    }
    catch (InterruptedException e) {
        Log.e("AUDIO_REC","Interrupted waiting for audio thread, shutting down");
    }
    if(mAecToggle.isChecked()) {
        if (mAec != null) {
            Log.i("AUDIO_REC", "Releasing AEC");
            mAec.release();
        }
        Log.i("TOGGLE_BUTTON_AEC", "COMPLETE AEC PROCESS");
    }


    isRecording = false;
    audioFile.stopAudio();

    mRecordedFile.processAndSaveAudioQueue(mAudioQ);
}

private void playRecordedAudio() {

    final Thread filePlayThread = new Thread(new Runnable() {

        @Override
        public void run() {

            try {
                playAdioTrack(new File(mRecordedFile.getFilename()).getPath());
            } catch (IOException e) {
                e.printStackTrace();
            }


        }
    },"File Play Thread");

    filePlayThread.start();
}

private void playAdioTrack(String filePath) throws IOException
{
    // We keep temporarily filePath globally as we have only two sample sounds now..
    if (filePath==null)
        return;

    int intSize = android.media.AudioTrack.getMinBufferSize(44100,        aAudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);

    AudioTrack mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);


    if (mAudioTrack ==null){
        Log.d("TCAudio", "audio track is not initialised ");
        return;
    }

    int count = 1024 * 1024; // 512 kb

    byte[] byteData = null;
    File file = null;
    file = new File(filePath);

    byteData = new byte[(int)count];
    FileInputStream in = null;
    try {
        in = new FileInputStream( file );
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    }

    int bytesread = 0, ret = 0;
    int size = (int) file.length();
    mAudioTrack.play();
    isPlaying = true;
    while (bytesread < size && isPlaying) {
        ret = in.read( byteData,0, count);
        if (ret != -1 && mAudioTrack != null) { // Write the byte array to the track
            mAudioTrack.write(byteData,0, ret);
            bytesread += ret;
        } else
            break;
    }
    in.close();

    mAudioTrack.release();
}

public class RecordAudio extends AsyncTask<Void, Void, Void> {

    @Override
    protected Void doInBackground(Void... params) {
        startRecdording();
        return null;
    }

}

void startRecdording() {

    mAudioTask = new AudioTask(mAudioQ, 1024);
    this.audio_thread = new Thread(mAudioTask);
    this.audio_thread.start();
    isRecording = true;
}


}
我得到的日志是这样的

W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioTrack( 2118): obtainBuffer timed out (is the CPU pegged?) 0x5c2d4d68 name=0x1user=00077730, server=000765c6
W/AudioTrack( 2118): obtainBuffer timed out (is the CPU pegged?) 0x5c2d4d68 name=0x1user=00077730, server=000765c6
W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioFlinger( 1371): write blocked for 1817 msecs, 2 delayed writes, thread 0x40ae5008
然后我的抓拍和回放就没有我想象的那么好了

即使我将cpu速度设置为高模式,日志也在重复


可能有什么问题?

我使用的是一个低速设备,对我来说性能不太好。 这就是我犯这个错误的原因。
一旦我在我的设备上升级到最新的安卓系统,我就解决了这个问题

任何一次都可以帮我解决这个问题。。。请给我一个解决方案:(请至少建议我做什么有人能解决我的问题吗
public class RecordedAudioFile {


//variables are declared and defined here 
public RecordedAudioFile() {
    bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}

public void processAndSaveAudioQueue(LinkedBlockingQueue<byte[]> audioQ) {
    this.mAudioQ = audioQ;
    fileWriteThread = new Thread(new Runnable() {

        @Override
        public void run() {
            writeAudioDataToFile();
        }
    },"File Write Thread");


    fileWriteThread.start();
}


private void writeAudioDataToFile(){
    byte data[] = new byte[bufferSize];
    String filename = getTempFilename();
    FileOutputStream os = null;

    try {
        os = new FileOutputStream(filename);
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    int read = 0;

    if(null != os){

        byte[] buf;
        while ((buf = mAudioQ.poll()) != null) {
                try {
                    os.write(buf);
                } catch (IOException e) {
                    e.printStackTrace();
                }

        }

        try {
            os.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    copyWaveFile(getTempFilename(),getFilename());
    deleteTempFile();
}

private String getTempFilename(){
//tempfile for storing temporary data        
}

private void copyWaveFile(String inFilename,String outFilename){
 //we will copy the captured data into a file here
}

private void WriteWaveFileHeader(
        FileOutputStream out, long totalAudioLen,
        long totalDataLen, long longSampleRate, int channels,
        long byteRate) throws IOException {

    //writing wave header here to the file
}

public String getFilename(){
    //getting the file name for copying the captured data into it
}

private void deleteTempFile() {
    File file = new File(getTempFilename());

    file.delete();
}

}
public VoiceAudioRecord(int sampleRateInHz, int bufferSizeInBytes,
                        boolean noise, boolean gain, boolean echo)
        throws IllegalArgumentException {

    this(MediaRecorder.AudioSource.MIC, sampleRateInHz,
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
            bufferSizeInBytes, noise, gain, echo);
}
......

public VoiceAudioRecord(int audioSource, int sampleRateInHz,
                        int channelConfig, int audioFormat, int bufferSizeInBytes,
                        boolean noise, boolean gain, boolean echo)
        throws IllegalArgumentException {

    super(audioSource, sampleRateInHz, channelConfig, audioFormat,
            bufferSizeInBytes);
}
}
W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioTrack( 2118): obtainBuffer timed out (is the CPU pegged?) 0x5c2d4d68 name=0x1user=00077730, server=000765c6
W/AudioTrack( 2118): obtainBuffer timed out (is the CPU pegged?) 0x5c2d4d68 name=0x1user=00077730, server=000765c6
W/AudioRecord( 2118): obtainBuffer timed out (is the CPU pegged?) user=00027e84, server=00027e84
W/AudioFlinger( 1371): write blocked for 1817 msecs, 2 delayed writes, thread 0x40ae5008