Android应用程序在成功录制和处理音频几秒钟后冻结

Android应用程序在成功录制和处理音频几秒钟后冻结,android,multithreading,audio,Android,Multithreading,Audio,经过大约10秒的预期行为后,我的应用程序冻结了,我很难找出原因 内存有问题吗 音频缓冲区有问题吗 该应用程序应该从麦克风中采集音频,对其进行处理以检测峰值频率,并将结果显示在屏幕上。我有一个main活动,其中定义了一个线程来更新视图。我有另外两个单独实现的线程,一个用于通过AudioRecord类录制音频,一个用于处理音频信号和检测峰值频率。记录和处理似乎有效,并产生预期的结果。但是,一段时间后(从一次运行到下一次运行的确切时间各不相同),应用程序将冻结。然后,日志显示重复的消息 W/art

经过大约10秒的预期行为后,我的应用程序冻结了,我很难找出原因

  • 内存有问题吗
  • 音频缓冲区有问题吗
该应用程序应该从麦克风中采集音频,对其进行处理以检测峰值频率,并将结果显示在屏幕上。我有一个main活动,其中定义了一个线程来更新视图。我有另外两个单独实现的线程,一个用于通过AudioRecord类录制音频,一个用于处理音频信号和检测峰值频率。记录和处理似乎有效,并产生预期的结果。但是,一段时间后(从一次运行到下一次运行的确切时间各不相同),应用程序将冻结。然后,日志显示重复的消息

W/art:挂起所有线程花费:x.xxxms

I/art:背景粘性并发标记扫描GC释放

GC删除的对象数量可能相当高(一种情况:62119(3MB)),我想知道我在哪里创建了这么多的对象,以及如何避免它?我怀疑音频缓冲区有问题,但不知道如何最好地诊断

以下是相关代码:

MainActivity.java

public class MainActivity extends AppCompatActivity {

    private static final String TAG = "StringTuner";

    VerticalLineDrawingView drawView;       // view to draw frequency indicator
    TextView textView;                      // view for text output

    private AudioRecorderThread recorder;   // thread for recording audio
    private ProcessingThread processor;     // thread for processing audio signal
    private Thread viewUpdater;             // thread for updating views

    @Override
    public void onDestroy() {
        super.onDestroy();
        recorder.releaseAudioRecord();
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {

        Log.d(TAG, "Creating the main activity");
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        drawView = (VerticalLineDrawingView) this.findViewById(R.id.drawView_NoteIndicator);
        textView = (TextView) this.findViewById(R.id.textView_TextOutput);


        Log.d(TAG, "Setting up AudioRecord and processing thread");
        recorder = new AudioRecorderThread();
        recorder.start();

        processor = new ProcessingThread(recorder);
        processor.start();

        // thread for updating view
        if (viewUpdater == null){
            viewUpdater = new Thread() {
                public void run() {
                try {
                    while (recorder != null && processor != null) {
                        runOnUiThread(new Runnable() {
                            public void run() {
                            if (processor != null){
                                textView.setText(String.valueOf(processor.getPeakFrequency()) + " Hz\n" + processor.getProcessingTime() + " ms");
                                drawView.reposition((int) (processor.getPosition() * drawView.getViewWidth()));
                            }
                            }
                        });
                        sleep(1);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                } finally {
                    viewUpdater = null;
                }
                }
            };
            viewUpdater.start();
        }

    }

}
AudioRecorderThread.java

public class AudioRecorderThread extends Thread {

    private AudioRecord audioRecord;
    private boolean isRecording;
    private int nSample;
    private int frameByteSize;
    private byte[] frame;
    private byte[] buffer;

    public AudioRecorderThread(){

        int channel     = AudioFormat.CHANNEL_IN_MONO;          // channel configuration
        int encoding    = AudioFormat.ENCODING_PCM_16BIT;       // audio encoding
        int fS          = getMinimumSampleRate();               // sample rate [Hz]

        // get minimum buffer size for AudioRecord session
        int minBuffer   = AudioRecord.getMinBufferSize(fS, channel, encoding);

        int src         = MediaRecorder.AudioSource.MIC;        // audio source

        // instantiate AudioRecord session
        audioRecord     = new AudioRecord(src, fS, channel, encoding, minBuffer);
        if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
            throw new RuntimeException("AudioRecord session could not be initialized.");
        }

        nSample     = getNextPowerOf2(fS);                  // frequency resolution below 1 Hz
        frameByteSize   = 2 * nSample;                          // 16bit -> 1 frame = 2* sample size
        frame = new byte[frameByteSize];

        buffer          = new byte[frameByteSize];              // buffer for reading data

    }

    public AudioRecord getAudioRecord(){
        return audioRecord;
    }

    public boolean isRecording(){
        return this.isAlive() && isRecording;
    }

    public void startRecording(){
        try{
            audioRecord.startRecording();
            isRecording = true;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void stopRecording(){
        try{
            audioRecord.stop();
            isRecording = false;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void releaseAudioRecord(){
        try{
            audioRecord.release();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public byte[] getFrame(){
        audioRecord.read(buffer, 0, 800);                     // one sample = 2 bytes (16 bits)
        System.arraycopy(frame,800,frame,0,800);
        System.arraycopy(buffer,0,frame,frameByteSize-800,800);
        return frame;
    }

    public int getFrameByteSize(){
        return frameByteSize;
    }

    public double getFrequencySteps(){
        return ((double) audioRecord.getSampleRate()) / ((double) nSample);
    }

    public void run() {
        startRecording();

        // fill buffer
        audioRecord.read(buffer, 0, frameByteSize);             // read 1 frame of data into buffer
        System.arraycopy(buffer, 0, frame, 0, frameByteSize);

    }
}
public class ProcessingThread extends Thread{

    private AudioRecorderThread recorder;
    private volatile Thread _thread;
    private int bytesPerSample;

    private long t_process;
    private double f_peak;
    private double position;

    private double[] stringFrequenciesLog = new double[] {Math.log(82.4), Math.log(110), Math.log(146.8), Math.log(196), Math.log(246.9), Math.log(329.6)};
    private double[] stringPositions = new double[] {2.0/9.0, 3.0/9.0, 4.0/9.0, 5.0/9.0, 6.0/9.0, 7.0/9.0};

    public ProcessingThread(AudioRecorderThread recorder){

        this.recorder = recorder;
        AudioRecord audioRecord = recorder.getAudioRecord();

        if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT){
            bytesPerSample = 2;
        }
        else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT){
            bytesPerSample = 1;
        }

    }

    public void start() {
        _thread = new Thread(this);
        _thread.start();
    }

    public void stopDetection(){
        _thread = null;
    }

    public void run() {
        try {
            double maxValue;
            long t0;
            byte[] frame;
            short[] sample = new short[recorder.getFrameByteSize()/2];
            double[] sampleAsDouble = new double[sample.length];
            double[] magnitudes;
            FastFourierTransform fft = new com.dsp.FastFourierTransform();

            Thread thisThread = Thread.currentThread();
            while (_thread == thisThread) {

                t0 = System.nanoTime();

                // read recorded audio data
                frame = recorder.getFrame();

                // processing
                if (frame != null) {
                    // sound detected
                    sample = decodeSample(frame);

                    for (int j=0;j<sample.length;j++) {
                        sampleAsDouble[j] = (double)sample[j];
                    }

                    // todo: bandpass filter the audio signal

                    // todo: fft the audio signal
                    magnitudes = fft.getMagnitudes(sampleAsDouble);

                    // todo: identify peak frequency
                    maxValue = magnitudes[0];
                    f_peak   = 0.0;
                    for (int i=1; i<magnitudes.length; i++) {
                        if (magnitudes[i]>maxValue) {
                            maxValue = magnitudes[i];
                            f_peak = (double) i * recorder.getFrequencySteps();
                        }
                    }

                    frequency2position(f_peak);

                    t_process = System.nanoTime() - t0;

                }
                else{
                    // no sound detected
                    f_peak = -1;
                    position = -5;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private short[] decodeSample(byte[] buffer) {

        short[] sample;

        if (bytesPerSample==2) {

            sample = new short[buffer.length/2];

            for (int i = 0; i < buffer.length; i += 2) {
                sample[i/2] = (short) ((buffer[i]) | buffer[i + 1] << 8);
            }

        } else {

            sample = new short[buffer.length];

            for (int i = 0; i < buffer.length; i++) {
                sample[i] = (short) buffer[i];
            }

        }

        return sample;

    }

    private void frequency2position(double f){

        // check f is in valid frequency range
        double f_min = 30;
        double f_max = 3000;

        if (f<f_min) {
            position = -1;
            return;
        }

        if (f>f_max) {
            position = -2;
            return;
        }

        double f_log = Math.log(f);

        // find closest string frequency
        int closestIndex = 0;
        double distance = Math.abs(f_log - stringFrequenciesLog[closestIndex]);
        while ((closestIndex < stringFrequenciesLog.length-1) && (Math.abs(f_log - stringFrequenciesLog[closestIndex+1])) < distance) {
            distance = Math.abs(f_log - stringFrequenciesLog[closestIndex+1]);
            closestIndex++;
        }

        double alpha = f_log/stringFrequenciesLog[closestIndex];
        position = alpha * stringPositions[closestIndex];

        if (position<0) {
            position = -3;
            return;
        }

        if (position>1) {
            position = -4;
        }

    }

    public double getPosition(){
        return position;
    }

    public double getPeakFrequency(){
        return f_peak;
    }

    public long getProcessingTime(){
        return t_process/1000000; // in ms
    }

}
ProcessingThread.java

public class AudioRecorderThread extends Thread {

    private AudioRecord audioRecord;
    private boolean isRecording;
    private int nSample;
    private int frameByteSize;
    private byte[] frame;
    private byte[] buffer;

    public AudioRecorderThread(){

        int channel     = AudioFormat.CHANNEL_IN_MONO;          // channel configuration
        int encoding    = AudioFormat.ENCODING_PCM_16BIT;       // audio encoding
        int fS          = getMinimumSampleRate();               // sample rate [Hz]

        // get minimum buffer size for AudioRecord session
        int minBuffer   = AudioRecord.getMinBufferSize(fS, channel, encoding);

        int src         = MediaRecorder.AudioSource.MIC;        // audio source

        // instantiate AudioRecord session
        audioRecord     = new AudioRecord(src, fS, channel, encoding, minBuffer);
        if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
            throw new RuntimeException("AudioRecord session could not be initialized.");
        }

        nSample     = getNextPowerOf2(fS);                  // frequency resolution below 1 Hz
        frameByteSize   = 2 * nSample;                          // 16bit -> 1 frame = 2* sample size
        frame = new byte[frameByteSize];

        buffer          = new byte[frameByteSize];              // buffer for reading data

    }

    public AudioRecord getAudioRecord(){
        return audioRecord;
    }

    public boolean isRecording(){
        return this.isAlive() && isRecording;
    }

    public void startRecording(){
        try{
            audioRecord.startRecording();
            isRecording = true;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void stopRecording(){
        try{
            audioRecord.stop();
            isRecording = false;
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void releaseAudioRecord(){
        try{
            audioRecord.release();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public byte[] getFrame(){
        audioRecord.read(buffer, 0, 800);                     // one sample = 2 bytes (16 bits)
        System.arraycopy(frame,800,frame,0,800);
        System.arraycopy(buffer,0,frame,frameByteSize-800,800);
        return frame;
    }

    public int getFrameByteSize(){
        return frameByteSize;
    }

    public double getFrequencySteps(){
        return ((double) audioRecord.getSampleRate()) / ((double) nSample);
    }

    public void run() {
        startRecording();

        // fill buffer
        audioRecord.read(buffer, 0, frameByteSize);             // read 1 frame of data into buffer
        System.arraycopy(buffer, 0, frame, 0, frameByteSize);

    }
}
public class ProcessingThread extends Thread{

    private AudioRecorderThread recorder;
    private volatile Thread _thread;
    private int bytesPerSample;

    private long t_process;
    private double f_peak;
    private double position;

    private double[] stringFrequenciesLog = new double[] {Math.log(82.4), Math.log(110), Math.log(146.8), Math.log(196), Math.log(246.9), Math.log(329.6)};
    private double[] stringPositions = new double[] {2.0/9.0, 3.0/9.0, 4.0/9.0, 5.0/9.0, 6.0/9.0, 7.0/9.0};

    public ProcessingThread(AudioRecorderThread recorder){

        this.recorder = recorder;
        AudioRecord audioRecord = recorder.getAudioRecord();

        if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT){
            bytesPerSample = 2;
        }
        else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT){
            bytesPerSample = 1;
        }

    }

    public void start() {
        _thread = new Thread(this);
        _thread.start();
    }

    public void stopDetection(){
        _thread = null;
    }

    public void run() {
        try {
            double maxValue;
            long t0;
            byte[] frame;
            short[] sample = new short[recorder.getFrameByteSize()/2];
            double[] sampleAsDouble = new double[sample.length];
            double[] magnitudes;
            FastFourierTransform fft = new com.dsp.FastFourierTransform();

            Thread thisThread = Thread.currentThread();
            while (_thread == thisThread) {

                t0 = System.nanoTime();

                // read recorded audio data
                frame = recorder.getFrame();

                // processing
                if (frame != null) {
                    // sound detected
                    sample = decodeSample(frame);

                    for (int j=0;j<sample.length;j++) {
                        sampleAsDouble[j] = (double)sample[j];
                    }

                    // todo: bandpass filter the audio signal

                    // todo: fft the audio signal
                    magnitudes = fft.getMagnitudes(sampleAsDouble);

                    // todo: identify peak frequency
                    maxValue = magnitudes[0];
                    f_peak   = 0.0;
                    for (int i=1; i<magnitudes.length; i++) {
                        if (magnitudes[i]>maxValue) {
                            maxValue = magnitudes[i];
                            f_peak = (double) i * recorder.getFrequencySteps();
                        }
                    }

                    frequency2position(f_peak);

                    t_process = System.nanoTime() - t0;

                }
                else{
                    // no sound detected
                    f_peak = -1;
                    position = -5;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private short[] decodeSample(byte[] buffer) {

        short[] sample;

        if (bytesPerSample==2) {

            sample = new short[buffer.length/2];

            for (int i = 0; i < buffer.length; i += 2) {
                sample[i/2] = (short) ((buffer[i]) | buffer[i + 1] << 8);
            }

        } else {

            sample = new short[buffer.length];

            for (int i = 0; i < buffer.length; i++) {
                sample[i] = (short) buffer[i];
            }

        }

        return sample;

    }

    private void frequency2position(double f){

        // check f is in valid frequency range
        double f_min = 30;
        double f_max = 3000;

        if (f<f_min) {
            position = -1;
            return;
        }

        if (f>f_max) {
            position = -2;
            return;
        }

        double f_log = Math.log(f);

        // find closest string frequency
        int closestIndex = 0;
        double distance = Math.abs(f_log - stringFrequenciesLog[closestIndex]);
        while ((closestIndex < stringFrequenciesLog.length-1) && (Math.abs(f_log - stringFrequenciesLog[closestIndex+1])) < distance) {
            distance = Math.abs(f_log - stringFrequenciesLog[closestIndex+1]);
            closestIndex++;
        }

        double alpha = f_log/stringFrequenciesLog[closestIndex];
        position = alpha * stringPositions[closestIndex];

        if (position<0) {
            position = -3;
            return;
        }

        if (position>1) {
            position = -4;
        }

    }

    public double getPosition(){
        return position;
    }

    public double getPeakFrequency(){
        return f_peak;
    }

    public long getProcessingTime(){
        return t_process/1000000; // in ms
    }

}

您正在主线程上执行许多操作,这就是它挂起的原因,以避免使用异步任务