Java 使用TarsosDSP在Android上评估.wav文件的基音

Java 使用TarsosDSP在Android上评估.wav文件的基音,java,android,wav,tarsosdsp,pitch-detection,Java,Android,Wav,Tarsosdsp,Pitch Detection,我正在尝试编写一个简单的Android应用程序,在.wav文件播放时显示它的不同音高。我正在使用TarsosDSP进行音高处理和音频跟踪来播放文件 在我深入研究代码之前,我正在使用JRE 1.8.0运行Android Studio 3.4.2 我的版本是23 根据我对TarsosDSP工作原理的理解,我将wav流连接到AudioDispatcher对象,连接处理器(播放器本身和音高计算器),然后将dispatcher分配到一个线程并启动它以启动一切。从我的理解来看,我也可能在做一些愚蠢的事情(某

我正在尝试编写一个简单的Android应用程序,在.wav文件播放时显示它的不同音高。我正在使用TarsosDSP进行音高处理和音频跟踪来播放文件

在我深入研究代码之前,我正在使用JRE 1.8.0运行Android Studio 3.4.2 我的版本是23

根据我对TarsosDSP工作原理的理解,我将wav流连接到AudioDispatcher对象,连接处理器(播放器本身和音高计算器),然后将dispatcher分配到一个线程并启动它以启动一切。从我的理解来看,我也可能在做一些愚蠢的事情(某处…)

我在使用AudioTrack Builder时遇到了问题,因为我发现很多示例都使用了现在不推荐使用的使用AudioManager.STREAM\u MUSIC的构造函数

更新:我找到了一个或多或少做了我想做的事情的人(只是需要谷歌从韩语翻译过来):

重构后,我能够将AudioPlayer的内容转移到AudioMethods类中

更新 所以在更新代码之后,我得到了正确播放的文件,音高评估似乎在工作,但我只得到了两个样本,它们在我真正听到音频播放之前在logcat中注册。有人能解释一下吗?另外,如果有人能告诉我如何获取两个以上的样本,我想知道这是在哪里设置/计算的

D/EBB Inside Run: Pitch:372.05637
D/EBB Inside Run: Pitch:412.30508
主要活动

public class MainActivity extends AppCompatActivity {
private TextView local_NoteText;
private TextView local_PitchText;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    local_PitchText = findViewById(R.id.pitchText);
    local_NoteText = findViewById(R.id.noteText);

}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.menu_main, menu);
    return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    // Handle action bar item clicks here. The action bar will
    // automatically handle clicks on the Home/Up button, so long
    // as you specify a parent activity in AndroidManifest.xml.
    int id = item.getItemId();

    //noinspection SimplifiableIfStatement
    if (id == R.id.action_settings) {
        return true;
    }

    return super.onOptionsItemSelected(item);
}

public void ProcessTone(View view) throws IOException {
    //get the test file
    final AssetFileDescriptor afd = getResources().openRawResourceFd(R.raw.avery_test);
    AudioMethods audioMethods = new AudioMethods();

    TarsosDSPAudioFormat tarsosDSPAudioFormat = new TarsosDSPAudioFormat(TarsosDSPAudioFormat.Encoding.PCM_SIGNED,
            16000,
            16,
            1,
            2,
            16000,
            ByteOrder.BIG_ENDIAN.equals(ByteOrder.nativeOrder()));

    audioMethods.getPitchFromFile(afd, MainActivity.this, tarsosDSPAudioFormat, local_NoteText,local_PitchText);
  }

}
音频方法

public class AudioMethods {
public static AudioDispatcher dispatcher;
public float pitchInHz;
public int millSecond;

 public void getPitchFromFile(final AssetFileDescriptor afd, final Activity activity, TarsosDSPAudioFormat tarsosDSPAudioFormat,final TextView pitchText,final TextView noteText) {
    try {
        releaseDispatcher(dispatcher);

        FileInputStream fileInputStream = new FileInputStream(afd.getFileDescriptor());
        fileInputStream.skip(afd.getStartOffset());

       // I only need this to get the number of elapsed seconds if the dispatcher doesn't detect when the audio file is finished.
       MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
        mediaMetadataRetriever.setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
        String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
        millSecond = Integer.parseInt(durationStr);

        dispatcher = new AudioDispatcher(new UniversalAudioInputStream(fileInputStream, tarsosDSPAudioFormat), 2048, 0);
        final AudioProcessor playerProcessor = new AndroidAudioPlayer(tarsosDSPAudioFormat, 16000, 0);

        dispatcher.addAudioProcessor(playerProcessor);

        PitchDetectionHandler pitchDetectionHandler = new PitchDetectionHandler() {

           public void handlePitch(final PitchDetectionResult res, AudioEvent e) {
               pitchInHz  = res.getPitch();
               //if(pitchInHz > 0){Log.d("EBB Outside Run","Pitch:" + pitchInHz);}
                activity.runOnUiThread(new Runnable() {
                    @Override
                    public void run() {
                                if(pitchInHz > 0){Log.d("EBB Inside Run","Pitch:" + pitchInHz);}
                                pitchText.setText(pitchInHz + "");
                                processPitch(pitchInHz);
                    }
                });
            }

            public void processPitch(float pitchInHz) {

                if(pitchInHz >= 110 && pitchInHz < 123.47) {
                    //A
                    noteText.setText("A");
                }
                else if(pitchInHz >= 123.47 && pitchInHz < 130.81) {
                    //B
                    noteText.setText("B");
                }
                else if(pitchInHz >= 130.81 && pitchInHz < 146.83) {
                    //C
                    noteText.setText("C");
                }
                else if(pitchInHz >= 146.83 && pitchInHz < 164.81) {
                    //D
                    noteText.setText("D");
                }
                else if(pitchInHz >= 164.81 && pitchInHz <= 174.61) {
                    //E
                    noteText.setText("E");
                }
                else if(pitchInHz >= 174.61 && pitchInHz < 185) {
                    //F
                    noteText.setText("F");
                }
                else if(pitchInHz >= 185 && pitchInHz < 196) {
                    //G
                    noteText.setText("G");
                }
            }
        };

        AudioProcessor pitchProcessor = new PitchProcessor(PitchProcessor.PitchEstimationAlgorithm.FFT_YIN, 44100, 2048, pitchDetectionHandler);
        dispatcher.addAudioProcessor(pitchProcessor);
        dispatcher.run();

        Thread audioThread = new Thread(dispatcher, "Audio Thread");
        audioThread.start();

    } catch (Exception e) {
        e.printStackTrace();
    }

}

public void releaseDispatcher(AudioDispatcher dispatcher)
{
    if(dispatcher != null)
    {
        if(!dispatcher.isStopped())
            dispatcher.stop();

        dispatcher = null;
    }
}

protected void onStop(AudioDispatcher dispatcher) {
    //super.onStop();
    releaseDispatcher(dispatcher);
}

 //I don't need these guys yet
 /*public void stopRecording()
{
    releaseDispatcher();
}


@Override
protected void onStop() {
    super.onStop();
    releaseDispatcher();
}*/

}
公共类音频方法{
公共静态音频调度器;
公共浮球;
第二次公开辩论;
public void getPitchFromFile(最终资产文件描述符afd、最终活动活动活动、TARSOSSDSPAUDIOFORMAT TARSOSSDSPAUDIOFORMAT、最终文本视图pitchText、最终文本视图noteText){
试一试{
释放调度员(调度员);
FileInputStream FileInputStream=新的FileInputStream(afd.getFileDescriptor());
fileInputStream.skip(afd.getStartOffset());
//如果dispatcher没有检测到音频文件何时完成,我只需要这个来获取经过的秒数。
MediaMetadataRetriever MediaMetadataRetriever=新的MediaMetadataRetriever();
setDataSource(afd.getFileDescriptor(),afd.getStartOffset(),afd.getLength());
String durationStr=mediaMetadataRetriever.extractMetadata(mediaMetadataRetriever.METADATA\u KEY\u DURATION);
millSecond=Integer.parseInt(durationStr);
dispatcher=new AudioDispatcher(新的UniversalAudioInputStream(fileInputStream,tarsosDSPAudioFormat),2048,0);
最终音频处理器playerProcessor=新的AndroidAudioPlayer(tarsosDSPAudioFormat,16000,0);
dispatcher.addAudioProcessor(播放处理器);
PitchDetectionHandler PitchDetectionHandler=新PitchDetectionHandler(){
公共无效handlePitch(最终PitchDetectionResult res,AudioEvent e){
pitchInHz=res.getPitch();
//如果(pitchInHz>0){Log.d(“运行外的衰退”,“俯仰:+pitchInHz”)}
activity.runOnUiThread(新的Runnable(){
@凌驾
公开募捐{
如果(pitchInHz>0){Log.d(“运行中的衰退”,“俯仰:+pitchInHz”)}
pitchText.setText(pitchInHz+“”);
过程螺距(PITCINHZ);
}
});
}
公共变桨(浮动变桨){
如果(pitchInHz>=110&&pitchInHz<123.47){
//A
noteText.setText(“A”);
}
否则如果(pitchInHz>=123.47和&pitchInHz<130.81){
//B
noteText.setText(“B”);
}
否则如果(pitchInHz>=130.81和&pitchInHz<146.83){
//C
noteText.setText(“C”);
}
否则如果(pitchInHz>=146.83和&pitchInHz<164.81){
//D
noteText.setText(“D”);
}
否则,如果(pitchInHz>=164.81&&pitchInHz=174.61&&pitchInHz<185){
//F
noteText.setText(“F”);
}
否则如果(pitchInHz>=185&&pitchInHz<196){
//G
noteText.setText(“G”);
}
}
};
音频处理器pitchProcessor=新的pitchProcessor(pitchProcessor.PitchStimationAlgorithm.FFT_-YIN,441002048,pitchDetectionHandler);
dispatcher.addAudioProcessor(音频处理器);
dispatcher.run();
线程audioThread=新线程(dispatcher,“音频线程”);
audioThread.start();
}捕获(例外e){
e、 printStackTrace();
}
}
公共无效释放调度器(音频调度器)
{
if(调度程序!=null)
{
如果(!dispatcher.isStopped())
dispatcher.stop();
dispatcher=null;
}
}
顶部受保护的void(AudioDispatcher){
//super.onStop();
释放调度员(调度员);
}
//我还不需要这些人
/*公开作废停止录制()
{
releasedpatcher();
}
@凌驾
受保护的void onStop(){
super.onStop();
releasedpatcher();
}*/
}