Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/363.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/wix/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 记谱记录仪的调试与解决_Java_Android_Signal Processing_Fft - Fatal编程技术网

Java 记谱记录仪的调试与解决

Java 记谱记录仪的调试与解决,java,android,signal-processing,fft,Java,Android,Signal Processing,Fft,我目前正在尝试创建一个Android应用程序,其中用户按下按钮,应用程序收听音乐,找到节奏,通过以下方式从节奏中收集每个季度音符的音符信息: 接收音频字节数组 FFT(查找频率) 使用频率确定该四分之一音符的音符 将该音符指定给以后将显示为乐谱的数组 这是我目前所做的代码: package com.tentmaker.musicnotationrecorder; import java.io.IOException; import android.app.Activity; import

我目前正在尝试创建一个Android应用程序,其中用户按下按钮,应用程序收听音乐,找到节奏,通过以下方式从节奏中收集每个季度音符的音符信息:

  • 接收音频字节数组
  • FFT(查找频率)
  • 使用频率确定该四分之一音符的音符
  • 将该音符指定给以后将显示为乐谱的数组
这是我目前所做的代码:

package com.tentmaker.musicnotationrecorder;

import java.io.IOException;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.MediaRecorder.AudioSource;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.Toast;

import com.badlogic.gdx.audio.analysis.FFT;

public class Record extends Activity implements OnClickListener {

Button btnRecord;
private MediaRecorder mRecorder = null;
private static String mFileName = null;
    float[] fftArray;

    private boolean recording;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_record);

    btnRecord = (Button)findViewById(R.id.recordButton);
    btnRecord.setOnClickListener(this);
}

@Override
public void onClick(View v) {
    try{
        switch(v.getId()){
            case R.id.recordButton:
                buttonEvent();
                return;
            default:
                return; 
        }
    }
    catch (Exception e){
        Toast.makeText(getApplicationContext(), e.toString(), Toast.LENGTH_LONG).show();
    }
}

public void buttonEvent(){
    if(recording){
        startRecording();
        frequencyCollection();
        process(fft());
    }
    else if(!recording){
        stopRecording();
    }
}

public void startRecording(){
    mRecorder = new MediaRecorder();
    mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
    mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
    mRecorder.setOutputFile(mFileName);
    mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
    Toast.makeText(getApplicationContext(), "recording...", Toast.LENGTH_LONG).show();
    try {
        mRecorder.prepare();
    } catch (IOException e) {
        Toast.makeText(getApplicationContext(), "prepare() failed", Toast.LENGTH_LONG).show();
    }
    mRecorder.start();
}

@SuppressWarnings("unused")
private void stopRecording() {
    mRecorder.stop();
    mRecorder.release();
    mRecorder = null;
    Toast.makeText(getApplicationContext(), "done recording", Toast.LENGTH_LONG).show();
}

@SuppressWarnings("unused")
private float[] fft() {
    int fs = 8374;
    int N = fftArray.length;
    float[] fft_cpx, tmpr, tmpi;
    float[] res = new float[N / 2];
    // float[] mod_spec =new float[array.length/2];
    float[] real_mod = new float[N];
    float[] imag_mod = new float[N];
    double[] real = new double[N];
    double[] imag = new double[N];
    double[] mag = new double[N];
    double[] phase = new double[N];
    float[] new_array = new float[N];
    // Zero Pad signal
    for (int i = 0; i < N; i++) {
        if (i < fftArray.length) {
            new_array[i] = fftArray[i];
        } 
        else {
            new_array[i] = 0;
        }
    }

    FFT fft = new FFT(N, 8373);

    fft.forward(new_array);
    fft_cpx = fft.getSpectrum();
    tmpi = fft.getImaginaryPart();
    tmpr = fft.getRealPart();
    for (int i = 0; i < new_array.length; i++) {
        real[i] = (double) tmpr[i];
        imag[i] = (double) tmpi[i];

        mag[i] = Math.sqrt((real[i] * real[i]) + (imag[i] * imag[i]));
        phase[i] = Math.atan2(imag[i], real[i]);

        /**** Reconstruction ****/
        real_mod[i] = (float) (mag[i] * Math.cos(phase[i]));
        imag_mod[i] = (float) (mag[i] * Math.sin(phase[i]));

        double freq = (double)i*(double)fs/(double)N;
        Toast.makeText(getApplicationContext(), 
                       "Frequency: "+ Double.toString(freq) + 
                       "Magnitude: "+ Double.toString(mag[i]), 
                       Toast.LENGTH_LONG).show();

    }
    fft.inverse(real_mod, imag_mod, res);
    return res;

}

private void process(float[] fft){

}

@SuppressWarnings("deprecation")
private void frequencyCollection(){
    int channel_config = AudioFormat.CHANNEL_CONFIGURATION_MONO;
    int format = AudioFormat.ENCODING_PCM_16BIT;
    int sampleSize = 8000;
    int bufferSize = AudioRecord.getMinBufferSize(sampleSize, channel_config, format);
    AudioRecord audioInput = new AudioRecord(AudioSource.MIC, sampleSize, channel_config, format, bufferSize);

    byte[] audioBuffer = new byte[bufferSize];
    audioInput.startRecording();
    audioInput.read(audioBuffer, 0, bufferSize);

    float[] fftTempArray = new float[bufferSize];
    for (int i=0; i<bufferSize; i++)
    {
        fftTempArray[i] = audioBuffer[i];
    }
    fftArray = fftTempArray;
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.record, menu);
    return true;
}

}
package com.tentmaker.MusicStationRecorder;
导入java.io.IOException;
导入android.app.Activity;
导入android.media.AudioFormat;
导入android.media.AudioManager;
导入android.media.AudioRecord;
导入android.media.AudioTrack;
导入android.media.MediaRecorder;
导入android.media.MediaRecorder.AudioSource;
导入android.os.Bundle;
导入android.view.Menu;
导入android.view.view;
导入android.view.view.OnClickListener;
导入android.widget.Button;
导入android.widget.Toast;
导入com.badlogic.gdx.audio.analysis.FFT;
公共类记录扩展活动实现OnClickListener{
按钮BTN记录;
专用媒体记录器mRecorder=null;
私有静态字符串mFileName=null;
漂浮的焦油;
私有布尔记录;
@凌驾
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_记录);
btnRecord=(按钮)findViewById(R.id.recordButton);
btnRecord.setOnClickListener(此);
}
@凌驾
公共void onClick(视图v){
试一试{
开关(v.getId()){
案例R.id.recordButton:
buttonEvent();
返回;
违约:
返回;
}
}
捕获(例外e){
Toast.makeText(getApplicationContext(),e.toString(),Toast.LENGTH_LONG).show();
}
}
公共无效按钮(){
如果(录音){
startRecording();
频率采集();
过程(fft());
}
如果(!录制),则为else{
停止录制();
}
}
公共无效开始记录(){
mRecorder=新的MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.ThreeGPP);
mRecorder.setOutputFile(mFileName);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
Toast.makeText(getApplicationContext(),“录制…”,Toast.LENGTH_LONG.show();
试一试{
mRecorder.prepare();
}捕获(IOE异常){
Toast.makeText(getApplicationContext(),“prepare()失败”,Toast.LENGTH_LONG.show();
}
mRecorder.start();
}
@抑制警告(“未使用”)
私有void stopRecording(){
mRecorder.stop();
mRecorder.release();
mRecorder=null;
Toast.makeText(getApplicationContext(),“录制完毕”,Toast.LENGTH_LONG.show();
}
@抑制警告(“未使用”)
私有浮点[]fft(){
int fs=8374;
int N=fftArray.length;
浮点[]fftu cpx、tmpr、tmpi;
浮动[]res=新浮动[N/2];
//float[]mod_spec=新的float[array.length/2];
浮点[]实模=新浮点[N];
浮动[]imag_mod=新浮动[N];
double[]实=新的双精度[N];
double[]imag=新的double[N];
double[]mag=新的double[N];
双[]相位=新双[N];
float[]新数组=新浮点[N];
//零位信号
对于(int i=0;i对于(int i=0;i这些步骤不适用于大多数实际的现场音乐,因为大多数音乐音高由非常复杂的FFT频率序列表示(远远超过每个“四分之一音符”的一个最大频率).查找大量研究生研究论文,可能是关于如何在有限的情况下解决您的问题。

现在我想看看我是否可以用一个由单个音符组成的简单旋律来做这样的事情。使用这些步骤是否可行,或者即使是这样简单的事情也需要一个研究生级别的项目?可能吗如果你的旋律是由一个由严格的节奏计时器驱动的正弦波发生器产生的,那么你会建议我怎么做?