如何在Java中同步TargetDataLine和SourceDataLine(同步音频录制和播放)

如何在Java中同步TargetDataLine和SourceDataLine(同步音频录制和播放),java,audio,synchronization,audio-streaming,audio-recording,Java,Audio,Synchronization,Audio Streaming,Audio Recording,我正在尝试创建一个Java应用程序,它能够播放音频回放,记录用户的声音,并告诉用户是否在正确的时间唱歌 目前,我只关注录制和播放音频(曲调识别超出范围) 为此,我使用了Java audio API中的TargetDataLine和SourceDataLine。首先,我启动音频录制,然后启动音频播放。因为我想确保用户在正确的时间唱歌,所以我需要在录制的音频和播放的音频之间保持同步 例如,如果音频播放在音频录制后1秒开始,我知道我将忽略录制缓冲区中的第一秒数据 我使用下面的代码进行测试(代码远非完美

我正在尝试创建一个Java应用程序,它能够播放音频回放,记录用户的声音,并告诉用户是否在正确的时间唱歌

目前,我只关注录制和播放音频(曲调识别超出范围)

为此,我使用了Java audio API中的TargetDataLine和SourceDataLine。首先,我启动音频录制,然后启动音频播放。因为我想确保用户在正确的时间唱歌,所以我需要在录制的音频和播放的音频之间保持同步

例如,如果音频播放在音频录制后1秒开始,我知道我将忽略录制缓冲区中的第一秒数据

我使用下面的代码进行测试(代码远非完美,但它只是用于测试目的)

import javax.sound.sampled.*;
导入java.io.File;
导入java.io.IOException;
类音频同步器{
专用TargetDataLine TargetDataLine;
专用源数据线源数据线;
私人音频输入流ais;
私有音频格式记录音频格式;
私有音频格式播放音频格式;
public AudioSynchro(字符串源文件)引发IOException,UnsupportDaudioFileException{
ais=AudioSystem.getAudioInputStream(新文件(源文件));
recordAudioFormat=新的AudioFormat(44100f,16,1,真,假);
playAudioFormat=ais.getFormat();
}
//列举混合器
公共无效枚举(){
试一试{
Mixer.Info[]mixerInfo=
AudioSystem.getMixerInfo();
System.out.println(“可用混合器:”);
对于(int cnt=0;cnt=0){
int nbyteswrited=sourceDataLine.write(abData,0,nBytesRead);
}
}
sourceDataLine.drain();
sourceDataLine.close();
}
捕获(例外e){
e、 printStackTrace();
}
}
};
线程线程=新线程(播放音频);
thread.start();
}
public void printStats(){
Runnable stats=new Runnable(){
@凌驾
公开募捐{
while(true){
long targetDataLinePosition=targetDataLine.getMicrosecondPosition();
long sourceDataLinePosition=sourceDataLine.getMicrosecondPosition();
长延迟=targetDataLinePosition-sourceDataLinePosition;
System.out.println(targetDataLinePosition+“\t”+源DatalinePosition+“\t”+延迟);
试一试{
睡眠(20);
}捕捉(中断异常e){
e、 printStackTrace();
}
}
}
};
线程=新线程(统计);
thread.start();
}
公共静态void main(字符串[]args){
试一试{
AudioSynchro audio=new AudioSynchro(“C:\\dev\\intellij ws\\guitarchallenge\\src\\main\\resources\\com\\ouestdev\\guitarcharlenge\\al_adagi.mid”);
audio.enumerate();
initDataLines();
audio.startRecord();
audio.startPlay();
printStats();
}捕获(IOException | LineUnavailableException | UnsupportedAudioFileException e){
e、 printStackTrace();
}
}
}

该代码初始化2条数据线,开始音频录制,开始音频播放并显示统计信息。enumerate()方法用于显示系统上可用的混合器。您必须根据您的系统来更改initDataLines()方法中使用的混频器,以执行您自己的测试。 printStats方法()启动一个线程,以微秒为单位询问2条数据线的位置。这是我试图用来跟踪同步的数据。我观察到的是,这两条数据线并非始终保持同步。以下是我的输出控制台的简短摘录:

130000 130000

15000
import javax.sound.sampled.*;
import java.io.File;
import java.io.IOException;

class AudioSynchro {

private TargetDataLine targetDataLine;
private SourceDataLine sourceDataLine;
private AudioInputStream ais;
private AudioFormat recordAudioFormat;
private AudioFormat playAudioFormat;

public AudioSynchro(String sourceFile) throws IOException, UnsupportedAudioFileException {
    ais = AudioSystem.getAudioInputStream(new File(sourceFile));

    recordAudioFormat = new AudioFormat(44100f, 16, 1, true, false);
    playAudioFormat = ais.getFormat();
}

//Enumerate the mixers
public void enumerate() {
    try {
        Mixer.Info[] mixerInfo =
                AudioSystem.getMixerInfo();
        System.out.println("Available mixers:");
        for(int cnt = 0; cnt < mixerInfo.length;
            cnt++){
            System.out.println(mixerInfo[cnt].
                    getName());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

//Init datalines
public void initDataLines() throws LineUnavailableException {
    Mixer.Info[] mixerInfo =
            AudioSystem.getMixerInfo();

    DataLine.Info targetDataLineInfo = new DataLine.Info(TargetDataLine.class, recordAudioFormat);

    Mixer targetMixer = AudioSystem.getMixer(mixerInfo[5]);

    targetDataLine = (TargetDataLine)targetMixer.getLine(targetDataLineInfo);

    DataLine.Info sourceDataLineInfo = new DataLine.Info(SourceDataLine.class, playAudioFormat);

    Mixer sourceMixer = AudioSystem.getMixer(mixerInfo[3]);

    sourceDataLine = (SourceDataLine)sourceMixer.getLine(sourceDataLineInfo);
}

public void startRecord() throws LineUnavailableException {
    AudioInputStream stream = new AudioInputStream(targetDataLine);

    targetDataLine.open(recordAudioFormat);

    byte currentByteBuffer[] = new byte[512];

    Runnable readAudioStream = new Runnable() {
        @Override
        public void run() {
            int count = 0;
            try {
                targetDataLine.start();
                while ((count = stream.read(currentByteBuffer)) != -1) {
                    //Do something
                }
            }
            catch(Exception e) {
                e.printStackTrace();
            }
        }
    };
    Thread thread = new Thread(readAudioStream);
    thread.start();
}

public void startPlay() throws LineUnavailableException {
    sourceDataLine.open(playAudioFormat);
    sourceDataLine.start();

    Runnable playAudio = new Runnable() {
        @Override
        public void run() {
            try {
                int nBytesRead = 0;
                byte[] abData = new byte[8192];
                while (nBytesRead != -1) {
                    nBytesRead = ais.read(abData, 0, abData.length);
                    if (nBytesRead >= 0) {
                        int nBytesWritten = sourceDataLine.write(abData, 0, nBytesRead);
                    }
                }

                sourceDataLine.drain();
                sourceDataLine.close();
            }
            catch(Exception e) {
                e.printStackTrace();
            }
        }
    };
    Thread thread = new Thread(playAudio);
    thread.start();
}

public void printStats() {
    Runnable stats = new Runnable() {

        @Override
        public void run() {
            while(true) {
                long targetDataLinePosition = targetDataLine.getMicrosecondPosition();
                long sourceDataLinePosition = sourceDataLine.getMicrosecondPosition();
                long delay = targetDataLinePosition - sourceDataLinePosition;
                System.out.println(targetDataLinePosition+"\t"+sourceDataLinePosition+"\t"+delay);

                try {
                    Thread.sleep(20);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    };

    Thread thread = new Thread(stats);
    thread.start();
}

public static void main(String[] args) {
    try {
        AudioSynchro audio = new AudioSynchro("C:\\dev\\intellij-ws\\guitar-challenge\\src\\main\\resources\\com\\ouestdev\\guitarchallenge\\al_adagi.mid");
        audio.enumerate();
        audio.initDataLines();
        audio.startRecord();
        audio.startPlay();
        audio.printStats();
    } catch (IOException | LineUnavailableException | UnsupportedAudioFileException e) {
        e.printStackTrace();
    }
}
public void startAll() throws LineUnavailableException, IOException {
    AudioInputStream stream = new AudioInputStream(targetDataLine);

    targetDataLine.open(recordAudioFormat);

    byte reccordByteBuffer[] = new byte[512];
    byte playByteBuffer[] = new byte[1024];


    sourceDataLine.open(playAudioFormat);
    targetDataLine.start();
    sourceDataLine.start();

    Runnable audio = new Runnable() {
        @Override
        public void run() {
            int reccordCount = 0;
            int totalReccordCount = 0;
            int playCount = 0;
            int totalPlayCount = 0;
            int playWriteCount = 0;
            int totalWritePlayCount = 0;
            try {
                while (playCount != -1) {
                    reccordCount = stream.read(reccordByteBuffer);
                    totalReccordCount += reccordCount;
                    long targetDataLinePosition = targetDataLine.getLongFramePosition();
                    playCount = ais.read(playByteBuffer, 0, playByteBuffer.length);
                    playWriteCount = sourceDataLine.write(playByteBuffer, 0, playCount);
                    totalPlayCount += playCount;
                    totalWritePlayCount += playWriteCount;
                    long sourceDataLinePosition = sourceDataLine.getLongFramePosition();


                    long delay = targetDataLinePosition - sourceDataLinePosition;
                    System.out.println(targetDataLinePosition + "\t" + sourceDataLinePosition + "\t" + delay + "\t" + totalReccordCount + "\t" + totalPlayCount + "\t" + totalWritePlayCount + "\t" + System.nanoTime());
                }
            } catch (IOException e) {
                e.printStackTrace();
            }

        }
    };

    Thread thread = new Thread(audio);
    thread.start();

}