java使用AudioWaveformCreator绘制2(多个)波形
这个问题是关于用于在线程上回答的代码。我使用的是尼古拉斯·迪皮亚扎(Nicholas DiPiazza)发布的代码,后来是安德鲁·汤普森(Andrew Thompson)发布的变体。 我在这段代码中添加了第二个AudioWaveformCreator,两个AWC的结果都是一样的。我不明白为什么。我想做的是在一个JOptionpane中显示两个不同的波形(来自不同的文件)java使用AudioWaveformCreator绘制2(多个)波形,java,audio,wav,wave,Java,Audio,Wav,Wave,这个问题是关于用于在线程上回答的代码。我使用的是尼古拉斯·迪皮亚扎(Nicholas DiPiazza)发布的代码,后来是安德鲁·汤普森(Andrew Thompson)发布的变体。 我在这段代码中添加了第二个AudioWaveformCreator,两个AWC的结果都是一样的。我不明白为什么。我想做的是在一个JOptionpane中显示两个不同的波形(来自不同的文件) import java.awt.BasicStroke; import java.awt.Color; import java
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.font.FontRenderContext;
import java.awt.font.LineBreakMeasurer;
import java.awt.font.TextAttribute;
import java.awt.font.TextLayout;
import java.awt.geom.Line2D;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.text.AttributedCharacterIterator;
import java.text.AttributedString;
import java.util.Vector;
import javax.imageio.ImageIO;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;
import javax.sound.sampled.UnsupportedAudioFileException;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
public class AudioWaveformCreator2 {
AudioInputStream audioInputStream;
Vector<Line2D.Double> lines = new Vector<Line2D.Double>();
String errStr;
Capture capture = new Capture();
double duration, seconds;
//File file;
String fileName = "out.png";
SamplingGraph samplingGraph;
String waveformFilename;
Color imageBackgroundColor = new Color(20,20,20);
Object result = null;
public AudioWaveformCreator2(File url, String waveformFilename) throws Exception {
if (url != null) {
try {
errStr = null;
audioInputStream = AudioSystem.getAudioInputStream(url);
long milliseconds = (long)((audioInputStream.getFrameLength() * 1000) / audioInputStream.getFormat().getFrameRate());
duration = milliseconds / 1000.0;
samplingGraph = new SamplingGraph();
samplingGraph.createWaveForm(null);
} catch (Exception ex) {
reportStatus(ex.toString());
throw ex;
}
} else {
reportStatus("Audio file required.");
}
}
/**
* Render a WaveForm.
*/
class SamplingGraph implements Runnable {
private Thread thread;
private Font font10 = new Font("serif", Font.PLAIN, 10);
private Font font12 = new Font("serif", Font.PLAIN, 12);
Color jfcBlue = new Color(000, 000, 255);
Color pink = new Color(255, 175, 175);
public SamplingGraph() {
}
public void createWaveForm(byte[] audioBytes) {
lines.removeAllElements(); // clear the old vector
AudioFormat format = audioInputStream.getFormat();
if (audioBytes == null) {
try {
audioBytes = new byte[
(int) (audioInputStream.getFrameLength()
* format.getFrameSize())];
audioInputStream.read(audioBytes);
} catch (Exception ex) {
reportStatus(ex.getMessage());
return;
}
}
int w = 500;
int h = 200;
int[] audioData = null;
if (format.getSampleSizeInBits() == 16) {
int nlengthInSamples = audioBytes.length / 2;
audioData = new int[nlengthInSamples];
if (format.isBigEndian()) {
for (int i = 0; i < nlengthInSamples; i++) {
/* First byte is MSB (high order) */
int MSB = (int) audioBytes[2*i];
/* Second byte is LSB (low order) */
int LSB = (int) audioBytes[2*i+1];
audioData[i] = MSB << 8 | (255 & LSB);
}
} else {
for (int i = 0; i < nlengthInSamples; i++) {
/* First byte is LSB (low order) */
int LSB = (int) audioBytes[2*i];
/* Second byte is MSB (high order) */
int MSB = (int) audioBytes[2*i+1];
audioData[i] = MSB << 8 | (255 & LSB);
}
}
} else if (format.getSampleSizeInBits() == 8) {
int nlengthInSamples = audioBytes.length;
audioData = new int[nlengthInSamples];
if (format.getEncoding().toString().startsWith("PCM_SIGN")) {
for (int i = 0; i < audioBytes.length; i++) {
audioData[i] = audioBytes[i];
}
} else {
for (int i = 0; i < audioBytes.length; i++) {
audioData[i] = audioBytes[i] - 128;
}
}
}
int frames_per_pixel = audioBytes.length / format.getFrameSize()/w;
byte my_byte = 0;
double y_last = 0;
int numChannels = format.getChannels();
for (double x = 0; x < w && audioData != null; x++) {
int idx = (int) (frames_per_pixel * numChannels * x);
if (format.getSampleSizeInBits() == 8) {
my_byte = (byte) audioData[idx];
} else {
my_byte = (byte) (128 * audioData[idx] / 32768 );
}
double y_new = (double) (h * (128 - my_byte) / 256);
lines.add(new Line2D.Double(x, y_last, x, y_new));
y_last = y_new;
}
saveToFile();
}
public void saveToFile() {
int w = 500;
int h = 200;
int INFOPAD = 0;
BufferedImage bufferedImage = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
Graphics2D g2 = bufferedImage.createGraphics();
createSampleOnGraphicsContext(w, h, INFOPAD, g2);
g2.dispose();
// Write generated image to a file
try {
// Save as PNG
File file = new File(fileName);
System.out.println(file.getAbsolutePath());
ImageIO.write(bufferedImage, "png", file);
result = new ImageIcon(fileName);
} catch (IOException e) {
}
}
private void createSampleOnGraphicsContext(int w, int h, int INFOPAD, Graphics2D g2) {
g2.setBackground(imageBackgroundColor);
g2.clearRect(0, 0, w, h);
g2.setColor(Color.white);
g2.fillRect(0, h-INFOPAD, w, INFOPAD);
if (errStr != null) {
g2.setColor(jfcBlue);
g2.setFont(new Font("serif", Font.BOLD, 18));
g2.drawString("ERROR", 5, 20);
AttributedString as = new AttributedString(errStr);
as.addAttribute(TextAttribute.FONT, font12, 0, errStr.length());
AttributedCharacterIterator aci = as.getIterator();
FontRenderContext frc = g2.getFontRenderContext();
LineBreakMeasurer lbm = new LineBreakMeasurer(aci, frc);
float x = 5, y = 25;
lbm.setPosition(0);
while (lbm.getPosition() < errStr.length()) {
TextLayout tl = lbm.nextLayout(w-x-5);
if (!tl.isLeftToRight()) {
x = w - tl.getAdvance();
}
tl.draw(g2, x, y += tl.getAscent());
y += tl.getDescent() + tl.getLeading();
}
} else if (capture.thread != null) {
g2.setColor(Color.black);
g2.setFont(font12);
//g2.drawString("Length: " + String.valueOf(seconds), 3, h-4);
} else {
g2.setColor(Color.black);
g2.setFont(font12);
//g2.drawString("File: " + fileName + " Length: " + String.valueOf(duration) + " Position: " + String.valueOf(seconds), 3, h-4);
if (audioInputStream != null) {
// .. render sampling graph ..
g2.setColor(jfcBlue);
for (int i = 1; i < lines.size(); i++) {
g2.draw((Line2D) lines.get(i));
}
// .. draw current position ..
if (seconds != 0) {
double loc = seconds/duration*w;
g2.setColor(pink);
g2.setStroke(new BasicStroke(3));
g2.draw(new Line2D.Double(loc, 0, loc, h-INFOPAD-2));
}
}
}
}
public void start() {
thread = new Thread(this);
thread.setName("SamplingGraph");
thread.start();
seconds = 0;
}
public void stop() {
if (thread != null) {
thread.interrupt();
}
thread = null;
}
public void run() {
seconds = 0;
while (thread != null) {
if ( (capture.line != null) && (capture.line.isActive()) ) {
long milliseconds = (long)(capture.line.getMicrosecondPosition() / 1000);
seconds = milliseconds / 1000.0;
}
try { thread.sleep(100); } catch (Exception e) { break; }
while ((capture.line != null && !capture.line.isActive()))
{
try { thread.sleep(10); } catch (Exception e) { break; }
}
}
seconds = 0;
}
} // End class SamplingGraph
/**
* Reads data from the input channel and writes to the output stream
*/
class Capture implements Runnable {
TargetDataLine line;
Thread thread;
public void start() {
errStr = null;
thread = new Thread(this);
thread.setName("Capture");
thread.start();
}
public void stop() {
thread = null;
}
private void shutDown(String message) {
if ((errStr = message) != null && thread != null) {
thread = null;
samplingGraph.stop();
System.err.println(errStr);
}
}
public void run() {
duration = 0;
audioInputStream = null;
// define the required attributes for our line,
// and make sure a compatible line is supported.
AudioFormat format = audioInputStream.getFormat();
DataLine.Info info = new DataLine.Info(TargetDataLine.class,
format);
if (!AudioSystem.isLineSupported(info)) {
shutDown("Line matching " + info + " not supported.");
return;
}
// get and open the target data line for capture.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format, line.getBufferSize());
} catch (LineUnavailableException ex) {
shutDown("Unable to open the line: " + ex);
return;
} catch (SecurityException ex) {
shutDown(ex.toString());
//JavaSound.showInfoDialog();
return;
} catch (Exception ex) {
shutDown(ex.toString());
return;
}
// play back the captured audio data
ByteArrayOutputStream out = new ByteArrayOutputStream();
int frameSizeInBytes = format.getFrameSize();
int bufferLengthInFrames = line.getBufferSize() / 8;
int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes;
byte[] data = new byte[bufferLengthInBytes];
int numBytesRead;
line.start();
while (thread != null) {
if((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) {
break;
}
out.write(data, 0, numBytesRead);
}
// we reached the end of the stream. stop and close the line.
line.stop();
line.close();
line = null;
// stop and close the output stream
try {
out.flush();
out.close();
} catch (IOException ex) {
ex.printStackTrace();
}
// load bytes into the audio input stream for playback
byte audioBytes[] = out.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(audioBytes);
audioInputStream = new AudioInputStream(bais, format, audioBytes.length / frameSizeInBytes);
long milliseconds = (long)((audioInputStream.getFrameLength() * 1000) / format.getFrameRate());
duration = milliseconds / 1000.0;
try {
audioInputStream.reset();
} catch (Exception ex) {
ex.printStackTrace();
return;
}
samplingGraph.createWaveForm(audioBytes);
}
} // End class Capture
public static void main(String [] args) throws Exception {
AudioWaveformCreator2 awc = new AudioWaveformCreator2(new File("E:/PRODI ILKOM/Semester VIII/TA/wave/cars062.wav"), "cars062.png");
AudioWaveformCreator2 awc2 = new AudioWaveformCreator2(new File("E:/PRODI ILKOM/Semester VIII/TA/wave/plain wav.wav"), "plain wav.png");
Object[] fields = {
"Plain", awc.result
,"Stego", awc2.result
};
JOptionPane.showConfirmDialog(null, fields, "Wave Form", JOptionPane.PLAIN_MESSAGE);
}
private void reportStatus(String msg) {
if ((errStr = msg) != null) {
System.out.println(errStr);
}
}
private static void printUsage() {
System.out.println("AudioWaveformCreator usage: java AudioWaveformCreator.class [path to audio file for generating the image] [path to save waveform image to]");
}
}
import java.awt.BasicStroke;
导入java.awt.Color;
导入java.awt.Font;
导入java.awt.Graphics2D;
导入java.awt.font.FontRenderContext;
导入java.awt.font.LineBreakMeasurer;
导入java.awt.font.texttribute;
导入java.awt.font.TextLayout;
导入java.awt.geom.Line2D;
导入java.awt.image.buffereImage;
导入java.io.ByteArrayInputStream;
导入java.io.ByteArrayOutputStream;
导入java.io.File;
导入java.io.IOException;
导入java.net.URL;
导入java.text.AttributedCharacterator;
导入java.text.AttributeString;
导入java.util.Vector;
导入javax.imageio.imageio;
导入javax.sound.sampled.AudioFormat;
导入javax.sound.sampled.AudioInputStream;
导入javax.sound.sampled.AudioSystem;
导入javax.sound.sampled.DataLine;
导入javax.sound.sampled.LineUnavailableException;
导入javax.sound.sampled.TargetDataLine;
导入javax.sound.sampled.unsupportDaudioFileException;
导入javax.swing.ImageIcon;
导入javax.swing.JLabel;
导入javax.swing.JOptionPane;
公共类音频波形发生器2{
音频输入流音频输入流;
向量线=新向量();
字符串errStr;
捕获=新捕获();
双倍持续时间,秒;
//文件;
字符串fileName=“out.png”;
采样仪采样仪;
字符串波形文件名;
彩色图像背景颜色=新颜色(20,20,20);
对象结果=空;
公共AudioWaveformCreator2(文件url,字符串waveformFilename)引发异常{
如果(url!=null){
试一试{
errStr=null;
audioInputStream=AudioSystem.getAudioInputStream(url);
长毫秒=(长)((audioInputStream.getFrameLength()*1000)/audioInputStream.getFormat().getFrameRate());
持续时间=毫秒/1000.0;
samplingGraph=新的samplingGraph();
samplingGraph.createWaveForm(空);
}捕获(例外情况除外){
报告状态(例如toString());
掷骰子;
}
}否则{
报告状态(“需要音频文件”);
}
}
/**
*渲染波形。
*/
类SamplingGraph实现了Runnable{
私有线程;
私有字体font10=新字体(“衬线”,Font.PLAIN,10);
私有字体font12=新字体(“衬线”,Font.PLAIN,12);
颜色jfcBlue=新颜色(000000255);
粉色=新颜色(255、175、175);
公共采样图(){
}
公共波形(字节[]音频字节){
lines.removeAllElements();//清除旧向量
AudioFormat格式=audioInputStream.getFormat();
if(audioBytes==null){
试一试{
audioBytes=新字节[
(int)(audioInputStream.getFrameLength()
*format.getFrameSize())];
audioInputStream.read(音频字节);
}捕获(例外情况除外){
reportStatus(例如getMessage());
返回;
}
}
int w=500;
int h=200;
int[]audioData=null;
if(format.getSampleSizeInBits()==16){
int nlengthInSamples=audioBytes.length/2;
audioData=新整数[nlengthInSamples];
if(格式为.isBigEndian()){
对于(int i=0;i 音频数据[i]=MSB创建AudioWaveformCreator2
实例时,随后执行SamplingGraph#saveToFile
-方法。该方法将先前生成的波形存储在文件fileName
中,其中fileName
是一个AudioWaveformCreator2
-字段,用固定值初始化trong>nameout.png
。因此,当创建多个AudioWaveformCreator2
-实例时,两个实例都将其数据存储在相同的文件out.png
中,第二个文件覆盖第一个文件。在AudioWaveformCreator2
-实例存储该文件后,一个新的ImageIcon
使用ImageIcon(字符串文件名)
-构造函数创建。ImageIcon
(例如)的源代码显示ImageIcon(字符串文件名)
-构造函数稍后调用Toolkit.getDefaultToolkit().getImage(文件名)
-方法。
对该方法的描述表明,存在一种缓存机制,可以为具有相同文件名的请求返回相同的图像(参见示例):
返回从指定文件获取像素数据的图像,该文件
格式可以是GIF、JPEG或PNG
将具有相同文件名的多个请求解析为相同的
返回的图像。由于该机制需要促进这一点
图像对象的共享可能会继续保留不可用的图像
在无限期的长时间使用中,开发人员
鼓励用户使用
createImage变量(如果可用)。如果
指定的文件更改后,Ima
public AudioWaveformCreator2(File url, String waveformFilename, String fileName) throws Exception {
if (url != null) {
try {
this.fileName = fileName;
...
AudioWaveformCreator2 awc = new AudioWaveformCreator2(new File("E:/PRODI ILKOM/Semester VIII/TA/wave/cars062.wav"), "cars062.png", "out.png");
AudioWaveformCreator2 awc2 = new AudioWaveformCreator2(new File("E:/PRODI ILKOM/Semester VIII/TA/wave/plain wav.wav"), "plain wav.png", "out2.png");
...
result = new ImageIcon(fileName);
result = new ImageIcon(Toolkit.getDefaultToolkit().createImage(fileName));
result = new ImageIcon(fileName);
result = new ImageIcon(bufferedImage);