Java 从android使用外部Jar调用web服务方法时出错
我试图从android设备调用一些java方法。我的java方法是通过axis2 web服务访问的 这是我的完整Java类,我已经编写了两个从Adnroid设备调用的方法 运行(Thread.java:662) 原因:java.lang.NoClassDefFoundError:edu/cmu/sphinx/util/props/ConfigurationManager 这是我的外部jar库中的类,我在recognize_wave(stringwavepath)方法中使用它。 我还检查了jar文件m中的edu/cmu/sphinx/util/props/ConfigurationManager是否可用,包括它给我的错误 JAVA方法:Java 从android使用外部Jar调用web服务方法时出错,java,android,web-services,ksoap2,Java,Android,Web Services,Ksoap2,我试图从android设备调用一些java方法。我的java方法是通过axis2 web服务访问的 这是我的完整Java类,我已经编写了两个从Adnroid设备调用的方法 运行(Thread.java:662) 原因:java.lang.NoClassDefFoundError:edu/cmu/sphinx/util/props/ConfigurationManager 这是我的外部jar库中的类,我在recognize_wave(stringwavepath)方法中使用它。 我还检查了jar文
package edu.cmu.sphinx.demo.transcriber;
import edu.cmu.sphinx.frontend.util.AudioFileDataSource;
import edu.cmu.sphinx.recognizer.Recognizer;
import edu.cmu.sphinx.result.Result;
//import edu.cmu.sphinx.util.props.ConfigurationManager;
import edu.cmu.sphinx.util.props.ConfigurationManager;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import javax.sound.sampled.UnsupportedAudioFileException;
/** A simple example that shows how to transcribe a continuous audio file that has multiple utterances in it. */
public class Transcriber {
// private static final String PATH = "file:///D:\\Sound\\";
@SuppressWarnings("null")
public static String recognize_wave(String wavePath) throws MalformedURLException{
String resultText="";
URL audioURL;
audioURL = new URL(wavePath);
URL configURL = Transcriber.class.getResource("config.xml");
ConfigurationManager cm = new ConfigurationManager(configURL);
Recognizer recognizer = (Recognizer) cm.lookup("recognizer");
/* allocate the resource necessary for the recognizer */
recognizer.allocate();
// configure the audio input for the recognizer
AudioFileDataSource dataSource = (AudioFileDataSource) cm.lookup("audioFileDataSource");
dataSource.setAudioFile(audioURL, null);
// Loop until last utterance in the audio file has been decoded, in which case the recognizer will return null.
Result result;
while ((result = recognizer.recognize())!= null) {
resultText = result.getBestResultNoFiller();
System.out.println(resultText);
}
return resultText;
}
public String get_wav_byte(byte[] wavbite,String path) throws IOException
{
String result1="null";
//return result1;
final String PATH = "file:///D:\\Sound\\";
//System.out.println(bhavik1111);
try
{
File dstFile = new File(path);
FileOutputStream out = new FileOutputStream(dstFile);
out.write(wavbite, 0, wavbite.length);
out.close();
}
catch (IOException e)
{
System.out.println("IOException : " + e);
}
try {
result1=recognize_wave(path);
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return result1;
}
}
我用KSOAP2调用该方法的ANDROID代码如下:
package com.varma.samples.audiorecorder;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.ksoap2.SoapEnvelope;
import org.ksoap2.SoapFault;
import org.ksoap2.serialization.MarshalBase64;
import org.ksoap2.serialization.SoapObject;
import org.ksoap2.serialization.SoapSerializationEnvelope;
import org.ksoap2.transport.HttpTransportSE;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Base64;
import android.util.Log;
import android.view.View;
import android.view.ViewDebug.FlagToString;
import android.widget.Button;
import android.widget.TextView;
public class RecorderActivity extends Activity {
private static final int RECORDER_BPP =16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static String AUDIO_WAV_FILE = "";
private static final int RECORDER_SAMPLERATE = 16000;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
@SuppressLint("NewApi")
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING);
}
private void setButtonHandlers() {
((Button)findViewById(R.id.btnStart)).setOnClickListener(btnClick);
((Button)findViewById(R.id.btnStop)).setOnClickListener(btnClick);
}
private void enableButton(int id,boolean isEnable){
((Button)findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart,!isRecording);
enableButton(R.id.btnStop,isRecording);
}
private String getFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV);
}
private String getTempFilename(){
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath,AUDIO_RECORDER_FOLDER);
if(!file.exists()){
file.mkdirs();
}
File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE);
if(tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
@SuppressLint({ "NewApi", "NewApi" })
private void startRecording(){
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
@Override
public void run() {
writeAudioDataToFile();
}
},"AudioRecorder Thread");
recordingThread.start();
}
@SuppressLint({ "NewApi", "NewApi", "NewApi" })
private void writeAudioDataToFile(){
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
try {
os = new FileOutputStream(filename);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
int read = 0;
if(null != os){
while(isRecording){
read = recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read){
try {
os.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@SuppressLint({ "NewApi", "NewApi" })
private void stopRecording(){
if(null != recorder){
isRecording = false;
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(),getFilename());
deleteTempFile();
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
@SuppressLint("NewApi")
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = 16000;
int channels = 1;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8;
/// long byteRate = 256;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
AppLog.logString("File size: " + totalDataLen);
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while(in.read(data) != -1){
out.write(data);
}
in.close();
out.close();
//////////////////
AUDIO_WAV_FILE=outFilename;
/////////////////
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/////////////read wav file and convert to byte////////////////////
public static byte[] getBytesFromFile(File file) throws IOException {
/*
InputStream is = new FileInputStream(file);
// Get the size of the file
long length = file.length();
// You cannot create an array using a long type.
// It needs to be an int type.
// Before converting to an int type, check
// to ensure that file is not larger than Integer.MAX_VALUE.
if (length > Integer.MAX_VALUE) {
// File is too large
}
// Create the byte array to hold the data
byte[] bytes = new byte[(int)length];
// Read in the bytes
int offset = 0;
int numRead = 0;
while (offset < bytes.length
&& (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) {
offset += numRead;
}
// Ensure all the bytes have been read in
if (offset < bytes.length) {
throw new IOException("Could not completely read file "+file.getName());
}
// Close the input stream and return bytes
is.close();
return bytes;
*/
ByteArrayOutputStream out = new ByteArrayOutputStream();
BufferedInputStream in = new BufferedInputStream(new FileInputStream(file));
int read;
byte[] buff = new byte[1024];
while ((read = in.read(buff)) > 0)
{
out.write(buff, 0, read);
}
out.flush();
byte[] audioBytes = out.toByteArray();
return audioBytes;
}
//////////////////////////////////////
private void WriteWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
private View.OnClickListener btnClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
switch(v.getId()){
case R.id.btnStart:{
AppLog.logString("Start Recording");
enableButtons(true);
startRecording();
break;
}
case R.id.btnStop:{
AppLog.logString("Start Recording");
enableButtons(false);
stopRecording();
File source_for_byte=new File( AUDIO_WAV_FILE);
byte[] temp = new byte[(int) source_for_byte.length()];
try {
temp=getBytesFromFile(source_for_byte);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//byte[] strBase64 = Base64.encode(temp, Base64.NO_WRAP);
//Request.addProperty("image", strBase64);
//////////////////////WebService Activity ///////////////////////
String METHOD_NAME = "";
// our webservice method name
String NAMESPACE = "http://test.com";
String SOAP_ACTION = NAMESPACE + METHOD_NAME;
// NAMESPACE + method name
//final String URL = "http://192.168.3.106:8080/axis2/services/speechmain?wsdl";
final String URL="http://192.168.3.106:8080/axis2/services/VoiceService?wsdl";
METHOD_NAME = "get_wav_byte";
try {
SoapObject request = new SoapObject(NAMESPACE, METHOD_NAME);
request.addProperty("wavbite", temp);
request.addProperty("path", "D:\\sound\\latest_recognizer.wav");
SoapSerializationEnvelope envelope = new SoapSerializationEnvelope(
SoapEnvelope.VER11);
new MarshalBase64().register(envelope); // serialization
envelope.encodingStyle = SoapEnvelope.ENC;
envelope.dotNet = true;
envelope.setOutputSoapObject(request);
HttpTransportSE androidHttpTransport = new HttpTransportSE(URL);
androidHttpTransport.call(SOAP_ACTION, envelope);
Object result = envelope.getResponse();
// Object result = (SoapObject) envelope.bodyIn;
((TextView) findViewById(R.id.gettext1)).setText("NUMBER IS :-> "
+ result.toString());
} catch (Exception E) {
E.printStackTrace();
((TextView) findViewById(R.id.gettext1)).setText("ERROR:"
+ E.getClass().getName() + ":" + E.getMessage());
}
/////////////////////////
break;
}
}
}
};
}
package com.varma.samples.audiorecorder;
导入java.io.BufferedInputStream;
导入java.io.ByteArrayOutputStream;
导入java.io.File;
导入java.io.FileInputStream;
导入java.io.FileNotFoundException;
导入java.io.FileOutputStream;
导入java.io.IOException;
导入java.io.InputStream;
导入org.ksoap2.SoapEnvelope;
导入org.ksoap2.SoapFault;
导入org.ksoap2.serialization.MarshallBase64;
导入org.ksoap2.serialization.SoapObject;
导入org.ksoap2.serialization.SoapSerializationEnvelope;
导入org.ksoap2.transport.HttpTransportSE;
导入android.annotation.SuppressLint;
导入android.app.Activity;
导入android.media.AudioFormat;
导入android.media.AudioRecord;
导入android.media.MediaRecorder;
导入android.os.Bundle;
导入android.os.Environment;
导入android.util.Base64;
导入android.util.Log;
导入android.view.view;
导入android.view.ViewDebug.FlagToString;
导入android.widget.Button;
导入android.widget.TextView;
公共类RecorderActivity扩展了活动{
专用静态最终整数记录器_BPP=16;
专用静态最终字符串音频\u记录器\u文件\u EXT\u WAV=“.WAV”;
专用静态最终字符串音频\u记录器\u文件夹=“音频记录器”;
专用静态最终字符串音频\u记录器\u TEMP\u FILE=“录制\u TEMP.raw”;
私有静态字符串音频_WAV_文件=”;
专用静态最终积分记录仪\u采样器=16000;
专用静态最终整数记录器\u通道=AudioFormat.CHANNEL\u配置\u MONO;
专用静态最终整数记录器\u音频\u编码=AudioFormat.ENCODING\u PCM\u 16位;
专用录音机=空;
私有int bufferSize=0;
私有线程recordingThread=null;
私有布尔值isRecording=false;
@SuppressLint(“新API”)
@凌驾
创建时的公共void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
启用按钮(错误);
bufferSize=AudioRecord.getMinBufferSize(记录器采样器、记录器通道、记录器音频编码);
}
私有void setButtonHandlers(){
((按钮)findviewbyd(R.id.btnStart)).setOnClickListener(btnClick);
((按钮)findviewbyd(R.id.btnStop)).setOnClickListener(btnClick);
}
私有void enableButton(int-id,boolean-isEnable){
((按钮)findViewById(id)).setEnabled(isEnable);
}
专用void启用按钮(布尔值isRecording){
启用按钮(R.id.btnStart,!isRecording);
启用按钮(R.id.btnStop,isRecording);
}
私有字符串getFilename(){
字符串filepath=Environment.getExternalStorageDirectory().getPath();
文件=新文件(文件路径、音频\u录制器\u文件夹);
如果(!file.exists()){
mkdirs()文件;
}
返回(file.getAbsolutePath()+“/”+System.currentTimeMillis()+音频\u记录器\u文件\u EXT\u WAV);
}
私有字符串getTempFilename(){
字符串filepath=Environment.getExternalStorageDirectory().getPath();
文件=新文件(文件路径、音频\u录制器\u文件夹);
如果(!file.exists()){
mkdirs()文件;
}
File tempFile=新文件(文件路径、音频记录器临时文件);
if(tempFile.exists())
tempFile.delete();
返回(file.getAbsolutePath()+“/”+音频记录器临时文件);
}
@SuppressLint({“NewApi”,“NewApi”})
私有无效开始记录(){
录音机=新的录音机(MediaRecorder.AudioSource.MIC,
录像机采样器、录像机通道、录像机音频编码、缓冲区大小);
记录器。开始记录();
isRecording=true;
recordingThread=新线程(new Runnable()){
@凌驾
公开募捐{
WriteeAudioDataToFile();
}
}“录音机线程”);
recordingThread.start();
}
@SuppressLint({“NewApi”、“NewApi”、“NewApi”})
私有void writeeAudioDataToFile(){
字节数据[]=新字节[bufferSize];
字符串文件名=getTempFilename();
FileOutputStream os=null;
试一试{
os=新文件输出流(文件名);
}catch(filenotfounde异常){
//TODO自动生成的捕捉块
e、 printStackTrace();
}
int read=0;
如果(null!=os){
while(isRecording){
读取=记录器。读取(数据,0,缓冲区大小);
if(AudioRecord.ERROR\u无效\u操作!=读取){
试一试{
写(数据);
}捕获(IOE异常){
e、 printStackTrace();
}
}
}
试一试{
os.close();
}捕获(IOE异常){
e、 printStackTrace();
}
}
}
@SuppressLint({“NewApi”,“NewApi”})
私有void stopRecording(){
如果(空!=记录器){
isRecording=false;
录音机