Android:空指针异常

Android:空指针异常,android,exception,nullpointerexception,loading,ocr,Android,Exception,Nullpointerexception,Loading,Ocr,这是我在Logcat中得到的错误 04-23 08:00:07.524: E/AndroidRuntime(1384): FATAL EXCEPTION: main 04-23 08:00:07.524: E/AndroidRuntime(1384): java.lang.RuntimeException: Unable to start activity ComponentInfo{com.datumdroid.android.ocr.simple/c

这是我在Logcat中得到的错误

04-23 08:00:07.524: E/AndroidRuntime(1384): FATAL EXCEPTION: main 
04-23 08:00:07.524: E/AndroidRuntime(1384): java.lang.RuntimeException: Unable to start                 activity     ComponentInfo{com.datumdroid.android.ocr.simple/com.datumdroid.android.ocr.simple.SimpleAndroidOCRActivity}: java.lang.NullPointerException
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2180)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2230)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread.access$600(ActivityThread.java:141)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1234)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.os.Handler.dispatchMessage(Handler.java:99)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.os.Looper.loop(Looper.java:137)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread.main(ActivityThread.java:5039)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at java.lang.reflect.Method.invokeNative(Native Method)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at java.lang.reflect.Method.invoke(Method.java:511)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:793)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:560)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at dalvik.system.NativeStart.main(Native Method)
04-23 08:00:07.524: E/AndroidRuntime(1384): Caused by: java.lang.NullPointerException
04-23 08:00:07.524: E/AndroidRuntime(1384):     at com.datumdroid.android.ocr.simple.SimpleAndroidOCRActivity.onCreate(SimpleAndroidOCRActivity.java:68)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.Activity.performCreate(Activity.java:5104)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1080)
04-23 08:00:07.524: E/AndroidRuntime(1384):     at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2144)
启动屏幕活动

public class SplashAct extends Activity {
    /** Called when the activity is first created. */
    private final Handler mHandler = new Handler();
    private static final int duration = 1500;




    @Override
    protected void onCreate(final Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        setContentView(R.layout.splash);

        mHandler.postDelayed(mPendingLauncherRunnable,
                SplashAct.duration);
    }

    @Override
    protected void onPause() {
        super.onPause();
        mHandler.removeCallbacks(mPendingLauncherRunnable);
    }

    private final Runnable mPendingLauncherRunnable = new Runnable() {

        public void run() {
            final Intent intent = new Intent(SplashAct.this,
                    SimpleAndroidOCRActivity.class);

            startActivity(intent);
            finish();
        }
    };



}
public class SimpleAndroidOCRActivity extends Activity implements OnInitListener {
    public static final String PACKAGE_NAME = "com.datumdroid.android.ocr.simple";
    public static final String DATA_PATH = Environment
            .getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";

    // You should have the trained data file in assets folder
    // You can get them at:
    // http://code.google.com/p/tesseract-ocr/downloads/list
    public static final String lang = "eng";

    private static final String TAG = "SimpleAndroidOCR.java";

    protected Button _button, btnSpeak;
    // protected ImageView _image;
    protected EditText _field;
    protected String _path;
    protected boolean _taken;

    protected static final String PHOTO_TAKEN = "photo_taken";

    //tts
    private TextToSpeech tts;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        //tts
         tts = new TextToSpeech(this, this);

         btnSpeak = (Button) findViewById(R.id.button1);
         btnSpeak.setOnClickListener(new View.OnClickListener() {

                @Override
                public void onClick(View arg0) {
                    speakOut();
                }

            });
        //ocr
        String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" };

        for (String path : paths) {
            File dir = new File(path);
            if (!dir.exists()) {
                if (!dir.mkdirs()) {
                    Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
                    return;
                } else {
                    Log.v(TAG, "Created directory " + path + " on sdcard");
                }
            }

        }

        // lang.traineddata file with the app (in assets folder)
        // You can get them at:
        // http://code.google.com/p/tesseract-ocr/downloads/list
        // This area needs work and optimization
        if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
            try {

                AssetManager assetManager = getAssets();
                InputStream in = assetManager.open("tessdata/eng.traineddata");
                //GZIPInputStream gin = new GZIPInputStream(in);
                OutputStream out = new FileOutputStream(DATA_PATH
                        + "tessdata/eng.traineddata");

                // Transfer bytes from in to out
                byte[] buf = new byte[1024];
                int len;
                //while ((lenf = gin.read(buff)) > 0) {
                while ((len = in.read(buf)) > 0) {
                    out.write(buf, 0, len);
                }
                in.close();
                //gin.close();
                out.close();

                Log.v(TAG, "Copied " + lang + " traineddata");
            } catch (IOException e) {
                Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString());
            }
        }

        super.onCreate(savedInstanceState);

        setContentView(R.layout.main);

        // _image = (ImageView) findViewById(R.id.image);
        _field = (EditText) findViewById(R.id.field);
        _button = (Button) findViewById(R.id.button);
        _button.setOnClickListener(new ButtonClickHandler());

        _path = DATA_PATH + "/ocr.jpg";
    }

    public class ButtonClickHandler implements View.OnClickListener {
        public void onClick(View view) {
            Log.v(TAG, "Starting Camera app");
            startCameraActivity();
        }
    }

    // Simple android photo capture:
    // http://labs.makemachine.net/2010/03/simple-android-photo-capture/

    protected void startCameraActivity() {
        File file = new File(_path);
        Uri outputFileUri = Uri.fromFile(file);

        final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
        intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);

        startActivityForResult(intent, 0);
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {

        Log.i(TAG, "resultCode: " + resultCode);

        if (resultCode == -1) {
            onPhotoTaken();
        } else {
            Log.v(TAG, "User cancelled");
        }
    }

    @Override
    protected void onSaveInstanceState(Bundle outState) {
        outState.putBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN, _taken);
    }

    @Override
    protected void onRestoreInstanceState(Bundle savedInstanceState) {
        Log.i(TAG, "onRestoreInstanceState()");
        if (savedInstanceState.getBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN)) {
            onPhotoTaken();
        }
    }

    protected void onPhotoTaken() {
        _taken = true;

        BitmapFactory.Options options = new BitmapFactory.Options();
        options.inSampleSize = 4;

        Bitmap bitmap = BitmapFactory.decodeFile(_path, options);

        try {
            ExifInterface exif = new ExifInterface(_path);
            int exifOrientation = exif.getAttributeInt(
                    ExifInterface.TAG_ORIENTATION,
                    ExifInterface.ORIENTATION_NORMAL);

            Log.v(TAG, "Orient: " + exifOrientation);

            int rotate = 0;

            switch (exifOrientation) {
            case ExifInterface.ORIENTATION_ROTATE_90:
                rotate = 90;
                break;
            case ExifInterface.ORIENTATION_ROTATE_180:
                rotate = 180;
                break;
            case ExifInterface.ORIENTATION_ROTATE_270:
                rotate = 270;
                break;
            }

            Log.v(TAG, "Rotation: " + rotate);

            if (rotate != 0) {

                // Getting width & height of the given image.
                int w = bitmap.getWidth();
                int h = bitmap.getHeight();

                // Setting pre rotate
                Matrix mtx = new Matrix();
                mtx.preRotate(rotate);

                // Rotating Bitmap
                bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
            }

            // Convert to ARGB_8888, required by tess
            bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);

        } catch (IOException e) {
            Log.e(TAG, "Couldn't correct orientation: " + e.toString());
        }

        // _image.setImageBitmap( bitmap );

        Log.v(TAG, "Before baseApi");

        TessBaseAPI baseApi = new TessBaseAPI();
        baseApi.setDebug(true);
        baseApi.init(DATA_PATH, lang);
        baseApi.setImage(bitmap);

        String recognizedText = baseApi.getUTF8Text();

        baseApi.end();

        // You now have the text in recognizedText var, you can do anything with it.
        // We will display a stripped out trimmed alpha-numeric version of it (if lang is eng)
        // so that garbage doesn't make it to the display.

        Log.v(TAG, "OCRED TEXT: " + recognizedText);

        if ( lang.equalsIgnoreCase("eng") ) {
            recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
        }

        recognizedText = recognizedText.trim();

        if ( recognizedText.length() != 0 ) {
            _field.setText(_field.getText().toString().length() == 0 ? recognizedText : _field.getText() + " " + recognizedText);
            _field.setSelection(_field.getText().toString().length());
        }

        // Cycle done.
    }

    //tts
    @Override
    public void onDestroy() {
        // Don't forget to shutdown tts!
        if (tts != null) {
            tts.stop();
            tts.shutdown();
        }
        super.onDestroy();
    }

    @Override
    public void onInit(int status) {

        if (status == TextToSpeech.SUCCESS) {

            int result = tts.setLanguage(Locale.US);

            if (result == TextToSpeech.LANG_MISSING_DATA
                    || result == TextToSpeech.LANG_NOT_SUPPORTED) {
                Log.e("TTS", "This Language is not supported");
            } else {
                btnSpeak.setEnabled(true);
                speakOut();
            }

        } else {
            Log.e("TTS", "Initilization Failed!");
        }

    }

    private void speakOut() {

        String text = _field.getText().toString();

        tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
    }   
}
主要活动

public class SplashAct extends Activity {
    /** Called when the activity is first created. */
    private final Handler mHandler = new Handler();
    private static final int duration = 1500;




    @Override
    protected void onCreate(final Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        setContentView(R.layout.splash);

        mHandler.postDelayed(mPendingLauncherRunnable,
                SplashAct.duration);
    }

    @Override
    protected void onPause() {
        super.onPause();
        mHandler.removeCallbacks(mPendingLauncherRunnable);
    }

    private final Runnable mPendingLauncherRunnable = new Runnable() {

        public void run() {
            final Intent intent = new Intent(SplashAct.this,
                    SimpleAndroidOCRActivity.class);

            startActivity(intent);
            finish();
        }
    };



}
public class SimpleAndroidOCRActivity extends Activity implements OnInitListener {
    public static final String PACKAGE_NAME = "com.datumdroid.android.ocr.simple";
    public static final String DATA_PATH = Environment
            .getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";

    // You should have the trained data file in assets folder
    // You can get them at:
    // http://code.google.com/p/tesseract-ocr/downloads/list
    public static final String lang = "eng";

    private static final String TAG = "SimpleAndroidOCR.java";

    protected Button _button, btnSpeak;
    // protected ImageView _image;
    protected EditText _field;
    protected String _path;
    protected boolean _taken;

    protected static final String PHOTO_TAKEN = "photo_taken";

    //tts
    private TextToSpeech tts;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        //tts
         tts = new TextToSpeech(this, this);

         btnSpeak = (Button) findViewById(R.id.button1);
         btnSpeak.setOnClickListener(new View.OnClickListener() {

                @Override
                public void onClick(View arg0) {
                    speakOut();
                }

            });
        //ocr
        String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" };

        for (String path : paths) {
            File dir = new File(path);
            if (!dir.exists()) {
                if (!dir.mkdirs()) {
                    Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
                    return;
                } else {
                    Log.v(TAG, "Created directory " + path + " on sdcard");
                }
            }

        }

        // lang.traineddata file with the app (in assets folder)
        // You can get them at:
        // http://code.google.com/p/tesseract-ocr/downloads/list
        // This area needs work and optimization
        if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
            try {

                AssetManager assetManager = getAssets();
                InputStream in = assetManager.open("tessdata/eng.traineddata");
                //GZIPInputStream gin = new GZIPInputStream(in);
                OutputStream out = new FileOutputStream(DATA_PATH
                        + "tessdata/eng.traineddata");

                // Transfer bytes from in to out
                byte[] buf = new byte[1024];
                int len;
                //while ((lenf = gin.read(buff)) > 0) {
                while ((len = in.read(buf)) > 0) {
                    out.write(buf, 0, len);
                }
                in.close();
                //gin.close();
                out.close();

                Log.v(TAG, "Copied " + lang + " traineddata");
            } catch (IOException e) {
                Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString());
            }
        }

        super.onCreate(savedInstanceState);

        setContentView(R.layout.main);

        // _image = (ImageView) findViewById(R.id.image);
        _field = (EditText) findViewById(R.id.field);
        _button = (Button) findViewById(R.id.button);
        _button.setOnClickListener(new ButtonClickHandler());

        _path = DATA_PATH + "/ocr.jpg";
    }

    public class ButtonClickHandler implements View.OnClickListener {
        public void onClick(View view) {
            Log.v(TAG, "Starting Camera app");
            startCameraActivity();
        }
    }

    // Simple android photo capture:
    // http://labs.makemachine.net/2010/03/simple-android-photo-capture/

    protected void startCameraActivity() {
        File file = new File(_path);
        Uri outputFileUri = Uri.fromFile(file);

        final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
        intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);

        startActivityForResult(intent, 0);
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {

        Log.i(TAG, "resultCode: " + resultCode);

        if (resultCode == -1) {
            onPhotoTaken();
        } else {
            Log.v(TAG, "User cancelled");
        }
    }

    @Override
    protected void onSaveInstanceState(Bundle outState) {
        outState.putBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN, _taken);
    }

    @Override
    protected void onRestoreInstanceState(Bundle savedInstanceState) {
        Log.i(TAG, "onRestoreInstanceState()");
        if (savedInstanceState.getBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN)) {
            onPhotoTaken();
        }
    }

    protected void onPhotoTaken() {
        _taken = true;

        BitmapFactory.Options options = new BitmapFactory.Options();
        options.inSampleSize = 4;

        Bitmap bitmap = BitmapFactory.decodeFile(_path, options);

        try {
            ExifInterface exif = new ExifInterface(_path);
            int exifOrientation = exif.getAttributeInt(
                    ExifInterface.TAG_ORIENTATION,
                    ExifInterface.ORIENTATION_NORMAL);

            Log.v(TAG, "Orient: " + exifOrientation);

            int rotate = 0;

            switch (exifOrientation) {
            case ExifInterface.ORIENTATION_ROTATE_90:
                rotate = 90;
                break;
            case ExifInterface.ORIENTATION_ROTATE_180:
                rotate = 180;
                break;
            case ExifInterface.ORIENTATION_ROTATE_270:
                rotate = 270;
                break;
            }

            Log.v(TAG, "Rotation: " + rotate);

            if (rotate != 0) {

                // Getting width & height of the given image.
                int w = bitmap.getWidth();
                int h = bitmap.getHeight();

                // Setting pre rotate
                Matrix mtx = new Matrix();
                mtx.preRotate(rotate);

                // Rotating Bitmap
                bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
            }

            // Convert to ARGB_8888, required by tess
            bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);

        } catch (IOException e) {
            Log.e(TAG, "Couldn't correct orientation: " + e.toString());
        }

        // _image.setImageBitmap( bitmap );

        Log.v(TAG, "Before baseApi");

        TessBaseAPI baseApi = new TessBaseAPI();
        baseApi.setDebug(true);
        baseApi.init(DATA_PATH, lang);
        baseApi.setImage(bitmap);

        String recognizedText = baseApi.getUTF8Text();

        baseApi.end();

        // You now have the text in recognizedText var, you can do anything with it.
        // We will display a stripped out trimmed alpha-numeric version of it (if lang is eng)
        // so that garbage doesn't make it to the display.

        Log.v(TAG, "OCRED TEXT: " + recognizedText);

        if ( lang.equalsIgnoreCase("eng") ) {
            recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
        }

        recognizedText = recognizedText.trim();

        if ( recognizedText.length() != 0 ) {
            _field.setText(_field.getText().toString().length() == 0 ? recognizedText : _field.getText() + " " + recognizedText);
            _field.setSelection(_field.getText().toString().length());
        }

        // Cycle done.
    }

    //tts
    @Override
    public void onDestroy() {
        // Don't forget to shutdown tts!
        if (tts != null) {
            tts.stop();
            tts.shutdown();
        }
        super.onDestroy();
    }

    @Override
    public void onInit(int status) {

        if (status == TextToSpeech.SUCCESS) {

            int result = tts.setLanguage(Locale.US);

            if (result == TextToSpeech.LANG_MISSING_DATA
                    || result == TextToSpeech.LANG_NOT_SUPPORTED) {
                Log.e("TTS", "This Language is not supported");
            } else {
                btnSpeak.setEnabled(true);
                speakOut();
            }

        } else {
            Log.e("TTS", "Initilization Failed!");
        }

    }

    private void speakOut() {

        String text = _field.getText().toString();

        tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
    }   
}
这是我的类文件,从技术上讲,文件的OCR部分来自示例代码,我哪里出错了?我顺利通过了启动屏幕活动。我得到了
NullPointerException,
如果我推荐btnSpeak行用于
View.OnClickListener,
我得到了supercreate错误,添加super.onCreate也没有帮助

我不懂


顺便说一句,button、button1和field分别是button、button和EditText的id。

Yor在
SimpleAndRoidActivity的
onCreate
上设置布局时丢失

正如异常所说:它在
simpleandroidocrativity.onCreate(simpleandroidocrativity.java:68)

这样做吧

@Override
            public void onCreate(Bundle savedInstanceState) {
                super.onCreate(savedInstanceState);
                //tts//<<<<<-----------set a layout here first like setContentView(R.layout.main);
                 tts = new TextToSpeech(this, this);

                 btnSpeak = (Button) findViewById(R.id.button1); //then get button or text id 
@覆盖
创建时的公共void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);

//tts//您必须在访问子级之前设置布局。因此

setContentView(R.layout.main);
btnSpeak = (Button) findViewById(R.id.button1);

您尚未为活动
SimpleAndRoidActivity
设置contentView

添加行
setContentView(layoutID);

下面
super.onCreate(savedInstanceState)

基本上每个人都想说的是,当您从xml调用视图时,您需要确保在设置布局之后和使用视图之前,在onCreate方法本身而不是在子方法中调用它。将findViewById放在布局行之后,您就可以了

setContentView(R.layout.main);
btn1Speak = (Button)findViewById(R.id.button1);
并检查else是否按照Er.Madhur dhamande的建议调节或管理您的应用程序

setContentView(R.layout.activity_main);


在此之后,为布局中使用的所有组件分配ID。

您必须插入setContentView(R.layout.main);在查找视图之后,请在findViewById视图之前插入setContentView(R.layout.main)。

请注意,不要使用“紧急”一词-每个问题都很紧迫。我的错,不管怎样,我正在为我的一个项目“援助”或“提升”做准备,这个项目在VisualStudio中,提交日期是明天,除了TTS,我所有这些都在工作,我添加了TTS的代码,它把一切都搞砸了,这是出于绝望。:)虽然我们已经有了一个可接受的答案,但你We’你说的没什么不同,那为什么要发布一个新的答案呢?