当I';我试图在Android NDK上使用getPerspectiveTransform来转换透视图像

当I';我试图在Android NDK上使用getPerspectiveTransform来转换透视图像,android,opencv,android-ndk,computer-vision,Android,Opencv,Android Ndk,Computer Vision,因此,我尝试在android上使用ndk上的opencv检测一个正方形对象,并将其裁剪为2D图形,我检测到正方形的点,但当我尝试使用getPerspectiveTRansform(src,dst)时,我得到以下错误消息: OpenCV Error: Assertion failed (src.checkVector(2, CV_32F) == 4 && dst.checkVector(2, CV_32F) == 4) in cv::Mat cv::getPerspectiveT

因此,我尝试在android上使用ndk上的opencv检测一个正方形对象,并将其裁剪为2D图形,我检测到正方形的点,但当我尝试使用getPerspectiveTRansform(src,dst)时,我得到以下错误消息:

OpenCV Error: Assertion failed (src.checkVector(2, CV_32F) == 4 && dst.checkVector(2, CV_32F)  == 4) in cv::Mat cv::getPerspectiveTransform(cv::InputArray, cv::InputArray), file /home/reports/ci/slave/50-SDK/opencv/modules/imgproc/src/imgwarp.cpp, line 3607
这是我在android中的活动:

package org.opencv.samples.tutorial1;

import java.io.ByteArrayOutputStream;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;

import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.widget.Toast;

public class Tutorial1Activity extends Activity implements
        CvCameraViewListener2 {
    private static final String TAG = "OCVSample::Activity";

    private Mat mRgba;
    private Mat mGrayMat;
    private Mat imageTaken;

    private CameraBridgeViewBase mOpenCvCameraView;
    private boolean mIsJavaCamera = true;
    private MenuItem mItemSwitchCamera = null;

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");

                System.loadLibrary("native_sample");

                mOpenCvCameraView.enableView();
            }
                break;
            default: {
                super.onManagerConnected(status);
            }
                break;
            }
        }
    };

    public Tutorial1Activity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
    }

    /** Called when the activity is first created. */
    @Override
    public void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.tutorial1_surface_view);

        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);

        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);

        mOpenCvCameraView.setCvCameraViewListener(this);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume() {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
                mLoaderCallback);
    }

    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        Log.i(TAG, "called onCreateOptionsMenu");
        // mItemSwitchCamera = menu.add("Toggle Native/Java camera");
        return true;
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
        String toastMesage = new String();
        Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);

        if (item == mItemSwitchCamera) {
            mOpenCvCameraView.setVisibility(SurfaceView.GONE);
            mIsJavaCamera = !mIsJavaCamera;

            if (mIsJavaCamera) {
                mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
                toastMesage = "Java Camera";
            } else {
                mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_native_surface_view);
                toastMesage = "Native Camera";
            }

            mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
            mOpenCvCameraView.setCvCameraViewListener(this);
            mOpenCvCameraView.enableView();

            mOpenCvCameraView.setOnClickListener(new OnClickListener() {

                @Override
                public void onClick(View v) {
                    takePicture();
                }
            });

            Toast toast = Toast.makeText(this, toastMesage, Toast.LENGTH_LONG);
            toast.show();
        }

        return true;
    }

    public void takePicture() {

        if (imageTaken != null) {

            Bitmap resultBitmap = null;

            try {
                // Imgproc.cvtColor(imageTaken, imageTaken,
                // Imgproc.COLOR_BGR2GRAY);
                // Imgproc.cvtColor(imageTaken, imageTaken,
                // Imgproc.COLOR_GRAY2RGBA, 4);

                /*
                 * Mat test =
                 * Imgproc.getPerspectiveTransform(ImageSrc,ImageDst);
                 * Imgproc.warpPerspective(ImageSrc, ImageDst, test,
                 * ImageDst.size());
                 */

                resultBitmap = Bitmap.createBitmap(imageTaken.cols(),
                        imageTaken.rows(), Bitmap.Config.ARGB_8888);
                //
                Utils.matToBitmap(imageTaken, resultBitmap);

                byte[] sendData = codec(resultBitmap,
                        Bitmap.CompressFormat.JPEG, 50);

                Intent i = new Intent(getApplicationContext(),
                        ShowImageActivity.class);
                i.putExtra("data", sendData);
                startActivity(i);

            } catch (CvException e) {
                // TODO: handle exception
                e.printStackTrace();
            }

        }

    }

    private byte[] codec(Bitmap src, Bitmap.CompressFormat format, int quality) {
        ByteArrayOutputStream os = new ByteArrayOutputStream();
        src.compress(format, quality, os);

        byte[] array = os.toByteArray();
        System.out.println(array.length);

//      return BitmapFactory.decodeByteArray(array, 0, array.length);
        return array;

    }

    public void onCameraViewStarted(int width, int height) {
        mRgba = new Mat();
        mGrayMat = new Mat();
        imageTaken = new Mat();
    }

    public void onCameraViewStopped() {
        mRgba.release();
        mGrayMat.release();
        imageTaken.release();
    }

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        /*long start = System.currentTimeMillis();

        Size originalSize = inputFrame.rgba().size();

        Imgproc.resize(inputFrame.rgba(), mRgba, new Size(800, 480));

        */
//      FindSquares(inputFrame.rgba().getNativeObjAddr(), 1);
        // imageTaken = inputFrame.clone();

//      System.out.println(inputFrame.rgba().type());

        findSquare(inputFrame.rgba().getNativeObjAddr(), imageTaken.getNativeObjAddr(),  1);

        // if (mDraw == 1) {
        /*Imgproc.resize(mRgba, inputFrame.rgba(), originalSize);
        // }

        long end = System.currentTimeMillis();
        Log.d("Frame time", "" + (end - start) + " ms");
*/
        return inputFrame.rgba();

    }

    public native void FindFeatures(long matAddrGr, long matAddrRgba);

    public native int FindSquares(long matAddrRgba, int draw);

    public native void findSquare(long matAddrRgba, long matAddrDescriptor, int draw);
}
这是我的jni代码:

JNIEXPORT jint JNICALL Java_com_gconsent_opencv_MainActivity_findSquare(JNIEnv*,
        jobject, jlong addrRgba, jlong addrDescriptor, jlong addrSrc, jlong addrDst, jint draw){

    Mat& image = *(Mat*) addrRgba;

    Mat& imageCropped = *(Mat*) addrDescriptor;

    Mat& imageSrc = *(Mat*) addrSrc;
    Mat& imageDst = *(Mat*) addrDst;


    Mat newSrc = image.clone();
    imageCropped = image.clone();
    Mat testImage = image.clone();

    // blur will enhance edge detection
    Mat blurred(testImage);
    medianBlur(testImage, blurred, 9);

    Mat gray0(blurred.size(), CV_8U), gray;
    vector < vector<Point> > contours;

    // find squares in every color plane of the image
    for (int c = 0; c < 3; c++) {
        int ch[] = { c, 0 };
        mixChannels(&blurred, 1, &gray0, 1, ch, 1);

        // try several threshold levels
        const int threshold_level = 2;
        for (int l = 0; l < threshold_level; l++) {
            // Use Canny instead of zero threshold level!
            // Canny helps to catch squares with gradient shading
            if (l == 0) {
                Canny(gray0, gray, 10, 20, 3); //

                // Dilate helps to remove potential holes between edge segments
                dilate(gray, gray, Mat(), Point(-1, -1));
            } else {
                gray = gray0 >= (l + 1) * 255 / threshold_level;
            }

            // Find contours and store them in a list
            findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);

            // Test contours
            vector < Point > approx;
            for (size_t i = 0; i < contours.size(); i++) {
                // approximate contour with accuracy proportional
                // to the contour perimeter
                approxPolyDP(Mat(contours[i]), approx,
                        arcLength(Mat(contours[i]), true) * 0.02, true);

                // Note: absolute value of an area is used because
                // area may be positive or negative - in accordance with the
                // contour orientation
                if (approx.size() == 4 && fabs(contourArea(Mat(approx))) > 1000
                        && isContourConvex(Mat(approx))) {
                    double maxCosine = 0;

                    for (int j = 2; j < 5; j++) {
                        double cosine = fabs(
                                angle(approx[j % 4], approx[j - 2],
                                        approx[j - 1]));
                        maxCosine = MAX(maxCosine, cosine);
                    }

                    if (maxCosine < 0.3) {

                        line(image, approx[0], approx[1],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[1], approx[2],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[2], approx[3],
                                Scalar(0, 255, 0, 255), 2, 4, 0);
                        line(image, approx[3], approx[0],
                                Scalar(0, 255, 0, 255), 2, 4, 0);



                        vector<Point2f> src(4);

//                      src.push_back(approx[0]);
//                      src.push_back(approx[1]);
//                      src.push_back(approx[2]);
//                      src.push_back(approx[3]);


                        src[0] = approx[0];
                        src[1] = approx[1];
                        src[2] = approx[2];
                        src[3] = approx[3];

                        cv::Mat quad = cv::Mat::zeros(300, 220, CV_8U);

                        // transformed quadrangle
                        vector < Point2f > quad_pts(4);

//                      Point2f quad_pts[4];

                        quad_pts.push_back(Point(0, 0));
                        quad_pts.push_back(Point(quad.cols, 0));
                        quad_pts.push_back(Point(quad.cols, quad.rows));
                        quad_pts.push_back(Point(0, quad.rows));

//                      quad_pts[0] = Point(0, 0);
//                      quad_pts[1] = Point(quad.cols, 0);
//                      quad_pts[2] = Point(quad.cols, quad.rows);
//                      quad_pts[3] = Point(0, quad.rows);

                        imageSrc = Mat(src);
                        imageDst = Mat(quad_pts);


                        Mat transmtx = getPerspectiveTransform(src, quad_pts);
                        warpPerspective(src, quad, transmtx, quad.size());

                        imageCropped = quad.clone();

                    }
                }
            }
        }

    }

//  imageCropped = getPolygon(newSrc);

    return 1;

}
JNIEXPORT jint JNICALL Java\u com\u gconsent\u opencv\u main activity\u findSquare(JNIEnv*,
jobject、jlong addrRgba、jlong ADDRESCRC、jlong addrDst、jint draw){
材料和图像=*(材料*)添加RRGBA;
Mat&imagecrapped=*(Mat*)addrDescriptor;
Mat&imageSrc=*(Mat*)addrSrc;
Mat&imageDst=*(Mat*)addrDst;
Mat newSrc=image.clone();
imageCropped=image.clone();
Mat testImage=image.clone();
//模糊将增强边缘检测
Mat模糊(testImage);
中间模糊(睾丸图像,模糊,9);
Mat灰色0(模糊的大小(),CV_8U),灰色;
矢量<矢量>等高线;
//在图像的每个颜色平面中查找正方形
对于(int c=0;c<3;c++){
int ch[]={c,0};
混音通道(&模糊,1,&灰色,0,1,通道,1);
//尝试几个阈值级别
const int threshold_level=2;
对于(int l=0;l=(l+1)*255/阈值\u级;
}
//找到等高线并将其存储在列表中
findContours(灰色、等高线、等高线列表、等高线链近似简单);
//测试轮廓
向量<点>近似值;
对于(size_t i=0;i1000
&&isContourConvex(材料(近似))){
双最大余弦=0;
对于(int j=2;j<5;j++){
双余弦=fabs(
角度(约[j%4],约[j-2],
约[j-1]);
最大余弦=最大值(最大余弦,余弦);
}
如果(最大余弦<0.3){
线(图像,约为[0],约为[1],
标量(0,255,0,255),2,4,0);
线条(图像,约[1],约[2],
标量(0,255,0,255),2,4,0);
线条(图像,约[2],约[3],
标量(0,255,0,255),2,4,0);
线(图像,约[3],约[0],
标量(0,255,0,255),2,4,0);
载体src(4);
//src.推回(约[0]);
//src.推回(约[1]);
//src.推回(约[2]);
//src.推回(约[3]);
src[0]=约[0];
src[1]=约[1];
src[2]=约[2];
src[3]=约[3];
cv::Mat quad=cv::Mat::Zero(300、220、cv_8U);
//变换四边形
向量四分位(4);
//第2F点四点[4];
四点后推(点(0,0));
四边形点向后推(点(quad.cols,0));
四边形点向后推(点(quad.cols,quad.rows));
四边形点向后推(点(0,四边形行));
//四分位[0]=点(0,0);
//quad_pts[1]=点(quad.cols,0);
//quad_pts[2]=点(quad.cols、quad.rows);
//四边形点[3]=点(0,四边形行);
imageSrc=Mat(src);
imageDst=垫(四点);
Mat transmtx=getPerspectiveTransform(src、quad_pts);
warpPerspective(src、quad、transmtx、quad.size());
imageCropped=quad.clone();
}
}
}
}
}
//imageCropped=getPolygon(newSrc);
返回1;
}

将输入
cv::Mat
类型更改为
cv\u 32FC2

还可以查看。

您是否可以发布更新后的代码?或者你能给我解释一下。。。?我试图实现接受的ans。。。bt仍然是gttng相同的错误…你能帮我吗…?谢谢你发布这个问题。我面临着同样的问题,得到了下面的答案!非常感谢这个家伙!!坚持下去,兄弟!!