Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/opencv/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android 埃福兰德。所以我使用ffplay来显示这个网络流,以证明它是有效的。这是显示流的脚本。因此,您必须在此基础上作为opencv插件工作 ffplay -f h264 -i udp://192.168.1.45:23003 你想用imshow把你的画面形象_Android_Opencv_Ffmpeg_Javacv_Dji Sdk - Fatal编程技术网

Android 埃福兰德。所以我使用ffplay来显示这个网络流,以证明它是有效的。这是显示流的脚本。因此,您必须在此基础上作为opencv插件工作 ffplay -f h264 -i udp://192.168.1.45:23003 你想用imshow把你的画面形象

Android 埃福兰德。所以我使用ffplay来显示这个网络流,以证明它是有效的。这是显示流的脚本。因此,您必须在此基础上作为opencv插件工作 ffplay -f h264 -i udp://192.168.1.45:23003 你想用imshow把你的画面形象,android,opencv,ffmpeg,javacv,dji-sdk,Android,Opencv,Ffmpeg,Javacv,Dji Sdk,埃福兰德。所以我使用ffplay来显示这个网络流,以证明它是有效的。这是显示流的脚本。因此,您必须在此基础上作为opencv插件工作 ffplay -f h264 -i udp://192.168.1.45:23003 你想用imshow把你的画面形象化吗?在这种情况下,Y通道将被映射到蓝色,U通道将被映射到绿色,V通道将被映射到红色通道以便可视化。这就是保存到我的平板电脑上的jpeg。我没有使用imshow,这绝对是一个很好的建议,可以解释为什么它不起作用。但是我只是用DJI的示例代码来生

埃福兰德。所以我使用ffplay来显示这个网络流,以证明它是有效的。这是显示流的脚本。因此,您必须在此基础上作为opencv插件工作

ffplay -f h264 -i udp://192.168.1.45:23003 

你想用imshow把你的画面形象化吗?在这种情况下,Y通道将被映射到蓝色,U通道将被映射到绿色,V通道将被映射到红色通道以便可视化。这就是保存到我的平板电脑上的jpeg。我没有使用imshow,这绝对是一个很好的建议,可以解释为什么它不起作用。但是我只是用DJI的示例代码来生成这些结果。我从DJI的示例代码中为那些感兴趣的人添加了YUV save to JPEG函数。你想用imshow可视化YUV帧吗?在这种情况下,Y通道将被映射到蓝色,U通道将被映射到绿色,V通道将被映射到红色通道以便可视化。这就是保存到我的平板电脑上的jpeg。我没有使用imshow,这绝对是一个很好的建议,可以解释为什么它不起作用。但我只是使用DJI示例代码来生成这些结果。我为感兴趣的人从DJI示例代码中添加了YUV save to JPEG函数
    @Override
    public void onYuvDataReceived(final ByteBuffer yuvFrame, int dataSize, final int width, final int height) {
        //In this demo, we test the YUV data by saving it into JPG files.
        //DJILog.d(TAG, "onYuvDataReceived " + dataSize);
        if (count++ % 30 == 0 && yuvFrame != null) {
            final byte[] bytes = new byte[dataSize];
            yuvFrame.get(bytes);
            AsyncTask.execute(new Runnable() {
                @Override
                public void run() {
                    if (bytes.length >= width * height) {
                        Log.d("MatWidth", "Made it");
                        YuvImage yuvImage = saveYuvDataToJPEG(bytes, width, height);
                        Bitmap rgbYuvConvert = convertYuvImageToRgb(yuvImage, width, height);

                        Mat yuvMat = new Mat(height, width, CvType.CV_8UC1);
                        yuvMat.put(0, 0, bytes);
                        //OpenCv Stuff
                    }
                }
            });
        }
    }
private YuvImage saveYuvDataToJPEG(byte[] yuvFrame, int width, int height){
        byte[] y = new byte[width * height];
        byte[] u = new byte[width * height / 4];
        byte[] v = new byte[width * height / 4];
        byte[] nu = new byte[width * height / 4]; //
        byte[] nv = new byte[width * height / 4];

        System.arraycopy(yuvFrame, 0, y, 0, y.length);
        Log.d("MatY", y.toString());
        for (int i = 0; i < u.length; i++) {
            v[i] = yuvFrame[y.length + 2 * i];
            u[i] = yuvFrame[y.length + 2 * i + 1];
        }
        int uvWidth = width / 2;
        int uvHeight = height / 2;
        for (int j = 0; j < uvWidth / 2; j++) {
            for (int i = 0; i < uvHeight / 2; i++) {
                byte uSample1 = u[i * uvWidth + j];
                byte uSample2 = u[i * uvWidth + j + uvWidth / 2];
                byte vSample1 = v[(i + uvHeight / 2) * uvWidth + j];
                byte vSample2 = v[(i + uvHeight / 2) * uvWidth + j + uvWidth / 2];
                nu[2 * (i * uvWidth + j)] = uSample1;
                nu[2 * (i * uvWidth + j) + 1] = uSample1;
                nu[2 * (i * uvWidth + j) + uvWidth] = uSample2;
                nu[2 * (i * uvWidth + j) + 1 + uvWidth] = uSample2;
                nv[2 * (i * uvWidth + j)] = vSample1;
                nv[2 * (i * uvWidth + j) + 1] = vSample1;
                nv[2 * (i * uvWidth + j) + uvWidth] = vSample2;
                nv[2 * (i * uvWidth + j) + 1 + uvWidth] = vSample2;
            }
        }
        //nv21test
        byte[] bytes = new byte[yuvFrame.length];
        System.arraycopy(y, 0, bytes, 0, y.length);
        for (int i = 0; i < u.length; i++) {
            bytes[y.length + (i * 2)] = nv[i];
            bytes[y.length + (i * 2) + 1] = nu[i];
        }
        Log.d(TAG,
              "onYuvDataReceived: frame index: "
                  + DJIVideoStreamDecoder.getInstance().frameIndex
                  + ",array length: "
                  + bytes.length);
        YuvImage yuver = screenShot(bytes,Environment.getExternalStorageDirectory() + "/DJI_ScreenShot", width, height);
        return yuver;
    }

    /**
     * Save the buffered data into a JPG image file
     */
    private YuvImage screenShot(byte[] buf, String shotDir, int width, int height) {
        File dir = new File(shotDir);
        if (!dir.exists() || !dir.isDirectory()) {
            dir.mkdirs();
        }
        YuvImage yuvImage = new YuvImage(buf,
                ImageFormat.NV21,
                width,
                height,
                null);

        OutputStream outputFile = null;

        final String path = dir + "/ScreenShot_" + System.currentTimeMillis() + ".jpg";

        try {
            outputFile = new FileOutputStream(new File(path));
        } catch (FileNotFoundException e) {
            Log.e(TAG, "test screenShot: new bitmap output file error: " + e);
            //return;
        }
        if (outputFile != null) {
            yuvImage.compressToJpeg(new Rect(0,
                    0,
                    width,
                    height), 100, outputFile);
        }
        try {
            outputFile.close();
        } catch (IOException e) {
            Log.e(TAG, "test screenShot: compress yuv image error: " + e);
            e.printStackTrace();
        }

        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                displayPath(path);
            }
        });
        return yuvImage;
    }

    /* Async OpenCV Code */
    private class OpenCvAndModelAsync extends AsyncTask<byte[], Void, double[]> {
        @Override
        protected double[] doInBackground(byte[]... params) {//Background Code Executing. Don't touch any UI components
            //get fpv feed and convert bytes to mat array
            Mat videoBufMat = new Mat(4, params[0].length, CvType.CV_8UC4);
            videoBufMat.put(0,0, params[0]);
            //if I add this in it says the bytes are empty.
            //Mat videoBufMat = Imgcodecs.imdecode(encodeVideoBuf, Imgcodecs.IMREAD_ANYCOLOR);
            //encodeVideoBuf.release();
            Log.d("MatRgba", videoBufMat.toString());
            for (int i = 0; i< videoBufMat.rows(); i++){
                for (int j=0; j< videoBufMat.cols(); j++){
                    double[] rgb = videoBufMat.get(i, j);
                    Log.i("Matrix", "red: "+rgb[0]+" green: "+rgb[1]+" blue: "+rgb[2]+" alpha: "
                            + rgb[3] + " Length: " + rgb.length + " Rows: "
                            + videoBufMat.rows() + " Columns: " + videoBufMat.cols());
                }
            }
            double[] center = openCVThingy(videoBufMat);
            return center;
        }
        protected void onPostExecute(double[] center) {
            //handle ui or another async task if necessary
        }
    }

2019-05-23 21:14:29.601 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 2425
2019-05-23 21:14:29.802 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 2659
2019-05-23 21:14:30.004 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:30.263 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6015
2019-05-23 21:14:30.507 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:30.766 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4682
2019-05-23 21:14:31.005 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:31.234 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 2840
2019-05-23 21:14:31.433 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4482
2019-05-23 21:14:31.664 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:31.927 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4768
2019-05-23 21:14:32.174 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:32.433 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4700
2019-05-23 21:14:32.668 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:32.864 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4740
2019-05-23 21:14:33.102 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 6
2019-05-23 21:14:33.365 21431-22086/com.dji.simulatorDemo D/VideoBufferSize: 4640
Mat videoBufMat = Imgcodecs.imdecode(new MatOfByte(params[0]), Imgcodecs.IMREAD_UNCHANGED);
Mat encodeVideoBuf = new Mat(4, params[0].length, CvType.CV_8UC4);
encodeVideoBuf.put(0,0, params[0]);
Mat videoBufMat = Imgcodecs.imdecode(encodeVideoBuf, Imgcodecs.IMREAD_UNCHANGED);
ffplay -f h264 -i udp://192.168.1.45:23003