Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/352.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
什么是OpenCV模板匹配最大最小值范围?需要用作存储/c++/JAVA_Java_C++_Opencv_Image Processing_Template Matching - Fatal编程技术网

什么是OpenCV模板匹配最大最小值范围?需要用作存储/c++/JAVA

什么是OpenCV模板匹配最大最小值范围?需要用作存储/c++/JAVA,java,c++,opencv,image-processing,template-matching,Java,C++,Opencv,Image Processing,Template Matching,我正在使用模板匹配创建一个简单的openCV应用程序,我需要在一个大图像中比较查找一个小图像,并将结果返回为true(如果找到匹配项)或false(未找到匹配项) 现在的问题是使用这个minMaxValue进行决策(真/假)。我知道上述两种方法TM_SQDIFF和TM_SQDIFF_NORMED返回低值,而其他方法返回高值,因此我可以有两个不同的阈值,并比较其中一个阈值(取决于模板方法类型) 因此,如果有人能解释MinMaxLocResult返回的minVal和maxVal范围,那就太好了 是0

我正在使用模板匹配创建一个简单的openCV应用程序,我需要在一个大图像中比较查找一个小图像,并将结果返回为true(如果找到匹配项)或false(未找到匹配项)

现在的问题是使用这个minMaxValue进行决策(真/假)。我知道上述两种方法TM_SQDIFF和TM_SQDIFF_NORMED返回低值,而其他方法返回高值,因此我可以有两个不同的阈值,并比较其中一个阈值(取决于模板方法类型)

因此,如果有人能解释MinMaxLocResult返回的minVal和maxVal范围,那就太好了

是0到1的范围吗


如果是,对于最大类型模板,方法值1是完美匹配?

MinMaxLocResult
不返回
minVal
maxVal
范围
minVal
maxVal
仅为最低和最高匹配分数,如图所示

结构
MinMaxLocResult
还具有
minLoc
maxLoc
属性,其类型为
,提供匹配位置。假设您使用
TM_SQDIFF
TM_SQDIFF_NORMED
作为匹配标准,最佳匹配位置将是
mmr.minLoc


为了设置检测阈值,您可以声明一个变量
double thresholdMatch
,并通过实验设置其值。如果minVal不规范化结果,那么它将给出正确的值,我的意思是删除这一行

   Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());
的答案很好,但这里有一些实际的代码在本质上实现了它。使用
0.1
作为阈值,我取得了良好的成绩:

import lombok.val;
import org.opencv.core.*;
import org.springframework.core.io.ClassPathResource;

import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;

import static javax.imageio.ImageIO.read;
import static javax.imageio.ImageIO.write;
import static javax.swing.SwingUtilities.invokeAndWait;
import static org.opencv.core.CvType.CV_32FC1;
import static org.opencv.highgui.HighGui.imshow;
import static org.opencv.highgui.HighGui.waitKey;
import static org.opencv.imgcodecs.Imgcodecs.CV_LOAD_IMAGE_UNCHANGED;
import static org.opencv.imgcodecs.Imgcodecs.imdecode;
import static org.opencv.imgproc.Imgproc.*;

public class TemplateMatcher
{
    static
    {
        // loadNativeOpenCVLibrary();
    }

    private static final int MATCH_METHOD = TM_SQDIFF_NORMED;

    private static Mat BufferedImage2Mat(BufferedImage image) throws IOException
    {
        try (val byteArrayOutputStream = new ByteArrayOutputStream())
        {
            write(image, "jpg", byteArrayOutputStream);
            byteArrayOutputStream.flush();
            val matOfByte = new MatOfByte(byteArrayOutputStream.toByteArray());
            return imdecode(matOfByte, CV_LOAD_IMAGE_UNCHANGED);
        }
    }

    public static Point performTemplateMatching(BufferedImage bigImage, BufferedImage templateImage,
                                                double detectionThreshold, boolean showMatch) throws IOException
    {
        val image = BufferedImage2Mat(bigImage);
        val template = BufferedImage2Mat(templateImage);

        // Create the result matrix
        val result_cols = image.cols() - template.cols() + 1;
        val result_rows = image.rows() - template.rows() + 1;
        val result = new Mat(result_rows, result_cols, CV_32FC1);

        // Do the matching
        matchTemplate(image, template, result, MATCH_METHOD);

        // Localize the best match
        val minMaxLocResult = Core.minMaxLoc(result);

        // / Show me what you got
        val matchedLocation = minMaxLocResult.minLoc;
        rectangle(image, matchedLocation, new Point(matchedLocation.x + template.cols(),
                matchedLocation.y + template.rows()), new Scalar(0, 255, 0));

        if (showMatch)
        {
            try
            {
                invokeAndWait(() -> imshow("Image Search", image));
            } catch (InterruptedException | InvocationTargetException exception)
            {
                exception.printStackTrace();
            }
            waitKey();
        }

        // Determine whether this sub image has been found
        val minVal = minMaxLocResult.minVal;
        if (minVal < detectionThreshold)
        {
            return minMaxLocResult.maxLoc;
        }

        return null;
    }

    public static BufferedImage getBufferedImage(String classpathFile) throws IOException
    {
        val classPathResource = new ClassPathResource(classpathFile);
        val filePath = classPathResource.getFile();
        return read(filePath);
    }
}
导入lombok.val;
导入org.opencv.core.*;
导入org.springframework.core.io.ClassPathResource;
导入java.awt.image.buffereImage;
导入java.io.ByteArrayOutputStream;
导入java.io.IOException;
导入java.lang.reflect.InvocationTargetException;
导入静态javax.imageio.imageio.read;
导入静态javax.imageio.imageio.write;
导入静态javax.swing.SwingUtilities.invokeAndWait;
导入静态org.opencv.core.CvType.CV_32FC1;
导入静态org.opencv.highgui.highgui.imshow;
导入静态org.opencv.highgui.highgui.waitKey;
导入静态org.opencv.imgcodecs.imgcodecs.CV\u加载\u图像\u不变;
导入静态org.opencv.imgcodecs.imgcodecs.imdecode;
导入静态org.opencv.imgproc.imgproc.*;
公共类模板匹配器
{
静止的
{
//loadNativeOpenCVLibrary();
}
私有静态最终整型匹配方法=TM_SQDIFF_赋范;
私有静态Mat BuffereImage 2Mat(BuffereImage映像)引发IOException
{
try(val byteArrayOutputStream=new byteArrayOutputStream())
{
写入(图像,“jpg”,byteArrayOutputStream);
byteArrayOutputStream.flush();
val matOfByte=新的matOfByte(byteArrayOutputStream.toByteArray());
返回imdecode(matOfByte、CV\u LOAD\u IMAGE\u未更改);
}
}
公共静态点执行模板匹配(BuffereImage bigImage、BuffereImage templateImage、,
双检测阈值,布尔显示匹配)引发IOException
{
val image=BufferedImage2Mat(bigImage);
val template=BufferedImage2Mat(templateImage);
//创建结果矩阵
val result_cols=image.cols()-template.cols()+1;
val result_rows=image.rows()-template.rows()+1;
val结果=新材料(结果行、结果列、CV\u 32FC1);
//进行匹配
匹配模板(图像、模板、结果、匹配方法);
//将最佳匹配本地化
val minMaxLocResult=Core.minMaxLoc(结果);
///让我看看你有什么
val matchedLocation=minMaxLocResult.minLoc;
矩形(图像、matchedLocation、新点(matchedLocation.x+template.cols(),
matchedLocation.y+template.rows()),新标量(0,255,0));
如果(显示匹配)
{
尝试
{
调用EANDWAIT(()->imshow(“图像搜索”,图像));
}捕获(InterruptedException | InvocationTargetException异常)
{
异常。printStackTrace();
}
waitKey();
}
//确定是否已找到此子映像
val minVal=minMaxLocResult.minVal;
if(最小值<检测阈值)
{
返回minMaxLocResult.maxLoc;
}
返回null;
}
公共静态BuffereImage GetBuffereImage(字符串类路径文件)引发IOException
{
val classPathResource=新的classPathResource(classpathFile);
val filePath=classPathResource.getFile();
返回读取(文件路径);
}
}
术语

  • 模板=我们试图找到的图像
  • haystack=我们正在搜索的图像
  • region=当前与模板匹配的干草堆中的面积

根据模板匹配类型,最小值和最大值将具有不同的可能范围。干草堆中每个位置的比较结果由以下公式确定(取自;
T()
为模板,
I()
为干草堆):

正如您所注意到的,随着模板和区域之间的差异越来越大,平方差方法(SQDIFF)也越来越大,因此最佳匹配将具有最低的值。对于其他方法(互相关、相关系数),最佳匹配将具有最高值

如果你不懂数学(像我一样),范围本身很难确定,但看看平方差,我认为范围应该是(假设图像是1字节灰度):

0
import lombok.val;
import org.opencv.core.*;
import org.springframework.core.io.ClassPathResource;

import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;

import static javax.imageio.ImageIO.read;
import static javax.imageio.ImageIO.write;
import static javax.swing.SwingUtilities.invokeAndWait;
import static org.opencv.core.CvType.CV_32FC1;
import static org.opencv.highgui.HighGui.imshow;
import static org.opencv.highgui.HighGui.waitKey;
import static org.opencv.imgcodecs.Imgcodecs.CV_LOAD_IMAGE_UNCHANGED;
import static org.opencv.imgcodecs.Imgcodecs.imdecode;
import static org.opencv.imgproc.Imgproc.*;

public class TemplateMatcher
{
    static
    {
        // loadNativeOpenCVLibrary();
    }

    private static final int MATCH_METHOD = TM_SQDIFF_NORMED;

    private static Mat BufferedImage2Mat(BufferedImage image) throws IOException
    {
        try (val byteArrayOutputStream = new ByteArrayOutputStream())
        {
            write(image, "jpg", byteArrayOutputStream);
            byteArrayOutputStream.flush();
            val matOfByte = new MatOfByte(byteArrayOutputStream.toByteArray());
            return imdecode(matOfByte, CV_LOAD_IMAGE_UNCHANGED);
        }
    }

    public static Point performTemplateMatching(BufferedImage bigImage, BufferedImage templateImage,
                                                double detectionThreshold, boolean showMatch) throws IOException
    {
        val image = BufferedImage2Mat(bigImage);
        val template = BufferedImage2Mat(templateImage);

        // Create the result matrix
        val result_cols = image.cols() - template.cols() + 1;
        val result_rows = image.rows() - template.rows() + 1;
        val result = new Mat(result_rows, result_cols, CV_32FC1);

        // Do the matching
        matchTemplate(image, template, result, MATCH_METHOD);

        // Localize the best match
        val minMaxLocResult = Core.minMaxLoc(result);

        // / Show me what you got
        val matchedLocation = minMaxLocResult.minLoc;
        rectangle(image, matchedLocation, new Point(matchedLocation.x + template.cols(),
                matchedLocation.y + template.rows()), new Scalar(0, 255, 0));

        if (showMatch)
        {
            try
            {
                invokeAndWait(() -> imshow("Image Search", image));
            } catch (InterruptedException | InvocationTargetException exception)
            {
                exception.printStackTrace();
            }
            waitKey();
        }

        // Determine whether this sub image has been found
        val minVal = minMaxLocResult.minVal;
        if (minVal < detectionThreshold)
        {
            return minMaxLocResult.maxLoc;
        }

        return null;
    }

    public static BufferedImage getBufferedImage(String classpathFile) throws IOException
    {
        val classPathResource = new ClassPathResource(classpathFile);
        val filePath = classPathResource.getFile();
        return read(filePath);
    }
}