Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/19.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 如何使用CAShapeLayer和UIBezeirPath在实时摄影机提要上绘制检测到的矩形路径_Ios_Swift_Uibezierpath_Cashapelayer_Cidetector - Fatal编程技术网

Ios 如何使用CAShapeLayer和UIBezeirPath在实时摄影机提要上绘制检测到的矩形路径

Ios 如何使用CAShapeLayer和UIBezeirPath在实时摄影机提要上绘制检测到的矩形路径,ios,swift,uibezierpath,cashapelayer,cidetector,Ios,Swift,Uibezierpath,Cashapelayer,Cidetector,我正在开发一个应用程序来检测实时摄像机提要中的矩形,并突出显示检测到的矩形。我使用AVFoundation进行摄像,并使用以下方法检测并突出显示检测到的矩形 var detector: CIDetector?; override func viewDidLoad() { super.viewDidLoad(); detector = self.prepareRectangleDetector(); } func captureOutput(captureOutput: AV

我正在开发一个应用程序来检测实时摄像机提要中的矩形,并突出显示检测到的矩形。我使用AVFoundation进行摄像,并使用以下方法检测并突出显示检测到的矩形

var detector: CIDetector?;

override func viewDidLoad() {
    super.viewDidLoad();

    detector = self.prepareRectangleDetector();
}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { // re check this method

    // Need to shimmy this through type-hell
    let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)

    // Force the type change - pass through opaque buffer
    let opaqueBuffer = Unmanaged<CVImageBuffer>.passUnretained(imageBuffer!).toOpaque()
    let pixelBuffer = Unmanaged<CVPixelBuffer>.fromOpaque(opaqueBuffer).takeUnretainedValue()

    let sourceImage = CIImage(CVPixelBuffer: pixelBuffer, options: nil)

    // Do some detection on the image
    self.performRectangleDetection(sourceImage);

    var outputImage = sourceImage

    // Do some clipping
    var drawFrame = outputImage.extent
    let imageAR = drawFrame.width / drawFrame.height
    let viewAR = videoDisplayViewBounds.width / videoDisplayViewBounds.height

    if imageAR > viewAR {
        drawFrame.origin.x += (drawFrame.width - drawFrame.height * viewAR) / 2.0
        drawFrame.size.width = drawFrame.height / viewAR
    } else {
        drawFrame.origin.y += (drawFrame.height - drawFrame.width / viewAR) / 2.0
        drawFrame.size.height = drawFrame.width / viewAR
    }

    //videoDisplayView is a GLKView which is used to display camera feed
    videoDisplayView.bindDrawable()
    if videoDisplayView.context != EAGLContext.currentContext() {
        EAGLContext.setCurrentContext(videoDisplayView.context)
    }

    // clear eagl view to grey
    glClearColor(0.5, 0.5, 0.5, 1.0);
    glClear(0x00004000)

    // set the blend mode to "source over" so that CI will use that
    glEnable(0x0BE2);
    glBlendFunc(1, 0x0303);

    renderContext.drawImage(outputImage, inRect: videoDisplayViewBounds, fromRect: drawFrame);

    videoDisplayView.display();

}

func prepareRectangleDetector() -> CIDetector {

    let options: [String: AnyObject] = [CIDetectorAccuracy: CIDetectorAccuracyHigh];
    return CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: options);
}

func performRectangleDetection(image: CIImage){

    let resultImage: CIImage? = nil;

    if let detector = detector {

        // Get the detections
        let features = detector.featuresInImage(image, options: [CIDetectorAspectRatio:NSNumber(float:1.43)]);


        if features.count != 0{ // feature found

            for feature in features as! [CIRectangleFeature] {

                self.previewImageView.layer.sublayers = nil;

                let line: CAShapeLayer = CAShapeLayer();
                line.frame = self.videoDisplayView.bounds;
                let linePath: UIBezierPath = UIBezierPath();

                linePath.moveToPoint(feature.topLeft);
                linePath.addLineToPoint(feature.topRight);
                linePath.addLineToPoint(feature.bottomRight);
                linePath.addLineToPoint(feature.bottomLeft);
                linePath.addLineToPoint(feature.topLeft);
                linePath.closePath();

                line.lineWidth = 5.0;
                line.path = linePath.CGPath;
                line.fillColor = UIColor.clearColor().CGColor;
                line.strokeColor = UIColor(netHex: 0x3399CC, alpha: 1.0).CGColor;

                // videoDisplayParentView is the parent of videoDisplayView and they both have same bounds
                self.videoDisplayParentView.layer.addSublayer(line);
             }    
         }                    
     }
}
var检测器:CIDetector?;
重写func viewDidLoad(){
super.viewDidLoad();
检测器=self.prepareRectangleDetector();
}
func captureOutput(captureOutput:AVCaptureOutput!,didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,fromConnection connection:AVCaptureConnection!){//重新检查此方法
//需要在地狱里摇动它
让imageBuffer=CMSampleBufferGetImageBuffer(sampleBuffer)
//强制类型更改-通过不透明缓冲区
让opaqueBuffer=Unmanaged.passUnretained(imageBuffer!).toOpaque()
设pixelBuffer=Unmanaged.from不透明(opaqueBuffer).takeUnrepainedValue()
让sourceImage=CIImage(CVPixelBuffer:pixelBuffer,选项:nil)
//对图像进行一些检测
自执行角度检测(sourceImage);
var outputImage=sourceImage
//剪短
var drawFrame=outputImage.extent
设imageAR=drawFrame.width/drawFrame.height
让viewAR=videoDisplayViewBounds.width/videoDisplayViewBounds.height
如果imageAR>viewAR{
drawFrame.origin.x+=(drawFrame.width-drawFrame.height*viewAR)/2.0
drawFrame.size.width=drawFrame.height/viewAR
}否则{
drawFrame.origin.y+=(drawFrame.height-drawFrame.width/viewAR)/2.0
drawFrame.size.height=drawFrame.width/viewAR
}
//videoDisplayView是一个GLKView,用于显示相机馈送
videoDisplayView.bindDrawable()
如果videoDisplayView.context!=EAGLContext.currentContext(){
EAGLContext.setCurrentContext(videoDisplayView.context)
}
//将eagl视图清除为灰色
glClearColor(0.5,0.5,0.5,1.0);
glClear(0x00004000)
//将混合模式设置为“源代码结束”,以便CI将使用该模式
glEnable(0x0BE2);
glBlendFunc(1,0x0303);
drawImage(outputImage,inRect:videoDisplayViewBounds,fromRect:drawFrame);
videoDisplayView.display();
}
func prepareRectangleDetector()->CIDetector{
let选项:[String:AnyObject]=[CIDetectorAccuracy:CIDetectorAccuracyHigh];
返回CIDetector(ofType:CIDetectorTypeRectangle,context:nil,options:options);
}
func performRectangleDetection(图像:CIImage){
let resultImage:CIImage?=nil;
如果let检测器=检测器{
//获取检测结果
让features=detector.featuresImage(图像,选项:[CIDetectorAspectRatio:NSNumber(float:1.43)]);
如果features.count!=0{//找到特征
功能中的功能为![CIRectangleFeature]{
self.previewImageView.layer.sublayers=nil;
let line:CAShapeLayer=CAShapeLayer();
line.frame=self.videoDisplayView.bounds;
让linePath:UIBezierPath=UIBezierPath();
linePath.moveToPoint(feature.topLeft);
linePath.addLineToPoint(feature.topRight);
linePath.addLineToPoint(feature.bottomRight);
linePath.addLineToPoint(feature.bottomLeft);
linePath.addLineToPoint(feature.topLeft);
linePath.closePath();
线宽=5.0;
line.path=linePath.CGPath;
line.fillColor=UIColor.clearColor().CGColor;
line.strokeColor=UIColor(netHex:0x3399CC,alpha:1.0);
//videoDisplayParentView是videoDisplayView的父级,它们具有相同的边界
self.videoDisplayParentView.layer.addSublayer(行);
}    
}                    
}
}
我使用
CAShapeLayer
UIBezierPath
绘制矩形。这是非常非常缓慢的。路径在分钟后可见

有人能帮我弄清楚为什么速度慢,或者让我知道我在这里做错了什么。任何帮助都将不胜感激


或者,如果有比这更简单的方法,我也想知道。

如果你开始在GLKView中添加一个子层,那么速度会很慢。此处的GLKView每秒刷新多次(正如captureOutput:didOutputSampleBuffer:…方法中一样),每次创建和添加子层的过程都无法跟上


更好的方法是使用CoreImage绘制路径,并在resultImage上进行合成。

有关于您迄今为止尝试的内容的信息吗?很抱歉,我没有确切地了解您需要了解的内容。我已经完成了在一个实时视频提要中检测矩形,我得到了正确的分数。现在我需要用这4个点在我的实时视频提要上画一个矩形。我用UIBezierPath和CAShapeLayer绘制了那个矩形。但是它非常慢;博士好啊这就是我不知道的,它很慢,直到你编辑…啊!很抱歉。我在打字之前误按了post按钮。