Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/kotlin/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android CameraX分析器故障预览_Android_Kotlin_Android Camerax - Fatal编程技术网

Android CameraX分析器故障预览

Android CameraX分析器故障预览,android,kotlin,android-camerax,Android,Kotlin,Android Camerax,我正在使用CameraX Analyzer用例使用OpenCV Hough circles检测图像中的圆,但尽管我相信我正在使用单独的线程来进行此分析,但当检测到太多圆时,相机预览有时会变得非常慢 我发现我对检测器的实现并不是最有效的,但这种处理时间不应该只反映在分析率上,而不是预览上吗 从crysxd获得了大部分代码 这是我的主要活动: class MainActivity : AppCompatActivity() { private lateinit var overlayText

我正在使用CameraX Analyzer用例使用OpenCV Hough circles检测图像中的圆,但尽管我相信我正在使用单独的线程来进行此分析,但当检测到太多圆时,相机预览有时会变得非常慢

我发现我对检测器的实现并不是最有效的,但这种处理时间不应该只反映在分析率上,而不是预览上吗

从crysxd获得了大部分代码

这是我的主要活动:

class MainActivity : AppCompatActivity() {
    private lateinit var overlayTextureView: DetectionOverlayView

    private val camera
        get() = supportFragmentManager.findFragmentById(R.id.cameraFragment) as CameraFragment

    override fun onCreate(savedInstanceState: Bundle?) {
        OpenCVLoader.initDebug()

        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)

        if (Timber.treeCount() == 0) {
            Timber.plant(Timber.DebugTree())
        }

        overlayTextureView = findViewById(R.id.detectionOverlayView)
        camera.imageAnalyzer = ViewModelProviders.of(this).get(HoughPupilDetector::class.java)
    }
}
这是我的CameraFragment初始化:

open class CameraFragment : Fragment() {
    var cameraRunning = false
        private set
    var imageAnalyzer: ThreadedImageAnalyzer? = null
        set(value) {
            field = value
            if (cameraRunning) {
                startCamera()
            }
        }

    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View =
        inflater.inflate(R.layout.fragment_camera, container, false)

    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)

        CameraPermissionHelper().requestCameraPermission(childFragmentManager) {
            if (it) {
                startCamera()
            } else {
                activity?.finish()
            }
        }
    }

    override fun onDestroyView() {
        super.onDestroyView()

        if (cameraRunning) {
            CameraX.unbindAll()
            cameraRunning = false
            Timber.i("Stopping camera")
        }
    }

    private fun startCamera() {
        preview.post {
            try {
                val usesCases = mutableListOf<UseCase>()

                // Make sure that there are no other use cases bound to CameraX
                CameraX.unbindAll()

                // Create configuration object for the viewfinder use case
                val previewConfig = onCreatePreivewConfigBuilder().build()
                usesCases.add(AutoFitPreviewBuilder.build(previewConfig, preview))

                // Setup image analysis pipeline that computes average pixel luminance in real time
                if (imageAnalyzer != null) {
                    val analyzerConfig = onCreateAnalyzerConfigBuilder().build()
                    usesCases.add(ImageAnalysis(analyzerConfig).apply {
                        analyzer = imageAnalyzer
                    })
                }

                // Bind use cases to lifecycle
                CameraX.bindToLifecycle(this, *usesCases.toTypedArray())
                cameraRunning = true
                Timber.i("Started camera with useCases=$usesCases")
            } catch (e: Exception) {
                Timber.e(e)
                AlertDialog.Builder(context)
                    .setMessage(getString(R.string.camera_error))
                    .setPositiveButton(android.R.string.ok) { _, _ ->
                        activity?.finish()
                    }
                    .create()
            }
        }
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreateAnalyzerConfigBuilder() = ImageAnalysisConfig.Builder().apply {

        // Use a worker thread for image analysis to prevent preview glitches
        setCallbackHandler(imageAnalyzer!!.getHandler())
        // In our analysis, we care more about the latest image than analyzing *every* image
        setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreatePreivewConfigBuilder() = PreviewConfig.Builder().apply {
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }
}
abstract class PupilDetector(listener: PupilDetectionListener? = null) : ViewModel(), ThreadedImageAnalyzer {
    private val listeners = ArrayList<PupilDetectionListener>().apply { listener?.let { add(it) } }
    private val isBusy = AtomicBoolean(false)
    private val handlerThread = HandlerThread("PupilDetector").apply { start() }

    fun addListener(listener: PupilDetectionListener) = listeners.add(listener)

    override fun analyze(image: ImageProxy, rotationDegrees: Int) {
        if (isBusy.compareAndSet(false, true)) {
            Timber.d("Running analysis...")
            val pupil = detect(image, rotationDegrees)
            Timber.d("Analysis done.")
            isBusy.set(false)

            // listeners.forEach { it(pupil) }
        }
    }

    override fun getHandler() = Handler(handlerThread.looper)

    abstract fun detect(image: ImageProxy, rotationDegrees: Int): Pupil?
}
这是我的分析器接口初始化:

open class CameraFragment : Fragment() {
    var cameraRunning = false
        private set
    var imageAnalyzer: ThreadedImageAnalyzer? = null
        set(value) {
            field = value
            if (cameraRunning) {
                startCamera()
            }
        }

    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View =
        inflater.inflate(R.layout.fragment_camera, container, false)

    override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
        super.onViewCreated(view, savedInstanceState)

        CameraPermissionHelper().requestCameraPermission(childFragmentManager) {
            if (it) {
                startCamera()
            } else {
                activity?.finish()
            }
        }
    }

    override fun onDestroyView() {
        super.onDestroyView()

        if (cameraRunning) {
            CameraX.unbindAll()
            cameraRunning = false
            Timber.i("Stopping camera")
        }
    }

    private fun startCamera() {
        preview.post {
            try {
                val usesCases = mutableListOf<UseCase>()

                // Make sure that there are no other use cases bound to CameraX
                CameraX.unbindAll()

                // Create configuration object for the viewfinder use case
                val previewConfig = onCreatePreivewConfigBuilder().build()
                usesCases.add(AutoFitPreviewBuilder.build(previewConfig, preview))

                // Setup image analysis pipeline that computes average pixel luminance in real time
                if (imageAnalyzer != null) {
                    val analyzerConfig = onCreateAnalyzerConfigBuilder().build()
                    usesCases.add(ImageAnalysis(analyzerConfig).apply {
                        analyzer = imageAnalyzer
                    })
                }

                // Bind use cases to lifecycle
                CameraX.bindToLifecycle(this, *usesCases.toTypedArray())
                cameraRunning = true
                Timber.i("Started camera with useCases=$usesCases")
            } catch (e: Exception) {
                Timber.e(e)
                AlertDialog.Builder(context)
                    .setMessage(getString(R.string.camera_error))
                    .setPositiveButton(android.R.string.ok) { _, _ ->
                        activity?.finish()
                    }
                    .create()
            }
        }
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreateAnalyzerConfigBuilder() = ImageAnalysisConfig.Builder().apply {

        // Use a worker thread for image analysis to prevent preview glitches
        setCallbackHandler(imageAnalyzer!!.getHandler())
        // In our analysis, we care more about the latest image than analyzing *every* image
        setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }

    @Suppress("MemberVisibilityCanBePrivate")
    protected open fun onCreatePreivewConfigBuilder() = PreviewConfig.Builder().apply {
        setTargetAspectRatio(Rational(1, 1))
        setTargetResolution(Size(preview.width, preview.height))
    }
}
abstract class PupilDetector(listener: PupilDetectionListener? = null) : ViewModel(), ThreadedImageAnalyzer {
    private val listeners = ArrayList<PupilDetectionListener>().apply { listener?.let { add(it) } }
    private val isBusy = AtomicBoolean(false)
    private val handlerThread = HandlerThread("PupilDetector").apply { start() }

    fun addListener(listener: PupilDetectionListener) = listeners.add(listener)

    override fun analyze(image: ImageProxy, rotationDegrees: Int) {
        if (isBusy.compareAndSet(false, true)) {
            Timber.d("Running analysis...")
            val pupil = detect(image, rotationDegrees)
            Timber.d("Analysis done.")
            isBusy.set(false)

            // listeners.forEach { it(pupil) }
        }
    }

    override fun getHandler() = Handler(handlerThread.looper)

    abstract fun detect(image: ImageProxy, rotationDegrees: Int): Pupil?
}
这是我的Hough圆分析器:

class HoughPupilDetector(listener: PupilDetectionListener? = null): PupilDetector(listener) {
    val maxCircles = 5

    override fun detect(image: ImageProxy, rotationDegrees: Int): Pupil? {
            val bitmap = image.toBitmap(rotationDegrees)
            val circles = detectCircles(bitmap)
            if(circles.isNotEmpty()) {
                return Pupil(circles[0].point, circles[0].r)
            } else {
                return null
            }
    }

    private fun detectCircles(bitmap: Bitmap): List<Circle> {
        // Generate Mat object
        val img = Mat()
        Utils.bitmapToMat(bitmap, img)

        // Detect circles
        val cannyUpperThreshold = 100.0
        val minRadius = 10
        val maxRadius = 400
        val accumulator = 100.0
        val circles = Mat()
        Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2GRAY)
        Imgproc.GaussianBlur(img, img, org.opencv.core.Size(3.0, 3.0), 1.0)
        Imgproc.HoughCircles(img, circles, Imgproc.CV_HOUGH_GRADIENT,
            2.0, 2.0 / 8.0, cannyUpperThreshold, accumulator,
            minRadius, maxRadius)
        Imgproc.cvtColor(img, img, Imgproc.COLOR_GRAY2BGR)

        // Convert Mat to list of circles
        val result = toCircles(circles)

        // Return detection
        return result
    }

    private fun toCircles(circles: Mat): List<Circle>{
        if (circles.cols() > 0){
            return (0 until circles.cols().coerceAtMost(maxCircles)).map {
                val vCircle = circles.get(0, it)
                val pt = Point(vCircle[0].toInt(), vCircle[1].toInt())
                val radius = Math.round(vCircle[2]).toInt()
                // return circle
                Circle(pt, radius)
            }
        } else {
            return emptyList()
        }
    }
}

我将CameraX依赖项从alpha01更新为alpha05,故障停止发生。

CameraX库处于alpha阶段,因为其API表面尚未最终确定。我们不建议在生产中使用Alpha库。库应该严格避免在生产中依赖Alpha库,因为它们的API表面可能会以源代码和二进制不兼容的方式发生变化。我只是在用CameraX做实验,想知道我做错了什么。