Java GraphicOverlay未按预期绘制边界框

Java GraphicOverlay未按预期绘制边界框,java,android,firebase-mlkit,android-camerax,Java,Android,Firebase Mlkit,Android Camerax,我正在使用CameraX(来自GitHub的QuickStart CameraX Basic)和firebase ML工具包进行实时人脸检测。除了边界框不是绘制在面上,而是远离面之外,其他一切都可以正常工作。 我正在使用ML工具包quickStart GraphicOverlay.Class和FaceGraphic.Class graphicsoverlay.java public class GraphicOverlay extends View { private final Object

我正在使用CameraX(来自GitHub的QuickStart CameraX Basic)和firebase ML工具包进行实时人脸检测。除了边界框不是绘制在面上,而是远离面之外,其他一切都可以正常工作。 我正在使用ML工具包quickStart GraphicOverlay.Class和FaceGraphic.Class

graphicsoverlay.java

public class GraphicOverlay extends View {
private final Object mLock = new Object();
private int mPreviewWidth;
private float mWidthScaleFactor = 1.5f;
private int mPreviewHeight;
private float mHeightScaleFactor = 1.5f;
private int mFacing = CameraSource.CAMERA_FACING_BACK;
private Set<Graphic> mGraphics = new HashSet<>();

public GraphicOverlay(Context context, AttributeSet attrs) {
    super(context, attrs);
}

public void clear() {
    synchronized (mLock) {
        mGraphics.clear();
    }
    postInvalidate();
}

public void add(Graphic graphic) {
    synchronized (mLock) {
        mGraphics.add(graphic);
    }
    postInvalidate();
}

public void remove(Graphic graphic) {
    synchronized (mLock) {
        mGraphics.remove(graphic);
    }
    postInvalidate();
}

public void setCameraInfo(int previewWidth, int previewHeight, int facing) {
    synchronized (mLock) {
        mPreviewWidth = previewWidth;
        mPreviewHeight = previewHeight;
        mFacing = facing;
    }
    postInvalidate();
}

@Override
protected void onDraw(Canvas canvas) {
    super.onDraw(canvas);

    synchronized (mLock) {
        if ((mPreviewWidth != 0) && (mPreviewHeight != 0)) {
            mWidthScaleFactor = (float) getWidth() / (float) mPreviewWidth;
            mHeightScaleFactor = (float)getHeight() / (float) mPreviewHeight;
        }

        for (Graphic graphic : mGraphics) {
            graphic.draw(canvas);
        }
    }
}

public static abstract class Graphic {
    private GraphicOverlay mOverlay;

    public Graphic(GraphicOverlay overlay) {
        mOverlay = overlay;
    }

    public abstract void draw(Canvas canvas);

    public float scaleX(float horizontal) {
        return horizontal * mOverlay.mWidthScaleFactor;
    }

    public float scaleY(float vertical) {
        return vertical * mOverlay.mHeightScaleFactor;
    }

    public float translateX(float x) {
        if (mOverlay.mFacing == CameraSource.CAMERA_FACING_FRONT) {
            return mOverlay.getWidth() - scaleX(x);
        } else {
            return scaleX(x);
        }
    }

    public float translateY(float y) {
        return scaleY(y);
    }

    public void postInvalidate() {
        mOverlay.postInvalidate();
    }
}
 }
public class FaceGraphic extends GraphicOverlay.Graphic {
private static final float FACE_POSITION_RADIUS = 10.0f;
private static final float ID_TEXT_SIZE = 40.0f;
private static final float ID_Y_OFFSET = 50.0f;
private static final float ID_X_OFFSET = -50.0f;
private static final float BOX_STROKE_WIDTH = 5.0f;
private static final int[] COLOR_CHOICES = {
        Color.BLUE //, Color.CYAN, Color.GREEN, Color.MAGENTA, Color.RED, Color.WHITE, Color.YELLOW
};
private static int currentColorIndex = 0;
private final Paint facePositionPaint;
private final Paint idPaint, centerPoint;
private final Paint boxPaint, screenCenterPaint;
private final Paint movePaint;
GraphicOverlay graphicOverlay;
private int facing;
private volatile FirebaseVisionFace firebaseVisionFace;

public FaceGraphic(GraphicOverlay overlay) {
    super(overlay);
    this.graphicOverlay = overlay;

    currentColorIndex = (currentColorIndex + 1) % COLOR_CHOICES.length;
    final int selectedColor = COLOR_CHOICES[currentColorIndex];

    screenCenterPaint = new Paint();
    screenCenterPaint.setColor(Color.GREEN);

    facePositionPaint = new Paint();
    facePositionPaint.setColor(selectedColor);

    idPaint = new Paint();
    idPaint.setColor(Color.WHITE);
    idPaint.setTextSize(ID_TEXT_SIZE);

    boxPaint = new Paint();
    boxPaint.setColor(Color.WHITE);
    boxPaint.setStyle(Paint.Style.STROKE);
    boxPaint.setStrokeWidth(BOX_STROKE_WIDTH);

    centerPoint = new Paint();
    centerPoint.setStrokeWidth(5f);
    centerPoint.setColor(Color.RED);
    centerPoint.setStyle(Paint.Style.STROKE);

    movePaint = new Paint();
    movePaint.setColor(Color.RED);
    movePaint.setTextSize(38);
}


public void updateFace(FirebaseVisionFace face, int facing) {
    firebaseVisionFace = face;
    this.facing = facing;
    postInvalidate();
}


@Override
public void draw(Canvas canvas) {

    canvas.drawCircle(canvas.getWidth() / 2, canvas.getHeight() / 2, 10, screenCenterPaint);
    FirebaseVisionFace face = firebaseVisionFace;
    if (face == null) {
        return;
    }
    // Draws a circle at the position of the detected face, with the face's track id below.
    float x = translateX(face.getBoundingBox().centerX());
    float y = translateY(face.getBoundingBox().centerY());
    canvas.drawCircle(x, y, FACE_POSITION_RADIUS, facePositionPaint);

    Log.d("myFaceBounds", String.valueOf(face.getBoundingBox()));
    float faceRightOrLeftAngle = face.getHeadEulerAngleY();
    float faceTiltAngle = face.getHeadEulerAngleZ();

    // Draws a bounding box around the face.
    float xOffset = scaleX(face.getBoundingBox().width() / 2.0f);
    float yOffset = scaleY(face.getBoundingBox().height() / 2.0f);
    float left = x - xOffset - 100;
    float top = y - yOffset - 100;
    float right = x + xOffset + 100;
    float bottom = y + yOffset + 100;

    canvas.drawRect(left, top, right, bottom, boxPaint);
}
 }
CameraFragment.kt

class CameraFragment : Fragment() {
private lateinit var container: FrameLayout
private lateinit var viewFinder: TextureView
private lateinit var outputDirectory: File
private lateinit var broadcastManager: LocalBroadcastManager

private var displayId = -1
private var lensFacing = CameraX.LensFacing.BACK
private var preview: Preview? = null
private var imageCapture: ImageCapture? = null
private var imageAnalyzer: ImageAnalysis? = null
private lateinit var graphicOverlay: GraphicOverlay

private val volumeDownReceiver = object : BroadcastReceiver() {
    override fun onReceive(context: Context, intent: Intent) {
        val keyCode = intent.getIntExtra(KEY_EVENT_EXTRA, KeyEvent.KEYCODE_UNKNOWN)
        when (keyCode) {
            // When the volume down button is pressed, simulate a shutter button click
            KeyEvent.KEYCODE_VOLUME_DOWN -> {
                val shutter = container
                        .findViewById<ImageButton>(R.id.camera_capture_button)
                shutter.simulateClick()
            }
        }
    }
}
private val analyzerThread = HandlerThread("LuminosityAnalysis").apply { start() }
private lateinit var displayManager: DisplayManager
private val displayListener = object : DisplayManager.DisplayListener {
    override fun onDisplayAdded(displayId: Int) = Unit
    override fun onDisplayRemoved(displayId: Int) = Unit
    override fun onDisplayChanged(displayId: Int) = view?.let { view ->
        if (displayId == this@CameraFragment.displayId) {
            Log.d(TAG, "Rotation changed: ${view.display.rotation}")
            preview?.setTargetRotation(view.display.rotation)
            imageCapture?.setTargetRotation(view.display.rotation)
            imageAnalyzer?.setTargetRotation(view.display.rotation)
        }
    } ?: Unit
}

override fun onCreate(savedInstanceState: Bundle?) {
    super.onCreate(savedInstanceState)
    retainInstance = true


}

override fun onResume() {
    super.onResume()
    if (!PermissionsFragment.hasPermissions(requireContext())) {
        Navigation.findNavController(requireActivity(), R.id.fragment_container).navigate(
                CameraFragmentDirections.actionCameraToPermissions())
    }
}

override fun onDestroyView() {
    super.onDestroyView()
    broadcastManager.unregisterReceiver(volumeDownReceiver)
    displayManager.unregisterDisplayListener(displayListener)
}

override fun onCreateView(
        inflater: LayoutInflater,
        container: ViewGroup?,
        savedInstanceState: Bundle?): View? =
        inflater.inflate(R.layout.fragment_camera, container, false)

private fun setGalleryThumbnail(file: File) {
    val thumbnail = container.findViewById<ImageButton>(R.id.photo_view_button)
    thumbnail.post {
        thumbnail.setPadding(resources.getDimension(R.dimen.stroke_small).toInt())
        Glide.with(thumbnail)
                .load(file)
                .apply(RequestOptions.circleCropTransform())
                .into(thumbnail)
    }
}

 private val imageSavedListener = object : ImageCapture.OnImageSavedListener {
    override fun onError(
            error: ImageCapture.UseCaseError, message: String, exc: Throwable?) {
        Log.e(TAG, "Photo capture failed: $message")
        exc?.printStackTrace()
    }

    override fun onImageSaved(photoFile: File) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
            setGalleryThumbnail(photoFile)
        }
        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
            requireActivity().sendBroadcast(
                    Intent(Camera.ACTION_NEW_PICTURE, Uri.fromFile(photoFile)))
        }
        val mimeType = MimeTypeMap.getSingleton()
                .getMimeTypeFromExtension(photoFile.extension)
        MediaScannerConnection.scanFile(
                context, arrayOf(photoFile.absolutePath), arrayOf(mimeType), null)
    }
}

/*---------------------------------------------------------------------------------------------------------------------------------------------*/
@SuppressLint("MissingPermission")
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
    super.onViewCreated(view, savedInstanceState)
    container = view as FrameLayout
    viewFinder = container.findViewById(R.id.view_finder)
    graphicOverlay = container.findViewById(R.id.graphicOverlay)
    broadcastManager = LocalBroadcastManager.getInstance(view.context)
    val filter = IntentFilter().apply { addAction(KEY_EVENT_ACTION) }
    broadcastManager.registerReceiver(volumeDownReceiver, filter)
    displayManager = viewFinder.context
            .getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
    displayManager.registerDisplayListener(displayListener, null)
    outputDirectory = MainActivity.getOutputDirectory(requireContext())
    viewFinder.post {
        displayId = viewFinder.display.displayId
        updateCameraUi()
        bindCameraUseCases()
        lifecycleScope.launch(Dispatchers.IO) {
            outputDirectory.listFiles { file ->
                EXTENSION_WHITELIST.contains(file.extension.toUpperCase())
            }.sorted().reversed().firstOrNull()?.let { setGalleryThumbnail(it) }
        }
    }
}

private fun bindCameraUseCases() {
    val metrics = DisplayMetrics().also { viewFinder.display.getRealMetrics(it) }
    val screenAspectRatio = Rational(metrics.widthPixels, metrics.heightPixels)
    Log.d(TAG, "Screen metrics: ${metrics.widthPixels} x ${metrics.heightPixels}")
    val viewFinderConfig = PreviewConfig.Builder().apply {
        setLensFacing(lensFacing)
        setTargetAspectRatio(screenAspectRatio)
        setTargetRotation(viewFinder.display.rotation)
    }.build()
    preview = AutoFitPreviewBuilder.build(viewFinderConfig, viewFinder)

    graphicOverlay.setCameraInfo(metrics.widthPixels, metrics.heightPixels, getLensFacing())

    val imageCaptureConfig = ImageCaptureConfig.Builder().apply {
        setLensFacing(lensFacing)
        setCaptureMode(CaptureMode.MIN_LATENCY)
        setTargetAspectRatio(screenAspectRatio)
        setTargetRotation(viewFinder.display.rotation)
    }.build()

    imageCapture = ImageCapture(imageCaptureConfig)

    // Setup image analysis pipeline that computes average pixel luminance in real time
    val analyzerConfig = ImageAnalysisConfig.Builder().apply {
        setLensFacing(lensFacing)
        setCallbackHandler(Handler(analyzerThread.looper))
        setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
        setTargetRotation(viewFinder.display.rotation)
    }.build()

    imageAnalyzer = ImageAnalysis(analyzerConfig).apply {
        analyzer = LuminosityAnalyzer(graphicOverlay)
    }
    CameraX.bindToLifecycle(
            viewLifecycleOwner, preview, imageCapture, imageAnalyzer)
}

@SuppressLint("RestrictedApi")
private fun updateCameraUi() {
    container.findViewById<ConstraintLayout>(R.id.camera_ui_container)?.let {
        container.removeView(it)
    }
    val controls = View.inflate(requireContext(), R.layout.camera_ui_container, container)
    controls.findViewById<ImageButton>(R.id.camera_capture_button).setOnClickListener {
        imageCapture?.let { imageCapture ->
            val photoFile = createFile(outputDirectory, FILENAME, PHOTO_EXTENSION)
            val metadata = Metadata().apply {
                isReversedHorizontal = lensFacing == CameraX.LensFacing.FRONT
            }
            imageCapture.takePicture(photoFile, imageSavedListener, metadata)
            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
                container.postDelayed({
                    container.foreground = ColorDrawable(Color.WHITE)
                    container.postDelayed(
                            { container.foreground = null }, ANIMATION_FAST_MILLIS)
                }, ANIMATION_SLOW_MILLIS)
            }
        }
    }
    controls.findViewById<ImageButton>(R.id.camera_switch_button).setOnClickListener {
        lensFacing = if (CameraX.LensFacing.FRONT == lensFacing) {
            CameraX.LensFacing.BACK
        } else {
            CameraX.LensFacing.FRONT
        }
        try {
            CameraX.getCameraWithLensFacing(lensFacing)
            CameraX.unbindAll()
            bindCameraUseCases()
        } catch (exc: Exception) {
        }
    }
    controls.findViewById<ImageButton>(R.id.photo_view_button).setOnClickListener {
        Navigation.findNavController(requireActivity(), R.id.fragment_container).navigate(
                CameraFragmentDirections.actionCameraToGallery(outputDirectory.absolutePath))
    }
}

private class LuminosityAnalyzer(graphicOverlay: GraphicOverlay) : ImageAnalysis.Analyzer {

    val graphicOverlay = graphicOverlay
    private var lastAnalyzedTimestamp = 0L
    private fun getRotation(rotationCompensation: Int): Int {
        val result: Int
        when (rotationCompensation) {
            0 -> result = FirebaseVisionImageMetadata.ROTATION_0
            90 -> result = FirebaseVisionImageMetadata.ROTATION_90
            180 -> result = FirebaseVisionImageMetadata.ROTATION_180
            270 -> result = FirebaseVisionImageMetadata.ROTATION_270
            else -> {
                result = FirebaseVisionImageMetadata.ROTATION_0
            }
        }
        return result
    }

    override fun analyze(image: ImageProxy, rotationDegrees: Int) {

        val currentTimestamp = System.currentTimeMillis()

        if (currentTimestamp - lastAnalyzedTimestamp >= TimeUnit.MILLISECONDS.toMillis(300)) {
            lastAnalyzedTimestamp = currentTimestamp

            try {
                val y = image.planes[0]
                val u = image.planes[1]
                val v = image.planes[2]
                //Then we can then get the number of pixels in each plane
                val Yb = y.buffer.remaining()
                val Ub = u.buffer.remaining()
                val Vb = v.buffer.remaining()
                //and convert them into a single YUV formatted ByteArray
                val data = ByteArray(Yb + Ub + Vb)

                y.buffer.get(data, 0, Yb)
                u.buffer.get(data, Yb, Ub)
                v.buffer.get(data, Yb + Ub, Vb)

                val metadata = FirebaseVisionImageMetadata.Builder()
                        .setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_YV12)
                        .setHeight(image.height)
                        .setWidth(image.width)
                        .setRotation(getRotation(rotationDegrees))
                        .build()

                val options = FirebaseVisionFaceDetectorOptions.Builder()
                        .setPerformanceMode(FirebaseVisionFaceDetectorOptions.FAST)
                        .setLandmarkMode(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS)
                        .enableTracking()
                        .build()
                val labelImage = FirebaseVisionImage.fromByteArray(data, metadata)
                val detector = FirebaseVision.getInstance().getVisionFaceDetector(options)

                detector.detectInImage(labelImage)
                        .addOnSuccessListener { faces ->
                            graphicOverlay.clear()
                            for (face in faces) {
                                val faceGraphic = FaceGraphic(graphicOverlay)
                                graphicOverlay.add(faceGraphic)
                                faceGraphic.updateFace(face, 1)
                            }
                        }
                        .addOnFailureListener { }
            } catch (e: IllegalStateException) {
            }
        }
    }
}

companion object {
    private const val TAG = "CameraXBasic"
    private const val FILENAME = "yyyy-MM-dd-HH-mm-ss-SSS"
    private const val PHOTO_EXTENSION = ".jpg"
    private fun createFile(baseFolder: File, format: String, extension: String) =
            File(baseFolder, SimpleDateFormat(format, Locale.US)
                    .format(System.currentTimeMillis()) + extension)
}

private fun getLensFacing(): Int {
    return if (lensFacing == CameraX.LensFacing.BACK) {
        0
    } else
        1
}
  }
class AutoFitPreviewBuilder private constructor(
    config: PreviewConfig, viewFinderRef: WeakReference<TextureView>) {

/** Public instance of preview use-case which can be used by consumers of this adapter */
val useCase: Preview

/** Internal variable used to keep track of the use case's output rotation */
private var bufferRotation: Int = 0

/** Internal variable used to keep track of the view's rotation */
private var viewFinderRotation: Int? = null

/** Internal variable used to keep track of the use-case's output dimension */
private var bufferDimens: Size = Size(0, 0)

/** Internal variable used to keep track of the view's dimension */
private var viewFinderDimens: Size = Size(0, 0)

/** Internal variable used to keep track of the view's display */
private var viewFinderDisplay: Int = -1

/** Internal reference of the [DisplayManager] */
private lateinit var displayManager: DisplayManager

/**
 * We need a display listener for orientation changes that do not trigger a configuration
 * change, for example if we choose to override config change in manifest or for 180-degree
 * orientation changes.
 */
private val displayListener = object : DisplayManager.DisplayListener {
    override fun onDisplayAdded(displayId: Int) = Unit
    override fun onDisplayRemoved(displayId: Int) = Unit
    override fun onDisplayChanged(displayId: Int) {
        val viewFinder = viewFinderRef.get() ?: return
        if (displayId == viewFinderDisplay) {
            val display = displayManager.getDisplay(displayId)
            val rotation = getDisplaySurfaceRotation(display)
            updateTransform(viewFinder, rotation, bufferDimens, viewFinderDimens)
        }
    }
}

init {
    // Make sure that the view finder reference is valid
    val viewFinder = viewFinderRef.get() ?:
        throw IllegalArgumentException("Invalid reference to view finder used")

    // Initialize the display and rotation from texture view information
    viewFinderDisplay = viewFinder.display.displayId
    viewFinderRotation = getDisplaySurfaceRotation(viewFinder.display) ?: 0

    // Initialize public use-case with the given config
    useCase = Preview(config)

    // Every time the view finder is updated, recompute layout
    useCase.onPreviewOutputUpdateListener = Preview.OnPreviewOutputUpdateListener {
        val viewFinder =
                viewFinderRef.get() ?: return@OnPreviewOutputUpdateListener
        Log.d(TAG, "Preview output changed. " +
                "Size: ${it.textureSize}. Rotation: ${it.rotationDegrees}")

        // To update the SurfaceTexture, we have to remove it and re-add it
        val parent = viewFinder.parent as ViewGroup
        parent.removeView(viewFinder)
        parent.addView(viewFinder, 0)

        // Update internal texture
        viewFinder.surfaceTexture = it.surfaceTexture

        // Apply relevant transformations
        bufferRotation = it.rotationDegrees
        val rotation = getDisplaySurfaceRotation(viewFinder.display)
        updateTransform(viewFinder, rotation, it.textureSize, viewFinderDimens)
    }

    // Every time the provided texture view changes, recompute layout
    viewFinder.addOnLayoutChangeListener { view, left, top, right, bottom, _, _, _, _ ->
        val viewFinder = view as TextureView
        val newViewFinderDimens = Size(right - left, bottom - top)
        Log.d(TAG, "View finder layout changed. Size: $newViewFinderDimens")
        val rotation = getDisplaySurfaceRotation(viewFinder.display)
        updateTransform(viewFinder, rotation, bufferDimens, newViewFinderDimens)
    }

    // Every time the orientation of device changes, recompute layout
    // NOTE: This is unnecessary if we listen to display orientation changes in the camera
    //  fragment and call [Preview.setTargetRotation()] (like we do in this sample), which will
    //  trigger [Preview.OnPreviewOutputUpdateListener] with a new
    //  [PreviewOutput.rotationDegrees]. CameraX Preview use case will not rotate the frames for
    //  us, it will just tell us about the buffer rotation with respect to sensor orientation.
    //  In this sample, we ignore the buffer rotation and instead look at the view finder's
    //  rotation every time [updateTransform] is called, which gets triggered by
    //  [CameraFragment] display listener -- but the approach taken in this sample is not the
    //  only valid one.
    displayManager = viewFinder.context
            .getSystemService(Context.DISPLAY_SERVICE) as DisplayManager
    displayManager.registerDisplayListener(displayListener, null)

    // Remove the display listeners when the view is detached to avoid holding a reference to
    //  it outside of the Fragment that owns the view.
    // NOTE: Even though using a weak reference should take care of this, we still try to avoid
    //  unnecessary calls to the listener this way.
    viewFinder.addOnAttachStateChangeListener(object : View.OnAttachStateChangeListener {
        override fun onViewAttachedToWindow(view: View?) =
                displayManager.registerDisplayListener(displayListener, null)
        override fun onViewDetachedFromWindow(view: View?) =
                displayManager.unregisterDisplayListener(displayListener)
    })
}

/** Helper function that fits a camera preview into the given [TextureView] */
private fun updateTransform(textureView: TextureView?, rotation: Int?, newBufferDimens: Size,
                            newViewFinderDimens: Size) {
    // This should not happen anyway, but now the linter knows
    val textureView = textureView ?: return

    if (rotation == viewFinderRotation &&
            Objects.equals(newBufferDimens, bufferDimens) &&
            Objects.equals(newViewFinderDimens, viewFinderDimens)) {
        // Nothing has changed, no need to transform output again
        return
    }

    if (rotation == null) {
        // Invalid rotation - wait for valid inputs before setting matrix
        return
    } else {
        // Update internal field with new inputs
        viewFinderRotation = rotation
    }

    if (newBufferDimens.width == 0 || newBufferDimens.height == 0) {
        // Invalid buffer dimens - wait for valid inputs before setting matrix
        return
    } else {
        // Update internal field with new inputs
        bufferDimens = newBufferDimens
    }

    if (newViewFinderDimens.width == 0 || newViewFinderDimens.height == 0) {
        // Invalid view finder dimens - wait for valid inputs before setting matrix
        return
    } else {
        // Update internal field with new inputs
        viewFinderDimens = newViewFinderDimens
    }

    val matrix = Matrix()
    Log.d(TAG, "Applying output transformation.\n" +
            "View finder size: $viewFinderDimens.\n" +
            "Preview output size: $bufferDimens\n" +
            "View finder rotation: $viewFinderRotation\n" +
            "Preview output rotation: $bufferRotation")

    // Compute the center of the view finder
    val centerX = viewFinderDimens.width / 2f
    val centerY = viewFinderDimens.height / 2f

    // Correct preview output to account for display rotation
    matrix.postRotate(-viewFinderRotation!!.toFloat(), centerX, centerY)

    // Buffers are rotated relative to the device's 'natural' orientation: swap width and height
    val bufferRatio = bufferDimens.height / bufferDimens.width.toFloat()

    val scaledWidth: Int
    val scaledHeight: Int
    // Match longest sides together -- i.e. apply center-crop transformation
    if (viewFinderDimens.width > viewFinderDimens.height) {
        scaledHeight = viewFinderDimens.width
        scaledWidth = Math.round(viewFinderDimens.width * bufferRatio)
    } else {
        scaledHeight = viewFinderDimens.height
        scaledWidth = Math.round(viewFinderDimens.height * bufferRatio)
    }

    // Compute the relative scale value
    val xScale = scaledWidth / viewFinderDimens.width.toFloat()
    val yScale = scaledHeight / viewFinderDimens.height.toFloat()

    // Scale input buffers to fill the view finder
    matrix.preScale(xScale, yScale, centerX, centerY)

    // Finally, apply transformations to our TextureView
    textureView.setTransform(matrix)
}

companion object {
    private val TAG = AutoFitPreviewBuilder::class.java.simpleName

    /** Helper function that gets the rotation of a [Display] in degrees */
    fun getDisplaySurfaceRotation(display: Display?) = when(display?.rotation) {
        Surface.ROTATION_0 -> 0
        Surface.ROTATION_90 -> 90
        Surface.ROTATION_180 -> 180
        Surface.ROTATION_270 -> 270
        else -> null
    }

    /**
     * Main entrypoint for users of this class: instantiates the adapter and returns an instance
     * of [Preview] which automatically adjusts in size and rotation to compensate for
     * config changes.
     */
    fun build(config: PreviewConfig, viewFinder: TextureView) =
            AutoFitPreviewBuilder(config, WeakReference(viewFinder)).useCase
}
 }
类CameraFragment:Fragment(){
私有lateinit变量容器:FrameLayout
私有lateinit var取景器:TextureView
私有lateinit var输出目录:文件
私有lateinit var broadcastManager:LocalBroadcastManager
私有变量displayId=-1
私有变量lensFacing=CameraX.lensFacing.BACK
私有变量预览:预览?=null
私有变量imageCapture:imageCapture?=null
专用var imageAnalyzer:ImageAnalysis?=null
私有lateinit var graphicOverlay:graphicOverlay
private val volumeDownReceiver=对象:BroadcastReceiver(){
覆盖接收(上下文:上下文,意图:意图){
val keyCode=intent.getIntExtra(KEY\u EVENT\u EXTRA,KeyEvent.keyCode\u未知)
何时(键码){
//按下音量降低按钮时,模拟快门按钮的单击
KeyEvent.KEYCODE\u音量\u下降->{
val快门=容器
.findViewById(R.id.摄像头捕捉按钮)
shutter.simulateClick()命令
}
}
}
}
私有val analyzerThread=HandlerThread(“发光分析”)。应用{start()}
私有lateinit变量displayManager:displayManager
私有val displayListener=对象:DisplayManager.displayListener{
覆盖已添加的显示(显示ID:Int)=单位
覆盖显示已删除(显示ID:Int)=单位
覆盖显示更改(displayId:Int)=视图?.let{view->
如果(displayId==this@CameraFragment.displayId) {
d(标记“旋转已更改:${view.display.Rotation}”)
预览?.setTargetRotation(查看、显示、旋转)
imageCapture?.setTargetRotation(查看、显示、旋转)
图像分析仪?设置目标(视图、显示、旋转)
}
}?:单位
}
重写创建时的乐趣(savedInstanceState:Bundle?){
super.onCreate(savedInstanceState)
保持值=真
}
重写onResume(){
super.onResume()
如果(!PermissionsFragment.hasPermissions(requireContext())){
findNavController(requireActivity(),R.id.fragment_容器)。导航(
CameraFragmentDirections.actionCameraToPermissions())
}
}
重写onDestroyView(){
super.onDestroyView()
broadcastManager.UnregistereReceiver(volumeDownReceiver)
displayManager.unregisterDisplayListener(displayListener)
}
覆盖创建视图(
充气机,
容器:视图组?,
savedInstanceState:捆绑?:查看=
充气机。充气(右布局图。摄像机、容器、假)
private fun setGalleryThumbnail(文件:file){
val缩略图=container.findViewById(R.id.photo\u view\u按钮)
邮件{
thumbnail.setPadding(resources.getdimen(R.dimen.stroke_small).toInt())
使用(缩略图)滑动
.load(文件)
.apply(RequestOptions.circleCropTransform())
.插入(缩略图)
}
}
private val imageSavedListener=对象:ImageCapture.OnImageSavedListener{
忽略错误(
错误:ImageCapture.UseCaseError,消息:字符串,exc:Throwable?){
Log.e(标记“照片捕获失败:$message”)
exc?.printStackTrace()
}
覆盖已保存的图像(照片文件:文件){
if(Build.VERSION.SDK\u INT>=Build.VERSION\u code.M){
setGalleryThumbnail(照片文件)
}
if(Build.VERSION.SDK_INT
扩展名\白名单.contains(文件扩展名.toUpperCase())
}.sorted().reversed().firstOrNull()?.let{setGalleryThumbnail(it)}
}
}
}
私人娱乐杂志{
val metrics=DisplayMetrics()。也是{viewFinder.display.getRealMetrics(it)}
val screenspectratio=Rational(metrics.widthPixels,metrics.heightPixels)
Log.d(标记,“屏幕度量:${metrics.widthPixels}x${metrics.heightPixels}”)
val viewFinderConfig=PreviewConfig.Builder().apply{
设置透镜面(透镜面)
SetTargetSpectratio(屏幕显示)
塞塔