Android 旋转y轴时缩放的三维对象

Android 旋转y轴时缩放的三维对象,android,opengl-es-2.0,gesture,Android,Opengl Es 2.0,Gesture,我是OpenGL和ARCore的新手,我正在使用它作为创建应用程序的基础。我使用的是OpenGL-ES-2.0版本。我能够使用android.view.ScalegestrueDetector.SimpleOnScalegestrueListener进行收缩缩放(2个手指)。通过使用这个库类的旋转,我可以得到我的旋转度,它与我的3D对象很好地工作 旋转3D对象时,对象也会缩放。我想在用户进行旋转时停止缩放。我怎样才能做到这一点?或者,我如何以不同的方法传递缩放和旋转以更新各自的矩阵?我不想为此使

我是OpenGL和ARCore的新手,我正在使用它作为创建应用程序的基础。我使用的是OpenGL-ES-2.0版本。我能够使用android.view.ScalegestrueDetector.SimpleOnScalegestrueListener进行收缩缩放(2个手指)。通过使用这个库类的旋转,我可以得到我的旋转度,它与我的3D对象很好地工作

旋转3D对象时,对象也会缩放。我想在用户进行旋转时停止缩放。我怎样才能做到这一点?或者,我如何以不同的方法传递缩放和旋转以更新各自的矩阵?我不想为此使用任何3D party库

请帮我做这个。下面是我的代码,建议我哪里做错了什么

ScaleGesture

private class CustomScaleGesture extends ScaleGestureDetector.SimpleOnScaleGestureListener {
        @Override
        public boolean onScale(ScaleGestureDetector detector) {
            DebugHelper.log("detector.getScaleFactor(): " + detector.getScaleFactor() + " scaleFactor = " + scaleFactor);
            scaleFactor *= detector.getScaleFactor();
            DebugHelper.log("final scaleFactor: " + scaleFactor);
            return true;
        }
    }
private class RotateListener extends RotateGestureDetector.SimpleOnRotateGestureListener {
        @Override
        public boolean onRotate(RotateGestureDetector detector) {
            DebugHelper.log("RotateListener called..");
            mRotationDegrees -= detector.getRotationDegreesDelta();
            DebugHelper.log("RotateListener: " + mRotationDegrees);
            return true;
        }
    }
旋转手势

private class CustomScaleGesture extends ScaleGestureDetector.SimpleOnScaleGestureListener {
        @Override
        public boolean onScale(ScaleGestureDetector detector) {
            DebugHelper.log("detector.getScaleFactor(): " + detector.getScaleFactor() + " scaleFactor = " + scaleFactor);
            scaleFactor *= detector.getScaleFactor();
            DebugHelper.log("final scaleFactor: " + scaleFactor);
            return true;
        }
    }
private class RotateListener extends RotateGestureDetector.SimpleOnRotateGestureListener {
        @Override
        public boolean onRotate(RotateGestureDetector detector) {
            DebugHelper.log("RotateListener called..");
            mRotationDegrees -= detector.getRotationDegreesDelta();
            DebugHelper.log("RotateListener: " + mRotationDegrees);
            return true;
        }
    }
main活动

public class MyARActivity extends BaseActivity<MyActivityArBinding> implements GLSurfaceView.Renderer {


    //AR Variables
    private int mWidth;
    private int mHeight;
    private boolean capturePicture = false;
    private boolean installRequested;
    private boolean moving;
    float[] projmtx = new float[16];
    float[] viewmtx = new float[16];
    private Session session;

    private Snackbar messageSnackbar;
    private DisplayRotationHelper displayRotationHelper;
    private final BackgroundRenderer backgroundRenderer = new BackgroundRenderer();
    private ObjectRenderer virtualObject;
    private ObjectRenderer virtualObjectShadow;
    private final PlaneRenderer planeRenderer = new PlaneRenderer();
    private PointCloudRenderer pointCloud = new PointCloudRenderer();

    // Temporary matrix allocated here to reduce number of allocations for each frame.
    private float[] anchorMatrix = new float[16];
    // Tap handling and UI.
    private ArrayBlockingQueue<MotionEvent> queuedSingleTaps = new ArrayBlockingQueue<>(16);
    private ArrayList<Anchor> anchors = new ArrayList<>();

    //load and manipulate obj
    private SQLiteHelper sqlHelper;
    private boolean isObjectChanged = false;
    private String objectPath;
    private List<CharacterModel> characterModelList = new ArrayList<>();
    private boolean isFirstTimeLoad = true;
    //Gestures
    private float mRotationDegrees = 0.f;
    private RotateGestureDetector mRotateDetector;
    private float scaleFactor = 1.0f;
    private ScaleGestureDetector scaleDetector;
    private GestureDetector gestureDetector;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setHeaderVisible(false);
        doDefaults();
    }

    private void doDefaults() {
        binding.setPresenter(this);
        sqlHelper = SQLiteHelper.getInstance(this);
        load3DCharacters();
        initAR();
    }

    @SuppressLint("ClickableViewAccessibility")
    private void initAR() {
        displayRotationHelper = new DisplayRotationHelper(this);
        scaleDetector = new ScaleGestureDetector(this, new CustomScaleGesture());
        mRotateDetector = new RotateGestureDetector(getApplicationContext(), new RotateListener());

        // Set up tap listener.
        gestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
            @Override
            public boolean onSingleTapUp(MotionEvent e) {
                if (anchors.size() <= 0) {
                    onSingleTap(e);
                }
                return true;
            }

            @Override
            public boolean onDown(MotionEvent e) {
                return true;
            }
        });

        binding.surfaceView.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                DebugHelper.log("binding.surfaceView.setOnTouchListener called..");
                mRotateDetector.onTouchEvent(event);
                scaleDetector.onTouchEvent(event);
                switch (event.getAction()) {
                    case MotionEvent.ACTION_DOWN:
                        moving = true;
                        DebugHelper.log("ACTION_DOWN");
                        break;

                    case MotionEvent.ACTION_UP:
                        DebugHelper.log("ACTION_UP");
                        moving = false;
                        break;
                    case MotionEvent.ACTION_MOVE:
                        DebugHelper.log("ACTION_MOVE");
                        if (anchors.size() > 0) {
                            onSecondTouch(event);
                        }
                        break;
                }
                return gestureDetector.onTouchEvent(event);

            }
        });

        // Set up renderer.
        binding.surfaceView.setPreserveEGLContextOnPause(true);
        binding.surfaceView.setEGLContextClientVersion(2);
        binding.surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); // Alpha used for plane blending.
        binding.surfaceView.setRenderer(this);
        binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
        installRequested = false;
    }
    private void onSecondTouch(MotionEvent e) {
        Log.e("Second Touch", "Executed");
        if (e.getPointerCount() > 1) {
            scaleDetector.onTouchEvent(e);
        } else {
            queuedSingleTaps.offer(e);
        }
    }
private void onSingleTap(MotionEvent e) {
        // Queue tap if there is space. Tap is lost if queue is full.
        DebugHelper.log("onSingleTap()");
        queuedSingleTaps.offer(e);
    }

    private void load3DCharacters() {
        CharacterModel model = new CharacterModel();
        model.setName("Cat");
        model.setObjectPath("cat/cat.obj");
        model.setScaleFactor(0.25f);
        model.setResourceId(R.drawable.cat);
        characterModelList.add(model);

        model = new CharacterModel();
        model.setName("Old Man");
        model.setObjectPath("man/muro.obj");
        model.setScaleFactor(0.0085f);
        model.setResourceId(R.drawable.old_man);
        characterModelList.add(model);


        model = new CharacterModel();
        model.setName("Bloodwing");
        model.setObjectPath("bloodwing/bloodwing.obj");
        model.setScaleFactor(0.0009f);
        model.setResourceId(R.drawable.bat);
        characterModelList.add(model);
    }

    private void loadObject(CharacterModel model) {
        try {
            this.objectPath = model.getObjectPath();
            this.scaleFactor = model.getScaleFactor();
            if (virtualObject == null) {
                virtualObject = new ObjectRenderer(objectPath);
                virtualObject.createOnGlThread(this);
                virtualObject.setMaterialProperties(0.0f, 1.0f, 1.0f, 6.0f);
            } else {
                // Clear screen to notify driver it should not load any pixels from previous frame.
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
                isObjectChanged = true;
                virtualObject.updateObjectPath(model.getObjectPath());
            }
        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }


@Override
        public void onDrawFrame(GL10 gl) {
        if (isObjectChanged) {
            isObjectChanged = false;
            try {
                virtualObject.createOnGlThread(this);
                virtualObject.setMaterialProperties(0.0f, 2.0f, 0.5f, 6.0f);
            } catch (IOException e) {
                e.printStackTrace();
            }
            return;
        }

        // Clear screen to notify driver it should not load any pixels from previous frame.
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

        if (session == null) {
            return;
        }
        // Notify ARCore session that the view size changed so that the perspective matrix and
        // the video background can be properly adjusted.
        displayRotationHelper.updateSessionIfNeeded(session);

        try {
            session.setCameraTextureName(backgroundRenderer.getTextureId());

            // Obtain the current frame from ARSession. When the configuration is set to
            // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
            // camera framerate.
            Frame frame = session.update();
            Camera camera = frame.getCamera();

            // Handle taps. Handling only one tap per frame, as taps are usually low frequency
            // compared to frame rate.
            MotionEvent tap = queuedSingleTaps.poll();
            if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
                for (HitResult hit : frame.hitTest(tap)) {
                    // Check if any plane was hit, and if it was hit inside the plane polygon
                    Trackable trackable = hit.getTrackable();
                    // Creates an anchor if a plane or an oriented point was hit.
                    if ((trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) || (trackable instanceof Point && ((Point) trackable).getOrientationMode() == Point.OrientationMode.ESTIMATED_SURFACE_NORMAL)) {
                        // Hits are sorted by depth. Consider only closest hit on a plane or oriented point.
                        // Cap the number of objects created. This avoids overloading both the
                        // rendering system and ARCore.
                        //if (!isUpdate) {
                        DebugHelper.log("Anchor size = " + anchors.size());
                        if (anchors.size() >= 1) {
                            anchors.get(0).detach();
                            anchors.remove(0);
                        }
                        // Adding an Anchor tells ARCore that it should track this position in
                        // space. This anchor is created on the Plane to place the 3D model
                        // in the correct position relative both to the world and to the plane.
                        if (anchors.size() > 0) {
                            DebugHelper.log("anchor list has data");
                            for (Anchor anchor : anchors) {
                                anchor.detach();
                                anchors.remove(anchor);
                            }
                        }
                        Anchor anchor = hit.createAnchor();
                        if (anchor != null)
                            anchors.add(anchor);
                        else
                            DebugHelper.log("anchor is null");
                        //}
                        break;
                    }
                }
            }

            // Draw background.
            backgroundRenderer.draw(frame);

            // If not tracking, don't draw 3d objects.
            if (camera.getTrackingState() == TrackingState.PAUSED) {
                return;
            }

            // Get projection matrix.
            camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

            // Get camera matrix and draw.
            camera.getViewMatrix(viewmtx, 0);

            // Compute lighting from average intensity of the image.
            final float lightIntensity = frame.getLightEstimate().getPixelIntensity();

            // Visualize tracked points.
            PointCloud pointCloud = frame.acquirePointCloud();
            this.pointCloud.update(pointCloud);
            if (!capturePicture)
                this.pointCloud.draw(viewmtx, projmtx);

            // Application is responsible for releasing the point cloud resources after
            // using it.
            pointCloud.release();

            // Check if we detected at least one plane. If so, hide the loading message.
            if (messageSnackbar != null) {
                {
                    for (Plane plane : session.getAllTrackables(Plane.class)) {
                        if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING
                                && plane.getTrackingState() == TrackingState.TRACKING) {
                            hideLoadingMessage();
                            break;
                        }
                    }
                }
                for (Plane plane : session.getAllTrackables(Plane.class)) {
                    if (plane.getType() == Plane.Type.HORIZONTAL_UPWARD_FACING && plane.getTrackingState() == TrackingState.TRACKING) {
                        hideLoadingMessage();
                        break;
                    }
                }
            }
            // Visualize planes.
            if (!capturePicture)
                planeRenderer.drawPlanes(session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);

            // Visualize anchors created by touch.
            for (Anchor anchor : anchors) {
                if (anchor.getTrackingState() != TrackingState.TRACKING) {
                    continue;
                }
                // Get the current pose of an Anchor in world space. The Anchor pose is updated
                // during calls to session.update() as ARCore refines its estimate of the world.
                anchor.getPose().toMatrix(anchorMatrix, 0);

                // Update and draw the model and its shadow.
                if (virtualObject != null) {
//passing my scaleFector and rotationDegree to update my matrix.
                    virtualObject.updateModelMatrix(anchorMatrix, scaleFactor, mRotationDegrees);
                    if (viewmtx != null && projmtx != null) {
                        virtualObject.draw(viewmtx, projmtx, lightIntensity);
                    }
                }

            }

            if (capturePicture) {
                capturePicture = false;
                onSavePicture();
            }

        } catch (Throwable t) {
            Log.e(TAG, "Exception on the OpenGL thread", t);
        }
    }
公共类MyARActivity扩展了BaseActivity,实现了GLSurfaceView.Renderer{
//AR变量
私人英特姆维兹;
私营机构;
私有布尔capturePicture=false;
要求提供私人服务;
私有布尔移动;
float[]projmtx=新的float[16];
float[]viewmtx=新浮点[16];
非公开会议;
私有Snackbar消息Snackbar;
私有显示RotationHelper显示RotationHelper;
private final BackgroundRenderer BackgroundRenderer=新的BackgroundRenderer();
私有对象渲染器virtualObject;
私有对象渲染器virtualObjectShadow;
private final PlaneRenderer PlaneRenderer=新建PlaneRenderer();
私有PointCloudRenderer pointCloud=新的PointCloudRenderer();
//此处分配的临时矩阵用于减少每个帧的分配数量。
私有浮动[]主播矩阵=新浮动[16];
//点击处理和用户界面。
私有ArrayBlockingQueue queuedSingleTaps=新的ArrayBlockingQueue(16);
私有ArrayList锚点=新ArrayList();
//加载和操作对象
私有SQLiteHelper-sqlHelper;
私有布尔值isObjectChanged=false;
私有字符串对象路径;
私有列表characterModelList=new ArrayList();
私有布尔值isFirstTimeLoad=true;
//手势
私有浮动mRotationDegrees=0.f;
专用旋转检测器;
专用浮点scaleFactor=1.0f;
专用scalegestruedetector scaleDetector;
私人手势检测器;
@凌驾
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setheadervible(false);
doDefaults();
}
私有无效doDefaults(){
binding.setPresenter(这个);
sqlHelper=SQLiteHelper.getInstance(this);
load3DCharacters();
initAR();
}
@SuppressLint(“ClickableViewAccessibility”)
私有void initAR(){
displayRotationHelper=新的displayRotationHelper(此);
scaleTestor=new-scalegestruedetector(这是new-customscalegestrue());
mRotateDetector=new RotateGestureDetector(getApplicationContext(),new RotateListener());
//设置tap侦听器。
gestureDetector=new gestureDetector(这是new gestureDetector.SimpleOnGestureListener()){
@凌驾
公共布尔onSingleTapUp(运动事件e){
if(锚定.size()0){
onSecondTouch(事件);
}
打破
}
返回gestureDetector.onTouchEvent(事件);
}
});
//设置渲染器。
binding.surfaceView.setPreserveEGLContextOnPause(true);
binding.surfaceView.setEGLContextClientVersion(2);
binding.surfaceView.setEGLConfigChooser(8,8,8,8,16,0);//用于平面混合的Alpha。
binding.surfaceView.setRenderer(这个);
binding.surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_);
installrequest=false;
}
私有void onSecondTouch(运动事件e){
Log.e(“第二次接触”、“执行”);
如果(如getPointerCount()>1){
鳞片检测器onTouchEvent(e);
}否则{
queuedSingleTaps.报价(e);
}
}
私有void onSingleTap(运动事件e){
//如果有空间,请进行队列点击。如果队列已满,则点击将丢失。
log(“onSingleTap()”);
queuedSingleTaps.报价(e);
}
私有void load3DCharacters(){
CharacterModel=新的CharacterModel();
型号:setName(“Cat”);
model.setObjectPath(“cat/cat.obj”);
模型系数(0.25f);
model.setResourceId(R.drawable.cat);
characterModelList.add(模型);
model=新的CharacterModel();
model.setName(“老人”);
model.setObjectPath(“man/muro.obj”);
模型设定比例系数(0.0085f);
model.setResourceId(R.drawable.old_man);
characterModelList.add(模型);
model=新的CharacterModel();
型号:setName(“血翼”);
model.setObjectPath(“bloodwing/bloodwing.obj”);
型号.设定比例系数(0.0009f);
model.setResourceId(R.drawable.bat);
characterModelList.add(模型);
}
私有void加载对象(CharacterModel模型){
试一试{
this.objectPath=model.getObjectPath();
this.scaleFactor=model.getScaleFactor();
if(virtualObject==null){
virtualObject=新的ObjectRenderer(objectPath);
virtualObject.createOnGlThread(this);
virtualObject.setMaterialProperties(0.0f、1.0f、1.0f、6.0f);
}否则{
//清除屏幕以通知驱动程序不应加载前一帧中的任何像素。
GLES20.glClear(GLES20.GL_颜色_缓冲_位| GLES20.GL_深度_缓冲_位);
isObjectChanged=true;
updateObjectPath(model.getObjectPath());
}