Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/android/201.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android 如何将对象从一个锚点移动到另一个锚点?_Android_Arcore_Sceneform - Fatal编程技术网

Android 如何将对象从一个锚点移动到另一个锚点?

Android 如何将对象从一个锚点移动到另一个锚点?,android,arcore,sceneform,Android,Arcore,Sceneform,我的用例是: 点击屏幕并将“点”保存为起始锚点 再次点击屏幕,并将“点”保存为结束锚定 按下将对象从起点移动到终点的按钮 我已经使用ObjectAnimator构建了自己的节点,类似于太阳系示例中的节点。我唯一的问题是,我不知道如何确定评估者的起点和终点。我的第一个想法是从开始和结束锚点的姿势取x,y,z Vector3 start = new Vector3(startAnchor.getPose().tx(), startAnchor.getPose().ty(), startAnchor.

我的用例是:

  • 点击屏幕并将“点”保存为起始锚点
  • 再次点击屏幕,并将“点”保存为结束锚定
  • 按下将对象从起点移动到终点的按钮
  • 我已经使用
    ObjectAnimator
    构建了自己的节点,类似于太阳系示例中的节点。我唯一的问题是,我不知道如何确定评估者的起点和终点。我的第一个想法是从开始和结束锚点的姿势取x,y,z

    Vector3 start = new Vector3(startAnchor.getPose().tx(), startAnchor.getPose().ty(), startAnchor.getPose().tz());
    Vector3 end = new Vector3(endAnchor.getPose().tx(), endAnchor.getPose().ty(), endAnchor.getPose().tz());
    

    但当我这样做的时候,动画是从完全不同的地方完成的

    我还没有找到任何关于这种操作的内置工具的参考资料。 我正在使用Sceneform


    所以问题是:如何制作一个从锚a到锚B的流畅动画(一张简单的幻灯片就足够了?

    我在HelloSceneform示例中这样做了。我创建了第一个AnchorNode,并作为子节点添加了“andy”节点。在下一次点击时,我创建了endPosition锚点节点,并开始动画移动到该位置

    要记住的是,如果使用具有不同父对象的对象的位置,则需要使用worldPosition vs.localPosition

      private void onPlaneTap(HitResult hitResult, Plane plane, MotionEvent motionEvent) {
          if (andyRenderable == null) {
            return;
          }
          // Create the Anchor.
          Anchor anchor = hitResult.createAnchor();
    
          // Create the starting position.
          if (startNode == null) {
            startNode = new AnchorNode(anchor);
            startNode.setParent(arFragment.getArSceneView().getScene());
    
            // Create the transformable andy and add it to the anchor.
            andy = new Node();
            andy.setParent(startNode);
            andy.setRenderable(andyRenderable);
          } else {
            // Create the end position and start the animation.
            endNode = new AnchorNode(anchor);
            endNode.setParent(arFragment.getArSceneView().getScene());
            startWalking();
          }
      }
    
      private void startWalking() {
        objectAnimation = new ObjectAnimator();
        objectAnimation.setAutoCancel(true);
        objectAnimation.setTarget(andy);
    
        // All the positions should be world positions
        // The first position is the start, and the second is the end.
        objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());
    
        // Use setWorldPosition to position andy.
        objectAnimation.setPropertyName("worldPosition");
    
        // The Vector3Evaluator is used to evaluator 2 vector3 and return the next
        // vector3.  The default is to use lerp. 
        objectAnimation.setEvaluator(new Vector3Evaluator());
        // This makes the animation linear (smooth and uniform).
        objectAnimation.setInterpolator(new LinearInterpolator());
        // Duration in ms of the animation.
        objectAnimation.setDuration(500);
        objectAnimation.start();
      }
    
    /**
    *这是一个使用Sceneform UX包简化常见AR任务的示例活动。
    */
    公共类MainActivity扩展了AppCompatActivity{
    私有静态最终字符串标记=MainActivity.class.getSimpleName();
    私有静态最终double MIN_OPENGL_版本=3.1;
    会话中断;
    私有ArFragment-ArFragment;
    私人ArSceneView ArSceneView;
    私有模型可渲染和可渲染;
    私有布尔值shouldConfigureSession=false;
    私有布尔modelpadded=false;
    私有ObjectAnimator objectAnimation;
    私有可转换节点;
    专用锚点端节点;
    私人手势检测器可跟踪手势检测器;
    /**
    *如果Sceneform无法运行,则返回false并显示错误消息;如果Sceneform可以运行,则返回true
    *在这个设备上。
    *
    *Sceneform需要设备上的Android N以及OpenGL 3.1功能。
    *
    *如果Sceneform无法运行,则完成活动
    */
    公共静态布尔检查IsSupportedDeviceorFinish(最终活动){
    if(Build.VERSION.SDK\u INT(InputStream)新文件InputStream(文件);
    FutureTask任务=新的FutureTask(可调用);
    新线程(任务).start();
    ModelRenderable.builder()
    .setSource(this,R.raw.model)/.setSource(this,可调用)
    .build()
    .thenAccept(可渲染->可渲染=可渲染)
    .例外(
    可丢弃->{
    吐司=
    Toast.makeText(这是“无法加载andy renderable”,Toast.LENGTH\u LONG);
    toast.setGravity(Gravity.CENTER,0,0);
    toast.show();
    返回null;
    });
    arFragment.setOnTapArPlaneListener(
    (HitResult HitResult、平面、运动事件)->{
    if(andyRenderable==null){
    返回;
    }
    如果(已添加模型){
    endNode=新主播
    
      private void onPlaneTap(HitResult hitResult, Plane plane, MotionEvent motionEvent) {
          if (andyRenderable == null) {
            return;
          }
          // Create the Anchor.
          Anchor anchor = hitResult.createAnchor();
    
          // Create the starting position.
          if (startNode == null) {
            startNode = new AnchorNode(anchor);
            startNode.setParent(arFragment.getArSceneView().getScene());
    
            // Create the transformable andy and add it to the anchor.
            andy = new Node();
            andy.setParent(startNode);
            andy.setRenderable(andyRenderable);
          } else {
            // Create the end position and start the animation.
            endNode = new AnchorNode(anchor);
            endNode.setParent(arFragment.getArSceneView().getScene());
            startWalking();
          }
      }
    
      private void startWalking() {
        objectAnimation = new ObjectAnimator();
        objectAnimation.setAutoCancel(true);
        objectAnimation.setTarget(andy);
    
        // All the positions should be world positions
        // The first position is the start, and the second is the end.
        objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());
    
        // Use setWorldPosition to position andy.
        objectAnimation.setPropertyName("worldPosition");
    
        // The Vector3Evaluator is used to evaluator 2 vector3 and return the next
        // vector3.  The default is to use lerp. 
        objectAnimation.setEvaluator(new Vector3Evaluator());
        // This makes the animation linear (smooth and uniform).
        objectAnimation.setInterpolator(new LinearInterpolator());
        // Duration in ms of the animation.
        objectAnimation.setDuration(500);
        objectAnimation.start();
      }
    
    /**
     * This is an example activity that uses the Sceneform UX package to make common AR tasks easier.
     */
    public class MainActivity extends AppCompatActivity {
        private static final String TAG = MainActivity.class.getSimpleName();
        private static final double MIN_OPENGL_VERSION = 3.1;
        Session mSession;
        private ArFragment arFragment;
        private ArSceneView arSceneView;
        private ModelRenderable andyRenderable;
        private boolean shouldConfigureSession = false;
        private boolean modelAdded = false;
        private ObjectAnimator objectAnimation;
        private TransformableNode andy;
        private AnchorNode endNode;
        private GestureDetector trackableGestureDetector;
    
        /**
         * Returns false and displays an error message if Sceneform can not run, true if Sceneform can run
         * on this device.
         * <p>
         * <p>Sceneform requires Android N on the device as well as OpenGL 3.1 capabilities.
         * <p>
         * <p>Finishes the activity if Sceneform can not run
         */
        public static boolean checkIsSupportedDeviceOrFinish(final Activity activity) {
            if (Build.VERSION.SDK_INT < VERSION_CODES.N) {
                Log.e(TAG, "Sceneform requires Android N or later");
                Toast.makeText(activity, "Sceneform requires Android N or later", Toast.LENGTH_LONG).show();
                activity.finish();
                return false;
            }
    
            String openGlVersionString =
                    ((ActivityManager) activity.getSystemService(Context.ACTIVITY_SERVICE))
                            .getDeviceConfigurationInfo()
                            .getGlEsVersion();
            if (Double.parseDouble(openGlVersionString) < MIN_OPENGL_VERSION) {
                Log.e(TAG, "Sceneform requires OpenGL ES 3.1 later");
                Toast.makeText(activity, "Sceneform requires OpenGL ES 3.1 or later", Toast.LENGTH_LONG)
                        .show();
                activity.finish();
                return false;
            }
            return true;
        }
    
        @Override
        @SuppressWarnings({"AndroidApiChecker", "FutureReturnValueIgnored"})
        // CompletableFuture requires api level 24
        // FutureReturnValueIgnored is not valid
        protected void onCreate(Bundle savedInstanceState) {
            super.onCreate(savedInstanceState);
    
            if (!checkIsSupportedDeviceOrFinish(this)) {
                return;
            }
    
            setContentView(R.layout.activity_main);
            ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 105);
    
            arFragment = (ArFragment) getSupportFragmentManager().findFragmentById(R.id.ux_fragment);
            if (arFragment != null) {
                arFragment.getPlaneDiscoveryController().hide();
                arFragment.getPlaneDiscoveryController().setInstructionView(null);
    
            }
            arSceneView = arFragment.getArSceneView();
            arSceneView.getScene().addOnUpdateListener((this::onUpdateFrame));
    
            arFragment.getArSceneView().getScene().addOnPeekTouchListener(this::handleOnTouch);
            this.trackableGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
                public boolean onSingleTapUp(MotionEvent e) {
                    onSingleTap(e);
                    return true;
                }
    
                public boolean onDown(MotionEvent e) {
                    return true;
                }
            });
    
            // When you build a Renderable, Sceneform loads its resources in the background while returning
            // a CompletableFuture. Call thenAccept(), handle(), or check isDone() before calling get().
    
            File file = new File(Environment.getExternalStorageDirectory(), "model.sfb");
            Uri photoURI = Uri.fromFile(file);
            Callable callable = () -> (InputStream) new FileInputStream(file);
            FutureTask task = new FutureTask<>(callable);
            new Thread(task).start();
            ModelRenderable.builder()
                    .setSource(this, R.raw.model) //.setSource(this, callable)
                    .build()
                    .thenAccept(renderable -> andyRenderable = renderable)
                    .exceptionally(
                            throwable -> {
                                Toast toast =
                                        Toast.makeText(this, "Unable to load andy renderable", Toast.LENGTH_LONG);
                                toast.setGravity(Gravity.CENTER, 0, 0);
                                toast.show();
                                return null;
                            });
            arFragment.setOnTapArPlaneListener(
                    (HitResult hitResult, Plane plane, MotionEvent motionEvent) -> {
                        if (andyRenderable == null) {
                            return;
                        }
    
                        if (modelAdded) {
                            endNode = new AnchorNode(hitResult.createAnchor());
                            endNode.setParent(arFragment.getArSceneView().getScene());
                            startWalking();
                        }
                    });
    
        }
    
        private void handleOnTouch(HitTestResult hitTestResult, MotionEvent motionEvent) {
            // First call ArFragment's listener to handle TransformableNodes.
            arFragment.onPeekTouch(hitTestResult, motionEvent);
    
            // Check for touching a Sceneform node
            if (hitTestResult.getNode() != null) {
                return;
            }
    
            // Otherwise call gesture detector.
            trackableGestureDetector.onTouchEvent(motionEvent);
        }
    
        private void onSingleTap(MotionEvent motionEvent) {
            Frame frame = arFragment.getArSceneView().getArFrame();
            if (frame != null && motionEvent != null && frame.getCamera().getTrackingState() == TrackingState.TRACKING) {
                for (HitResult hit : frame.hitTest(motionEvent)) {
                    Trackable trackable = hit.getTrackable();
                    if (trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) {
                        Plane plane = (Plane) trackable;
                        endNode = new AnchorNode(plane.createAnchor(plane.getCenterPose()));
                        endNode.setParent(arFragment.getArSceneView().getScene());
                        startWalking();
                        // Handle plane hits.
                        break;
                    } else if (trackable instanceof Point) {
                        // Handle point hits
                        Point point = (Point) trackable;
                        endNode = new AnchorNode(point.createAnchor(hit.getHitPose()));
                        endNode.setParent(arFragment.getArSceneView().getScene());
                        startWalking();
                    } else if (trackable instanceof AugmentedImage) {
                        // Handle image hits.
                        AugmentedImage image = (AugmentedImage) trackable;
                        endNode = new AnchorNode(image.createAnchor(image.getCenterPose()));
                        endNode.setParent(arFragment.getArSceneView().getScene());
                        startWalking();
                    }
                }
            }
        }
    
        private void startWalking() {
            objectAnimation = new ObjectAnimator();
            objectAnimation.setAutoCancel(true);
            objectAnimation.setTarget(andy);
    
            // All the positions should be world positions
            // The first position is the start, and the second is the end.
            objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());
    
            // Use setWorldPosition to position andy.
            objectAnimation.setPropertyName("worldPosition");
    
            // The Vector3Evaluator is used to evaluator 2 vector3 and return the next
            // vector3.  The default is to use lerp.
            objectAnimation.setEvaluator(new Vector3Evaluator());
    
            // This makes the animation linear (smooth and uniform).
            objectAnimation.setInterpolator(new LinearInterpolator());
    
            // Duration in ms of the animation.
            objectAnimation.setDuration(500);
            objectAnimation.start();
        }
    
        private void configureSession() {
            Config config = new Config(mSession);
            if (!setupAugmentedImageDb(config)) {
                Toast.makeText(this, "Could not setup augmented", Toast.LENGTH_SHORT).show();
            }
            config.setUpdateMode(Config.UpdateMode.LATEST_CAMERA_IMAGE);
            mSession.configure(config);
        }
    
        @Override
        public void onPause() {
            super.onPause();
            if (mSession != null) {
                // Note that the order matters - GLSurfaceView is paused first so that it does not try
                // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
                // still call session.update() and get a SessionPausedException.
                arSceneView.pause();
                mSession.pause();
            }
        }
    
        @Override
        protected void onResume() {
            super.onResume();
            if (mSession == null) {
                String message = null;
                Exception exception = null;
                try {
                    mSession = new Session(this);
                } catch (UnavailableArcoreNotInstalledException
                        e) {
                    message = "Please install ARCore";
                    exception = e;
                } catch (UnavailableApkTooOldException e) {
                    message = "Please update ARCore";
                    exception = e;
                } catch (UnavailableSdkTooOldException e) {
                    message = "Please update this app";
                    exception = e;
                } catch (Exception e) {
                    message = "This device does not support AR";
                    exception = e;
                }
    
                if (message != null) {
                    Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
                    Log.e(TAG, "Exception creating session", exception);
                    return;
                }
                shouldConfigureSession = true;
    
            }
            if (shouldConfigureSession) {
                configureSession();
                shouldConfigureSession = false;
    
                arSceneView.setupSession(mSession);
            }
    
    
        }
    
        private void onUpdateFrame(FrameTime frameTime) {
            Frame frame = arSceneView.getArFrame();
    
    
            Collection<AugmentedImage> updatedAugmentedImages =
                    frame.getUpdatedTrackables(AugmentedImage.class);
            Log.d("size----", String.valueOf(updatedAugmentedImages.size()));
    
            for (AugmentedImage augmentedImage : updatedAugmentedImages) {
                if (augmentedImage.getTrackingState() == TrackingState.TRACKING) {
                    // Check camera image matches our reference image
                    if (augmentedImage.getName().contains("car")) {
    
                        if (!modelAdded) {
                            modelAdded = true;
                            Anchor anchor = augmentedImage.createAnchor(augmentedImage.getCenterPose());
                            AnchorNode anchorNode = new AnchorNode(anchor);
                            anchorNode.setParent(arFragment.getArSceneView().getScene());
    
                            // Create the transformable andy and add it to the anchor.
                            andy = new TransformableNode(arFragment.getTransformationSystem());
                            andy.setParent(anchorNode);
                            andy.setRenderable(andyRenderable);
                            andy.select();
    
                        }
                    }
    
                }
            }
    
        }
    
        private boolean setupAugmentedImageDb(Config config) {
            AugmentedImageDatabase augmentedImageDatabase;
    
            Bitmap augmentedImageBitmap = loadAugmentedImage();
            if (augmentedImageBitmap == null) {
                return false;
            }
    
            augmentedImageDatabase = new AugmentedImageDatabase(mSession);
            augmentedImageDatabase.addImage("car", augmentedImageBitmap);
    
            config.setAugmentedImageDatabase(augmentedImageDatabase);
            return true;
        }
    
        private Bitmap loadAugmentedImage() {
            try (InputStream is = getAssets().open("car.jpeg")) {
                return BitmapFactory.decodeStream(is);
            } catch (IOException e) {
                Log.e(TAG, "IO exception loading augmented image bitmap.", e);
            }
            return null;
        }
    }