如何将对象从一个锚点移动到另一个锚点?

8
我的使用场景如下:
  1. 点击屏幕并将“点”保存为起始锚点
  2. 第二次点击屏幕并将“点”保存为结束锚点
  3. 按下按钮,将对象从起始锚点移动到结束锚点
我构建了自己的节点,类似于太阳系示例中使用的 ObjectAnimator。我的唯一问题是我不知道如何确定评估器的起始和结束点。我的第一个想法是从起始和结束锚点的姿势中取出x、y、z。
Vector3 start = new Vector3(startAnchor.getPose().tx(), startAnchor.getPose().ty(), startAnchor.getPose().tz());
Vector3 end = new Vector3(endAnchor.getPose().tx(), endAnchor.getPose().ty(), endAnchor.getPose().tz());

movingAnimation.setObjectValues(startingPoint, endPoint);
movingAnimation.setPropertyName("localPosition");
movingAnimation.setEvaluator(new Vector3Evaluator());

但是当我这样做时,动画的起点和终点完全不同。
我没有找到任何关于此类操作的内置工具参考。我正在使用Sceneform。
所以问题是:如何使从锚点A到锚点B的动画流畅(一个简单的幻灯片就足够了)?
2个回答

15

我在HelloSceneform示例中做了这个。我创建了第一个AnchorNode并将"andy"节点添加为子节点。在下一次轻击时,我创建了endPosition AnchorNode并开始动画移动到该位置。

需要记住的是,如果您正在使用具有不同父级的对象的位置,则应使用worldPosition而不是localPosition。

  private void onPlaneTap(HitResult hitResult, Plane plane, MotionEvent motionEvent) {
      if (andyRenderable == null) {
        return;
      }
      // Create the Anchor.
      Anchor anchor = hitResult.createAnchor();

      // Create the starting position.
      if (startNode == null) {
        startNode = new AnchorNode(anchor);
        startNode.setParent(arFragment.getArSceneView().getScene());

        // Create the transformable andy and add it to the anchor.
        andy = new Node();
        andy.setParent(startNode);
        andy.setRenderable(andyRenderable);
      } else {
        // Create the end position and start the animation.
        endNode = new AnchorNode(anchor);
        endNode.setParent(arFragment.getArSceneView().getScene());
        startWalking();
      }
  }

  private void startWalking() {
    objectAnimation = new ObjectAnimator();
    objectAnimation.setAutoCancel(true);
    objectAnimation.setTarget(andy);

    // All the positions should be world positions
    // The first position is the start, and the second is the end.
    objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());

    // Use setWorldPosition to position andy.
    objectAnimation.setPropertyName("worldPosition");

    // The Vector3Evaluator is used to evaluator 2 vector3 and return the next
    // vector3.  The default is to use lerp. 
    objectAnimation.setEvaluator(new Vector3Evaluator());
    // This makes the animation linear (smooth and uniform).
    objectAnimation.setInterpolator(new LinearInterpolator());
    // Duration in ms of the animation.
    objectAnimation.setDuration(500);
    objectAnimation.start();
  }

0
/**
 * This is an example activity that uses the Sceneform UX package to make common AR tasks easier.
 */
public class MainActivity extends AppCompatActivity {
    private static final String TAG = MainActivity.class.getSimpleName();
    private static final double MIN_OPENGL_VERSION = 3.1;
    Session mSession;
    private ArFragment arFragment;
    private ArSceneView arSceneView;
    private ModelRenderable andyRenderable;
    private boolean shouldConfigureSession = false;
    private boolean modelAdded = false;
    private ObjectAnimator objectAnimation;
    private TransformableNode andy;
    private AnchorNode endNode;
    private GestureDetector trackableGestureDetector;

    /**
     * Returns false and displays an error message if Sceneform can not run, true if Sceneform can run
     * on this device.
     * <p>
     * <p>Sceneform requires Android N on the device as well as OpenGL 3.1 capabilities.
     * <p>
     * <p>Finishes the activity if Sceneform can not run
     */
    public static boolean checkIsSupportedDeviceOrFinish(final Activity activity) {
        if (Build.VERSION.SDK_INT < VERSION_CODES.N) {
            Log.e(TAG, "Sceneform requires Android N or later");
            Toast.makeText(activity, "Sceneform requires Android N or later", Toast.LENGTH_LONG).show();
            activity.finish();
            return false;
        }

        String openGlVersionString =
                ((ActivityManager) activity.getSystemService(Context.ACTIVITY_SERVICE))
                        .getDeviceConfigurationInfo()
                        .getGlEsVersion();
        if (Double.parseDouble(openGlVersionString) < MIN_OPENGL_VERSION) {
            Log.e(TAG, "Sceneform requires OpenGL ES 3.1 later");
            Toast.makeText(activity, "Sceneform requires OpenGL ES 3.1 or later", Toast.LENGTH_LONG)
                    .show();
            activity.finish();
            return false;
        }
        return true;
    }

    @Override
    @SuppressWarnings({"AndroidApiChecker", "FutureReturnValueIgnored"})
    // CompletableFuture requires api level 24
    // FutureReturnValueIgnored is not valid
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        if (!checkIsSupportedDeviceOrFinish(this)) {
            return;
        }

        setContentView(R.layout.activity_main);
        ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.READ_EXTERNAL_STORAGE}, 105);

        arFragment = (ArFragment) getSupportFragmentManager().findFragmentById(R.id.ux_fragment);
        if (arFragment != null) {
            arFragment.getPlaneDiscoveryController().hide();
            arFragment.getPlaneDiscoveryController().setInstructionView(null);

        }
        arSceneView = arFragment.getArSceneView();
        arSceneView.getScene().addOnUpdateListener((this::onUpdateFrame));

        arFragment.getArSceneView().getScene().addOnPeekTouchListener(this::handleOnTouch);
        this.trackableGestureDetector = new GestureDetector(this, new GestureDetector.SimpleOnGestureListener() {
            public boolean onSingleTapUp(MotionEvent e) {
                onSingleTap(e);
                return true;
            }

            public boolean onDown(MotionEvent e) {
                return true;
            }
        });

        // When you build a Renderable, Sceneform loads its resources in the background while returning
        // a CompletableFuture. Call thenAccept(), handle(), or check isDone() before calling get().

        File file = new File(Environment.getExternalStorageDirectory(), "model.sfb");
        Uri photoURI = Uri.fromFile(file);
        Callable callable = () -> (InputStream) new FileInputStream(file);
        FutureTask task = new FutureTask<>(callable);
        new Thread(task).start();
        ModelRenderable.builder()
                .setSource(this, R.raw.model) //.setSource(this, callable)
                .build()
                .thenAccept(renderable -> andyRenderable = renderable)
                .exceptionally(
                        throwable -> {
                            Toast toast =
                                    Toast.makeText(this, "Unable to load andy renderable", Toast.LENGTH_LONG);
                            toast.setGravity(Gravity.CENTER, 0, 0);
                            toast.show();
                            return null;
                        });
        arFragment.setOnTapArPlaneListener(
                (HitResult hitResult, Plane plane, MotionEvent motionEvent) -> {
                    if (andyRenderable == null) {
                        return;
                    }

                    if (modelAdded) {
                        endNode = new AnchorNode(hitResult.createAnchor());
                        endNode.setParent(arFragment.getArSceneView().getScene());
                        startWalking();
                    }
                });

    }

    private void handleOnTouch(HitTestResult hitTestResult, MotionEvent motionEvent) {
        // First call ArFragment's listener to handle TransformableNodes.
        arFragment.onPeekTouch(hitTestResult, motionEvent);

        // Check for touching a Sceneform node
        if (hitTestResult.getNode() != null) {
            return;
        }

        // Otherwise call gesture detector.
        trackableGestureDetector.onTouchEvent(motionEvent);
    }

    private void onSingleTap(MotionEvent motionEvent) {
        Frame frame = arFragment.getArSceneView().getArFrame();
        if (frame != null && motionEvent != null && frame.getCamera().getTrackingState() == TrackingState.TRACKING) {
            for (HitResult hit : frame.hitTest(motionEvent)) {
                Trackable trackable = hit.getTrackable();
                if (trackable instanceof Plane && ((Plane) trackable).isPoseInPolygon(hit.getHitPose())) {
                    Plane plane = (Plane) trackable;
                    endNode = new AnchorNode(plane.createAnchor(plane.getCenterPose()));
                    endNode.setParent(arFragment.getArSceneView().getScene());
                    startWalking();
                    // Handle plane hits.
                    break;
                } else if (trackable instanceof Point) {
                    // Handle point hits
                    Point point = (Point) trackable;
                    endNode = new AnchorNode(point.createAnchor(hit.getHitPose()));
                    endNode.setParent(arFragment.getArSceneView().getScene());
                    startWalking();
                } else if (trackable instanceof AugmentedImage) {
                    // Handle image hits.
                    AugmentedImage image = (AugmentedImage) trackable;
                    endNode = new AnchorNode(image.createAnchor(image.getCenterPose()));
                    endNode.setParent(arFragment.getArSceneView().getScene());
                    startWalking();
                }
            }
        }
    }

    private void startWalking() {
        objectAnimation = new ObjectAnimator();
        objectAnimation.setAutoCancel(true);
        objectAnimation.setTarget(andy);

        // All the positions should be world positions
        // The first position is the start, and the second is the end.
        objectAnimation.setObjectValues(andy.getWorldPosition(), endNode.getWorldPosition());

        // Use setWorldPosition to position andy.
        objectAnimation.setPropertyName("worldPosition");

        // The Vector3Evaluator is used to evaluator 2 vector3 and return the next
        // vector3.  The default is to use lerp.
        objectAnimation.setEvaluator(new Vector3Evaluator());

        // This makes the animation linear (smooth and uniform).
        objectAnimation.setInterpolator(new LinearInterpolator());

        // Duration in ms of the animation.
        objectAnimation.setDuration(500);
        objectAnimation.start();
    }

    private void configureSession() {
        Config config = new Config(mSession);
        if (!setupAugmentedImageDb(config)) {
            Toast.makeText(this, "Could not setup augmented", Toast.LENGTH_SHORT).show();
        }
        config.setUpdateMode(Config.UpdateMode.LATEST_CAMERA_IMAGE);
        mSession.configure(config);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mSession != null) {
            // Note that the order matters - GLSurfaceView is paused first so that it does not try
            // to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
            // still call session.update() and get a SessionPausedException.
            arSceneView.pause();
            mSession.pause();
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
        if (mSession == null) {
            String message = null;
            Exception exception = null;
            try {
                mSession = new Session(this);
            } catch (UnavailableArcoreNotInstalledException
                    e) {
                message = "Please install ARCore";
                exception = e;
            } catch (UnavailableApkTooOldException e) {
                message = "Please update ARCore";
                exception = e;
            } catch (UnavailableSdkTooOldException e) {
                message = "Please update this app";
                exception = e;
            } catch (Exception e) {
                message = "This device does not support AR";
                exception = e;
            }

            if (message != null) {
                Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
                Log.e(TAG, "Exception creating session", exception);
                return;
            }
            shouldConfigureSession = true;

        }
        if (shouldConfigureSession) {
            configureSession();
            shouldConfigureSession = false;

            arSceneView.setupSession(mSession);
        }


    }

    private void onUpdateFrame(FrameTime frameTime) {
        Frame frame = arSceneView.getArFrame();


        Collection<AugmentedImage> updatedAugmentedImages =
                frame.getUpdatedTrackables(AugmentedImage.class);
        Log.d("size----", String.valueOf(updatedAugmentedImages.size()));

        for (AugmentedImage augmentedImage : updatedAugmentedImages) {
            if (augmentedImage.getTrackingState() == TrackingState.TRACKING) {
                // Check camera image matches our reference image
                if (augmentedImage.getName().contains("car")) {

                    if (!modelAdded) {
                        modelAdded = true;
                        Anchor anchor = augmentedImage.createAnchor(augmentedImage.getCenterPose());
                        AnchorNode anchorNode = new AnchorNode(anchor);
                        anchorNode.setParent(arFragment.getArSceneView().getScene());

                        // Create the transformable andy and add it to the anchor.
                        andy = new TransformableNode(arFragment.getTransformationSystem());
                        andy.setParent(anchorNode);
                        andy.setRenderable(andyRenderable);
                        andy.select();

                    }
                }

            }
        }

    }

    private boolean setupAugmentedImageDb(Config config) {
        AugmentedImageDatabase augmentedImageDatabase;

        Bitmap augmentedImageBitmap = loadAugmentedImage();
        if (augmentedImageBitmap == null) {
            return false;
        }

        augmentedImageDatabase = new AugmentedImageDatabase(mSession);
        augmentedImageDatabase.addImage("car", augmentedImageBitmap);

        config.setAugmentedImageDatabase(augmentedImageDatabase);
        return true;
    }

    private Bitmap loadAugmentedImage() {
        try (InputStream is = getAssets().open("car.jpeg")) {
            return BitmapFactory.decodeStream(is);
        } catch (IOException e) {
            Log.e(TAG, "IO exception loading augmented image bitmap.", e);
        }
        return null;
    }
}

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接