我将尝试基于触摸坐标通过交叉光线测试来实现对象拾取。但是,我在寻找将触摸坐标转换为世界坐标系以构建该光线的信息时遇到了困难。
目前我的理解是,应用于场景中每个顶点的矩阵为:
这个测试应用的目标是展示场景从几个不同角度看的情况,以便我可以根据我的代码来看交点线看起来如何。我想画一条线,从相机的原点开始,结束于交点处,但它表现得很奇怪。终点似乎被推到了x轴正方向上更远的位置,在某些地方似乎会跳过,就像那个位置有一个洞一样。虽然我还记得一些来自微积分的线性代数知识,但我不记得足够多,无法确定我在这里做什么,我已经搜索了许多在线资源,但没有找到解决方法。我希望读到这篇文章的人比我更有处理这个问题的经验,并且愿意帮助我,或者如果我做错了其他事情或者以低效的方式做了其他事情,给我一些提示。
目前我的理解是,应用于场景中每个顶点的矩阵为:
projectionMatrix * viewMatrix * modelMatrix
以下是我用来反转该过程的步骤,试图找到场景中光线的终点,以及在我错误地应用不同矩阵的情况下我的绘图循环:
public float[] getMouseRayProjection(float touchX, float touchY, float windowWidth, float windowHeight, float[] modelView, float[] projection)
{
float[] rayDirection = new float[4];
float normalizedX = 2 * touchX/windowWidth - 1;
float normalizedY = 1 - 2*touchY/windowHeight;
float[] unviewMatrix = new float[16];
float[] viewMatrix = new float[16];
Matrix.multiplyMM(viewMatrix, 0, projection, 0, modelView, 0);
Matrix.invertM(unviewMatrix, 0, viewMatrix, 0);
float[] nearPoint = multiplyMat4ByVec4(projection, new float[]{normalizedX, normalizedY, 0, 1});
float[] modelviewInverse = new float[16];
Matrix.invertM(modelviewInverse, 0, modelView, 0);
float[] cameraPos = new float[4];
cameraPos[0] = modelviewInverse[12];
cameraPos[1] = modelviewInverse[13];
cameraPos[2] = modelviewInverse[14];
cameraPos[3] = modelviewInverse[15];
rayDirection[0] = nearPoint[0] - cameraPos[0];
rayDirection[1] = nearPoint[1] - cameraPos[1];
rayDirection[2] = nearPoint[2] - cameraPos[2];
rayDirection[3] = nearPoint[3] - cameraPos[3];
return rayDirection;
}
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
@Override
public void onDrawFrame(GL10 gl10) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
long time = SystemClock.uptimeMillis() % 10000L;
float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
GLES20.glViewport(0, 0, (int)(width/2), (int)(height/2));
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 1.5f, 0f, 0f, -5f, 0f, 1f, 0f);
//Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);
drawTriangle(triangleVertices);
//Matrix.translateM(mModelMatrix, 0, 1.5f, 0, -1f);
//Matrix.frustumM(mProjectionMatrix, 0, left, right, -1.0f, 1.0f, 1.0f, 10.0f);
Matrix.setIdentityM(mModelMatrix, 0);
Matrix.setLookAtM(viewMatrix, 0, 1.5f, 0.8f, 0.5f, 0f, 0f, 0f, 0f, 1f, 0f);
GLES20.glViewport((int)(width/2), (int)(height/2), (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
/*
Matrix.setLookAtM(viewMatrix, 0, 0, 1.5f, 0.5f, 0, 0, 0, 0, 0, -1f);
GLES20.glViewport((int)(width/2), (int)height, (int)(width/2), (int)(height/2));
drawTriangle(triangleVertices);
drawIntersectionLine();
*/
}
private void drawTriangle(final FloatBuffer triangleBuffer)
{
triangleBuffer.position(positionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
triangleBuffer.position(colorOffset);
GLES20.glVertexAttribPointer(mColorHandle, colorDataSize, GLES20.GL_FLOAT, false, strideBytes, triangleBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, mModelMatrix, 0);
mMVMatrix = mMVPMatrix;
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);
//Log.d("OpenGLES2Test", "The intersection ray is: " + floatArrayAsString(getCameraPos(mMVMatrix)) + " + " + floatArrayAsString(getMouseRayProjection((int)(width / 2), (int)(height / 2), 1.0f, (int)width, (int)height, mMVMatrix, mProjectionMatrix)));
}
private void drawIntersectionLine()
{
lineVertices.position(0);
GLES20.glVertexAttribPointer(mPositionHandle, positionDataSize, GLES20.GL_FLOAT, false, lineStrideBytes, lineVertices);
GLES20.glEnableVertexAttribArray(mPositionHandle);
GLES20.glDrawArrays(GLES20.GL_LINES, 0, 2);
}
private void moveIntersectionLineEndPoint(float[] lineEndPoint)
{
this.lineEndPoint = lineEndPoint;
float[] lineVerticesData = {
lineStartPoint[0], lineStartPoint[1], lineStartPoint[2],
lineEndPoint[0], lineEndPoint[1], lineEndPoint[2]
};
lineVertices = ByteBuffer.allocateDirect(lineVerticesData.length * bytesPerFloat).order(ByteOrder.nativeOrder()).asFloatBuffer();
lineVertices.put(lineVerticesData).position(0);
}
虽然我相信我的四维矩阵乘以四维向量的方法是正确的,但为了保险起见,以下是该方法:
public float[] multiplyMat4ByVec4(float[] matrix4, float[] vector4)
{
float[] returnMatrix = new float[4];
returnMatrix[0] = (matrix4[0] * vector4[0]) + (matrix4[1] * vector4[1]) + (matrix4[2] * vector4[2]) + (matrix4[3] * vector4[3]);
returnMatrix[1] = (matrix4[4] * vector4[0]) + (matrix4[5] * vector4[1]) + (matrix4[6] * vector4[2]) + (matrix4[7] * vector4[3]);
returnMatrix[2] = (matrix4[8] * vector4[0]) + (matrix4[9] * vector4[1]) + (matrix4[10] * vector4[2]) + (matrix4[11] * vector4[3]);
returnMatrix[3] = (matrix4[12] * vector4[0]) + (matrix4[13] * vector4[1]) + (matrix4[14] * vector4[2]) + (matrix4[15] * vector4[3]);
return returnMatrix;
}
这个测试应用的目标是展示场景从几个不同角度看的情况,以便我可以根据我的代码来看交点线看起来如何。我想画一条线,从相机的原点开始,结束于交点处,但它表现得很奇怪。终点似乎被推到了x轴正方向上更远的位置,在某些地方似乎会跳过,就像那个位置有一个洞一样。虽然我还记得一些来自微积分的线性代数知识,但我不记得足够多,无法确定我在这里做什么,我已经搜索了许多在线资源,但没有找到解决方法。我希望读到这篇文章的人比我更有处理这个问题的经验,并且愿意帮助我,或者如果我做错了其他事情或者以低效的方式做了其他事情,给我一些提示。
变量参考:矩阵都是长度为16的浮点数组
mProjectionMatrix = projection matrix
mModelMatrix = model matrix
mMVPMatrix = projection * modelview matrix
mMVMatrix = modelview matrix
private final FloatBuffer triangleVertices;
private FloatBuffer lineVertices;
private final int bytesPerFloat = 4;
private float[] viewMatrix = new float[16];
private static Context context;
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mColorHandle;
private float[] mProjectionMatrix = new float[16];
private float[] mModelMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private float[] mMVMatrix = new float[16];
private final int strideBytes = 7 * bytesPerFloat;
private final int lineStrideBytes = 3 * bytesPerFloat;
private final int positionOffset = 0;
private final int positionDataSize = 3;
private final int colorOffset = 3;
private final int colorDataSize = 4;
private float width, height;
private float[] lineStartPoint = new float[]{0, 0, 1.5f};
private float[] lineEndPoint = new float[]{0, 0, 0};