使用视频流作为OpenGL ES 2.0纹理

15

我正在尝试通过将Open GL ES纹理设置为Android surfaceTexture来捕获视频并将其显示到屏幕上。由于我正在使用Google Cardboard,因此无法像这个教程中所述那样使用TextureView并实现SurfaceTextureListener

我已经按照Android文档中关于如何初始化Open GL ES 2.0并使用它的说明,以及这个教程上的纹理贴图说明进行了操作。

将两者结合起来,我得到一个空白屏幕,并偶尔在控制台窗口中得到<core_glBindTexture:572>: GL_INVALID_OPERATION。由于对许多我不知道的新概念感到不知所措,因此我无法调试或理解这两种方法是否可以像这样使用。这是我的绘图代码,它是在MainActivity类的onSurfaceCreated()中初始化的,并从Cardboard的绘图函数onEyeDraw()中绘制。

package com.example.rich.test3;

import android.hardware.Camera;
import android.opengl.GLES20;
import android.view.TextureView;

import java.nio.ShortBuffer;
import java.nio.FloatBuffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;

/**
 * Created by rich on 03/05/2015.
 */
public class Square {

private java.nio.FloatBuffer vertexBuffer;
private java.nio.ShortBuffer drawListBuffer;
private final java.nio.FloatBuffer mCubeTextureCoordinates;

float color[] = { 1.f, 1.f, 1.f, 1.0f };

private final String vertexShaderCode =
        "attribute vec4 vPosition;" +
        "attribute vec2 a_TexCoordinate;" +
        "varying vec2 v_TexCoordinate;" +
                "void main() {" +
                " gl_Position = vPosition;" +
                " v_TexCoordinate = a_TexCoordinate;" +
                "}";

private final String fragmentShaderCode =
        "precision mediump float;" +
                "uniform vec4 vColor;" +
                "uniform sampler2D u_Texture;" +
                "varying vec2 v_TexCoordinate;" +
                "void main() {" +
                "gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
                "}";

// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 3;
static float squareCoords[] = {
        -0.5f, -0.5f, 0.0f,   // bottom left
        0.5f, -0.5f, 0.0f,   // bottom right
        -0.5f,  0.5f, 0.0f,   // top left
        0.5f,  0.5f, 0.0f}; // top right

private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices

private int mProgram;

private int mPositionHandle;
private int mColorHandle;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;

private final int vertexCount = squareCoords.length / COORDS_PER_VERTEX;
private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex

private int mTextureDataHandle;

float textureCoordinates[] =
        {0.0f, 1.0f,
        1.0f, 1.0f,
        0.0f, 0.0f,
        1.0f, 0.0f };

Camera _camera;
TextureView _textureView;
int[] textures;
android.graphics.SurfaceTexture _surface;

public Square()
{
    ByteBuffer bb = ByteBuffer.allocateDirect(
            // (# of coordinate values * 4 bytes per float)
            squareCoords.length * 4);
    bb.order(ByteOrder.nativeOrder());
    vertexBuffer = bb.asFloatBuffer();
    vertexBuffer.put(squareCoords);
    vertexBuffer.position(0);

    // initialize byte buffer for the draw list
    ByteBuffer dlb = ByteBuffer.allocateDirect(
            // (# of coordinate values * 2 bytes per short)
            drawOrder.length * 2);
    dlb.order(ByteOrder.nativeOrder());
    drawListBuffer = dlb.asShortBuffer();
    drawListBuffer.put(drawOrder);
    drawListBuffer.position(0);

    mCubeTextureCoordinates = ByteBuffer.allocateDirect(textureCoordinates.length * 4)
            .order(ByteOrder.nativeOrder()).asFloatBuffer();
    mCubeTextureCoordinates.put(textureCoordinates).position(0);

    // create empty OpenGL ES Program
    mProgram = GLES20.glCreateProgram();

    textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);

    _surface = new android.graphics.SurfaceTexture(textures[0]);
    _camera = Camera.open();
    Camera.Size previewSize = _camera.getParameters().getPreviewSize();

    try
    {
        _camera.setPreviewTexture(_surface);
    }
    catch (java.io.IOException ex)
    {
        // Console.writeLine (ex.Message);
    }

    final int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
    GLES20.glShaderSource(vertexShaderHandle, vertexShaderCode);
    GLES20.glCompileShader(vertexShaderHandle);
    final int[] compileStatus = new int[1];
    GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
    if (compileStatus[0] == 0)
    {
        //do check here
    }

    final int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
    GLES20.glShaderSource(fragmentShaderHandle, fragmentShaderCode);
    GLES20.glCompileShader(fragmentShaderHandle);
    GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
    if (compileStatus[0] == 0)
    {
        //do check here
    }

    GLES20.glAttachShader(mProgram, vertexShaderHandle);
    GLES20.glAttachShader(mProgram, fragmentShaderHandle);
    GLES20.glBindAttribLocation(mProgram, 0, "a_Position");
    GLES20.glBindAttribLocation(mProgram, 0, "a_TexCoordinate");

    GLES20.glLinkProgram(mProgram);
    final int[] linkStatus = new int[1];
    GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
    if (linkStatus[0] == 0)
    {
        //do check here
    }

    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
    mTextureDataHandle = textures[0];

    // Set filtering
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}

public void draw()
{
    _surface.updateTexImage();
    GLES20.glUseProgram(mProgram);

    mTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "u_Texture");
    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "a_Position");
    mColorHandle = GLES20.glGetAttribLocation(mProgram, "a_Color");
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);

    GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false,
            vertexStride, vertexBuffer);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false,
            0, mCubeTextureCoordinates);

    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    GLES20.glUniform1i(mTextureUniformHandle, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, vertexCount);
    GLES20.glDisableVertexAttribArray(mPositionHandle);
}

}

你有没有发现 https://github.com/google/grafika?"texture from camera" 这个活动可能已经实现了你想要的大部分功能。 - fadden
2个回答

8
当渲染一个SurfaceTexture纹理对象时,您需要使用GL_TEXTURE_EXTERNAL_OES纹理目标:

纹理对象使用GL_TEXTURE_EXTERNAL_OES纹理目标,该目标由GL_OES_EGL_image_external OpenGL ES扩展定义。这限制了纹理的使用方式。每次绑定纹理时,必须将其绑定到GL_TEXTURE_EXTERNAL_OES目标而不是GL_TEXTURE_2D目标。此外,任何从纹理中取样的OpenGL ES 2.0着色器都必须使用例如“#extension GL_OES_EGL_image_external : require”指令声明其使用此扩展。这样的着色器还必须使用samplerExternalOES GLSL采样器类型访问纹理。

因此,您需要修改片段着色器,添加#extension声明,并将纹理uniform声明为samplerExternalOES
private final String fragmentShaderCode =
    "#extension GL_OES_EGL_image_external : require\n" +
    "precision mediump float;" +
    "uniform vec4 vColor;" +
    "uniform samplerExternalOES u_Texture;" +
    "varying vec2 v_TexCoordinate;" +
    "void main() {" +
            "gl_FragColor = (texture2D(u_Texture, v_TexCoordinate));" +
    "}";

在您的draw()函数中,以以下方式绑定纹理:

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureDataHandle);

刚刚有机会尝试并出现了 W/Adreno-ES20﹕<core_glBindTexture:572>: GL_INVALID_OPERATION。 - cool mr croc
1
如果绑定到不同的目标,您可能会遇到该错误:https://www.khronos.org/opengles/sdk/docs/man/xhtml/glBindTexture.xml。您在构造函数和draw()中都调用了glBindTexture,请将其更改为使用GLES11Ext.GL_TEXTURE_EXTERNAL_OES。 - samgak
发现得不错。我现在遇到了这个错误W/Adreno-ES20﹕<core_glVertexAttribPointer:533>:GL_INVALID_VALUE。谷歌上没有相关信息。你看到有什么可能引起这个错误的吗? - cool mr croc
1
我认为您在mPositionHandle中传递了一个无效的值。您对glGetAttribLocation的调用要求为“a_Position”,但在着色器中它被称为“vPosition”。对于“a_Color”/“vColor”也是如此(尽管您似乎没有使用它)。因为颜色是一种统一的而不是属性,所以您应该调用GetUniformLocation来获取颜色。 - samgak
谢谢,我仍然什么也没有得到,但至少现在我知道没有错误了。 - cool mr croc

5
您不能使用普通纹理来渲染相机或视频预览,必须使用GL_TEXTURE_EXTERNAL_OES扩展。我遇到了同样的问题,在github上找到了一个完整的工作解决方案。该项目的名称是android_instacam。 在这里您可以找到源代码进行学习。如果您想直接在设备上查看它的效果,请前往Play商店此处

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接