从YouTube视频帧生成WebGL纹理

4
我正在使用描述在这里代码演示)的技术,将视频帧作为WebGL纹理,并从这里获取简单的场景(仅以2D形式显示图像,而不是3D旋转立方体)。
目标是为YouTube编写一个Tampermonkey用户脚本(带有WebGL着色器,即视频效果)。
由于使用了gl.clearColor(0.5,0.5,0.5,1),所以画布填充为灰色。 但是应该绘制视频帧的下一行代码没有任何可见效果。哪部分可能有问题?没有错误提示。
我尝试在发布之前缩短了代码,但显然即使是简单的WebGL场景也需要大量模板代码。 enter image description here
// ==UserScript==
// @name         tmp
// @namespace    http://tampermonkey.net/
// @version      0.1
// @description  try to take over the world!
// @author       You
// @match        https://www.youtube.com/*
// @icon         https://www.google.com/s2/favicons?domain=youtube.com
// @grant        none
// ==/UserScript==

(function() {

    // will set to true when video can be copied to texture
    var copyVideo = false;

    const video = document.getElementsByTagName("video")[0];

    // immediately after finding the video, create canvas and set its dimensions
    let canvas = document.createElement('canvas');
    canvas.setAttribute('id', 'glcanvas');
    canvas.setAttribute('width', '300');
    canvas.setAttribute('height', '200');
    canvas.setAttribute('style', 'position: absolute;');
    video.parentElement.appendChild(canvas);

    var playing = false;
    var timeupdate = false;

    // Waiting for these 2 events ensures
    // there is data in the video
    video.addEventListener('playing', function() {
        playing = true;
        checkReady();
    }, true);
    video.addEventListener('timeupdate', function() {
        timeupdate = true;
        checkReady();
    }, true);
    function checkReady() {
        if (playing && timeupdate) {
            copyVideo = true;
        }
    }

    // Initialize the GL context
    const gl = canvas.getContext("webgl");

    // Only continue if WebGL is available and working
    if (gl === null) {
        alert("Unable to initialize WebGL. Your browser or machine may not support it.");
        return;
    }

    // Vertex shader program
    const vsSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;

uniform vec2 u_resolution;

varying vec2 v_texCoord;

void main() {
   // convert the rectangle from pixels to 0.0 to 1.0
   vec2 zeroToOne = a_position / u_resolution;

   // convert from 0->1 to 0->2
   vec2 zeroToTwo = zeroToOne * 2.0;

   // convert from 0->2 to -1->+1 (clipspace)
   vec2 clipSpace = zeroToTwo - 1.0;

   gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);

   // pass the texCoord to the fragment shader
   // The GPU will interpolate this value between points.
   v_texCoord = a_texCoord;
}
`;

    // Fragment shader program
    const fsSource = `
precision mediump float;

// our texture
uniform sampler2D u_image;

// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;

void main() {
   gl_FragColor = texture2D(u_image, v_texCoord).bgra;
}
  `;

    // Initialize a shader program, so WebGL knows how to draw our data
    function initShaderProgram(gl, vsSource, fsSource) {
        const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
        const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);

        // Create the shader program
        const shaderProgram = gl.createProgram();
        gl.attachShader(shaderProgram, vertexShader);
        gl.attachShader(shaderProgram, fragmentShader);
        gl.linkProgram(shaderProgram);

        // If creating the shader program failed, alert
        if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
            alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
            return null;
        }

        return shaderProgram;
    }

    // creates a shader of the given type, uploads the source and compiles it.
    function loadShader(gl, type, source) {
        const shader = gl.createShader(type);

        // Send the source to the shader object
        gl.shaderSource(shader, source);

        // Compile the shader program
        gl.compileShader(shader);

        // See if it compiled successfully
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
            alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
            gl.deleteShader(shader);
            return null;
        }

        return shader;
    }

    // Initialize a shader program; this is where all the lighting
    // for the vertices and so forth is established.
    const shaderProgram = initShaderProgram(gl, vsSource, fsSource);

    // look up where the vertex data needs to go.
    var positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
    var texcoordLocation = gl.getAttribLocation(shaderProgram, "a_texCoord");

    // Create a buffer to put three 2d clip space points in
    var positionBuffer = gl.createBuffer();

    // Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
    gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
    // Set a rectangle the same size as the image.
    setRectangle(gl, 0, 0, video.width, video.height);

    // provide texture coordinates for the rectangle.
    var texcoordBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
        0.0,  0.0,
        1.0,  0.0,
        0.0,  1.0,
        0.0,  1.0,
        1.0,  0.0,
        1.0,  1.0,
    ]), gl.STATIC_DRAW);

    // Create a texture.
    var texture = initTexture(gl);


    function drawScene() {

        // lookup uniforms
        var resolutionLocation = gl.getUniformLocation(shaderProgram, "u_resolution");

        //webglUtils.resizeCanvasToDisplaySize(gl.canvas);

        // Tell WebGL how to convert from clip space to pixels
        gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

        // Clear the canvas
        gl.clearColor(0.5,0.5,0.5,1);
        gl.clear(gl.COLOR_BUFFER_BIT);

        // Tell it to use our program (pair of shaders)
        gl.useProgram(shaderProgram);

        // Turn on the position attribute
        gl.enableVertexAttribArray(positionLocation);

        // Bind the position buffer.
        gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);

        // Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
        var size = 2;          // 2 components per iteration
        var type = gl.FLOAT;   // the data is 32bit floats
        var normalize = false; // don't normalize the data
        var stride = 0;        // 0 = move forward size * sizeof(type) each iteration to get the next position
        var offset = 0;        // start at the beginning of the buffer
        gl.vertexAttribPointer(
            positionLocation, size, type, normalize, stride, offset);

        // Turn on the texcoord attribute
        gl.enableVertexAttribArray(texcoordLocation);

        // bind the texcoord buffer.
        gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);

        // Tell the texcoord attribute how to get data out of texcoordBuffer (ARRAY_BUFFER)
        var size = 2;          // 2 components per iteration
        var type = gl.FLOAT;   // the data is 32bit floats
        var normalize = false; // don't normalize the data
        var stride = 0;        // 0 = move forward size * sizeof(type) each iteration to get the next position
        var offset = 0;        // start at the beginning of the buffer
        gl.vertexAttribPointer(
            texcoordLocation, size, type, normalize, stride, offset);

        // set the resolution
        gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);

        // Draw the rectangle.
        var primitiveType = gl.TRIANGLES;
        var offset = 0;
        var count = 6;
        gl.drawArrays(primitiveType, offset, count);
    }


    function setRectangle(gl, x, y, width, height) {
        var x1 = x;
        var x2 = x + width;
        var y1 = y;
        var y2 = y + height;
        gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
            x1, y1,
            x2, y1,
            x1, y2,
            x1, y2,
            x2, y1,
            x2, y2,
        ]), gl.STATIC_DRAW);
    }


    var then = 0;

    // Draw the scene repeatedly
    function render(now) {
        now *= 0.001;  // convert to seconds
        const deltaTime = now - then;
        then = now;



        if (copyVideo) {
            updateTexture(gl, texture, video);
        }

        drawScene();

        requestAnimationFrame(render);
    }
    requestAnimationFrame(render);


    function initTexture(gl) {
        const texture = gl.createTexture();
        gl.bindTexture(gl.TEXTURE_2D, texture);

        // Because video has to be download over the internet
        // they might take a moment until it's ready so
        // put a single pixel in the texture so we can
        // use it immediately.
        const level = 0;
        const internalFormat = gl.RGBA;
        const width = 1;
        const height = 1;
        const border = 0;
        const srcFormat = gl.RGBA;
        const srcType = gl.UNSIGNED_BYTE;
        const pixel = new Uint8Array([0, 0, 255, 255]);  // opaque blue
        gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
                      width, height, border, srcFormat, srcType,
                      pixel);

        // Turn off mips and set  wrapping to clamp to edge so it
        // will work regardless of the dimensions of the video.
        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
        gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

        return texture;
    }


    function updateTexture(gl, texture, video) {
        const level = 0;
        const internalFormat = gl.RGBA;
        const srcFormat = gl.RGBA;
        const srcType = gl.UNSIGNED_BYTE;
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
                      srcFormat, srcType, video);
    }

})();
2个回答

6
第一个问题是由Atekihcan正确指出的,您在NDC坐标计算中混淆了事情,实际上直接发送它们要容易得多。此外,您可以从这些坐标很容易地推导出纹理坐标,从而节省第二个缓冲区的设置。
第二个问题是您的事件不会按您期望的顺序触发(至少对我来说,在重新加载正在播放的视频并运行脚本时不会)。我认为只需监听timeupdate事件就足够了,因为如果视频无法播放,则时间不会更新。工作代码:
// ==UserScript==
// @name         tmp
// @namespace    http://tampermonkey.net/
// @version      0.1
// @description  try to take over the world!
// @author       You
// @match        https://www.youtube.com/*
// @icon         https://www.google.com/s2/favicons?domain=youtube.com
// @grant        none
// ==/UserScript==

(function() {
    // will set to true when video can be copied to texture
    var copyVideo = false;
    const video = document.getElementsByTagName("video")[0];

    // immediately after finding the video, create canvas and set its dimensions
    let canvas = document.createElement('canvas');
    canvas.setAttribute('id', 'glcanvas');
    canvas.setAttribute('width', '300');
    canvas.setAttribute('height', '200');
    canvas.setAttribute('style', 'position: absolute;');
    video.parentElement.appendChild(canvas);    
    video.addEventListener('timeupdate', function() {
        copyVideo=true;
    }, true);

    // Initialize the GL context
    const gl = canvas.getContext("webgl");

    // Only continue if WebGL is available and working
    if (gl === null) {
        alert("Unable to initialize WebGL. Your browser or machine may not support it.");
        return;
    }

    // Vertex shader program
    const vsSource = `
attribute vec2 a_position;
varying vec2 v_texCoord;

void main() {
   gl_Position = vec4(a_position, 0.0, 1.0);
   v_texCoord = a_position*.5+.5;
   v_texCoord.y = 1.-v_texCoord.y;
}
`;

    // Fragment shader program
    const fsSource = `
precision mediump float;

uniform sampler2D u_image;
varying vec2 v_texCoord;

void main() {
   gl_FragColor = texture2D(u_image, v_texCoord);
}
  `;

    const positionData = new Float32Array([
        -1.0,-1.0,
         1.0,-1.0,
        -1.0, 1.0,
         1.0,-1.0,
         1.0, 1.0,
        -1.0, 1.0
    ]);


    // Initialize a shader program, so WebGL knows how to draw our data
    function initShaderProgram(gl, vsSource, fsSource) {
        const shaderProgram = gl.createProgram();
        gl.attachShader(shaderProgram, loadShader(gl, gl.VERTEX_SHADER, vsSource));
        gl.attachShader(shaderProgram, loadShader(gl, gl.FRAGMENT_SHADER, fsSource));
        gl.linkProgram(shaderProgram);

        // If creating the shader program failed, alert
        if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
            alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
            return null;
        }

        return shaderProgram;
    }

    // creates a shader of the given type, uploads the source and compiles it.
    function loadShader(gl, type, source) {
        const shader = gl.createShader(type);
        gl.shaderSource(shader, source);
        gl.compileShader(shader);

        // See if it compiled successfully
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
            alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
            gl.deleteShader(shader);
            return null;
        }

        return shader;
    }

    // Initialize shader program
    const shaderProgram = initShaderProgram(gl, vsSource, fsSource);

    // look up where the vertex data needs to go.
    var positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
    var textureLoc = gl.getUniformLocation(shaderProgram, "u_image");

    // Create a vertex buffer
    var positionBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, positionData, gl.STATIC_DRAW);

    // Create texture
    var texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 255]));
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    // Initialize rendering
    gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
    gl.clearColor(1.0,0.0,0.0,1.0);

    function drawScene() {
        gl.clear(gl.COLOR_BUFFER_BIT);
        gl.useProgram(shaderProgram);

        // Turn on the vertex attribute
        gl.enableVertexAttribArray(positionLocation);
        gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
        gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);

        // Draw the rectangle
        gl.drawArrays(gl.TRIANGLES, 0, 6);
    }

    // Draw the scene repeatedly
    function render() {
        if (copyVideo)
            gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video);

        drawScene();
        requestAnimationFrame(render);
    }
    requestAnimationFrame(render);
})();

注意:我还将纹理格式更改为RGB(alpha通道将隐式为1),但这与主题无关。

我真是太蠢了!甚至没有想到视频播放可能会出问题。结果陷入了错误结论的兔子洞里。感谢你指出这一点。而且从位置中推导纹理坐标的工作做得很好。 - Atekihcan
谢谢,这个可行! :) 一个小问题是,即使我使用与<video>元素相同的宽度和高度创建画布,仍然存在一些别名问题。接下来,我将尝试找出如何在视频上绘制一些2D形状(在画布上),然后再处理别名问题。如果您对任何计划有任何提示或鼓励我发布下一个StackOverflow问题,请告诉我。 - root
@root 这段代码没有经过测试,但可以尝试以下两个步骤:**(1)** 在 gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); 这一行中,将 gl.LINEAR 替换为 gl.NEAREST ... (2) 接下来添加一行代码:gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); ... 看看这些设置是否有助于减少锯齿。 - VC.One
@VC.One 放大滤镜仅在对输入图像进行降采样时应用,而在这种情况下不会发生,因为帧缓冲分辨率小于或等于输入分辨率(默认值已经是NEAREST)。对于缩小过滤器(当超采样时应用),使用NEAREST会导致更严重的锯齿,因为它是简单的点过滤,而LINEAR将双线性插值最近的四个像素,如果没有mipmaps,那就是最好的硬件过滤效果(可以通过着色器进行自定义过滤),在WebGL1中,非二次幂纹理不支持mipmaps。 - LJᛃ
@LJᛃ 很好的观点。然而,让提问者尝试我的建议,因为它对于我自己在多个视频上遇到的别名问题是一种不寻常但有效的解决方法。提问者发布的代码设置与我的类似(我认为我们从同样的人那里学习),因此它可能也会获得类似的好处。 PS: 别名问题在全屏模式下最明显(即设置gl.TEXTURE_MAG_FILTER可以解决它)。 - VC.One
显示剩余2条评论

1
编辑:正如已经指出的那样,本答案的前两个部分是完全错误的。
简而言之:如果没有后端服务器首先获取视频数据,则可能无法实现。
如果你查看了你所遵循的MDN教程,传递给texImage2D的视频对象实际上是一个MP4视频。然而,在你的脚本中,你可以访问的视频对象(document.getElementsByTagName("video")[0])只是一个DOM对象。你没有实际的视频数据。并且要获取YouTube的视频数据并不容易。YouTube播放器不会一次性获取视频数据,而是确保流式传输视频的块。我不确定这一点,但如果你的目标是实时视频效果,那么解决这个问题将会非常困难。 我在这里找到了一些关于此问题的讨论(link1, link2),这可能有所帮助。
话虽如此,从WebGL的角度来看,您的代码存在一些问题。理想情况下,您的代码应该显示一个蓝色矩形,因为这是您创建的纹理数据,而不是初始的glClearColor颜色。当视频开始播放后,它应该切换到视频纹理(由于我上面解释的问题,这将显示为黑色)。
我认为这是由于您设置位置数据和在着色器中进行剪辑空间计算的方式。可以跳过这个步骤,直接发送标准化设备坐标位置数据。以下是更新后的代码,进行了一些清理以使其更简短,并且表现符合预期:
// ==UserScript==
// @name         tmp
// @namespace    http://tampermonkey.net/
// @version      0.1
// @description  try to take over the world!
// @author       You
// @match        https://www.youtube.com/*
// @icon         https://www.google.com/s2/favicons?domain=youtube.com
// @grant        none
// ==/UserScript==

(function() {
    // will set to true when video can be copied to texture
    var copyVideo = false;
    const video = document.getElementsByTagName("video")[0];

    // immediately after finding the video, create canvas and set its dimensions
    let canvas = document.createElement('canvas');
    canvas.setAttribute('id', 'glcanvas');
    canvas.setAttribute('width', '300');
    canvas.setAttribute('height', '200');
    canvas.setAttribute('style', 'position: absolute;');
    video.parentElement.appendChild(canvas);

    var playing = false;
    var timeupdate = false;

    // Waiting for these 2 events ensures
    // there is data in the video
    video.addEventListener('playing', function() {
        playing = true;
        checkReady();
    }, true);
    video.addEventListener('timeupdate', function() {
        timeupdate = true;
        checkReady();
    }, true);
    function checkReady() {
        if (playing && timeupdate) {
            copyVideo = true;
        }
    }

    // Initialize the GL context
    const gl = canvas.getContext("webgl");

    // Only continue if WebGL is available and working
    if (gl === null) {
        alert("Unable to initialize WebGL. Your browser or machine may not support it.");
        return;
    }

    // Vertex shader program
    const vsSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;

varying vec2 v_texCoord;

void main() {
   gl_Position = vec4(a_position, 0.0, 1.0);
   v_texCoord = a_texCoord;
}
`;

    // Fragment shader program
    const fsSource = `
precision mediump float;

uniform sampler2D u_image;
varying vec2 v_texCoord;

void main() {
   gl_FragColor = texture2D(u_image, v_texCoord);
}
  `;

    const positionData = new Float32Array([
        -1.0,-1.0,
         1.0,-1.0,
        -1.0, 1.0,
         1.0,-1.0,
         1.0, 1.0,
        -1.0, 1.0
    ]);

    const texcoordData = new Float32Array([
        0.0, 0.0,
        1.0, 0.0,
        0.0, 1.0,
        0.0, 1.0,
        1.0, 0.0,
        1.0, 1.0,
    ]);

    // Initialize a shader program, so WebGL knows how to draw our data
    function initShaderProgram(gl, vsSource, fsSource) {
        const shaderProgram = gl.createProgram();
        gl.attachShader(shaderProgram, loadShader(gl, gl.VERTEX_SHADER, vsSource));
        gl.attachShader(shaderProgram, loadShader(gl, gl.FRAGMENT_SHADER, fsSource));
        gl.linkProgram(shaderProgram);

        // If creating the shader program failed, alert
        if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
            alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
            return null;
        }

        return shaderProgram;
    }

    // creates a shader of the given type, uploads the source and compiles it.
    function loadShader(gl, type, source) {
        const shader = gl.createShader(type);
        gl.shaderSource(shader, source);
        gl.compileShader(shader);

        // See if it compiled successfully
        if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
            alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
            gl.deleteShader(shader);
            return null;
        }

        return shader;
    }

    // Initialize shader program
    const shaderProgram = initShaderProgram(gl, vsSource, fsSource);

    // look up where the vertex data needs to go.
    var positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
    var texcoordLocation = gl.getAttribLocation(shaderProgram, "a_texCoord");
    var textureLoc = gl.getUniformLocation(shaderProgram, "u_image");

    // Create a vertex buffer
    var positionBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, positionData, gl.STATIC_DRAW);

    // Create texture coordinate buffer
    var texcoordBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, texcoordData, gl.STATIC_DRAW);

    // Create texture
    var texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 255, 255]));

    // Initialize rendering
    gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
    gl.clearColor(1.0,0.0,0.0,1.0);

    function drawScene() {
        gl.clear(gl.COLOR_BUFFER_BIT);
        gl.useProgram(shaderProgram);

        // Turn on the vertex attribute
        gl.enableVertexAttribArray(positionLocation);
        gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
        gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);

        // Turn on the texcoord attribute
        gl.enableVertexAttribArray(texcoordLocation);
        gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
        gl.vertexAttribPointer(texcoordLocation, 2, gl.FLOAT, false, 0, 0);

        // Draw the rectangle
        gl.drawArrays(gl.TRIANGLES, 0, 6);
    }

    // Draw the scene repeatedly
    function render() {
        if (copyVideo) {
            gl.bindTexture(gl.TEXTURE_2D, texture);
            gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
        }

        drawScene();
        requestAnimationFrame(render);
    }
    requestAnimationFrame(render);
})();

1
+1 直接发送 NDC,-2 因为这是纯属错误的:a)YouTube 现在已经使用 HTML5 播放器多年了;b)所有现有的视频播放器都会将数据分块流式传输;c)教程顶部的第一个代码片段就展示了如何设置 <video> 元素来加载数据。 - LJᛃ
我点赞是因为这个答案的一部分可能有助于其他答案,用总体负分来惩罚这一点似乎不太合适。谢谢! - root

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接