如何将 HTML5 播放器中的视频绘制并投影到 WebGL 中的 3D 平面上?

问题描述 投票:0回答:1

我正在使用 Unity 构建 WebGL 应用程序,我发现自己需要从 HTML 播放器抓取视频并将其绘制在 3D 空间的平面上。

我知道你可以在 drawImage()

 上调用 
CanvasRenderingContext2D
 并传入对视频播放器的引用,当函数运行时,当前帧将被绘制到画布上。

我能找到的该函数最接近的 3D 等效项是

WebGL2RenderingContext.texImage3D()
。但是,我并不完全理解它是如何工作的,当我尝试测试它时,我收到了以下异常:
Uncaught DOMException: The operation is insecure.
我使用了自己的本地视频文件,所以它不可能是CORS,但我不知道它是什么。

您可以在此 GitHub 存储库中查看测试项目。


我发现了一个类似的问题here,但不幸的是,答案显示了如何绘制我认为是预加载的纹理。我不知道如何从视频播放器中获取这些内容并传递它们,或者是否足够快以完成每一帧。


为了提供一些背景信息,我试图在我的 Unity/WebGL 应用程序中显示 HLS 实时流。我可以下载

.ts
(MPEG-2 传输流)视频片段并将它们排列成连贯的视频流,但 Unity 的内置视频播放器不支持这种格式。

作为解决方案,我想我可以在 HTML5 播放器中抓取视频(如有必要,使用 hls.js),并使用 JavaScript 将纹理注入到 WebGL 应用程序中,每帧。

Unity 允许您从其 C# 脚本内部运行 JavaScript 代码,因此计时可能不会成为问题,获取目标平面的世界比例/位置也不会成为问题。我只需要编写 JavaScript 函数来以某种方式绘制纹理。

这是我当前的代码

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>WebGL</title>
    <script src="https://unpkg.com/@ffmpeg/[email protected]/dist/ffmpeg.min.js"></script>
    <style>
        body {
            background-color: aquamarine;
        }
    </style>
</head>
<body>
    <video muted autoplay width="480" height="270">
        <source src="./test.mp4" type="video/mp4" />
    </video>
    <br>
    <canvas width="1080" height="720"></canvas>
    <button onclick="takeScreenshot()">Capture</button>
    
    <script>
        function takeScreenshot() {
            var video = document.querySelector("video");
            var canvas = document.querySelector("canvas");

            var gl = canvas.getContext("webgl2");

            gl.texImage3D(
                gl.TEXTURE_3D,    // target (enum)
                0,                // level of detail
                gl.RGBA,          // internalFormat
                1920,             // width of texture
                1080,             // height of texture
                1,                // depth
                0,                // border
                gl.RGBA,          // format
                gl.UNSIGNED_BYTE, // type
                video,            // source
            );
        }
    </script>
</body>
</html>
unity-game-engine video webgl http-live-streaming unity-webgl
1个回答
2
投票

这里是设置 webGL 对象(平面)的示例代码,它可以接收视频的像素。

基本上:

  • 使用两个三角形创建一个盒子/矩形形状...

  • 然后将视频像素投影到该矩形上(作为纹理图)。

      0-------1
      |       |
      3-------2
    
    //# two sets of... connected 3-points of a triangle
    var vertexIndices = [ 0,  2,  1,      0,  2,  3, ];
    

下面的示例代码还创建了一些所需的 GPU 着色器和程序。
尝试一下

如果任何编码人员只想做 GPU 像素效果,请在片段着色器中编写效果代码。
(请参阅代码部分:

//# example of basic colour effect
)。

<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta http-equiv="X-UA-Compatible" content="IE=edge">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>WebGL</title>
    
    <!--
    <script src="https://unpkg.com/@ffmpeg/[email protected]/dist/ffmpeg.min.js"></script>
    -->
    
    <style> body {background-color: aquamarine; } </style>  
</head>

<body>
    <video id="myVideo" controls muted autoplay width="480" height="270">
    <source src="video.mp4" type="video/mp4" />
    </video>
    <br>
    
    <button id="capture" onclick="takeScreenshot()"> Capture </button>
    <br><br>
    
    <!--
    <canvas id="myCanvas" width="1080" height="720"></canvas>
    -->
    
     <canvas id="myCanvas" width="480" height="270"></canvas>
    
<!-- ########## Shader code ###### -->
<!-- ### Shader code here -->


<!-- Fragment shader program -->
<script id="shader-fs" type="x-shader/x-fragment">

//<!-- //## code for pixel effects goes here if needed -->

//# these two vars will access 
varying mediump vec2 vDirection;
uniform sampler2D uSampler;

void main(void) 
{
    //# get current video pixel's color (no FOR-loops needed like in JS Canvas)
    gl_FragColor = texture2D(uSampler, vec2(vDirection.x * 0.5 + 0.5, vDirection.y * 0.5 + 0.5));
    
    /*
    //# example of basic colour effect
    gl_FragColor.r = ( gl_FragColor.r * 1.15 );
    gl_FragColor.g = ( gl_FragColor.g * 0.8 );
    gl_FragColor.b = ( gl_FragColor.b * 0.45 );
    */
}

</script>


<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">

    attribute mediump vec2 aVertexPosition;
    varying mediump vec2 vDirection;

    void main( void ) 
    {
        gl_Position = vec4(aVertexPosition, 1.0, 1.0) * 2.0;
        vDirection = aVertexPosition;
    }
    


</script>



<!-- ### END Shader code... -->

    
<script>

//# WebGL setup

var video = document.getElementById('myVideo');

const glcanvas = document.getElementById('myCanvas');
const gl = ( ( glcanvas.getContext("webgl") ) || ( glcanvas.getContext("experimental-webgl") ) );

//# check if WebGL is available..
if (gl && gl instanceof WebGLRenderingContext) { console.log( "WebGL is available"); }
else { console.log( "WebGL is NOT available" ); } //# use regular JS canvas functions if this happens...


//# create and attach the shader program to the webGL context
var attributes, uniforms, program;

function attachShader( params ) 
{
    fragmentShader = getShaderByName(params.fragmentShaderName);
    vertexShader = getShaderByName(params.vertexShaderName);

    program = gl.createProgram();
    gl.attachShader(program, vertexShader);
    gl.attachShader(program, fragmentShader);
    gl.linkProgram(program);

    if (!gl.getProgramParameter(program, gl.LINK_STATUS)) 
    { alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(program)); }

    gl.useProgram(program);

    // get the location of attributes and uniforms
    attributes = {};

    for (var i = 0; i < params.attributes.length; i++) 
    {
        var attributeName = params.attributes[i];
        attributes[attributeName] = gl.getAttribLocation(program, attributeName);
        gl.enableVertexAttribArray(attributes[attributeName]);
    }
        
    uniforms = {};

    for (i = 0; i < params.uniforms.length; i++) 
    {
        var uniformName = params.uniforms[i];
        uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
        
        gl.enableVertexAttribArray(attributes[uniformName]);
    }
    
}

function getShaderByName( id ) 
{
    var shaderScript = document.getElementById(id);

    var theSource = "";
    var currentChild = shaderScript.firstChild;

    while(currentChild) 
    {
        if (currentChild.nodeType === 3) { theSource += currentChild.textContent; }
        currentChild = currentChild.nextSibling;
    }

    var result;
    
    if (shaderScript.type === "x-shader/x-fragment") 
    { result = gl.createShader(gl.FRAGMENT_SHADER); } 
    else { result = gl.createShader(gl.VERTEX_SHADER); }
    
    gl.shaderSource(result, theSource);
    gl.compileShader(result);

    if (!gl.getShaderParameter(result, gl.COMPILE_STATUS)) 
    {
        alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(result));
        return null;
    }
    return result;
}

//# attach shader
attachShader({
fragmentShaderName: 'shader-fs',
vertexShaderName: 'shader-vs',
attributes: ['aVertexPosition'],
uniforms: ['someVal', 'uSampler'],
});

// some webGL initialization
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clearDepth(1.0);
gl.disable(gl.DEPTH_TEST);

positionsBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
var positions = [
  -1.0, -1.0,
   1.0, -1.0,
   1.0,  1.0,
  -1.0,  1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);

var vertexColors = [0xff00ff88,0xffffffff];
    
var cBuffer = gl.createBuffer();

verticesIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);

var vertexIndices = [ 0,  1,  2,      0,  2,  3, ];

gl.bufferData(  
                gl.ELEMENT_ARRAY_BUFFER,
                new Uint16Array(vertexIndices), gl.STATIC_DRAW
            );

texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);

//# must be LINEAR to avoid subtle pixelation (double-check this... test other options like NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);

// update the texture from the video
updateTexture = function() 
{
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
    
    //# next line fails in Safari if input video is NOT from same domain/server as this html code
    gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB,
    gl.UNSIGNED_BYTE, video);
    gl.bindTexture(gl.TEXTURE_2D, null);
};

</script>

<script>


//# Vars for video frame grabbing when system/browser provides a new frame
var requestAnimationFrame = (window.requestAnimationFrame || window.mozRequestAnimationFrame ||
                            window.webkitRequestAnimationFrame || window.msRequestAnimationFrame);

var cancelAnimationFrame = (window.cancelAnimationFrame || window.mozCancelAnimationFrame);

///////////////////////////////////////////////


function takeScreenshot( ) 
{
    //# video is ready (can display pixels)
    if( video.readyState >= 3 )
    {
        updateTexture(); //# update pixels with current video frame's pixels...
        
        gl.useProgram(program); //# apply our program
    
        gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
        gl.vertexAttribPointer(attributes['aVertexPosition'], 2, gl.FLOAT, false, 0, 0);
        
        //# Specify the texture to map onto the faces.
        gl.activeTexture(gl.TEXTURE0);
        gl.bindTexture(gl.TEXTURE_2D, texture);
        gl.uniform1i(uniforms['uSampler'], 0);
        
        //# Draw GPU
        gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);
        gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
    }
    
    //# re-capture the next frame... basically the function loops itself
    //# consider adding event listener for video pause to set value as... cancelAnimationFrame( takeScreenshot ); 
    requestAnimationFrame( takeScreenshot ); 
    
}

//////////////////////////////////////

function takeScreenshot_old() 
{
    var gl = canvas.getContext("webgl2");

    gl.texImage3D(
        gl.TEXTURE_3D,    // target (enum)
        0,                // level of detail
        gl.RGBA,          // internalFormat
        1920,             // width of texture
        1080,             // height of texture
        1,                // depth
        0,                // border
        gl.RGBA,          // format
        gl.UNSIGNED_BYTE, // type
        video,            // source
    );
}


    </script>
</body>
</html>
© www.soinside.com 2019 - 2024. All rights reserved.