Three.js Uncaught SecurityError:未能执行';texImage2D';在';WebGLRenderingContext';:

Three.js Uncaught SecurityError:未能执行';texImage2D';在';WebGLRenderingContext';:,three.js,Three.js,这个问题源于上一期html5视频不能在android手机上播放。() 正如mrdoob提到的,video.play()应该通过用户发起的事件(如单击)在移动设备上显式显示。完成此操作后,我能够播放该文件。然而,我只能听到视频的音频部分。屏幕仍然是空白的。此外,我在dev控制台上看到以下异常 未捕获的安全性错误:未能在“WebGLRenderingContext”上执行“texImage2D”:视频元素包含跨源数据,可能无法加载。三.min.js:507 我没有通过我的代码加载任何图像,正如您在下

这个问题源于上一期html5视频不能在android手机上播放。()

正如mrdoob提到的,video.play()应该通过用户发起的事件(如单击)在移动设备上显式显示。完成此操作后,我能够播放该文件。然而,我只能听到视频的音频部分。屏幕仍然是空白的。此外,我在dev控制台上看到以下异常

未捕获的安全性错误:未能在“WebGLRenderingContext”上执行“texImage2D”:视频元素包含跨源数据,可能无法加载。三.min.js:507

我没有通过我的代码加载任何图像,正如您在下面的代码中看到的,错误似乎是在three.min.js文件中抛出的。(下面的代码是的稍加修改的版本,以使其在移动设备上运行)

请注意,这仅在移动设备上发生。该示例在web上完美地工作

非常感谢你的帮助

<!DOCTYPE html>
<html lang="en">
    <head>
        <title>three.js webgl - materials - video</title>
        <meta charset="utf-8">
        <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
        <style>
            body {
                background-color: #000;
                color: #fff;
                margin: 0px;
                overflow: hidden;
                font-family:Monospace;
                font-size:13px;
                text-align:center;
                font-weight: bold;
                text-align:center;
            }

            a {
                color:#0078ff;
            }

            #info {
                color:#fff;
                position: absolute;
                top: 0px; width: 100%;
                padding: 5px;
                z-index:100;
            }

        </style>
    </head>
    <body>

        <div id="info">
            <a href="http://threejs.org" target="_blank">three.js</a> - webgl video demo. playing <a href="http://durian.blender.org/" target="_blank">sintel</a> trailer
        </div>

        <script src="js/three.min.js"></script>

        <script src="js/shaders/ConvolutionShader.js"></script>
        <script src="js/shaders/CopyShader.js"></script>

        <script src="js/postprocessing/EffectComposer.js"></script>
        <script src="js/postprocessing/RenderPass.js"></script>
        <script src="js/postprocessing/MaskPass.js"></script>
        <script src="js/postprocessing/BloomPass.js"></script>
        <script src="js/postprocessing/ShaderPass.js"></script>

        <script src="js/Detector.js"></script>

        <video id="video" autoplay loop style="display:none">
            <source src="textures/sintel.mp4" type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'>
            <source src="textures/sintel.ogv" type='video/ogg; codecs="theora, vorbis"'>
        </video>

        <script>

            if ( ! Detector.webgl ) Detector.addGetWebGLMessage();

            var container;

            var camera, scene, renderer, element;

            var video, texture, material, mesh;

            var composer;

            var mouseX = 0;
            var mouseY = 0;

            var windowHalfX = window.innerWidth / 2;
            var windowHalfY = window.innerHeight / 2;

            var cube_count,

                meshes = [],
                materials = [],

                xgrid = 20,
                ygrid = 10;

            init();
            animate();

            function init() {

                container = document.createElement( 'div' );
                document.body.appendChild( container );

                camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 10000 );
                camera.position.z = 500;

                scene = new THREE.Scene();

                var light = new THREE.DirectionalLight( 0xffffff );
                light.position.set( 0.5, 1, 1 ).normalize();
                scene.add( light );

                renderer = new THREE.WebGLRenderer( { antialias: false } );
                renderer.setSize( window.innerWidth, window.innerHeight );

                element=renderer.domElement;
                element.addEventListener('click', fullscreen, false);

                container.appendChild( renderer.domElement );

                video = document.getElementById( 'video' );

                texture = new THREE.Texture( video );
                texture.minFilter = THREE.LinearFilter;
                texture.magFilter = THREE.LinearFilter;
                texture.format = THREE.RGBFormat;
                texture.generateMipmaps = false;

                //

                var i, j, ux, uy, ox, oy,
                    geometry,
                    xsize, ysize;

                ux = 1 / xgrid;
                uy = 1 / ygrid;

                xsize = 480 / xgrid;
                ysize = 204 / ygrid;

                var parameters = { color: 0xffffff, map: texture },
                    material_base = new THREE.MeshLambertMaterial( parameters );

                renderer.initMaterial( material_base, scene.__lights, scene.fog );

                cube_count = 0;

                for ( i = 0; i < xgrid; i ++ )
                for ( j = 0; j < ygrid; j ++ ) {

                    ox = i;
                    oy = j;

                    geometry = new THREE.BoxGeometry( xsize, ysize, xsize );

                    change_uvs( geometry, ux, uy, ox, oy );

                    materials[ cube_count ] = new THREE.MeshLambertMaterial( parameters );

                    material = materials[ cube_count ];

                    material.hue = i/xgrid;
                    material.saturation = 1 - j/ygrid;

                    material.color.setHSL( material.hue, material.saturation, 0.5 );

                    mesh = new THREE.Mesh( geometry, material );

                    mesh.position.x =   ( i - xgrid/2 ) * xsize;
                    mesh.position.y =   ( j - ygrid/2 ) * ysize;
                    mesh.position.z = 0;

                    mesh.scale.x = mesh.scale.y = mesh.scale.z = 1;

                    scene.add( mesh );

                    mesh.dx = 0.001 * ( 0.5 - Math.random() );
                    mesh.dy = 0.001 * ( 0.5 - Math.random() );

                    meshes[ cube_count ] = mesh;

                    cube_count += 1;

                }

                renderer.autoClear = false;

                document.addEventListener( 'mousemove', onDocumentMouseMove, false );

                // postprocessing

                var renderModel = new THREE.RenderPass( scene, camera );
                var effectBloom = new THREE.BloomPass( 1.3 );
                var effectCopy = new THREE.ShaderPass( THREE.CopyShader );

                effectCopy.renderToScreen = true;

                composer = new THREE.EffectComposer( renderer );

                composer.addPass( renderModel );
                composer.addPass( effectBloom );
                composer.addPass( effectCopy );

                //

                window.addEventListener( 'resize', onWindowResize, false );

            }
            function fullscreen() {

                video.play();
                console.log(video);
                  if (container.requestFullscreen) {
                    container.requestFullscreen();
                  } else if (container.msRequestFullscreen) {
                    container.msRequestFullscreen();
                  } else if (container.mozRequestFullScreen) {
                    container.mozRequestFullScreen();
                  } else if (container.webkitRequestFullscreen) {
                    container.webkitRequestFullscreen();
                  }
                }

            function onWindowResize() {

                windowHalfX = window.innerWidth / 2;
                windowHalfY = window.innerHeight / 2;

                camera.aspect = window.innerWidth / window.innerHeight;
                camera.updateProjectionMatrix();

                renderer.setSize( window.innerWidth, window.innerHeight );
                composer.reset();

            }

            function change_uvs( geometry, unitx, unity, offsetx, offsety ) {

                var faceVertexUvs = geometry.faceVertexUvs[ 0 ];

                for ( var i = 0; i < faceVertexUvs.length; i ++ ) {

                    var uvs = faceVertexUvs[ i ];

                    for ( var j = 0; j < uvs.length; j ++ ) {

                        var uv = uvs[ j ];

                        uv.x = ( uv.x + offsetx ) * unitx;
                        uv.y = ( uv.y + offsety ) * unity;

                    }

                }

            }


            function onDocumentMouseMove(event) {

                mouseX = ( event.clientX - windowHalfX );
                mouseY = ( event.clientY - windowHalfY ) * 0.3;

            }

            //

            function animate() {

                requestAnimationFrame( animate );

                render();

            }

            var h, counter = 1;

            function render() {

                var time = Date.now() * 0.00005;

                camera.position.x += ( mouseX - camera.position.x ) * 0.05;
                camera.position.y += ( - mouseY - camera.position.y ) * 0.05;

                camera.lookAt( scene.position );

                if ( video.readyState === video.HAVE_ENOUGH_DATA ) {

                    if ( texture ) texture.needsUpdate = true;

                }

                for ( i = 0; i < cube_count; i ++ ) {

                    material = materials[ i ];

                    h = ( 360 * ( material.hue + time ) % 360 ) / 360;
                    material.color.setHSL( h, material.saturation, 0.5 );

                }

                if ( counter % 1000 > 200 ) {

                    for ( i = 0; i < cube_count; i ++ ) {

                        mesh = meshes[ i ];

                        mesh.rotation.x += 10 * mesh.dx;
                        mesh.rotation.y += 10 * mesh.dy;

                        mesh.position.x += 200 * mesh.dx;
                        mesh.position.y += 200 * mesh.dy;
                        mesh.position.z += 400 * mesh.dx;

                    }

                }

                if ( counter % 1000 === 0 ) {

                    for ( i = 0; i < cube_count; i ++ ) {

                        mesh = meshes[ i ];

                        mesh.dx *= -1;
                        mesh.dy *= -1;

                    }

                }

                counter ++;

                renderer.clear();
                composer.render();

            }


        </script>

    </body>
</html>

three.js webgl-材料-视频
身体{
背景色:#000;
颜色:#fff;
边际:0px;
溢出:隐藏;
字体系列:Monospace;
字体大小:13px;
文本对齐:居中;
字体大小:粗体;
文本对齐:居中;
}
a{
颜色:#0078ff;
}
#信息{
颜色:#fff;
位置:绝对位置;
顶部:0px;宽度:100%;
填充物:5px;
z指数:100;
}
-webgl视频演示。播放预告片
如果(!Detector.webgl)Detector.addGetWebGLMessage();
var容器;
摄像机、场景、渲染器、元素;
视频、纹理、材质、网格;
var作曲家;
var-mouseX=0;
var-mouseY=0;
var windowHalfX=window.innerWidth/2;
var windowHalfY=window.innerHeight/2;
var cube_计数,
网格=[],
材料=[],
xgrid=20,
ygrid=10;
init();
制作动画();
函数init(){
container=document.createElement('div');
文件.正文.附件(容器);
摄像头=新的三个透视摄像头(40,window.innerWidth/window.innerHeight,11000);
摄像机位置z=500;
场景=新的三个。场景();
var灯=新的三方向灯(0xffffff);
light.position.set(0.5,1,1).normalize();
场景。添加(灯光);
renderer=new THREE.WebGLRenderer({antialas:false});
renderer.setSize(window.innerWidth、window.innerHeight);
元素=renderer.doElement;
元素。addEventListener('click',全屏,false);
container.appendChild(renderer.doElement);
video=document.getElementById('video');
纹理=新的三个。纹理(视频);
texture.minFilter=THREE.LinearFilter;
texture.magFilter=THREE.LinearFilter;
texture.format=THREE.RGBFormat;
texture.generateMipmaps=false;
//
变量i,j,ux,uy,ox,oy,
几何学
xsize,ysize;
ux=1/xgrid;
uy=1/ygrid;
xsize=480/xgrid;
ysize=204/ygrid;
var参数={color:0xffffff,map:texture},
material_base=新的三个。MeshLambertMaterial(参数);
renderer.initMaterial(material\u base,scene.\u lights,scene.fog);
立方体计数=0;
对于(i=0;ivideo.setAttribute('crossorigin', 'anonymous');
video.setAttribute('crossorigin', 'anonymous');
video.src = "video/test.m4v";
video.src = "video/test.mp4";
video.load(); // must call after setting/changing source
video.play();
$ python -m SimpleHTTPServer