Javascript 使用GLSL着色器将WebRTC视频传递到几何体
这是我第一次在WebGL环境中使用顶点着色器。我想用视频对基本体进行纹理处理,但不只是将视频映射到曲面I;我试图将视频的亮度转换为顶点位移。这有点像Rutt Etra,但是是数字格式的。明亮的像素应将顶点向前推,而较暗的像素则相反。谁能告诉我我做错了什么?我找不到此错误的引用 在编译代码时,使用sampler2D和texture2D时,我会得到以下结果: Mozilla/5.0(Macintosh;Intel Mac OS X 10_8_4)AppleWebKit/537.36(KHTML,像Gecko)Chrome/29.0.1547.65 Safari/537.36 | WebGL 1.0(OpenGL ES 2.0 Chromium)| WebKit | WebKit WebGL | WebGL GLSL ES 1.0(OpenGL ES 1.0 Chromium)三个。js:264 错误:0:57:“ftTransform”:未找到匹配的重载函数 错误:0:57:“分配”:无法从“常量mediump float”转换为“Position highp float的4分量向量” 错误:0:60:“gl_TextureMatrix”:未声明的标识符 错误:0:60:'gl_TextureMatrix'“[”的左侧不是数组、矩阵或向量类型Javascript 使用GLSL着色器将WebRTC视频传递到几何体,javascript,opengl-es,three.js,webgl,Javascript,Opengl Es,Three.js,Webgl,这是我第一次在WebGL环境中使用顶点着色器。我想用视频对基本体进行纹理处理,但不只是将视频映射到曲面I;我试图将视频的亮度转换为顶点位移。这有点像Rutt Etra,但是是数字格式的。明亮的像素应将顶点向前推,而较暗的像素则相反。谁能告诉我我做错了什么?我找不到此错误的引用 在编译代码时,使用sampler2D和texture2D时,我会得到以下结果: Mozilla/5.0(Macintosh;Intel Mac OS X 10_8_4)AppleWebKit/537.36(KHTML,像G
错误:0:60:“gl_MultiTexCoord0”:未声明的标识符 3.js:257
<!doctype html>
<html>
<head>
<title>boiler plate for three.js</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<script src="vendor/three.js/Three.js"></script>
<script src="vendor/three.js/Detector.js"></script>
<script src="vendor/three.js/Stats.js"></script>
<script src="vendor/threex/THREEx.screenshot.js"></script>
<script src="vendor/threex/THREEx.FullScreen.js"></script>
<script src="vendor/threex/THREEx.WindowResize.js"></script>
<script src="vendor/threex.dragpancontrols.js"></script>
<script src="vendor/headtrackr.js"></script>
<style>
body {
overflow : hidden;
padding : 0;
margin : 0;
color : #222;
background-color: #BBB;
font-family : arial;
font-size : 100%;
}
#info .top {
position : absolute;
top : 0px;
width : 100%;
padding : 5px;
text-align : center;
}
#info a {
color : #66F;
text-decoration : none;
}
#info a:hover {
text-decoration : underline;
}
#info .bottom {
position : absolute;
bottom : 0px;
right : 5px;
padding : 5px;
}
</style>
</head>
<body>
<!-- three.js container -->
<div id="container"></div>
<!-- info on screen display -->
<div id="info">
<!--<div class="top">
<a href="http://learningthreejs.com/blog/2011/12/20/boilerplate-for-three-js/" target="_blank">LearningThree.js</a>
boiler plate for
<a href="https://github.com/mrdoob/three.js/" target="_blank">three.js</a>
</div>-->
<div class="bottom" id="inlineDoc" >
- <i>p</i> for screenshot
</div>
</div>
<canvas id="compare" width="320" height="240" style="display:none"></canvas>
<video id="vid" autoplay loop></video>
<script type="x-shader/x-vertex" id="vertexShader">
varying vec2 texcoord0;
void main()
{
// perform standard transform on vertex
gl_Position = ftransform();
// transform texcoords
texcoord0 = vec2(gl_TextureMatrix[0] * gl_MultiTexCoord0);
}
</script>
<script type="x-shader/x-vertex" id="fragmentShader">
varying vec2 texcoord0;
uniform sampler2D tex0;
uniform vec2 imageSize;
uniform float coef;
const vec4 lumcoeff = vec4(0.299,0.587,0.114,0.);
void main (void)
{
vec4 pixel = texture2D(tex0, texcoord0);
float luma = dot(lumcoeff, pixel);
gl_FragColor = vec4((texcoord0.x / imageSize.x), luma, (texcoord0.y / imageSize.y) , 1.0);
}
</script>
<script type="text/javascript">
var stats, scene, renderer;
var camera, cameraControls;
var videoInput = document.getElementById('vid');
var canvasInput = document.getElementById('compare');
var projector = new THREE.Projector();
var gl;
var mesh,
cube,
attributes,
uniforms,
material,
materials;
var videoTexture = new THREE.Texture( videoInput );
if( !init() ) animate();
// init the scene
function init(){
if( Detector.webgl ){
renderer = new THREE.WebGLRenderer({
antialias : true, // to get smoother output
preserveDrawingBuffer : true // to allow screenshot
});
renderer.setClearColorHex( 0xBBBBBB, 1 );
// uncomment if webgl is required
//}else{
// Detector.addGetWebGLMessage();
// return true;
}else{
renderer = new THREE.CanvasRenderer();
gl=renderer;
}
renderer.setSize( window.innerWidth, window.innerHeight );
document.getElementById('container').appendChild(renderer.domElement);
// create a scene
scene = new THREE.Scene();
// put a camera in the scene
camera = new THREE.PerspectiveCamera( 23, window.innerWidth / window.innerHeight, 1, 100000 );
camera.position.z = 0;
scene.add( camera );
//
// // create a camera contol
// cameraControls = new THREEx.DragPanControls(camera)
// transparently support window resize
// THREEx.WindowResize.bind(renderer, camera);
// allow 'p' to make screenshot
THREEx.Screenshot.bindKey(renderer);
// allow 'f' to go fullscreen where this feature is supported
if( THREEx.FullScreen.available() ){
THREEx.FullScreen.bindKey();
document.getElementById('inlineDoc').innerHTML += "- <i>f</i> for fullscreen";
}
materials = new THREE.MeshLambertMaterial({
map : videoTexture
});
attributes = {};
uniforms = {
tex0: {type: 'mat2', value: materials},
imageSize: {type: 'f', value: []},
coef: {type: 'f', value: 1.0}
};
//Adding a directional light source to see anything..
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(1, 1, 1).normalize();
scene.add(directionalLight);
// video styling
videoInput.style.position = 'absolute';
videoInput.style.top = '50px';
videoInput.style.zIndex = '100001';
videoInput.style.display = 'block';
// set up camera controller
headtrackr.controllers.three.realisticAbsoluteCameraControl(camera, 1, [0,0,0], new THREE.Vector3(0,0,0), {damping : 1.1});
var htracker = new headtrackr.Tracker();
htracker.init(videoInput, canvasInput);
htracker.start();
// var stats = new Stats();
// stats.domElement.style.position = 'absolute';
// stats.domElement.style.top = '0px';
// document.body.appendChild( stats.domElement );
document.addEventListener('headtrackrStatus',
function (event) {
if (event.status == "found") {
addCube();
}
}
);
}
// animation loop
function animate() {
// loop on request animation loop
// - it has to be at the begining of the function
// - see details at http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
requestAnimationFrame( animate );
// do the render
render();
// update stats
//stats.update();
}
function render() {
// convert matrix of every frame of video -> texture
uniforms.tex0 = materials;
uniforms.coef = 0.2;
uniforms.imageSize.x = window.innerWidth;
uniforms.imageSize.y = window.innerHeight;
// update camera controls
// cameraControls.update();
if( videoInput.readyState === videoInput.HAVE_ENOUGH_DATA ){
videoTexture.needsUpdate = true;
}
// actually render the scene
renderer.render( scene, camera );
}
function addCube(){
material = new THREE.ShaderMaterial({
uniforms: uniforms,
attributes: attributes,
vertexShader: document.getElementById('vertexShader').textContent,
fragmentShader: document.getElementById('fragmentShader').textContent,
transparent: true
});
//The cube
cube = new THREE.Mesh(new THREE.CubeGeometry(40, 30, 10, 1, 1, 1, material), new THREE.MeshFaceMaterial());
cube.overdraw = true;
scene.add(cube);
}
</script>
</body>
</html>
3.js锅炉板
身体{
溢出:隐藏;
填充:0;
保证金:0;
颜色:#222;
背景色:#BBB;
字体系列:arial;
字体大小:100%;
}
#信息,顶部{
位置:绝对位置;
顶部:0px;
宽度:100%;
填充物:5px;
文本对齐:居中;
}
#信息a{
颜色:#66F;
文字装饰:无;
}
#信息a:悬停{
文字装饰:下划线;
}
#信息,底部{
位置:绝对位置;
底部:0px;
右:5px;
填充物:5px;
}
-截图
可变矢量2 texcoord0;
void main()
{
//对顶点执行标准变换
gl_位置=F变换();
//变换坐标
texcoord0=vec2(gl_TextureMatrix[0]*gl_MultiTexCoord0);
}
可变矢量2 texcoord0;
均匀采样2d-tex0;
均匀的vec2图像大小;
均匀浮动系数;
常数vec4 lumcoeff=vec4(0.299,0.587,0.114,0.);
真空总管(真空)
{
vec4像素=纹理2d(tex0,texcoord0);
float luma=点(lumcoeff,像素);
gl_FragColor=vec4(texcoord0.x/imageSize.x),luma(texcoord0.y/imageSize.y),1.0;
}
var统计、场景、渲染器;
var摄像机、摄像机控制器;
var videoInput=document.getElementById('vid');
var canvasInput=document.getElementById('compare');
var投影仪=新的三个投影仪();
var-gl;
var网格,
立方体,
属性,
制服,
材料,
材料;
var videoTexture=新的三种纹理(videoInput);
如果(!init())设置动画();
//现场
函数init(){
if(Detector.webgl){
renderer=new THREE.WebGLRenderer({
antialias:true,//以获得更平滑的输出
preserveDrawingBuffer:true//允许屏幕截图
});
setClearColorHex(0xBBBBBB,1);
//如果需要webgl,请取消注释
//}否则{
//Detector.addGetWebGLMessage();
//返回true;
}否则{
renderer=new THREE.CanvasRenderer();
gl=渲染器;
}
renderer.setSize(window.innerWidth、window.innerHeight);
document.getElementById('container').appendChild(renderer.doElement);
//创造一个场景
场景=新的三个。场景();
//在场景中放一个摄像机
摄像头=新的三个透视摄像头(23,window.innerWidth/window.innerHeight,11000);
摄像机位置z=0;
场景。添加(摄影机);
//
////创建摄影机控件
//CameraControl=新的三轴拖动控制(摄像头)
//透明支持窗口大小调整
//3X.WindowResize.bind(渲染器、相机);
//允许“p”制作屏幕截图
THREEx.Screenshot.bindKey(渲染器);
//允许“f”在支持此功能的地方全屏显示
if(三倍全屏可用(){
THREEx.FullScreen.bindKey();
document.getElementById('inlineDoc')。innerHTML+=“-f表示全屏”;
}
材质=新的3.0网格LambertMaterial({
贴图:视频纹理
});
属性={};
制服={
tex0:{type:'mat2',value:materials},
imageSize:{type:'f',值:[]},
coef:{type:'f',value:1.0}
};
//添加定向光源以查看任何内容。。
var方向灯=新的三个方向灯(0xffffff);
directionalLight.position.set(1,1,1).normalize();
场景。添加(方向光);
//视频样式
videoInput.style.position='绝对';
videoInput.style.top='50px';
videoInput.style.zIndex='100001';
videoInput.style.display='block';
//设置摄像机控制器
headtrackr.控制器。三个。现实的解决方案控制(摄像头,1,[0,0,0],新的三个。矢量3(0,0,0),{阻尼:1.1});
var htracker=new headtracker.Tracker();
htracker.init(视频输入、画布输入);
htracker.start();
//var stats=newstats();
//stats.domElement.style.position='绝对';
//stats.domElement.style.top='0px';
//文件正文