Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/three.js/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Three.js/置换后在顶点着色器中计算索引平面几何体的顶点法线_Three.js_Glsl_Webgl_Normals_Buffer Geometry - Fatal编程技术网

Three.js/置换后在顶点着色器中计算索引平面几何体的顶点法线

Three.js/置换后在顶点着色器中计算索引平面几何体的顶点法线,three.js,glsl,webgl,normals,buffer-geometry,Three.js,Glsl,Webgl,Normals,Buffer Geometry,这件事我已经忙了一段时间了。我有一个用于GPGPU布料物理模拟的索引PlaneBuffer几何体,在我的一生中,在将模拟渲染到纹理后,无法在最终顶点着色器中正确计算法线 这是我当前设置的相关部分,我认为它不起作用,因为我需要一种方法来知道当前垂直的顺序和它的相邻面。只是不能完全正确 javascript: 片段着色器/照明: 不确定我是否遗漏了一些明显的东西/做了一些愚蠢的事情,或者这个问题确实很难解决。没有发布我尝试过的许多失败的方法,有人有什么想法吗 非常感谢您的帮助 [编辑1]:我添加了两

这件事我已经忙了一段时间了。我有一个用于GPGPU布料物理模拟的索引PlaneBuffer几何体,在我的一生中,在将模拟渲染到纹理后,无法在最终顶点着色器中正确计算法线

这是我当前设置的相关部分,我认为它不起作用,因为我需要一种方法来知道当前垂直的顺序和它的相邻面。只是不能完全正确

javascript:

片段着色器/照明:

不确定我是否遗漏了一些明显的东西/做了一些愚蠢的事情,或者这个问题确实很难解决。没有发布我尝试过的许多失败的方法,有人有什么想法吗

非常感谢您的帮助

[编辑1]:我添加了两个示例,使用平面着色和面法线,并显示当前混乱的平滑顶点法线进度。很难找到我的错误

[编辑2]:这是我将面数据管道化到每个顶点的方式。从数据的角度来看,一切看起来都是正确的,但在视觉上完全是一团糟。我一辈子都找不到我错在哪里

javascript

顶点着色器相关部分


由于@luigi de rosa的帮助,我最终放弃了上面的共享脸方法,改用了邻居查找方法。我以前尝试过这个,但是使用了不正确的值从查找纹理中抓取邻居,这让我觉得它不起作用

下面是我现在在顶点着色器中计算法线的方法

float diff = 0.06; // tweak this value to yield different results.
vec2 coord = positionReference.xy;
vec3 transformed = texture2D(tPositions, coord).xyz;
vec3 neighbour1 = texture2D(tPositions, coord + vec2(diff, 0.0)).xyz;
vec3 neighbour2 = texture2D(tPositions, coord + vec2(0.0, diff)).xyz;
vec3 tangent = neighbour1 - transformed;
vec3 bitangent = neighbour2 - transformed;            
vec3 nrml = cross(tangent, bitangent);
vNormal = normalMatrix * -normalize(nrml); // pass to fragment shader

更简单的方法。。。叹气。

如果您先用一个实例演示,您可能会更幸运地获得帮助。在这种情况下不需要法线。此外,你的UV应该索引到像素的中心,因此如果你的纹理是4x4,那么你的UV将是1/8、3/8、5/8、7/8。@WestLangley我添加了两个代码笔来显示我当前的进度。平面明暗处理-,我当前的法线进度非常糟糕-你显然有能力调试它我将大小设置为4以进行调试。顺便说一句,在指定制服时,您不再需要设置类型。感谢您的道义支持:我显然一直坚持这样做,早在我最初发布之前,我就一直这样做,讨厌寻求帮助,只是时间不够了。我知道这是一件非常复杂的事情,需要调试,可能要求人们看它太大了,但是值得一试…@WestLangley我在编辑2中的方法听起来合理吗?或者,这是一种试图通过管道输入人脸数据的愚蠢方式?
// how I'd calculate the normals if I could get a proper ordered reference
vec2 coord1 = faceVert1UvReference.xy;
vec3 pos1 = texture2D(tPositions, coord1).xyz;

vec2 coord2 = faceVert2UvReference.xy;
vec3 pos2 = texture2D(tPositions, coord2).xyz;

vec2 coord3 = faceVert3UvReference.xy;
vec3 pos3 = texture2D(tPositions, coord3).xyz;

vec3 tangent = pos3 - pos2;
vec3 bitangent = pos1 - pos2;
vec3 normal = normalMatrix * normalize(cross(tangent, bitangent));
vec3 lightDirection = normalize(lightPosition); // also tried normalize(lightPosition - vWorldPosition);
vec3 normal = normalize(vNormal);
float lightValue = max(0.0, dot(normal, lightDirection)) * lightIntensity;
finalColor.rgb *= lightValue;
const indices = geometry.index.array;
const faces = [];

// store faces for each vertex
for(let i = 0; i < indices.length; i += 3)
{
    const vertIndex1 = indices[ i + 0 ];
    const vertIndex2 = indices[ i + 1 ];
    const vertIndex3 = indices[ i + 2 ];

    faces[ vertIndex1 ] = faces[ vertIndex1 ] || [];
    faces[ vertIndex2 ] = faces[ vertIndex2 ] || [];
    faces[ vertIndex3 ] = faces[ vertIndex3 ] || [];

    faces[ vertIndex1 ].push([ vertIndex1, vertIndex2, vertIndex3 ]);
    faces[ vertIndex2 ].push([ vertIndex1, vertIndex2, vertIndex3 ]);
    faces[ vertIndex3 ].push([ vertIndex1, vertIndex2, vertIndex3 ]);
}

const size = 128;
const vertices = geometry.attributes.position;
const faceIndices = new Uint16Array( vertices.array.length );
const indices0 = gpuCompute.createTexture(size * 2, size * 2); // need 256x256 texture for all the data.
const indicesPixels0 = indices0.image.data;

let faceVertPixelIndex = 0,
    faceIndexRangeStart = 0,
    faceIndexRangeEnd = -1,
    index;

for(let i = 0; i < size; i++)
{
    for(let j = 0; j < size; j++)
    {
        index = j + (i * size);

        // ----------------------------------------------
        // writing vertex positions to data texture here
        // ----------------------------------------------


        if(faces[index])
        {
            const face = faces[index];
            const fLen = face.length;

            faceIndexRangeStart = faceIndexRangeEnd + 1;
            faceIndexRangeEnd = faceIndexRangeStart + fLen - 1;

            // face index range for looking up up all faces a single vertex is in
            faceIndices[index * 3 + 0] = faceIndexRangeStart;
            faceIndices[index * 3 + 1] = faceIndexRangeEnd;
            faceIndices[index * 3 + 2] = 0; // unused

            for(let v = 0; v < fLen; v++)
            {
                // store face vertex indices in each pixel rgb
                indicesPixels0[faceVertPixelIndex * 4 + 0] = face[v][0]; // current face, vertex 1 index
                indicesPixels0[faceVertPixelIndex * 4 + 1] = face[v][1]; // current face, vertex 2 index
                indicesPixels0[faceVertPixelIndex * 4 + 2] = face[v][2]; // current face, vertex 3 index
                indicesPixels0[faceVertPixelIndex * 4 + 3] = 0; // unused

                faceVertPixelIndex++;
            }
        }
    }
}

geometry.addAttribute('faceIndices', new THREE.BufferAttribute(faceIndices, 3));

uniforms.tIndices.value = indices0;
uniform vec2 resolution;
uniform sampler2D tPositions;
uniform sampler2D tIndices;

attribute vec3 faceIndices;

varying vec3 vNormal;

vec2 getCoord(in float index, in vec2 size)
{
    return vec2(mod(index, size.x) / size.x, floor(index / size.y) / size.y);
}

void addNormal(inout vec3 nrml, in float index)
{
    vec2 coord = getCoord(index, resolution * 2.0); // 256x256 sized texture for faces
    vec4 face = texture2D(tIndices, coord);

    // get uv for each vertex index in the face and grab positions.
    vec2 v1Coord = getCoord(face.x, resolution);
    vec3 v1 = texture2D(tPositions, v1Coord).xyz;

    vec2 v2Coord = getCoord(face.y, resolution);
    vec3 v2 = texture2D(tPositions, v2Coord).xyz;

    vec2 v3Coord = getCoord(face.z, resolution);
    vec3 v3 = texture2D(tPositions, v3Coord).xyz;

    vec3 tangent = v3 - v2;
    vec3 bitangent = v1 - v2;

    vec3 n = normalize(cross(tangent, bitangent));

    nrml += n;
}

void main()
{
    vec3 nrml = vec3(0.0);
    vec2 faceIndexRange = faceIndices.xy;

    float from = faceIndexRange.x;
    float to = faceIndexRange.y;
    float index = from;

    for(int i = 0; i < 6; i++)
    {
        if(index <= to)
        {
            addNormal(nrml, index);
            index += 1.0;
        }
        else
        {
            break;
        }
    }

    vNormal = normalMatrix * normalize(nrml);
}
float diff = 0.06; // tweak this value to yield different results.
vec2 coord = positionReference.xy;
vec3 transformed = texture2D(tPositions, coord).xyz;
vec3 neighbour1 = texture2D(tPositions, coord + vec2(diff, 0.0)).xyz;
vec3 neighbour2 = texture2D(tPositions, coord + vec2(0.0, diff)).xyz;
vec3 tangent = neighbour1 - transformed;
vec3 bitangent = neighbour2 - transformed;            
vec3 nrml = cross(tangent, bitangent);
vNormal = normalMatrix * -normalize(nrml); // pass to fragment shader