Java Android:OpenGL ES2纹理不工作

Java Android:OpenGL ES2纹理不工作,java,android,opengl-es,textures,opengl-es-2.0,Java,Android,Opengl Es,Textures,Opengl Es 2.0,更新:去掉了线GLES20.glEnable(GLES20.GL_纹理_2D)但行GLES20.glTexImage2D(GLES20.GL_纹理_2D,0,GLES20.GL_RGB,256,256,0,GLES20.GL_RGB,GLES20.GL_字节,ByteBuffer.wrap(像素))提供了无效的GL_枚举。。。像素缓冲区长度为196608 项目文件: 我正在尝试将相机数据传输到OpenGL ES2着色器,相机的东西似乎可以工作,但即使尝试自己的值,也无法使纹理工作。我有一个黑屏。

更新:去掉了线
GLES20.glEnable(GLES20.GL_纹理_2D)但行
GLES20.glTexImage2D(GLES20.GL_纹理_2D,0,GLES20.GL_RGB,256,256,0,GLES20.GL_RGB,GLES20.GL_字节,ByteBuffer.wrap(像素))提供了无效的GL_枚举。。。像素缓冲区长度为196608

项目文件:

我正在尝试将相机数据传输到OpenGL ES2着色器,相机的东西似乎可以工作,但即使尝试自己的值,也无法使纹理工作。我有一个黑屏。代码如下:

package com.matthewmitchell.nightcam;

import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Scanner;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.content.res.AssetManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out("Hello init.");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
        } catch (IOException e) {
            Debug.out("The shaders could not be found.");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        Debug.out("VS LOG: " + GLES20.glGetShaderInfoLog(vertexShader)); 
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        Debug.out("FS LOG: " + GLES20.glGetShaderInfoLog(fragmentShader)); 
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        Debug.out("PROG LOG: " + GLES20.glGetProgramInfoLog(mProgram)); 
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
        camera_texture = null;
        GLES20.glEnable(GLES20.GL_TEXTURE_2D);
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        }; //Even if wrong way around it should produce a texture with these coordinates on the screen.

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        //Load file from assets folder using context given by the activity class
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter("\\A").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        int shader = GLES20.glCreateShader(type);
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        //Bind texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        //Disable arrays
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context mcontext) {
        context = mcontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        //Takes pixel data from camera and makes texture
        byte[] pixels = new byte[256*256*3]; //Testing simple 256x256 texture. Will update for camera resolution
        for(int x = 0;x < 256;x++){
            for(int y = 0;y < 256;y++){
                //Ignore camera data, use test values.
                pixels[(x*256+y)*3] = 0;
                pixels[(x*256+y)*3+1] = 100;
                pixels[(x*256+y)*3+2] = 120;
            }
        }
        //Debug.out("PX:" + pixels[0] + " " + pixels[1] + " " + pixels[2]);
        //Make new texture for new data
        if (camera_texture == null){
            camera_texture = new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    }
}
以下是片段着色器代码:

attribute vec4 vPosition;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main(){ 
    gl_Position = vPosition;
    v_texCoord = a_texCoord;
}    
precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D s_texture;
void main(){
    gl_FragColor = texture2D(s_texture, v_texCoord);
}
我们可以忽略摄像机的东西,因为我使用的是测试值。我正在使用测试256x256纹理。我已经完成了我在例子中看到的一切


为什么它是黑色的,我如何使它显示?

从代码中看不到它,但在我看来,您并不是在有渲染上下文的地方调用bindCameraTexture(而您应该在onSurfaceCreated或onSurfaceChanged中这样做)。

我看到您使用的是
glGetAttribLocation()
检索
s\u纹理的位置
。这是一个统一变量,不是属性。尝试使用
glGetUniformLocation()
代替此选项


我不知道这是否能解决您所有的问题,但这是必须要确定的。

我用相机预览作为纹理完成了示例。代码的关键区别在于:

  • 我使用SurfaceTexture将相机预览连接到openGL ES中使用的纹理

  • 我使用SurfaceTexture生成的矩阵来调整相机预览的输出,否则会出现黑色闪烁区域

  • 我没有对用于相机预览的纹理显式调用glBindTexture()

    祝你好运


  • 好的,但是你的着色器代码在哪里?对不起。我忘了,不是吗。我马上就去拿。如果你只是将一些恒定的颜色设置为碎片而不是纹理样本,会发生什么?是的,如果我删除设置纹理以进行绘制的调用,那就可以了。谢谢你的回答。将调用该方法。我将在一个文件中发布整个项目…是的,该方法在onPreviewFrame中调用,其中没有渲染上下文(这就是纹理实际上没有创建的原因)。您应该保存字节数组,然后将其分配给onDrawFrame中的纹理。哦,我明白了。我会确保稍后再做,然后再报告。非常感谢。好的,我将纹理调用移动到onDrawFrame,但它仍然是黑色的:(好的,谢谢,我这样做了,但它确实没有完全解决问题。太糟糕了。我刚刚看到你关于无效枚举的更新:这可能是由于在glTexImage2D中使用GL_字节作为“type”参数造成的,它不是一个有效的类型。在这里使用GL_UNSIGNED_字节。请参阅以获取可接受的值。啊,它正在工作!谢谢。尽管它闪烁着黑色部分这很奇怪。除了渲染,我把它都整理好了,但这是一个单独的问题。我会把赏金给你,因为你的帮助最终把它整理好了。也要感谢Max。