Android 将gstreamer视频源显示到Google Carboard SurfacetTexture中

Android 将gstreamer视频源显示到Google Carboard SurfacetTexture中,android,video,opengl-es,google-cardboard,Android,Video,Opengl Es,Google Cardboard,我正在使用gstreamer检索一个视频提要(从一个RaspPi发送),我需要将它显示在谷歌硬纸板上 我把我的工作建立在这个基础上。我通过提供我的Surface(从SurfaceView.SurfaceHolder.getSurface()检索)将视频显示到SurfaceView中,但现在我需要将其与Google硬纸板连接 如果我没有弄错的话,Google Cardward依赖于一些SurfaceTexture。因此我认为使用Surface(SurfaceTexture)构造函数从Surface

我正在使用gstreamer检索一个视频提要(从一个RaspPi发送),我需要将它显示在谷歌硬纸板上

我把我的工作建立在这个基础上。我通过提供我的Surface(从
SurfaceView.SurfaceHolder.getSurface()
检索)将视频显示到SurfaceView中,但现在我需要将其与Google硬纸板连接

如果我没有弄错的话,Google Cardward依赖于一些SurfaceTexture。因此我认为使用
Surface(SurfaceTexture)
构造函数从SurfaceTexture简单地获取曲面是很容易的

问题是它根本不起作用。我的谷歌纸板应用程序,我没有接触过OpenGL代码,因为我对它一无所知

调试时,我发现我使用的代码(至少)有一个问题

GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
正在给我带来一些麻烦,因为
GL\u TEXTURE\u EXTERNAL\u OES
不在
glActiveTexture
方法所需的范围内(这要求GL从0到
GL\u MAX\u composed\u TEXTURE\u IMAGE\u UNITS
)。 这是我的日志:

GLConsumer  W  [unnamed-12520-0] bindTextureImage: clearing GL error: 0x500
Adreno-ES20  W  <core_glActiveTexture:348>: GL_INVALID_ENUM
有关更多代码,以下是两个主要文件的要点:


编辑:尝试在gstreamer上使用相机应用程序时,logcat中显示的错误与我前面描述的错误相同。因此,这可能不重要……

glActiveTexture()的参数是纹理单位,例如
GL_TEXTURE0
glBindTexture()的第一个参数
是一个纹理目标,例如,
GL\u texture\u EXTERNAL\u OES
。您可以在Grafika()中找到各种混合格斗和视频的示例@fadden没什么帮助。我在grafika上找不到任何能帮我在SurfacetTexture上放视频的东西。我知道GlactivetTexture的论点,这似乎很奇怪,上面引用的演示应用程序使用了这个错误。“来自摄影机的纹理”“活动”从摄影机中获取视频,在SurfaceTexture上播放,并使用GLES渲染纹理。“连续捕获”执行类似的操作。将“摄影机输出”替换为“视频解码器输出”。尽管出现错误,它仍可能工作,因为它仍然设置为有用的默认值(GL_TEXTURE0)@fadden问题是,在您的示例中,他们仍然使用SurfaceView来显示曲面,但我无法使用,因为我无法使用硬纸板访问此类元素,您可以在何处工作?我正在尝试做类似的操作,但也只得到黑色。如果我使用grafika的MediaPlayer,效果很好,但是MediaPlayer不够灵活,无法满足我的需要。
@Override
public void onCreate(Bundle savedInstanceState)
{
    super.onCreate(savedInstanceState);
    setContentView(R.layout.main);
    CardboardView cardboardView = (CardboardView) findViewById(R.id.cardboard_view);
    cardboardView.setRenderer(this);
    setCardboardView(cardboardView);

    // Initialize GStreamer and warn if it fails
    try {
        GStreamer.init(this);
    } catch (Exception e) {
    //Catch e...
    }
    mCamera = new float[16];
    mView = new float[16];
    mHeadView = new float[16];
    //gstreamer stuff
    nativeInit();
}


@Override
public void onSurfaceCreated(EGLConfig eglConfig) {
            Log.d(TAG, "onSurfaceCreated start");
    GLES20.glClearColor(0.5f, 0.1f, 0.1f, 0.5f);
    ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4);
    bb.order(ByteOrder.nativeOrder());
    vertexBuffer = bb.asFloatBuffer();
    vertexBuffer.put(squareVertices);
    vertexBuffer.position(0);


    ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
    dlb.order(ByteOrder.nativeOrder());
    drawListBuffer = dlb.asShortBuffer();
    drawListBuffer.put(drawOrder);
    drawListBuffer.position(0);


    ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4);
    bb2.order(ByteOrder.nativeOrder());
    textureVerticesBuffer = bb2.asFloatBuffer();
    textureVerticesBuffer.put(textureVertices);
    textureVerticesBuffer.position(0);

    int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
    int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

    mProgram = GLES20.glCreateProgram();             // create empty OpenGL ES Program
    GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
    GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
    GLES20.glLinkProgram(mProgram);
    checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    Log.d(TAG, "Surface created");
    texture = createTexture();
    initSurface(texture);
}
static private int createTexture()
{
    Log.d(TAG + "_cardboard", "createTexture");

    int[] texture = new int[1];

    GLES20.glGenTextures(1,texture, 0);
    checkGLError("GenTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());

    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]);
    checkGLError("BindTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    return texture[0];
}
//Give the surface to gstreamer.
private void initSurface(int texture) {
    mSurface = new SurfaceTexture(texture);
    mSurface.setOnFrameAvailableListener(this);
    Log.d(TAG, "OnFrameAvailableListener set");

    Surface toto = new Surface(mSurface);
    nativeSurfaceInit(toto);
    toto.release();

}

//When we need to render
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    Log.d(TAG, "onFrameAvailable");
    this.getCardboardView().requestRender();

}

//Display to cardboard
@Override
public void onNewFrame(HeadTransform headTransform) {

    headTransform.getHeadView(mHeadView, 0);

    // Build the camera matrix and apply it to the ModelView.
    Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, 0.01f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    mSurface.updateTexImage();
    mSurface.getTransformMatrix(mtx);

    float[] test = new float[3];
    headTransform.getEulerAngles(test, 0);

    //if(networkThread != null){
    //    networkThread.setRegValue(test);
    //}
}

@Override
public void onDrawEye(Eye eye) {
    // Log.d(TAG, "onDrawEye");

    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    GLES20.glUseProgram(mProgram);
    Log.d(TAG, "trying to access " + GL_TEXTURE_EXTERNAL_OES +" out of " + GLES20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS);
    GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());


    mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
    GLES20.glEnableVertexAttribArray(mPositionHandle);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());
    GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, vertexStride, vertexBuffer);
   // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber());


    mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
    GLES20.glEnableVertexAttribArray(mTextureCoordHandle);
    GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, vertexStride, textureVerticesBuffer);

    mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");


    GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length,
            GLES20.GL_UNSIGNED_SHORT, drawListBuffer);


    // Disable vertex array
    GLES20.glDisableVertexAttribArray(mPositionHandle);

    GLES20.glDisableVertexAttribArray(mTextureCoordHandle);
    Matrix.multiplyMM(mView, 0, eye.getEyeView(), 0, mCamera, 0);

}