Android 在摄影机流(GLSL)上绘制文本或图像

Android 在摄影机流(GLSL)上绘制文本或图像,android,opengl-es,glsl,Android,Opengl Es,Glsl,我有一个基于off的直播应用程序,我通过RTMP发送我的视频源进行直播 我现在想通过在视频流上叠加文本或徽标来为视频添加水印。我知道这可以通过GLSL过滤实现,但我不知道如何基于我链接的示例实现这一点 我尝试使用Alpha混合,但似乎这两种纹理格式在某种程度上是不兼容的(一种是texture_EXTERNAL_OES,另一种是texture_2D),我得到了一个黑色帧作为回报 编辑: 我的代码基于 类CameraSurfaceRenderer实现GLSurfaceView.Renderer{ 私

我有一个基于off的直播应用程序,我通过RTMP发送我的视频源进行直播

我现在想通过在视频流上叠加文本或徽标来为视频添加水印。我知道这可以通过GLSL过滤实现,但我不知道如何基于我链接的示例实现这一点

我尝试使用Alpha混合,但似乎这两种纹理格式在某种程度上是不兼容的(一种是texture_EXTERNAL_OES,另一种是texture_2D),我得到了一个黑色帧作为回报

编辑:

我的代码基于

类CameraSurfaceRenderer实现GLSurfaceView.Renderer{ 私有静态最终字符串标记=“CameraSurfaceRenderer”; private static final boolean VERBOSE=false; 专用摄像机编码器; 私人FullFrameRect mFullScreenCamera; 私有FullFrameRect mFullScreenOverlay;//用于纹理覆盖 私有最终浮动[]mSTMatrix=新浮动[16]; 私有int-mOverlayTextureId; 私有int mCameraTextureId; 私有布尔mRecordingEnabled; 私有整数计数; //跟踪所选过滤器+相关状态 私有布尔值mIncomingSizeUpdated; 私密的内部加密宽度; 私家车进站高度; 专用int mCurrentFilter; 私人内部过滤器; 布尔showBox=false; /** *构造CameraSurface渲染器。 * *@param-recorder视频编码器对象 */ 公共CameraSurface渲染器(CameraEncoder记录器){ mCameraEncoder=记录器; mCameraTextureId=-1; mFrameCount=-1; SessionConfig config=recorder.getConfig(); mIncomingWidth=config.getVideoWidth(); mIncomingHeight=config.getVideoHeight(); mIncomingSizeUpdated=true;//强制在下一个onDrawFrame上更新纹理大小 mCurrentFilter=-1; mNewFilter=Filters.FILTER\u NONE; mRecordingEnabled=false; } /** *通知渲染器我们要停止或开始录制。 */ public void changeRecordingState(布尔值isRecording){ Log.d(标签,“changeRecordingState:was”+mRecordingEnabled+“now”+isRecording); mRecordingEnabled=isRecording; } @凌驾 已创建Surface上的公共void(GL10未使用,EGLConfig配置){ Log.d(标记“onSurfaceCreated”); //设置将用于屏幕显示的纹理blitter。此 //*未*应用于录制,因为它使用单独的着色器。 mFullScreenCamera=新的FullFrameRect( 新的Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT)); //对于纹理覆盖: GLES20.glEnable(GLES20.Glu混合物); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE_减去SRC_ALPHA); mFullScreenOverlay=新的FullFrameRect( 新的Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_2D)); mOverlayTextureId=GlUtil.createTextureWithTextContent(“你好!”); mOverlayTextureId=GlUtil.createTextureFromImage(mCameraView.getContext(),R.drawable.red_-dot); mCameraTextureId=mFullScreenCamera.createTextureObject(); onSurfaceCreated(mCameraTextureId); mFrameCount=0; } @凌驾 表面上的公共空隙已更改(GL10未使用、整型宽度、整型高度){ Log.d(标签“onSurfaceChanged”+宽度+“x”+高度); } @凌驾 公共框架(GL10未使用){ 如果(详细){ 如果(mFrameCount%30==0){ Log.d(标记“onDrawFrame-tex=“+mCameraTextureId”); mCameraEncoder.logSavedEglState(); } } if(mCurrentFilter!=mNewFilter){ 过滤器。更新过滤器(mFullScreenCamera、mNewFilter); mCurrentFilter=mNewFilter; mIncomingSizeUpdated=真; } 如果(mIncomingSizeUpdated){ mFullScreenCamera.getProgram().SettXSize(mIncomingWidth,mIncomingHeight); mFullScreenOverlay.getProgram().SettXSize(mIncomingWidth,mIncomingHeight); mIncomingSizeUpdated=假; Log.i(标记,“在显示纹理上设置大小”); } //绘制视频帧。 if(mCameraEncoder.issurFaceTextureReadyFordSplay()){ mCameraEncoder.getSurfaceTextureOrdisplay().UpdateMaximage(); mCameraEncoder.getSurfaceTextureForDisplay().getTransformMatrix(mSTMatrix); //图形纹理覆盖: mFullScreenOverlay.drawFrame(mOverlayTextureId,mSTMatrix); mFullScreenCamera.drawFrame(Mcameratextured,mSTMatrix); } mFrameCount++; } 公共无效信号垂直视频(FullFrameRect.SCREEN\u旋转为垂直){ 如果(mFullScreenCamera!=null)mFullScreenCamera.adjustForVerticalVideo(垂直,假); } /** *更改应用于摄影机预览的过滤器。 */ public void changeFilterMode(int filter){ mNewFilter=过滤器; } 公共无效handleTouchEvent(MotionEvent ev){ mFullScreenCamera.handleTouchEvent(ev); } } 这是用于在屏幕上渲染图像的代码(GLSURFACHEVIEW),但实际上并没有覆盖在视频上。如果我没有弄错的话,这件事就这样做了

问题是,将代码从CameraSurfaceRenderer复制到CameraEncoder(当涉及到过滤器时,它们都有类似的代码)不会提供叠加的文本/图像

纹理对象使用GL_texture_EXTERNAL_OES纹理目标,该目标由GL_OES_EGL_image_EXTERNAL OpenGL ES扩展定义。这限制了纹理的使用方式。每次绑定纹理时,它都必须绑定到GL_texture_EXTERNAL_OES目标,而不是GL_texture_2D目标。此外,从纹理采样的任何OpenGL ES 2.0着色器都必须声明其使用此扩展,例如
class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
    private static final String TAG = "CameraSurfaceRenderer";
    private static final boolean VERBOSE = false;

    private CameraEncoder mCameraEncoder;

    private FullFrameRect mFullScreenCamera;
    private FullFrameRect mFullScreenOverlay;     // For texture overlay

    private final float[] mSTMatrix = new float[16];
    private int mOverlayTextureId;
    private int mCameraTextureId;

    private boolean mRecordingEnabled;

    private int mFrameCount;

    // Keep track of selected filters + relevant state
    private boolean mIncomingSizeUpdated;
    private int mIncomingWidth;
    private int mIncomingHeight;
    private int mCurrentFilter;
    private int mNewFilter;

    boolean showBox = false;


    /**
     * Constructs CameraSurfaceRenderer.
     * <p>
     * @param recorder video encoder object
     */
    public CameraSurfaceRenderer(CameraEncoder recorder) {
        mCameraEncoder = recorder;

        mCameraTextureId = -1;
        mFrameCount = -1;

        SessionConfig config = recorder.getConfig();
        mIncomingWidth = config.getVideoWidth();
        mIncomingHeight = config.getVideoHeight();
        mIncomingSizeUpdated = true;        // Force texture size update on next onDrawFrame

        mCurrentFilter = -1;
        mNewFilter = Filters.FILTER_NONE;

        mRecordingEnabled = false;
    }


    /**
     * Notifies the renderer that we want to stop or start recording.
     */
    public void changeRecordingState(boolean isRecording) {
        Log.d(TAG, "changeRecordingState: was " + mRecordingEnabled + " now " + isRecording);
        mRecordingEnabled = isRecording;
    }

    @Override
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        Log.d(TAG, "onSurfaceCreated");
        // Set up the texture blitter that will be used for on-screen display.  This
        // is *not* applied to the recording, because that uses a separate shader.
        mFullScreenCamera = new FullFrameRect(
                new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
        // For texture overlay:
        GLES20.glEnable(GLES20.GL_BLEND);
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
        mFullScreenOverlay = new FullFrameRect(
                  new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_2D));
        mOverlayTextureId = GlUtil.createTextureWithTextContent("hello!");
        mOverlayTextureId = GlUtil.createTextureFromImage(mCameraView.getContext(), R.drawable.red_dot);
        mCameraTextureId = mFullScreenCamera.createTextureObject();

        mCameraEncoder.onSurfaceCreated(mCameraTextureId);
        mFrameCount = 0;
    }

    @Override
    public void onSurfaceChanged(GL10 unused, int width, int height) {
        Log.d(TAG, "onSurfaceChanged " + width + "x" + height);
    }

    @Override
    public void onDrawFrame(GL10 unused) {
        if (VERBOSE){
            if(mFrameCount % 30 == 0){
                Log.d(TAG, "onDrawFrame tex=" + mCameraTextureId);
                mCameraEncoder.logSavedEglState();
            }
        }

        if (mCurrentFilter != mNewFilter) {
            Filters.updateFilter(mFullScreenCamera, mNewFilter);
            mCurrentFilter = mNewFilter;
            mIncomingSizeUpdated = true;
        }

        if (mIncomingSizeUpdated) {
            mFullScreenCamera.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
            mFullScreenOverlay.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
            mIncomingSizeUpdated = false;
            Log.i(TAG, "setTexSize on display Texture");
        }

        // Draw the video frame.
        if(mCameraEncoder.isSurfaceTextureReadyForDisplay()){
            mCameraEncoder.getSurfaceTextureForDisplay().updateTexImage();
            mCameraEncoder.getSurfaceTextureForDisplay().getTransformMatrix(mSTMatrix);
            //Drawing texture overlay:
            mFullScreenOverlay.drawFrame(mOverlayTextureId, mSTMatrix);
            mFullScreenCamera.drawFrame(mCameraTextureId, mSTMatrix);
        }
        mFrameCount++;
    }

    public void signalVertialVideo(FullFrameRect.SCREEN_ROTATION isVertical) {
        if (mFullScreenCamera != null) mFullScreenCamera.adjustForVerticalVideo(isVertical, false);
    }

    /**
     * Changes the filter that we're applying to the camera preview.
     */
    public void changeFilterMode(int filter) {
        mNewFilter = filter;
    }

    public void handleTouchEvent(MotionEvent ev){
        mFullScreenCamera.handleTouchEvent(ev);
    }

}