Android GLSURFACHEVIEW在以下情况下不会恢复其打开的GL线程;onResume";被称为

Android GLSURFACHEVIEW在以下情况下不会恢复其打开的GL线程;onResume";被称为,android,opengl-es,glsurfaceview,Android,Opengl Es,Glsurfaceview,我的问题是:在“旧”Android设备(V2.2和2.3)上,经过一次旋转后,我的GLSurfaceView是空白的。我可以在日志中看到这些呼叫: - rotation detected! - CTestApp(10669): entering onConfigurationChanged method. MainActivity(10669): entering onPause method. *WEBRTC*(10669): ViEAndroidGLES20::onPause *WEBRTC

我的问题是:在“旧”Android设备(V2.2和2.3)上,经过一次旋转后,我的GLSurfaceView是空白的。我可以在日志中看到这些呼叫:

- rotation detected! -
CTestApp(10669): entering onConfigurationChanged method.
MainActivity(10669): entering onPause method.
*WEBRTC*(10669): ViEAndroidGLES20::onPause
*WEBRTC*(10669): ContextFactory::destroyContext
*WEBRTC*(10669): ViEAndroidGLES20::onPause
*WEBRTC*(10669): ContextFactory::destroyContext
MainActivity(10669): end of onPause method.
MainActivity(10669): entering onStop method.
*WEBRTC*(10669): ViEAndroidGLES20::onDetachedFromWindow
*WEBRTC*(10669): ViEAndroidGLES20::onDetachedFromWindow
MainActivity(10669): end of onStop method.
MainActivity(10669): entering onDestroy method.
MainActivity(10669): end of onDestroy method.
MainActivity(10669): entering onCreate method.
MainActivity(10669): entering onStart method.
MainActivity(10669): end of onStart method.
MainActivity(10669): entering onResume method.
*WEBRTC*(10669): ViEAndroidGLES20::onResume
*WEBRTC*(10669): ViEAndroidGLES20::onResume
MainActivity(10669): end of onResume method.
*WEBRTC*(10669): ViEAndroidGLES20::onAttachedToWindow
*WEBRTC*(10669): ViEAndroidGLES20::onAttachedToWindow
在较新的Android设备上,视频流的渲染在设备旋转后正确恢复:

工作设备的日志与前一个(不工作)日志相似,只是这些跟踪出现在“onAttachedToWindow”调用之后:

在Eclipse调试器中,我注意到在活动销毁期间暂停的2个OpenGl线程没有恢复。Android 2.3和4.0之间的GLSurfaceView行为似乎存在差异,导致OpenGl线程仅在较新版本上恢复。有人对此有线索吗

以下是我用于测试的设备的详细信息:

工作装置:

  • 运行Android版本4.1.1的galaxy nexus
  • 运行Android版本4.0.4的galaxy tab 10.1
“坏”设备: -HTC desire,运行安卓2.3.5 -摩托罗拉droid,运行安卓2.2

下面是关于我使用的代码的附加信息

我有以下类,它扩展了GLSURFACHEVIEW:

public class ViEAndroidGLES20 extends GLSurfaceView
    implements GLSurfaceView.Renderer {
    private static String TAG = "WEBRTC-JR";
    private static final boolean DEBUG = true;
  // True if onSurfaceCreated has been called.
  private boolean surfaceCreated = false;
  private boolean openGLCreated = false;
  // True if NativeFunctionsRegistered has been called.
  private boolean nativeFunctionsRegisted = false;
  private ReentrantLock nativeFunctionLock = new ReentrantLock();
  // Address of Native object that will do the drawing.
  private long nativeObject = 0;
  private int viewWidth = 0;
  private int viewHeight = 0;

  public static boolean UseOpenGL2(Object renderWindow) {
    return ViEAndroidGLES20.class.isInstance(renderWindow);
  }

  public ViEAndroidGLES20(Context context) {
    super(context);
        init(false, 0, 0);
    }

    public ViEAndroidGLES20(Context context, boolean translucent,
            int depth, int stencil) {
        super(context);
        init(translucent, depth, stencil);
    }

    private void init(boolean translucent, int depth, int stencil) {

        // By default, GLSurfaceView() creates a RGB_565 opaque surface.
        // If we want a translucent one, we should change the surface's
        // format here, using PixelFormat.TRANSLUCENT for GL Surfaces
        // is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
        if (translucent) {
            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
        }

    // Setup the context factory for 2.0 rendering.
    // See ContextFactory class definition below
    setEGLContextFactory(new ContextFactory());

        // We need to choose an EGLConfig that matches the format of
        // our surface exactly. This is going to be done in our
        // custom config chooser. See ConfigChooser class definition
        // below.
        setEGLConfigChooser( translucent ?
                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );

        // Set the renderer responsible for frame rendering
     this.setRenderer(this);
     this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
  }

    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
            Log.w(TAG, "creating OpenGL ES 2.0 context");
            checkEglError("Before eglCreateContext", egl);
            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
            EGLContext context = egl.eglCreateContext(display, eglConfig,
                    EGL10.EGL_NO_CONTEXT, attrib_list);
            checkEglError("After eglCreateContext", egl);
            return context;
        }

        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
            Log.d("*WEBRTC*", "ContextFactory::destroyContext");
            egl.eglDestroyContext(display, context);
        }
    }

  private static void checkEglError(String prompt, EGL10 egl) {
    int error;
    while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
      Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
    }
  }

    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {

    public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
      mRedSize = r;
      mGreenSize = g;
      mBlueSize = b;
      mAlphaSize = a;
      mDepthSize = depth;
      mStencilSize = stencil;
    }

    // This EGL config specification is used to specify 2.0 rendering.
    // We use a minimum size of 4 bits for red/green/blue, but will
    // perform actual matching in chooseConfig() below.
    private static int EGL_OPENGL_ES2_BIT = 4;
    private static int[] s_configAttribs2 =
    {
      EGL10.EGL_RED_SIZE, 4,
      EGL10.EGL_GREEN_SIZE, 4,
      EGL10.EGL_BLUE_SIZE, 4,
      EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
      EGL10.EGL_NONE
    };

    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {

      // Get the number of minimally matching EGL configurations
      int[] num_config = new int[1];
      egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);

      int numConfigs = num_config[0];

      if (numConfigs <= 0) {
        throw new IllegalArgumentException("No configs match configSpec");
      }

      // Allocate then read the array of minimally matching EGL configs
      EGLConfig[] configs = new EGLConfig[numConfigs];
      egl.eglChooseConfig(display, s_configAttribs2, configs,                          numConfigs, num_config);

      // Now return the "best" one
      return chooseConfig(egl, display, configs);
    }

    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
                                  EGLConfig[] configs) {
      for(EGLConfig config : configs) {
        int d = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_DEPTH_SIZE, 0);
        int s = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_STENCIL_SIZE, 0);

        // We need at least mDepthSize and mStencilSize bits
        if (d < mDepthSize || s < mStencilSize)
          continue;

        // We want an *exact* match for red/green/blue/alpha
        int r = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_RED_SIZE, 0);
        int g = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_GREEN_SIZE, 0);
        int b = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_BLUE_SIZE, 0);
        int a = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_ALPHA_SIZE, 0);

        if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
          return config;
      }
      return null;
    }

    private int findConfigAttrib(EGL10 egl, EGLDisplay display,
                                 EGLConfig config, int attribute, int defaultValue) {

      if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
        return mValue[0];
      }
      return defaultValue;
    }

    // Subclasses can adjust these values:
    protected int mRedSize;
    protected int mGreenSize;
    protected int mBlueSize;
    protected int mAlphaSize;
    protected int mDepthSize;
    protected int mStencilSize;
    private int[] mValue = new int[1];
  }

  // IsSupported
  // Return true if this device support Open GL ES 2.0 rendering.
  public static boolean IsSupported(Context context) {
    ActivityManager am =
        (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
    ConfigurationInfo info = am.getDeviceConfigurationInfo();
    if(info.reqGlEsVersion >= 0x20000) {
      // Open GL ES 2.0 is supported.
      return true;
    }
    return false;
  }

   public void onDrawFrame(GL10 gl) {
    nativeFunctionLock.lock();
    if(!nativeFunctionsRegisted || !surfaceCreated) {
      nativeFunctionLock.unlock();
      return;
    }

    if(!openGLCreated) {
      if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
        return; // Failed to create OpenGL
      }
      openGLCreated = true; // Created OpenGL successfully
    }
    DrawNative(nativeObject); // Draw the new frame
    nativeFunctionLock.unlock();
  }

   public void onSurfaceChanged(GL10 gl, int width, int height) {

    if (DEBUG)
    {
      Log.d("*WEBRTC*", "ViEAndroidGLES20::onSurfaceChanged");
    }

    surfaceCreated = true;
    viewWidth = width;
    viewHeight = height;

    nativeFunctionLock.lock();
    if(nativeFunctionsRegisted) {
      if(CreateOpenGLNative(nativeObject,width,height) == 0)
      {
        openGLCreated = true;
      }
      else
      {
        Log.e("*WEBRTC*", "ViEAndroidGLES20::onSurfaceChanged - failed to openGlCreated!");
      }
    }
    nativeFunctionLock.unlock();
  }

   public void onSurfaceCreated(GL10 gl, EGLConfig config) {

    if (DEBUG)
    {
      Log.d("*WEBRTC*", "ViEAndroidGLES20::onSurfaceCreated");
    }
  }

  public void ReDraw() {
    if(surfaceCreated) {
      // Request the renderer to redraw using the render thread context.
      this.requestRender();
    }
  }

  private native int CreateOpenGLNative(long nativeObject,
                                        int width, int height);
  private native void DrawNative(long nativeObject);

  protected void onAttachedToWindow()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onAttachedToWindow");
      }

      super.onAttachedToWindow();
  }

  protected void onDetachedFromWindow()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onDetachedFromWindow");
      }

      super.onDetachedFromWindow();
  }

  public void onPause()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onPause");
      }

      super.onPause();
  }

  public void onResume()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onResume");
      }

      super.onResume();
  }
}

请注意,我还包括了我的主要活动的onResume()回调实现,以表明我在活动恢复时调用了GLSurfaceView.onResume()。

我终于找到了问题所在。问题源于android 2.3和4.x之间在类android.opengl.GLSurfaceView中的行为差异,导致了问题。在GLSurfaceView的Android 4.x实现上,回调“onAttachedToWindow”导致关联的GLThread重新启动

Android 2.2和2.3实现中缺少GLThread的重新启动。在调用ViewGroup::removeView/addView后,不恢复OpenGL线程导致渲染视图变为空白,就像在旋转场景中一样

为了纠正这个问题,我在我的项目中添加了一个类newGLSurfaceView,它是Android 4.1源代码的GLSurfaceView.java类的副本


谢谢,

您是如何编译的?您是否也必须复制所有依赖项?
public class ViEAndroidGLES20 extends GLSurfaceView
    implements GLSurfaceView.Renderer {
    private static String TAG = "WEBRTC-JR";
    private static final boolean DEBUG = true;
  // True if onSurfaceCreated has been called.
  private boolean surfaceCreated = false;
  private boolean openGLCreated = false;
  // True if NativeFunctionsRegistered has been called.
  private boolean nativeFunctionsRegisted = false;
  private ReentrantLock nativeFunctionLock = new ReentrantLock();
  // Address of Native object that will do the drawing.
  private long nativeObject = 0;
  private int viewWidth = 0;
  private int viewHeight = 0;

  public static boolean UseOpenGL2(Object renderWindow) {
    return ViEAndroidGLES20.class.isInstance(renderWindow);
  }

  public ViEAndroidGLES20(Context context) {
    super(context);
        init(false, 0, 0);
    }

    public ViEAndroidGLES20(Context context, boolean translucent,
            int depth, int stencil) {
        super(context);
        init(translucent, depth, stencil);
    }

    private void init(boolean translucent, int depth, int stencil) {

        // By default, GLSurfaceView() creates a RGB_565 opaque surface.
        // If we want a translucent one, we should change the surface's
        // format here, using PixelFormat.TRANSLUCENT for GL Surfaces
        // is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
        if (translucent) {
            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
        }

    // Setup the context factory for 2.0 rendering.
    // See ContextFactory class definition below
    setEGLContextFactory(new ContextFactory());

        // We need to choose an EGLConfig that matches the format of
        // our surface exactly. This is going to be done in our
        // custom config chooser. See ConfigChooser class definition
        // below.
        setEGLConfigChooser( translucent ?
                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );

        // Set the renderer responsible for frame rendering
     this.setRenderer(this);
     this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
  }

    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
            Log.w(TAG, "creating OpenGL ES 2.0 context");
            checkEglError("Before eglCreateContext", egl);
            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
            EGLContext context = egl.eglCreateContext(display, eglConfig,
                    EGL10.EGL_NO_CONTEXT, attrib_list);
            checkEglError("After eglCreateContext", egl);
            return context;
        }

        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
            Log.d("*WEBRTC*", "ContextFactory::destroyContext");
            egl.eglDestroyContext(display, context);
        }
    }

  private static void checkEglError(String prompt, EGL10 egl) {
    int error;
    while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
      Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
    }
  }

    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {

    public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
      mRedSize = r;
      mGreenSize = g;
      mBlueSize = b;
      mAlphaSize = a;
      mDepthSize = depth;
      mStencilSize = stencil;
    }

    // This EGL config specification is used to specify 2.0 rendering.
    // We use a minimum size of 4 bits for red/green/blue, but will
    // perform actual matching in chooseConfig() below.
    private static int EGL_OPENGL_ES2_BIT = 4;
    private static int[] s_configAttribs2 =
    {
      EGL10.EGL_RED_SIZE, 4,
      EGL10.EGL_GREEN_SIZE, 4,
      EGL10.EGL_BLUE_SIZE, 4,
      EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
      EGL10.EGL_NONE
    };

    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {

      // Get the number of minimally matching EGL configurations
      int[] num_config = new int[1];
      egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);

      int numConfigs = num_config[0];

      if (numConfigs <= 0) {
        throw new IllegalArgumentException("No configs match configSpec");
      }

      // Allocate then read the array of minimally matching EGL configs
      EGLConfig[] configs = new EGLConfig[numConfigs];
      egl.eglChooseConfig(display, s_configAttribs2, configs,                          numConfigs, num_config);

      // Now return the "best" one
      return chooseConfig(egl, display, configs);
    }

    public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
                                  EGLConfig[] configs) {
      for(EGLConfig config : configs) {
        int d = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_DEPTH_SIZE, 0);
        int s = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_STENCIL_SIZE, 0);

        // We need at least mDepthSize and mStencilSize bits
        if (d < mDepthSize || s < mStencilSize)
          continue;

        // We want an *exact* match for red/green/blue/alpha
        int r = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_RED_SIZE, 0);
        int g = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_GREEN_SIZE, 0);
        int b = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_BLUE_SIZE, 0);
        int a = findConfigAttrib(egl, display, config,
                                 EGL10.EGL_ALPHA_SIZE, 0);

        if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
          return config;
      }
      return null;
    }

    private int findConfigAttrib(EGL10 egl, EGLDisplay display,
                                 EGLConfig config, int attribute, int defaultValue) {

      if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
        return mValue[0];
      }
      return defaultValue;
    }

    // Subclasses can adjust these values:
    protected int mRedSize;
    protected int mGreenSize;
    protected int mBlueSize;
    protected int mAlphaSize;
    protected int mDepthSize;
    protected int mStencilSize;
    private int[] mValue = new int[1];
  }

  // IsSupported
  // Return true if this device support Open GL ES 2.0 rendering.
  public static boolean IsSupported(Context context) {
    ActivityManager am =
        (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
    ConfigurationInfo info = am.getDeviceConfigurationInfo();
    if(info.reqGlEsVersion >= 0x20000) {
      // Open GL ES 2.0 is supported.
      return true;
    }
    return false;
  }

   public void onDrawFrame(GL10 gl) {
    nativeFunctionLock.lock();
    if(!nativeFunctionsRegisted || !surfaceCreated) {
      nativeFunctionLock.unlock();
      return;
    }

    if(!openGLCreated) {
      if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
        return; // Failed to create OpenGL
      }
      openGLCreated = true; // Created OpenGL successfully
    }
    DrawNative(nativeObject); // Draw the new frame
    nativeFunctionLock.unlock();
  }

   public void onSurfaceChanged(GL10 gl, int width, int height) {

    if (DEBUG)
    {
      Log.d("*WEBRTC*", "ViEAndroidGLES20::onSurfaceChanged");
    }

    surfaceCreated = true;
    viewWidth = width;
    viewHeight = height;

    nativeFunctionLock.lock();
    if(nativeFunctionsRegisted) {
      if(CreateOpenGLNative(nativeObject,width,height) == 0)
      {
        openGLCreated = true;
      }
      else
      {
        Log.e("*WEBRTC*", "ViEAndroidGLES20::onSurfaceChanged - failed to openGlCreated!");
      }
    }
    nativeFunctionLock.unlock();
  }

   public void onSurfaceCreated(GL10 gl, EGLConfig config) {

    if (DEBUG)
    {
      Log.d("*WEBRTC*", "ViEAndroidGLES20::onSurfaceCreated");
    }
  }

  public void ReDraw() {
    if(surfaceCreated) {
      // Request the renderer to redraw using the render thread context.
      this.requestRender();
    }
  }

  private native int CreateOpenGLNative(long nativeObject,
                                        int width, int height);
  private native void DrawNative(long nativeObject);

  protected void onAttachedToWindow()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onAttachedToWindow");
      }

      super.onAttachedToWindow();
  }

  protected void onDetachedFromWindow()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onDetachedFromWindow");
      }

      super.onDetachedFromWindow();
  }

  public void onPause()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onPause");
      }

      super.onPause();
  }

  public void onResume()
  {
      if (DEBUG)
      {
          Log.d("*WEBRTC*", "ViEAndroidGLES20::onResume");
      }

      super.onResume();
  }
}
// The activity is about to become visible.
@Override protected void onStart() {

    Log.d("MainActivity", "entering onStart method.");

    super.onStart();

    //   The application is responsible of keeping valid references to the surface view
    //   used to perform local capture and remote stream rendering.
    m_RemoteView1 = ((CTestApp)getApplication()).GetRemoteVideoView();
    m_RemoteView2 = ((CTestApp)getApplication()).GetRemoteVideoView2();

    if (m_RemoteView1 != null)
    {
        LinearLayout layout = (LinearLayout) findViewById(R.id.remoteVideoRenderLayout1);
        layout.addView(m_RemoteView1);
    }

    if (m_RemoteView2 != null)
    {
        LinearLayout layout = (LinearLayout) findViewById(R.id.remoteVideoRenderLayout2);
        layout.addView(m_RemoteView2);
    }
}

// The activity has become visible, it is now resumed.
@Override protected void onResume() {

    Log.d("MainActivity", "entering onResume method.");

    super.onResume();

    // A GLSurfaceView must be notified when the activity is paused and resumed.  GLSurfaceView clients
    // are required to call onPause() when the activity pauses and onResume() when the activity resumes.
    ((GLSurfaceView)m_RemoteView1).onResume();
    ((GLSurfaceView)m_RemoteView2).onResume();
}