C++ 双缓冲在笔记本电脑上不起作用,在台式机上起作用
我最近遇到了一个奇怪的问题。我编写了一个应用程序类,它使用一个非常简单的渲染器在屏幕上绘制一些模型。照相机是可移动的 我在笔记本电脑上运行了这个程序。起初,我注意到屏幕上没有画任何东西(但屏幕被正确的颜色清除)。然后我注意到,如果我点击装饰框并移动窗口,屏幕会自动更新:这样,模型就会可见,但不会移动,除非我再次点击并移动装饰框 我在台式电脑上测试了我的程序,一切正常;照相机运转平稳 最后,我让程序在我的笔记本电脑上运行,但我必须设置C++ 双缓冲在笔记本电脑上不起作用,在台式机上起作用,c++,opengl,sdl,double-buffering,C++,Opengl,Sdl,Double Buffering,我最近遇到了一个奇怪的问题。我编写了一个应用程序类,它使用一个非常简单的渲染器在屏幕上绘制一些模型。照相机是可移动的 我在笔记本电脑上运行了这个程序。起初,我注意到屏幕上没有画任何东西(但屏幕被正确的颜色清除)。然后我注意到,如果我点击装饰框并移动窗口,屏幕会自动更新:这样,模型就会可见,但不会移动,除非我再次点击并移动装饰框 我在台式电脑上测试了我的程序,一切正常;照相机运转平稳 最后,我让程序在我的笔记本电脑上运行,但我必须设置SDL_GL_SetAttribute(SDL_GL_DOUBL
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER,0)代码>并禁用缓冲区交换
下面是主要的应用程序类。在执行循环中,我调用应用程序状态堆栈来循环和渲染(应用程序状态实际上拥有渲染器)
如果有任何后果,我的笔记本电脑有intel HD 4000图形,桌面电脑有GTX 670
App::App() : _running( false ),
_deltaTime( 0u ),
_elapsedTime( 0u ),
_mainWindow( nullptr ),
_glContext(),
_stack() {
//ctor
}
App::~App() {
SDL_GL_DeleteContext( _glContext );
SDL_DestroyWindow( _mainWindow );
SDL_Quit();
}
void App::execute() {
_initialize();
static const float millisecondsPerFrame = 17;
while ( _running ) {
//get the delta time & update elapsed time
uint32_t oldTime = _elapsedTime;
_elapsedTime = SDL_GetTicks();
_deltaTime = _elapsedTime - oldTime;
_processEvents();
_loop( _deltaTime / 1000.0f );
_render();
//apply possible state changes made to the stack
_stack.applyPendingChanges();
int usedTime = SDL_GetTicks() - int ( _elapsedTime );
//sleep the remainder of the cycle if we didn't use the entire update cycle
if ( millisecondsPerFrame - usedTime > 0 ) {
SDL_Delay( uint32_t ( millisecondsPerFrame - usedTime ) );
}
}
}
void App::_initialize() {
//initialize random number generator
nge::srand();
_running = true;
_initializeSDL();
_initializeOpenGL();
SDL_GL_MakeCurrent( _mainWindow, _glContext );
//attempt to set late swap tearing
int res = SDL_GL_SetSwapInterval( -1 );
//returns 0 on success
//returns -1 if swap interval is not supported
if ( res == -1 ) {
std::cout << "App::_initializeSDL> " << SDL_GetError() << "\n\n";
SDL_GL_SetSwapInterval( 1 );
}
_stack.registerState<GameState>( AppStateID::Game );
_stack.pushState( AppStateID::Game );
_stack.applyPendingChanges();
}
void App::_initializeSDL() {
SDL_Init( SDL_INIT_VIDEO );
SDL_Init( SDL_INIT_TIMER );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MAJOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MINOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_PROFILE_MASK,
SDL_GL_CONTEXT_PROFILE_CORE );
SDL_GL_SetAttribute( SDL_GL_ACCELERATED_VISUAL, 1 );
/**
For some reason, on my Samsung Series 9, double buffering does not
work.
*/
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 0 );
SDL_GL_SetAttribute( SDL_GL_DEPTH_SIZE, 16 );
SDL_GL_SetAttribute( SDL_GL_STENCIL_SIZE, 8 );
//anti-aliasing
SDL_GL_SetAttribute( SDL_GL_MULTISAMPLEBUFFERS, 1 );
SDL_GL_SetAttribute( SDL_GL_MULTISAMPLESAMPLES, 4 );
_mainWindow = SDL_CreateWindow( "window",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
800,
600,
SDL_WINDOW_OPENGL |
SDL_WINDOW_RESIZABLE |
SDL_WINDOW_MAXIMIZED |
SDL_WINDOW_SHOWN );
_glContext = SDL_GL_CreateContext( _mainWindow );
}
void App::_initializeOpenGL() {
//initialize GLEW
glewExperimental = GL_TRUE;
if ( glewInit() != GLEW_OK ) {
std::cerr << "glewInit failed." << std::endl;
std::exit( EXIT_FAILURE );
}
glEnable( GL_DEPTH_TEST );
//enable culling
glEnable( GL_CULL_FACE );
glCullFace( GL_BACK );
glDepthFunc( GL_LEQUAL );
glEnable( GL_TEXTURE_CUBE_MAP_SEAMLESS );
std::cout << "OpenGL version: " << glGetString( GL_VERSION ) << std::endl;
std::cout << "GLSL version: " << glGetString( GL_SHADING_LANGUAGE_VERSION ) << std::endl;
std::cout << "Vendor: " << glGetString( GL_VENDOR ) << std::endl;
std::cout << "Renderer: " << glGetString( GL_RENDERER ) << std::endl << std::endl;
//make sure OpenGL 3.3 is available
ASSERT( GLEW_VERSION_3_3, "OpenGL 3.3 API is not available" );
}
void App::_processEvents() {
SDL_Event event;
while ( SDL_PollEvent( &event ) ) {
if ( event.type == SDL_QUIT ) {
_running = false;
}
}
}
void App::_loop( float delta ) {
_stack.loop( delta );
}
void App::_render() {
_stack.render();
//SDL_GL_SwapWindow( _mainWindow );
}
App::App():_正在运行(false),
_deltaTime(0u),
_elapsedTime(0u),
_主窗口(空PTR),
_glContext(),
_堆栈(){
//执行器
}
App::~App(){
SDL_GL_DeleteContext(_glContext);
SDL_销毁窗口(_主窗口);
SDL_退出();
}
void App::execute(){
_初始化();
静态常量浮点毫秒帧=17;
当(_运行时){
//获取增量时间&更新运行时间
uint32\u t oldTime=\u elapsedTime;
_elapsedTime=SDL_GetTicks();
_deltaTime=_elapsedTime-oldTime;
_processEvents();
_环路(_deltaTime/1000.0f);
_render();
//将可能的状态更改应用于堆栈
_stack.applyPendingChanges();
int usedTime=SDL_GetTicks()-int(_elapsedTime);
//如果我们没有使用整个更新周期,请在周期的剩余部分休眠
如果(毫秒帧-使用时间>0){
SDL_延迟(uint32_t(毫秒帧-使用时间));
}
}
}
void App::_initialize(){
//初始化随机数生成器
nge::srand();
_运行=真;
_初始化sdl();
_初始化英语();
SDL\u GL\u MakeCurrent(\u主窗口,\u glContext);
//尝试设置延迟交换
int res=SDL_GL_SetSwapInterval(-1);
//成功时返回0
//如果不支持交换间隔,则返回-1
如果(res==-1){
我要检查的第一件事是笔记本电脑上的GPU驱动程序。确保驱动程序版本与桌面上的驱动程序版本匹配
第二件事是添加错误打印。从:
要检查的第三件事是请求的缓冲区。可能GPU或驱动程序不支持双缓冲,或16位的深度大小,或您请求的其他一些参数。因此,请在initializeSDL()
函数中使用参数,并找到在笔记本电脑上工作的参数
window = SDL_CreateWindow("OpenGL Window", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 640, 480, SDL_WINDOW_OPENGL);
if (!window) {
fprintf(stderr, "Couldn't create window: %s\n", SDL_GetError());
return;
}
context = SDL_GL_CreateContext(window);
if (!context) {
fprintf(stderr, "Couldn't create context: %s\n", SDL_GetError());
return;
}