libgdx:如何通过另一个正交摄影机获得正交摄影机的结果?
我想在我的舞台上有一个电视屏幕。我有一个主场景,包含电视屏幕和一些其他东西(一些精灵)以及与我要在电视中渲染的内容相对应的场景 因此,我想通过正交摄影机渲染电视场景,然后将结果放入类似精灵的东西中,然后可以通过主正交摄影机渲染,如何做到这一点 我知道可以在多个视口中使用多个摄影机,但这些摄影机直接渲染到我的计算机屏幕的像素上libgdx:如何通过另一个正交摄影机获得正交摄影机的结果?,libgdx,Libgdx,我想在我的舞台上有一个电视屏幕。我有一个主场景,包含电视屏幕和一些其他东西(一些精灵)以及与我要在电视中渲染的内容相对应的场景 因此,我想通过正交摄影机渲染电视场景,然后将结果放入类似精灵的东西中,然后可以通过主正交摄影机渲染,如何做到这一点 我知道可以在多个视口中使用多个摄影机,但这些摄影机直接渲染到我的计算机屏幕的像素上 任何建议都将不胜感激这可以通过从屏幕摄影机的角度将场景渲染到帧缓冲区,然后从帧缓冲区获取纹理区域并将其渲染到屏幕,然后再次渲染场景来实现 例如: 在上面的示例中,青色矩形
任何建议都将不胜感激这可以通过从屏幕摄影机的角度将场景渲染到
帧缓冲区
,然后从帧缓冲区
获取纹理区域
并将其渲染到屏幕,然后再次渲染场景来实现
例如:
在上面的示例中,青色矩形显示了场景中摄影机看到的内容,并将其渲染到灰色框中,可以独立地移动场景摄影机和主摄影机
通过从帧缓冲区创建并抓取纹理区域
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
通过调用screenFrameBuffer.begin()
,可以进行所有渲染调用以仅影响screenFrameBuffer
,在调用screenFrameBuffer.end()
后,下一次绘制调用将再次影响实际屏幕/窗口
因此,在渲染
方法中,您可以首先将场景绘制到帧缓冲区
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
然后再次绘制,然后将“屏幕”作为精灵:
renderScene(sceneCamera, Color.BLACK);
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
以上gif的完整源代码是:
package com.bornander.sandbox;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.*;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.graphics.glutils.FrameBuffer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.math.RandomXS128;
import com.badlogic.gdx.math.Vector2;
public class MyGdxGame implements ApplicationListener {
public static class Ball {
public Vector2 position = new Vector2();
public Vector2 velocity = new Vector2();
public float size = 1.0f;
public Color color = new Color();
public void render(ShapeRenderer shapeRenderer) {
shapeRenderer.setColor(color);
shapeRenderer.circle(position.x, position.y, size, 16);
}
public void update() {
position.x += velocity.x * Gdx.graphics.getDeltaTime();
position.y += velocity.y * Gdx.graphics.getDeltaTime();
}
}
static RandomXS128 rnd = new RandomXS128();
OrthographicCamera sceneCamera;
OrthographicCamera screenCamera;
ShapeRenderer shapeRenderer;
SpriteBatch batch;
FrameBuffer screenFrameBuffer;
TextureRegion screenTexture;
Ball[] balls;
private static float rnd(float min, float max) {
return min + rnd.nextFloat() * (max - min);
}
@Override
public void create() {
float aspectRatio = (float) Gdx.graphics.getWidth() / (float)Gdx.graphics.getHeight();
sceneCamera = new OrthographicCamera(100.0f, 100.0f / aspectRatio);
screenCamera = new OrthographicCamera(32.0f, 32.0f / aspectRatio);
batch = new SpriteBatch();
screenFrameBuffer = new FrameBuffer(Pixmap.Format.RGBA8888, Gdx.graphics.getWidth(), Gdx.graphics.getHeight(),true);
screenTexture = new TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(false, true);
shapeRenderer = new ShapeRenderer();
balls = new Ball[128];
for(int i = 0; i < balls.length; ++i) {
balls[i] = new Ball();
balls[i].position.set(0, 0);
balls[i].velocity.set(rnd(-4, 4), rnd(-4, 4));
balls[i].size = rnd(1, 1);
balls[i].color.set(rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), rnd(0.5f, 1.0f), 1.0f);
}
}
private void renderScene(Camera camera, Color background) {
camera.update();
Gdx.gl.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl.glClearColor(background.r, background.g, background.b, background.a);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
shapeRenderer.setProjectionMatrix(camera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Filled);
for(int i = 0; i < balls.length; ++i) {
balls[i].render(shapeRenderer);
}
shapeRenderer.end();
}
@Override
public void render() {
float cs = 8.0f;
for(int i = 0; i < balls.length; ++i)
balls[i].update();
if (Gdx.input.isKeyPressed(Input.Keys.LEFT))
sceneCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.RIGHT))
sceneCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.UP))
sceneCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.DOWN))
sceneCamera.position.y += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.A))
screenCamera.position.x -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.D))
screenCamera.position.x += cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.W))
screenCamera.position.y -= cs * Gdx.graphics.getDeltaTime();
if (Gdx.input.isKeyPressed(Input.Keys.S))
screenCamera.position.y += cs * Gdx.graphics.getDeltaTime();
// Render to framebuffer, clear the background to DARK_GRAY
screenFrameBuffer.begin();
renderScene(screenCamera, Color.DARK_GRAY);
screenFrameBuffer.end();
// Render to window/screen, clear backgroun to BLACK
renderScene(sceneCamera, Color.BLACK);
// Draw the framebuffer's texture as a sprite using a normal SpriteBatch
batch.setProjectionMatrix(sceneCamera.combined);
batch.begin();
batch.draw(screenTexture, -sceneCamera.viewportWidth / 2.0f,-sceneCamera.viewportHeight / 2.0f,0,0,screenCamera.viewportWidth,screenCamera.viewportHeight,1.0f, 1.0f, 0.0f);
batch.end();
// This just draws the outline of what the screen camera looks at
shapeRenderer.setProjectionMatrix(sceneCamera.combined);
shapeRenderer.begin(ShapeRenderer.ShapeType.Line);
shapeRenderer.setColor(Color.CYAN);
shapeRenderer.rect(
screenCamera.position.x - screenCamera.viewportWidth / 2.0f,
screenCamera.position.y - screenCamera.viewportHeight / 2.0f,
screenCamera.viewportWidth,
screenCamera.viewportHeight
);
shapeRenderer.end();
}
@Override
public void dispose() {
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
}
package com.bornander.sandbox;
导入com.badlogic.gdx.ApplicationListener;
导入com.badlogic.gdx.gdx;
导入com.badlogic.gdx.Input;
导入com.badlogic.gdx.graphics.*;
导入com.badlogic.gdx.graphics.g2d.SpriteBatch;
导入com.badlogic.gdx.graphics.g2d.TextureRegion;
导入com.badlogic.gdx.graphics.glutils.FrameBuffer;
导入com.badlogic.gdx.graphics.glutils.shaperender;
导入com.badlogic.gdx.math.RandomXS128;
导入com.badlogic.gdx.math.Vector2;
公共类MyGdxGame实现ApplicationListener{
公共静态类球{
公共向量2位置=新向量2();
公共向量2速度=新向量2();
公共浮点数=1.0f;
公共颜色=新颜色();
公共无效渲染(ShaperEnder ShaperEnder){
设置颜色(颜色);
圆(位置x,位置y,尺寸16);
}
公共无效更新(){
position.x+=velocity.x*Gdx.graphics.getDeltaTime();
position.y+=velocity.y*Gdx.graphics.getDeltaTime();
}
}
静态RandomXS128 rnd=新的RandomXS128();
正交摄影机场景镜头;
正交摄影机;屏幕摄影机;
shaperender shaperender;
喷雾批;
帧缓冲区屏幕帧缓冲区;
纹理区域纹理;
球[]球;
专用静态浮动rnd(最小浮动、最大浮动){
返回min+rnd.nextFloat()*(max-min);
}
@凌驾
公共void create(){
float aspectRatio=(float)Gdx.graphics.getWidth()/(float)Gdx.graphics.getHeight();
sceneCamera=新的正交摄影机(100.0f,100.0f/aspectRatio);
屏幕摄像机=新的正交摄像机(32.0f,32.0f/aspectRatio);
批次=新的SpriteBatch();
screenFrameBuffer=新的帧缓冲区(Pixmap.Format.rgba888,Gdx.graphics.getWidth(),Gdx.graphics.getHeight(),true);
screenTexture=新的TextureRegion(screenFrameBuffer.getColorBufferTexture());
screenTexture.flip(假、真);
ShaperEnder=新的ShaperEnder();
球=新球[128];
对于(int i=0;i