Android 图像目标Vufroria示例
我想制作自己的跟踪器对象,就像在图像目标示例中一样,我想用另一个3D形状替换茶壶,问题是我不能很好地理解代码!代码如下: 茶壶类: 有两个功能: setVerts()和SetIndexes()具有大量索引和顶点编号 而ImageTargetRender是:Android 图像目标Vufroria示例,android,augmented-reality,vuforia,Android,Augmented Reality,Vuforia,我想制作自己的跟踪器对象,就像在图像目标示例中一样,我想用另一个3D形状替换茶壶,问题是我不能很好地理解代码!代码如下: 茶壶类: 有两个功能: setVerts()和SetIndexes()具有大量索引和顶点编号 而ImageTargetRender是: // The renderer class for the ImageTargets sample. public class ImageTargetRenderer implements GLSurfaceView.Renderer {
// The renderer class for the ImageTargets sample.
public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
private static final String LOGTAG = "ImageTargetRenderer";
private SampleApplicationSession vuforiaAppSession;
private ImageTargets mActivity;
private Vector<Texture> mTextures;
private int shaderProgramID;
private int vertexHandle;
private int normalHandle;
private int textureCoordHandle;
private int mvpMatrixHandle;
private int texSampler2DHandle;
private Teapot mTeapot;
private float kBuildingScale = 12.0f;
private SampleApplication3DModel mBuildingsModel;
private Renderer mRenderer;
boolean mIsActive = false;
private static final float OBJECT_SCALE_FLOAT = 3.0f;
public ImageTargetRenderer(ImageTargets activity,
SampleApplicationSession session)
{
mActivity = activity;
vuforiaAppSession = session;
}
// Called to draw the current frame.
@Override
public void onDrawFrame(GL10 gl)
{
if (!mIsActive)
return;
// Call our function to render content
renderFrame();
}
// Called when the surface is created or recreated.
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
initRendering();
// Call Vuforia function to (re)initialize rendering after first use
// or after OpenGL ES context was lost (e.g. after onPause/onResume):
vuforiaAppSession.onSurfaceCreated();
}
// Called when the surface changed size.
@Override
public void onSurfaceChanged(GL10 gl, int width, int height)
{
Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
// Call Vuforia function to handle render surface size changes:
vuforiaAppSession.onSurfaceChanged(width, height);
}
// Function for initializing the renderer.
private void initRendering()
{
mTeapot = new Teapot();
mRenderer = Renderer.getInstance();
GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f
: 1.0f);
for (Texture t : mTextures)
{
GLES20.glGenTextures(1, t.mTextureID, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,
t.mWidth, t.mHeight, 0, GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE, t.mData);
}
shaderProgramID = SampleUtils.createProgramFromShaderSrc(
CubeShaders.CUBE_MESH_VERTEX_SHADER,
CubeShaders.CUBE_MESH_FRAGMENT_SHADER);
vertexHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = GLES20.glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID,
"texSampler2D");
try
{
mBuildingsModel = new SampleApplication3DModel();
mBuildingsModel.loadModel(mActivity.getResources().getAssets(),
"ImageTargets/Buildings.txt");
} catch (IOException e)
{
Log.e(LOGTAG, "Unable to load buildings");
}
// Hide the Loading Dialog
mActivity.loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}
// The render function.
private void renderFrame()
{
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
State state = mRenderer.begin();
mRenderer.drawVideoBackground();
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
// handle face culling, we need to detect if we are using reflection
// to determine the direction of the culling
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON)
GLES20.glFrontFace(GLES20.GL_CW); // Front camera
else
GLES20.glFrontFace(GLES20.GL_CCW); // Back camera
// did we find any trackables this frame?
for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
TrackableResult result = state.getTrackableResult(tIdx);
Trackable trackable = result.getTrackable();
printUserData(trackable);
Matrix44F modelViewMatrix_Vuforia = Tool
.convertPose2GLMatrix(result.getPose());
float[] modelViewMatrix = modelViewMatrix_Vuforia.getData();
int textureIndex = trackable.getName().equalsIgnoreCase("stones") ? 0
: 1;
textureIndex = trackable.getName().equalsIgnoreCase("tarmac") ? 2
: textureIndex;
// deal with the modelview and projection matrices
float[] modelViewProjection = new float[16];
if (!mActivity.isExtendedTrackingActive())
{
Matrix.translateM(modelViewMatrix, 0, 0.0f, 0.0f,
OBJECT_SCALE_FLOAT);
Matrix.scaleM(modelViewMatrix, 0, OBJECT_SCALE_FLOAT,
OBJECT_SCALE_FLOAT, OBJECT_SCALE_FLOAT);
} else
{
Matrix.rotateM(modelViewMatrix, 0, 90.0f, 1.0f, 0, 0);
Matrix.scaleM(modelViewMatrix, 0, kBuildingScale,
kBuildingScale, kBuildingScale);
}
Matrix.multiplyMM(modelViewProjection, 0, vuforiaAppSession
.getProjectionMatrix().getData(), 0, modelViewMatrix, 0);
// activate the shader program and bind the vertex/normal/tex coords
GLES20.glUseProgram(shaderProgramID);
if (!mActivity.isExtendedTrackingActive())
{
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT,
false, 0, mTeapot.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT,
false, 0, mTeapot.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2,
GLES20.GL_FLOAT, false, 0, mTeapot.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
// activate texture 0, bind it, and pass to shader
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
mTextures.get(textureIndex).mTextureID[0]);
GLES20.glUniform1i(texSampler2DHandle, 0);
// pass the model view matrix to the shader
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false,
modelViewProjection, 0);
// finally draw the teapot
GLES20.glDrawElements(GLES20.GL_TRIANGLES,
mTeapot.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT,
mTeapot.getIndices());
// disable the enabled arrays
GLES20.glDisableVertexAttribArray(vertexHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
GLES20.glDisableVertexAttribArray(textureCoordHandle);
} else
{
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT,
false, 0, mBuildingsModel.getVertices());
GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT,
false, 0, mBuildingsModel.getNormals());
GLES20.glVertexAttribPointer(textureCoordHandle, 2,
GLES20.GL_FLOAT, false, 0, mBuildingsModel.getTexCoords());
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glEnableVertexAttribArray(textureCoordHandle);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
mTextures.get(3).mTextureID[0]);
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false,
modelViewProjection, 0);
GLES20.glUniform1i(texSampler2DHandle, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0,
mBuildingsModel.getNumObjectVertex());
SampleUtils.checkGLError("Renderer DrawBuildings");
}
SampleUtils.checkGLError("Render Frame");
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
mRenderer.end();
}
private void printUserData(Trackable trackable)
{
String userData = (String) trackable.getUserData();
Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
}
public void setTextures(Vector<Texture> textures)
{
mTextures = textures;
}
}
//ImageTargets示例的渲染器类。
公共类ImageTargetRenderer实现GLSurfaceView.Renderer
{
私有静态最终字符串LOGTAG=“ImageTargetRenderer”;
私有样本应用程序会话vuforiaAppSession;
私密性;
私有向量文本;
私有int shaderProgramID;
私人内勤;
私有int-normalHandle;
私有int-TextureCordHandle;
私有int mvpMatrixHandle;
私有int-texhandle;
私人茶壶;
私有浮动kBuildingScale=12.0f;
私有样本应用3DModel mBuildingsModel;
私家侦探;
布尔值mIsActive=false;
私有静态最终浮动对象\u比例\u浮动=3.0f;
公共ImageTargetRenderer(ImageTargets活动,
示例应用程序会话(会话)
{
活动性=活动性;
vuforiaAppSession=会话;
}
//调用以绘制当前帧。
@凌驾
公共框架(GL10 gl)
{
如果(!误用)
返回;
//调用我们的函数来呈现内容
renderFrame();
}
//创建或重新创建曲面时调用。
@凌驾
已创建曲面上的公共void(GL10 gl、EGLConfig配置)
{
Log.d(LOGTAG,“GLRenderer.onSurfaceCreated”);
initRendering();
//首次使用后调用Vuforia函数(重新)初始化渲染
//或在OpenGL ES上下文丢失后(例如,在暂停/恢复后):
vuforiaAppSession.onSurfaceCreated();
}
//当曲面大小更改时调用。
@凌驾
表面上的公共空隙已更改(GL10 gl,整型宽度,整型高度)
{
Log.d(LOGTAG,“GLRenderer.onSurfaceChanged”);
//调用Vuforia函数以处理渲染曲面大小更改:
vuforiaAppSession.onSurfaceChanged(宽度、高度);
}
//用于初始化渲染器的函数。
私有void initRendering()
{
mTeapot=新茶壶();
mrender=Renderer.getInstance();
GLES20.glClearColor(0.0f、0.0f、0.0f、Vuforia.requiresAlpha()?0.0f
:1.0f);
对于(纹理t:mTextures)
{
GLES20.glGenTextures(1,t.mTextureID,0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,t.mTextureID[0]);
GLES20.glTexParameterf(GLES20.GL_纹理_2D,
GLES20.GL_纹理_最小_过滤器,GLES20.GL_线性);
GLES20.glTexParameterf(GLES20.GL_纹理_2D,
GLES20.GL_纹理(MAG_过滤器,GLES20.GL_线性);
GLES20.glTexImage2D(GLES20.GL_纹理_2D,0,GLES20.GL_RGBA,
t、 mWidth,t.mHeight,0,GLES20.GL_RGBA,
GLES20.GL_无符号字节,t.mData);
}
shaderProgramID=SampleUtils.createProgramFromShaderSrc(
CubeShaders.CUBE_网格_顶点_着色器,
立方体着色器。立方体\网格\碎片\着色器);
vertexHandle=GLES20.GlGetAttriblLocation(着色器程序,
“博览会”);
normalHandle=GLES20.glGetAttriblLocation(shaderProgramID,
“顶点正常”);
TextureCordHandle=GLES20.glGetAttriblLocation(shaderProgramID,
"顶点";;
mvpMatrixHandle=GLES20.glGetUniformLocation(shaderProgramID,
“modelViewProjectionMatrix”);
texSampler2DHandle=GLES20.glGetUniformLocation(shaderProgramID,
“第2D条”);
尝试
{
mBuildingsModel=新样本应用程序3DModel();
mBuildingsModel.loadModel(mActivity.getResources().getAssets(),
“ImageTargets/Buildings.txt”);
}捕获(IOE异常)
{
Log.e(LOGTAG,“无法加载建筑物”);
}
//隐藏加载对话框
mActivity.loadingDialogHandler
.sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
}
//渲染函数。
私有void renderFrame()
{
GLES20.glClear(GLES20.GL_颜色_缓冲_位| GLES20.GL_深度_缓冲_位);
State State=mrender.begin();
mrender.drawVideoBackground();
GLES20.glEnable(GLES20.Glu深度试验);
//在处理人脸剔除时,我们需要检测是否使用了反射
//确定剔除的方向
GLES20.glEnable(GLES20.Glu CULL_面);
GLES20.glCullFace(GLES20.Glu-BACK);
if(Renderer.getInstance().getVideoBackgroundConfig().getReflection()==VIDEO\u BACKGROUND\u REFLECTION.VIDEO\u BACKGROUND\u REFLECTION\u ON)
GLES20.glFrontFace(GLES20.GL_CW);//前置摄像头
其他的
GLES20.glFrontFace(GLES20.GL_CCW);//后摄像头
//我们在这架飞机上找到任何可追踪的东西了吗?
对于(int-tIdx=0;tIdx