压缩纹理在Android中变为白色
我对android和OpenGL开发都是新手 我试着只做一个带有纹理的立方体,但我需要在ETC1压缩中对这个纹理进行压缩,我制作了一个类似于develope.android网站的代码,但代码只在模拟器上运行 我在两台设备上进行测试 1) Nexus mobile,在这个设备上立方体没有出现,我在互联网上搜索,我发现这个设备原来的安卓是2.1,并且它升级到了2.3,这是为什么让这个设备能够画出立方体呢 2) 摩托罗拉Xoom平板电脑,它是安卓3.1设备,立方体出现在这里,但颜色为白色,而不是未压缩的纹理,它支持CTE1,没有OPENGL错误 压缩的纹理是PNG 265*265/我把压缩的纹理放在raw文件夹中 代码如下所示压缩纹理在Android中变为白色,android,opengl-es,textures,Android,Opengl Es,Textures,我对android和OpenGL开发都是新手 我试着只做一个带有纹理的立方体,但我需要在ETC1压缩中对这个纹理进行压缩,我制作了一个类似于develope.android网站的代码,但代码只在模拟器上运行 我在两台设备上进行测试 1) Nexus mobile,在这个设备上立方体没有出现,我在互联网上搜索,我发现这个设备原来的安卓是2.1,并且它升级到了2.3,这是为什么让这个设备能够画出立方体呢 2) 摩托罗拉Xoom平板电脑,它是安卓3.1设备,立方体出现在这里,但颜色为白色,而不是未压缩
public class TextureCube {
private String TAG;
private Context context;
private FloatBuffer vertexBuffer; // Buffer for vertex-array
private FloatBuffer texBuffer; // Buffer for texture-coords-array
private float[] vertices = { // Vertices for a face
-1.0f, -1.0f, 0.0f, // 0. left-bottom-front
1.0f, -1.0f, 0.0f, // 1. right-bottom-front
-1.0f, 1.0f, 0.0f, // 2. left-top-front
1.0f, 1.0f, 0.0f // 3. right-top-front
};
float[] texCoords = { // Texture coords for the above face
0.0f, 1.0f, // A. left-bottom
1.0f, 1.0f, // B. right-bottom
0.0f, 0.0f, // C. left-top
1.0f, 0.0f // D. right-top
};
int[] textureIDs = new int[1]; // Array for 1 texture-ID
// Constructor - Set up the buffers
public TextureCube(Context context) {
this.context = context;
TAG = "Sam Messages: " + this.getClass().getName();
// Setup vertex-array buffer. Vertices in float. An float has 4 bytes
ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
vbb.order(ByteOrder.nativeOrder()); // Use native byte order
vertexBuffer = vbb.asFloatBuffer(); // Convert from byte to float
vertexBuffer.put(vertices); // Copy data into buffer
vertexBuffer.position(0); // Rewind
// Setup texture-coords-array buffer, in float. An float has 4 bytes
ByteBuffer tbb = ByteBuffer.allocateDirect(texCoords.length * 4);
tbb.order(ByteOrder.nativeOrder());
texBuffer = tbb.asFloatBuffer();
texBuffer.put(texCoords);
texBuffer.position(0);
}
// Draw the shape
public void draw(GL10 gl) {
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glFrontFace(GL10.GL_CCW); // Front face in counter-clockwise orientation
gl.glEnable(GL10.GL_CULL_FACE); // Enable cull face
gl.glCullFace(GL10.GL_BACK); // Cull the back face (don't display)
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Enable texture-coords-array
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, texBuffer); // Define texture-coords buffer
// front
gl.glPushMatrix();
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// left
gl.glPushMatrix();
gl.glRotatef(270.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// back
gl.glPushMatrix();
gl.glRotatef(180.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// right
gl.glPushMatrix();
gl.glRotatef(90.0f, 0.0f, 1.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// top
gl.glPushMatrix();
gl.glRotatef(270.0f, 1.0f, 0.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
// bottom
gl.glPushMatrix();
gl.glRotatef(90.0f, 1.0f, 0.0f, 0.0f);
gl.glTranslatef(0.0f, 0.0f, 1.0f);
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
gl.glPopMatrix();
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY); // Disable
// texture-coords-array
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisable(GL10.GL_CULL_FACE);
}
// Load an image into GL texture
public void loadTexture(GL10 gl) {
Boolean loadCompressed = true;
if (loadCompressed) {
/****************************************************/
/** LOAD A COMPRESSED TEXTURE IMAGE */
/***********************************/
Log.w(TAG, ": ETC1 texture support: " + ETC1Util.isETC1Supported());
try {
ETC1Util.loadTexture(GLES20 .GL_TEXTURE_2D, 0, 0,
GLES20.GL_RGB, GLES20.GL_UNSIGNED_SHORT_5_6_5,
context.getResources().openRawResource(R.raw.pic3));
Log.w(TAG, ": OpenGL Error -After LoadTexture()-:" + gl.glGetError());
Log.w(TAG,"OpenGL Extensions: " + gl.glGetString(GL10.GL_EXTENSIONS));
} catch (IOException e) {
Log.w(TAG, ": Could not load texture: " + e);
} finally {
Log.w(TAG, ": OpenGL Error -In Final()-:" + gl.glGetError());
}
} else {
/*****************************************************/
/** LOAD A TEXTURE IMAGE */
/************************/
gl.glGenTextures(1, textureIDs, 0); // Generate texture-ID array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureIDs[0]); // Bind to texture ID Set up texture filters
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,GL10.GL_LINEAR);
InputStream istream = context.getResources().openRawResource(R.drawable.pic5);
Bitmap bitmap;
try {
bitmap = BitmapFactory.decodeStream(istream);
} finally {
try {
istream.close();
} catch (IOException e) {
}
}
// Build Texture from loaded bitmap for the currently-bind texture
// ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
/******************************************************/
}
}
无法为压缩纹理加载设置纹理名称,这两个函数调用
gl.glGenTextures(1, textureIDs, 0); // Generate texture-ID array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureIDs[0]); // Bind to texture
这样做,所以把它们放在函数的开头
“白色”纹理通常表示纹理不完整。如果启用mipmap但未提供所有必需的mipmap级别,则通常会发生这种情况。在上面的代码中,您禁用未压缩纹理的mipmapping:
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,GL10.GL_LINEAR);
但是,对于compressed,请保持原样–如果您将这两个调用放在名称生成之后,即如果(compressed),也放在if(compressed)
之前,则会发生什么情况:
datenwolf,我不知道该如何感谢你,伙计,这是有效的,但我真的不太明白你的意思,你能帮我寄一本好的教程给我吗?谢谢you@Samy:这只是OpenGL纹理创建的基本模式:使用
glGenTexture
将纹理名称分配给目标纹理单元,使用glBindTexture
设置纹理参数(使用mipmapping、过滤器类型)通过glTextureParameter
,设置纹理环境(多纹理所需),并最终使用直接glTexImage2D
加载实际纹理数据,或使用最终将调用它的某个辅助函数。深入教程是OpenGL编程指南中关于纹理的章节,NeHe纹理教程非常感谢您的快速帮助
public void loadTexture(GL10 gl) {
gl.glGenTextures(1, textureIDs, 0);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textureIDs[0]);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,GL10.GL_LINEAR);
Boolean loadCompressed = true;
if (loadCompressed) {
/****************************************************/
/** LOAD A COMPRESSED TEXTURE IMAGE */
/***********************************/
Log.w(TAG, ": ETC1 texture support: " + ETC1Util.isETC1Supported());
try {
ETC1Util.loadTexture(GLES20 .GL_TEXTURE_2D, 0, 0,
GLES20.GL_RGB, GLES20.GL_UNSIGNED_SHORT_5_6_5,
context.getResources().openRawResource(R.raw.pic3));
Log.w(TAG, ": OpenGL Error -After LoadTexture()-:" + gl.glGetError());
Log.w(TAG,"OpenGL Extensions: " + gl.glGetString(GL10.GL_EXTENSIONS));
} catch (IOException e) {
Log.w(TAG, ": Could not load texture: " + e);
} finally {
Log.w(TAG, ": OpenGL Error -In Final()-:" + gl.glGetError());
}
} else {
/*****************************************************/
/** LOAD A TEXTURE IMAGE */
/************************/
InputStream istream = context.getResources().openRawResource(R.drawable.pic5);
Bitmap bitmap;
try {
bitmap = BitmapFactory.decodeStream(istream);
} finally {
try {
istream.close();
} catch (IOException e) {
}
}
// Build Texture from loaded bitmap for the currently-bind texture
// ID
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
/******************************************************/
}
}