如何在android的GLsurfaceView中用file.obj替换多维数据集对象?

如何在android的GLsurfaceView中用file.obj替换多维数据集对象?,android,opengl-es,glsurfaceview,Android,Opengl Es,Glsurfaceview,我在android中使用教程创建3d对象,然后根据教程纹理过滤的第六课创建立方体,之后我想用我的对象替换立方体,然后创建草莓对象。我希望我的对象可以显示在视图中,因此我将my object my object use扩展名file.obj解析为my renderer类,但视图中的对象显示的是随机三角形对象。 这是我的解析代码: public ObjLoader(Context mActivityContext) { FileReader fr; String str;

我在android中使用教程创建3d对象,然后根据教程纹理过滤的第六课创建立方体,之后我想用我的对象替换立方体,然后创建草莓对象。我希望我的对象可以显示在视图中,因此我将my object my object use扩展名file.obj解析为my renderer类,但视图中的对象显示的是随机三角形对象。 这是我的解析代码:

public ObjLoader(Context mActivityContext) {
    FileReader fr;
    String str;

    ArrayList<Float> tempModelVertices = new ArrayList<Float>();
    ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
    ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
    ArrayList<Integer> facesM = new ArrayList<Integer>();
    ArrayList<Integer> facesT = new ArrayList<Integer>();
    ArrayList<Integer> facesN = new ArrayList<Integer>();

    try {
        fr = new FileReader(new File("model/straw_obj"));
        BufferedReader br = new BufferedReader(fr);
        while((str = br.readLine())!=null){
            if(str.startsWith("f")){
                String[] strAr = str.replaceAll("f", "").trim().split(" ");
                for(String s : strAr){
                    String[] cornerAr = s.split("/");
                    facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
                    facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
                    facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
                }
            }
            else if(str.startsWith("vt")){
                String[] strAr = str.replaceAll("vt", "").trim().split(" ");
                tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
                tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
            }
            else if(str.startsWith("vn")){
                String[] strAr = str.replaceAll("vn", "").trim().split(" ");
                tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
                tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
                tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
            }
            else if(str.startsWith("v")){               
                String[] strAr = str.replaceAll("v", "").trim().split(" ");
                tempModelVertices.add(Float.valueOf(strAr[0].trim()));
                tempModelVertices.add(Float.valueOf(strAr[1].trim()));
                tempModelVertices.add(Float.valueOf(strAr[2].trim()));      
            }
        }
        //Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
    } catch (IOException e) {
        // TODO Auto-generated catch block
        Log.v(TAG, "error");
    }
    Log.v(TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));

    ModelPositionData = new float[facesM.size()];
    ModelTextureCoordinateData = new float[facesT.size()];
    ModelNormalData = new float[facesN.size()];

    for(int i=0; i<facesM.size(); i++){
        ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
    }
    for(int i=0; i<facesT.size(); i++){
        ModelTextureCoordinateData[i] = tempTextureVertices.get(facesT.get(i));
    }
    for(int i=0; i<facesN.size(); i++){
        ModelNormalData[i] = tempNormalVertices.get(facesN.get(i));
    }
}
//专用最终浮动缓冲区-三维坐标平面

/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;

/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;

/** This will be used to pass in the light position. */
private int mLightPosHandle;

/** This will be used to pass in the texture. */
private int mTextureUniformHandle;

/** This will be used to pass in model position information. */
private int mPositionHandle;

/** This will be used to pass in model normal information. */
private int mNormalHandle;

/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;

/** How many bytes per float. */
private final int mBytesPerFloat = 4;   

/** Size of the position data in elements. */
private final int mPositionDataSize = 3;    

/** Size of the normal data in elements. */
private final int mNormalDataSize = 3;

/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;

/** Used to hold a light centered on the origin in model space. We need a 4th coordinate so we can get translations to work when
 *  we multiply this by our transformation matrices. */
private final float[] mLightPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};

/** Used to hold the current position of the light in world space (after transformation via model matrix). */
private final float[] mLightPosInWorldSpace = new float[4];

/** Used to hold the transformed position of the light in eye space (after transformation via modelview matrix) */
private final float[] mLightPosInEyeSpace = new float[4];

/** This is a handle to our cube shading program. */
private int mProgramHandle;

/** This is a handle to our light point program. */
private int mPointProgramHandle;

/** These are handles to our texture data. */
private int mTextureDataHandle;
//私有int-mGrassDataHandle

/** Temporary place to save the min and mag filter, in case the activity was restarted. */
private int mQueuedMinFilter;
private int mQueuedMagFilter;

// These still work without volatile, but refreshes are not guaranteed to happen.                   
public volatile float mDeltaX;                  
public volatile float mDeltaY;                      


public TesterRenderer(final Context activityContext)
{   
    mActivityContext = activityContext;

    ObjLoader obj = new ObjLoader(mActivityContext);

    mModelPositions = ByteBuffer.allocateDirect(obj.ModelPositionData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();                            
    mModelPositions.put(obj.ModelPositionData).position(0);

    mModelNormals = ByteBuffer.allocateDirect(obj.ModelNormalData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();                            
    mModelNormals.put(obj.ModelNormalData).position(0);

    mModelTextureCoordinates = ByteBuffer.allocateDirect(obj.ModelTextureCoordinateData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();
    mModelTextureCoordinates.put(obj.ModelTextureCoordinateData).position(0);
}

@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) 
{
    // Set the background clear color to black.
    GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);

    // Use culling to remove back faces.
    GLES20.glEnable(GLES20.GL_CULL_FACE);

    // Enable depth testing
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    // The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
    // Enable texture mapping
    // GLES20.glEnable(GLES20.GL_TEXTURE_2D);

    // Position the eye in front of the origin.
    final float eyeX = 0.0f;
    final float eyeY = 0.0f;
    final float eyeZ = -0.5f;

    // We are looking toward the distance
    final float lookX = 0.0f;
    final float lookY = 0.0f;
    final float lookZ = -5.0f;

    // Set our up vector. This is where our head would be pointing were we holding the camera.
    final float upX = 0.0f;
    final float upY = 1.0f;
    final float upZ = 0.0f;

    // Set the view matrix. This matrix can be said to represent the camera position.
    // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
    // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
    Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);        

    final String vertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_vertex_shader_tex_and_light);           
    final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_fragment_shader_tex_and_light);           

    final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);       
    final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);     

    mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, 
            new String[] {"a_Position",  "a_Normal", "a_TexCoordinate"});                                                                                                  

    // Define a simple shader program for our point.
    final String pointVertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_vertex_shader);                   
    final String pointFragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_fragment_shader);

    final int pointVertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, pointVertexShader);
    final int pointFragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, pointFragmentShader);
    mPointProgramHandle = ShaderHelper.createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle, 
            new String[] {"a_Position"}); 

    // Load the texture
    mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.strawberry_texture);        
    GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
//mGrassDataHandle=TextureHelper.LoadTextureActivityContext,R.drawable.Noised\u grass\u public\u domain; //GLES20.GLGENATEMIPMAPGLES20.GL_纹理_2D

    if (mQueuedMinFilter != 0)
    {
        setMinFilter(mQueuedMinFilter);
    }

    if (mQueuedMagFilter != 0)
    {
        setMagFilter(mQueuedMagFilter);
    }

    // Initialize the accumulated rotation matrix
    Matrix.setIdentityM(mAccumulatedRotation, 0);
}   

@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) 
{
    // Set the OpenGL viewport to the same size as the surface.
    GLES20.glViewport(0, 0, width, height);

    // Create a new perspective projection matrix. The height will stay the same
    // while the width will vary as per aspect ratio.
    final float ratio = (float) width / height;
    final float left = -ratio;
    final float right = ratio;
    final float bottom = -1.0f;
    final float top = 1.0f;
    final float near = 1.0f;
    final float far = 1000.0f;

    Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}   

@Override
public void onDrawFrame(GL10 glUnused) 
{
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);                    

    // Do a complete rotation every 10 seconds.
    long time = SystemClock.uptimeMillis() % 10000L;
    long slowTime = SystemClock.uptimeMillis() % 100000L; 
    float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
    float slowAngleInDegrees = (360.0f / 100000.0f) * ((int) slowTime); 

    // Set our per-vertex lighting program.
    GLES20.glUseProgram(mProgramHandle);

    // Set program handles for cube drawing.
    mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
    mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix"); 
    mLightPosHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_LightPos");
    mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
    mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");        
    mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal"); 
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");                        

    // Calculate position of the light. Rotate and then push into the distance.
    Matrix.setIdentityM(mLightModelMatrix, 0);
    Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, -2.0f);      
    Matrix.rotateM(mLightModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
    Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, 3.5f);

    Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
    Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);                        

    // Draw a cube.
    // Translate the cube into the screen.
    Matrix.setIdentityM(mModelMatrix, 0);
    Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);     

    // Set a matrix that contains the current rotation.
    Matrix.setIdentityM(mCurrentRotation, 0);        
    Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f);
    Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f);
    mDeltaX = 0.0f;
    mDeltaY = 0.0f;

    // Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result.
    Matrix.multiplyMM(mTemporaryMatrix, 0, mCurrentRotation, 0, mAccumulatedRotation, 0);
    System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16);

    // Rotate the cube taking the overall rotation into account.        
    Matrix.multiplyMM(mTemporaryMatrix, 0, mModelMatrix, 0, mAccumulatedRotation, 0);
    System.arraycopy(mTemporaryMatrix, 0, mModelMatrix, 0, 16);

    // Set the active texture unit to texture unit 0.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

    // Bind the texture to this unit.
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);

    // Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
    GLES20.glUniform1i(mTextureUniformHandle, 0);

    // Pass in the texture coordinate information
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    mModelTextureCoordinates.position(0);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 
            0, mModelTextureCoordinates);



    drawModel();  

    // Draw a plane
    Matrix.setIdentityM(mModelMatrix, 0);
    Matrix.translateM(mModelMatrix, 0, 0.0f, -2.0f, -5.0f);
    Matrix.scaleM(mModelMatrix, 0, 25.0f, 1.0f, 25.0f);
    Matrix.rotateM(mModelMatrix, 0, slowAngleInDegrees, 0.0f, 1.0f, 0.0f);

    // Set the active texture unit to texture unit 0.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

    // Bind the texture to this unit.
    //GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);

    // Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
    GLES20.glUniform1i(mTextureUniformHandle, 0);

    // Pass in the texture coordinate information
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);

    drawModel();

    GLES20.glUseProgram(mPointProgramHandle);        
    drawLight();
}   

public void setMinFilter(final int filter)
{
    if (mTextureDataHandle != 0)
    {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);
//GLES20.glBindTextureGLES20.GL_TEXTURE_2D,mGrassDataHandle; //GLES20.glTexParameteriGLES20.GL_纹理_2D,GLES20.GL_纹理_MIN_过滤器,过滤器; } 其他的 { MQUEDMINFILTER=过滤器; } }

//GLES20.glBindTextureGLES20.GL_TEXTURE_2D,mGrassDataHandle; //GLES20.glTexParameteriGLES20.GL_纹理_2D,GLES20.GL_纹理_MAG_过滤器,过滤器; } 其他的 { MQUEDMAGFILTER=过滤器; } }

}


有人能帮我解决这个问题吗?

根据面中的索引重新排序坐标的方式似乎有问题:

for(int i=0; i<facesM.size(); i++){
    ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}

并对法线和纹理坐标进行等效更改。

请有人帮助我,你能把这个简化为一个更简单的例子吗?这个网站的想法并不是你发布你的全部代码,让人们为你调试。我们的目的是用最少的代码发布特定的问题,以重现您遇到的问题。好的,谢谢你,我将把它简化为问题首先,我创建一个objloader,然后iam将objloader与glsurface的渲染器相结合,在我结合之前,我在渲染器中创建立方体,但是我想用我用blender创建的对象替换立方体,然后我将file.obj解析到我的android项目中,但是我的obj的形状是随机的,hapeen是什么?
public void setMagFilter(final int filter)
{
    if (mTextureDataHandle != 0)
    {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);
private void drawModel()
{       
    // Pass in the position information
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    mModelPositions.position(0);        
    GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
            0, mModelPositions);        



    // Pass in the normal information
    GLES20.glEnableVertexAttribArray(mNormalHandle);
    mModelNormals.position(0);
    GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false, 
            0, mModelNormals);



    // This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
    // (which currently contains model * view).
    Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);   

    // Pass in the modelview matrix.
    GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);                

    // This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
    // (which now contains model * view * projection).        
    Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
    System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);

    // Pass in the combined matrix.
    GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

    // Pass in the light position in eye space.        
    GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);

    // Draw the cube.
    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);                               
}           

/**
 * Draws a point representing the position of the light.
 */
private void drawLight()
{
    final int pointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointProgramHandle, "u_MVPMatrix");
    final int pointPositionHandle = GLES20.glGetAttribLocation(mPointProgramHandle, "a_Position");

    // Pass in the position.
    GLES20.glVertexAttrib3f(pointPositionHandle, mLightPosInModelSpace[0], mLightPosInModelSpace[1], mLightPosInModelSpace[2]);

    // Since we are not using a buffer object, disable vertex arrays for this attribute.
    GLES20.glDisableVertexAttribArray(pointPositionHandle);  

    // Pass in the transformation matrix.
    Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mLightModelMatrix, 0);
    Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
    System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
    GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);

    // Draw the point.
    GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}
for(int i=0; i<facesM.size(); i++){
    ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}
for(int i=0; i<facesM.size(); i++){
    ModelPositionData[3 * i    ] = tempModelVertices.get(3 * facesM.get(i)    );
    ModelPositionData[3 * i + 1] = tempModelVertices.get(3 * facesM.get(i) + 1);
    ModelPositionData[3 * i + 2] = tempModelVertices.get(3 * facesM.get(i) + 2);
}
ModelPositionData = new float[3 * facesM.size()];