how to replace cube object with file.obj in GLsurfaceView from android?

i create object 3d in android using tutorial from learnopengles, and i create cube from the lesson six of that tutorial (texture filtering), after that i want replace the cube with my object (i create the strawberry object). i want my object can display in the view, so i parsing the my object (my object use extension file .obj) to my renderer class, but the object in view is displaying random triangle object. this is my parsing code :

public ObjLoader(Context mActivityContext) {
    FileReader fr;
    String str;

    ArrayList<Float> tempModelVertices = new ArrayList<Float>();
    ArrayList<Float> tempTextureVertices = new ArrayList<Float>();
    ArrayList<Float> tempNormalVertices = new ArrayList<Float>();
    ArrayList<Integer> facesM = new ArrayList<Integer>();
    ArrayList<Integer> facesT = new ArrayList<Integer>();
    ArrayList<Integer> facesN = new ArrayList<Integer>();

    try {
        fr = new FileReader(new File("model/straw_obj"));
        BufferedReader br = new BufferedReader(fr);
        while((str = br.readLine())!=null){
            if(str.startsWith("f")){
                String[] strAr = str.replaceAll("f", "").trim().split(" ");
                for(String s : strAr){
                    String[] cornerAr = s.split("/");
                    facesM.add(Integer.parseInt(cornerAr[0].trim())-1);
                    facesT.add(Integer.parseInt(cornerAr[1].trim())-1);
                    facesN.add(Integer.parseInt(cornerAr[2].trim())-1);
                }
            }
            else if(str.startsWith("vt")){
                String[] strAr = str.replaceAll("vt", "").trim().split(" ");
                tempTextureVertices.add(Float.valueOf(strAr[0].trim()));
                tempTextureVertices.add(-1*Float.valueOf(strAr[1].trim()));
            }
            else if(str.startsWith("vn")){
                String[] strAr = str.replaceAll("vn", "").trim().split(" ");
                tempNormalVertices.add(Float.valueOf(strAr[0].trim()));
                tempNormalVertices.add(Float.valueOf(strAr[1].trim()));
                tempNormalVertices.add(Float.valueOf(strAr[2].trim()));
            }
            else if(str.startsWith("v")){               
                String[] strAr = str.replaceAll("v", "").trim().split(" ");
                tempModelVertices.add(Float.valueOf(strAr[0].trim()));
                tempModelVertices.add(Float.valueOf(strAr[1].trim()));
                tempModelVertices.add(Float.valueOf(strAr[2].trim()));      
            }
        }
        //Log.v(LOG_TAG, "v :"+ String.valueOf(v) + "vt :"+ String.valueOf(vt) + "vn :"+ String.valueOf(vn) + "f :"+ String.valueOf(f));
    } catch (IOException e) {
        // TODO Auto-generated catch block
        Log.v(TAG, "error");
    }
    Log.v(TAG, "vt " + String.valueOf(tempTextureVertices.size()) + " vn " + String.valueOf(tempNormalVertices.size()) + " v " + String.valueOf(tempModelVertices.size()));

    ModelPositionData = new float[facesM.size()];
    ModelTextureCoordinateData = new float[facesT.size()];
    ModelNormalData = new float[facesN.size()];

    for(int i=0; i<facesM.size(); i++){
        ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
    }
    for(int i=0; i<facesT.size(); i++){
        ModelTextureCoordinateData[i] = tempTextureVertices.get(facesT.get(i));
    }
    for(int i=0; i<facesN.size(); i++){
        ModelNormalData[i] = tempNormalVertices.get(facesN.get(i));
    }
}

and this is how i create the glsurface renderer

public class TesterRenderer implements GLSurfaceView.Renderer{
private static final String TAG = "TesterRenderer";


private final Context mActivityContext;

/**
 * Store the model matrix. This matrix is used to move models from object space (where each model can be thought
 * of being located at the center of the universe) to world space.
 */
private float[] mModelMatrix = new float[16];

/**
 * Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;
 * it positions things relative to our eye.
 */
private float[] mViewMatrix = new float[16];

/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */
private float[] mProjectionMatrix = new float[16];

/** Allocate storage for the final combined matrix. This will be passed into the shader program. */
private float[] mMVPMatrix = new float[16];

/** Store the accumulated rotation. */
private final float[] mAccumulatedRotation = new float[16];

/** Store the current rotation. */
private final float[] mCurrentRotation = new float[16];

/** A temporary matrix. */
private float[] mTemporaryMatrix = new float[16];

/** 
 * Stores a copy of the model matrix specifically for the light position.
 */
private float[] mLightModelMatrix = new float[16];  

/** Store our model data in a float buffer. */
private final FloatBuffer mModelPositions;  
private final FloatBuffer mModelNormals;
private final FloatBuffer mModelTextureCoordinates;

// private final FloatBuffer mModelTextureCoordinatesForPlane;

/** This will be used to pass in the transformation matrix. */
private int mMVPMatrixHandle;

/** This will be used to pass in the modelview matrix. */
private int mMVMatrixHandle;

/** This will be used to pass in the light position. */
private int mLightPosHandle;

/** This will be used to pass in the texture. */
private int mTextureUniformHandle;

/** This will be used to pass in model position information. */
private int mPositionHandle;

/** This will be used to pass in model normal information. */
private int mNormalHandle;

/** This will be used to pass in model texture coordinate information. */
private int mTextureCoordinateHandle;

/** How many bytes per float. */
private final int mBytesPerFloat = 4;   

/** Size of the position data in elements. */
private final int mPositionDataSize = 3;    

/** Size of the normal data in elements. */
private final int mNormalDataSize = 3;

/** Size of the texture coordinate data in elements. */
private final int mTextureCoordinateDataSize = 2;

/** Used to hold a light centered on the origin in model space. We need a 4th coordinate so we can get translations to work when
 *  we multiply this by our transformation matrices. */
private final float[] mLightPosInModelSpace = new float[] {0.0f, 0.0f, 0.0f, 1.0f};

/** Used to hold the current position of the light in world space (after transformation via model matrix). */
private final float[] mLightPosInWorldSpace = new float[4];

/** Used to hold the transformed position of the light in eye space (after transformation via modelview matrix) */
private final float[] mLightPosInEyeSpace = new float[4];

/** This is a handle to our cube shading program. */
private int mProgramHandle;

/** This is a handle to our light point program. */
private int mPointProgramHandle;

/** These are handles to our texture data. */
private int mTextureDataHandle;

// private int mGrassDataHandle;

/** Temporary place to save the min and mag filter, in case the activity was restarted. */
private int mQueuedMinFilter;
private int mQueuedMagFilter;

// These still work without volatile, but refreshes are not guaranteed to happen.                   
public volatile float mDeltaX;                  
public volatile float mDeltaY;                      


public TesterRenderer(final Context activityContext)
{   
    mActivityContext = activityContext;

    ObjLoader obj = new ObjLoader(mActivityContext);

    mModelPositions = ByteBuffer.allocateDirect(obj.ModelPositionData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();                            
    mModelPositions.put(obj.ModelPositionData).position(0);

    mModelNormals = ByteBuffer.allocateDirect(obj.ModelNormalData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();                            
    mModelNormals.put(obj.ModelNormalData).position(0);

    mModelTextureCoordinates = ByteBuffer.allocateDirect(obj.ModelTextureCoordinateData.length * mBytesPerFloat)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();
    mModelTextureCoordinates.put(obj.ModelTextureCoordinateData).position(0);
}

@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) 
{
    // Set the background clear color to black.
    GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);

    // Use culling to remove back faces.
    GLES20.glEnable(GLES20.GL_CULL_FACE);

    // Enable depth testing
    GLES20.glEnable(GLES20.GL_DEPTH_TEST);

    // The below glEnable() call is a holdover from OpenGL ES 1, and is not needed in OpenGL ES 2.
    // Enable texture mapping
    // GLES20.glEnable(GLES20.GL_TEXTURE_2D);

    // Position the eye in front of the origin.
    final float eyeX = 0.0f;
    final float eyeY = 0.0f;
    final float eyeZ = -0.5f;

    // We are looking toward the distance
    final float lookX = 0.0f;
    final float lookY = 0.0f;
    final float lookZ = -5.0f;

    // Set our up vector. This is where our head would be pointing were we holding the camera.
    final float upX = 0.0f;
    final float upY = 1.0f;
    final float upZ = 0.0f;

    // Set the view matrix. This matrix can be said to represent the camera position.
    // NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
    // view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
    Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);        

    final String vertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_vertex_shader_tex_and_light);           
    final String fragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.per_pixel_fragment_shader_tex_and_light);           

    final int vertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);       
    final int fragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);     

    mProgramHandle = ShaderHelper.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle, 
            new String[] {"a_Position",  "a_Normal", "a_TexCoordinate"});                                                                                                  

    // Define a simple shader program for our point.
    final String pointVertexShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_vertex_shader);                   
    final String pointFragmentShader = RawResourceReader.readTextFileFromRawResource(mActivityContext, R.raw.point_fragment_shader);

    final int pointVertexShaderHandle = ShaderHelper.compileShader(GLES20.GL_VERTEX_SHADER, pointVertexShader);
    final int pointFragmentShaderHandle = ShaderHelper.compileShader(GLES20.GL_FRAGMENT_SHADER, pointFragmentShader);
    mPointProgramHandle = ShaderHelper.createAndLinkProgram(pointVertexShaderHandle, pointFragmentShaderHandle, 
            new String[] {"a_Position"}); 

    // Load the texture
    mTextureDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.strawberry_texture);        
    GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);

// mGrassDataHandle = TextureHelper.loadTexture(mActivityContext, R.drawable.noisy_grass_public_domain); // GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);

    if (mQueuedMinFilter != 0)
    {
        setMinFilter(mQueuedMinFilter);
    }

    if (mQueuedMagFilter != 0)
    {
        setMagFilter(mQueuedMagFilter);
    }

    // Initialize the accumulated rotation matrix
    Matrix.setIdentityM(mAccumulatedRotation, 0);
}   

@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) 
{
    // Set the OpenGL viewport to the same size as the surface.
    GLES20.glViewport(0, 0, width, height);

    // Create a new perspective projection matrix. The height will stay the same
    // while the width will vary as per aspect ratio.
    final float ratio = (float) width / height;
    final float left = -ratio;
    final float right = ratio;
    final float bottom = -1.0f;
    final float top = 1.0f;
    final float near = 1.0f;
    final float far = 1000.0f;

    Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
}   

@Override
public void onDrawFrame(GL10 glUnused) 
{
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);                    

    // Do a complete rotation every 10 seconds.
    long time = SystemClock.uptimeMillis() % 10000L;
    long slowTime = SystemClock.uptimeMillis() % 100000L; 
    float angleInDegrees = (360.0f / 10000.0f) * ((int) time);
    float slowAngleInDegrees = (360.0f / 100000.0f) * ((int) slowTime); 

    // Set our per-vertex lighting program.
    GLES20.glUseProgram(mProgramHandle);

    // Set program handles for cube drawing.
    mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVPMatrix");
    mMVMatrixHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_MVMatrix"); 
    mLightPosHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_LightPos");
    mTextureUniformHandle = GLES20.glGetUniformLocation(mProgramHandle, "u_Texture");
    mPositionHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Position");        
    mNormalHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_Normal"); 
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgramHandle, "a_TexCoordinate");                        

    // Calculate position of the light. Rotate and then push into the distance.
    Matrix.setIdentityM(mLightModelMatrix, 0);
    Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, -2.0f);      
    Matrix.rotateM(mLightModelMatrix, 0, angleInDegrees, 0.0f, 1.0f, 0.0f);
    Matrix.translateM(mLightModelMatrix, 0, 0.0f, 0.0f, 3.5f);

    Matrix.multiplyMV(mLightPosInWorldSpace, 0, mLightModelMatrix, 0, mLightPosInModelSpace, 0);
    Matrix.multiplyMV(mLightPosInEyeSpace, 0, mViewMatrix, 0, mLightPosInWorldSpace, 0);                        

    // Draw a cube.
    // Translate the cube into the screen.
    Matrix.setIdentityM(mModelMatrix, 0);
    Matrix.translateM(mModelMatrix, 0, 0.0f, 0.0f, -7.0f);     

    // Set a matrix that contains the current rotation.
    Matrix.setIdentityM(mCurrentRotation, 0);        
    Matrix.rotateM(mCurrentRotation, 0, mDeltaX, 0.0f, 1.0f, 0.0f);
    Matrix.rotateM(mCurrentRotation, 0, mDeltaY, 1.0f, 0.0f, 0.0f);
    mDeltaX = 0.0f;
    mDeltaY = 0.0f;

    // Multiply the current rotation by the accumulated rotation, and then set the accumulated rotation to the result.
    Matrix.multiplyMM(mTemporaryMatrix, 0, mCurrentRotation, 0, mAccumulatedRotation, 0);
    System.arraycopy(mTemporaryMatrix, 0, mAccumulatedRotation, 0, 16);

    // Rotate the cube taking the overall rotation into account.        
    Matrix.multiplyMM(mTemporaryMatrix, 0, mModelMatrix, 0, mAccumulatedRotation, 0);
    System.arraycopy(mTemporaryMatrix, 0, mModelMatrix, 0, 16);

    // Set the active texture unit to texture unit 0.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

    // Bind the texture to this unit.
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);

    // Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
    GLES20.glUniform1i(mTextureUniformHandle, 0);

    // Pass in the texture coordinate information
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
    mModelTextureCoordinates.position(0);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 
            0, mModelTextureCoordinates);



    drawModel();  

    // Draw a plane
    Matrix.setIdentityM(mModelMatrix, 0);
    Matrix.translateM(mModelMatrix, 0, 0.0f, -2.0f, -5.0f);
    Matrix.scaleM(mModelMatrix, 0, 25.0f, 1.0f, 25.0f);
    Matrix.rotateM(mModelMatrix, 0, slowAngleInDegrees, 0.0f, 1.0f, 0.0f);

    // Set the active texture unit to texture unit 0.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

    // Bind the texture to this unit.
    //GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle);

    // Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
    GLES20.glUniform1i(mTextureUniformHandle, 0);

    // Pass in the texture coordinate information
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);

    drawModel();

    GLES20.glUseProgram(mPointProgramHandle);        
    drawLight();
}   

public void setMinFilter(final int filter)
{
    if (mTextureDataHandle != 0)
    {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter);

// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle); // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, filter); } else { mQueuedMinFilter = filter; } }

public void setMagFilter(final int filter)
{
    if (mTextureDataHandle != 0)
    {
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter);

// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mGrassDataHandle); // GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, filter); } else { mQueuedMagFilter = filter; } }

private void drawModel()
{       
    // Pass in the position information
    GLES20.glEnableVertexAttribArray(mPositionHandle);
    mModelPositions.position(0);        
    GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
            0, mModelPositions);        



    // Pass in the normal information
    GLES20.glEnableVertexAttribArray(mNormalHandle);
    mModelNormals.position(0);
    GLES20.glVertexAttribPointer(mNormalHandle, mNormalDataSize, GLES20.GL_FLOAT, false, 
            0, mModelNormals);



    // This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
    // (which currently contains model * view).
    Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);   

    // Pass in the modelview matrix.
    GLES20.glUniformMatrix4fv(mMVMatrixHandle, 1, false, mMVPMatrix, 0);                

    // This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
    // (which now contains model * view * projection).        
    Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
    System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);

    // Pass in the combined matrix.
    GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

    // Pass in the light position in eye space.        
    GLES20.glUniform3f(mLightPosHandle, mLightPosInEyeSpace[0], mLightPosInEyeSpace[1], mLightPosInEyeSpace[2]);

    // Draw the cube.
    GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 36);                               
}           

/**
 * Draws a point representing the position of the light.
 */
private void drawLight()
{
    final int pointMVPMatrixHandle = GLES20.glGetUniformLocation(mPointProgramHandle, "u_MVPMatrix");
    final int pointPositionHandle = GLES20.glGetAttribLocation(mPointProgramHandle, "a_Position");

    // Pass in the position.
    GLES20.glVertexAttrib3f(pointPositionHandle, mLightPosInModelSpace[0], mLightPosInModelSpace[1], mLightPosInModelSpace[2]);

    // Since we are not using a buffer object, disable vertex arrays for this attribute.
    GLES20.glDisableVertexAttribArray(pointPositionHandle);  

    // Pass in the transformation matrix.
    Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mLightModelMatrix, 0);
    Matrix.multiplyMM(mTemporaryMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
    System.arraycopy(mTemporaryMatrix, 0, mMVPMatrix, 0, 16);
    GLES20.glUniformMatrix4fv(pointMVPMatrixHandle, 1, false, mMVPMatrix, 0);

    // Draw the point.
    GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
}

}

can someone help me fix this ?


It looks like there is a problem with the way you reorder the coordinates based on the indices in the faces:

for(int i=0; i<facesM.size(); i++){
    ModelPositionData[i] = tempModelVertices.get(facesM.get(i));
}

Each position consists of 3 coordinates. This loop copies only one value per position, though. It should look something like this:

for(int i=0; i<facesM.size(); i++){
    ModelPositionData[3 * i    ] = tempModelVertices.get(3 * facesM.get(i)    );
    ModelPositionData[3 * i + 1] = tempModelVertices.get(3 * facesM.get(i) + 1);
    ModelPositionData[3 * i + 2] = tempModelVertices.get(3 * facesM.get(i) + 2);
}

You will also need to adjust the allocation accordingly:

ModelPositionData = new float[3 * facesM.size()];

and make the equivalent changes for the normals and texture coordinates.

链接地址: http://www.djcxy.com/p/33908.html

上一篇: OpenGL ES 2.0立方体贴图不显示纹理

下一篇: 如何用android中的GLsurfaceView中的file.obj替换多维数据集对象?