본문 바로가기

카테고리 없음

안드로이드 OpenGL ES 2 예제, 회전하는 삼각형

원문 및 출처 http://www.learnopengles.com/android-lesson-one-getting-started/

테스트 환경 : Eclipse, Android SDK, ADT

테스트용 폰 : Android 2.3.6 ( Samsung Galaxy S ) 에뮬레이터에서는 OpenGL ES 2 프로그램을 지원하지 않음


GLSurfaceView

Android에서 OpenGL ES를 편리하게 사용할 수 있도록 한다

OpenGL ES를 위한 렌더링 쓰레드 제공

EGL 설정, EGL은 OpenGL ES와 윈도우 시스템의 인터페이스 역할을 한다


OpenGL ES 프로그래밍 절차

1. OpenGL ES 2를 지원하는 장치인가 확인

2. float 배열에 Vertex(x, y, z), Color(RGBA) 정의

3. FloatBuffer 생성 (위에서 생성한 공만 만큼의 메모리 공간 확보)

4. FloatBuffer에 배열의 내용 설정

5. View Matrix 설정(관찰자 혹은 카메라) 카메라 위치, 카메라가 보고 있는 시점의 위치, 카메라의 위쪽이 향하고 있는 방향

6. Vertex Shader, Fragment Shader 작성

7. Vertex Shader, Fragment Shader 를 OpenGL ES으로 로드

8. Vertex Shader, Fragment Shader 를 합쳐서 프로그램 생성

9. View Port 및 투영행열(Projection Matrix)설정

10. Model Matrix(모델변환행열) 작성 및 적용

11. FloatBuffer안에 저장된 정점을 화면에 그림


아래의 예제는 Android SDK에 포함된 AVD(에뮬레이터) 에서는 실행되지 않으며 실제 기기를 컴퓨터와 연결하여 테스트해야 한다

아래의 프로그램을 Eclipse에서 실행하면 연결되어 있는 모바일 기기에서 다음과 같은 결과를 확인할 수 있다.

package gl.test1;


import android.app.Activity;

import android.app.ActivityManager;

import android.content.Context;

import android.content.pm.ConfigurationInfo;

import android.opengl.GLSurfaceView;

import android.os.Bundle;


public class LessonOneActivity extends Activity 

{

private GLSurfaceView mGLSurfaceView;

@Override

public void onCreate(Bundle savedInstanceState) 

{

super.onCreate(savedInstanceState);

mGLSurfaceView = new GLSurfaceView(this);


// 장치가 OpenGL ES 2를 지원하는 기기인지 확인한다

final ActivityManager activityManager = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);

final ConfigurationInfo configurationInfo activityManager.getDeviceConfigurationInfo();

final boolean supportsEs2 = configurationInfo.reqGlEsVersion >= 0x20000;


if (supportsEs2) 

{

// Request an OpenGL ES 2.0 compatible context.

mGLSurfaceView.setEGLContextClientVersion(2);


// Set the renderer to our demo renderer, defined below.

mGLSurfaceView.setRenderer(new LessonOneRenderer());

else 

{

// This is where you could create an OpenGL ES 1.x compatible

// renderer if you wanted to support both ES 1 and ES 2.

return;

}

setContentView(mGLSurfaceView);

}

@Override

protected void onResume() 

{

// The activity must call the GL surface view's onResume() on activity onResume().

super.onResume();

mGLSurfaceView.onResume();

}

@Override

protected void onPause() 

{

// The activity must call the GL surface view's onPause() on activity onPause().

super.onPause();

mGLSurfaceView.onPause();

}

}


package gl.test1;


import java.nio.ByteBuffer;

import java.nio.ByteOrder;

import java.nio.FloatBuffer;


import javax.microedition.khronos.egl.EGLConfig;

import javax.microedition.khronos.opengles.GL10;


import android.opengl.GLES20;

import android.opengl.GLSurfaceView;

import android.opengl.Matrix;

import android.os.SystemClock;


/**

 * This class implements our custom renderer. Note that the GL10 parameter passed in is unused for OpenGL ES 2.0

 * renderers -- the static class GLES20 is used instead.

 */

public class LessonOneRenderer implements GLSurfaceView.Renderer 

{

/**

 * Store the model matrix. This matrix is used to move models from object space (where each model can be thought

 * of being located at the center of the universe) to world space.

 */

private float[] mModelMatrix = new float[16];


/**

 * Store the view matrix. This can be thought of as our camera. This matrix transforms world space to eye space;

 * it positions things relative to our eye.

 */

private float[] mViewMatrix = new float[16];


/** Store the projection matrix. This is used to project the scene onto a 2D viewport. */

private float[] mProjectionMatrix = new float[16];

/** Allocate storage for the final combined matrix. This will be passed into the shader program. */

private float[] mMVPMatrix = new float[16];          // 쉐이더 프로그램으로 전달될 결과 행렬

/** Store our model data in a float buffer. */

private final FloatBuffer mTriangle1Vertices;

private final FloatBuffer mTriangle2Vertices;

private final FloatBuffer mTriangle3Vertices;


/** This will be used to pass in the transformation matrix. */

private int mMVPMatrixHandle;

/** This will be used to pass in model position information. */

private int mPositionHandle;

/** This will be used to pass in model color information. */

private int mColorHandle;


/** How many bytes per float. */

private final int mBytesPerFloat = 4;

/** How many elements per vertex. */

private final int mStrideBytes = 7 * mBytesPerFloat; // 정점에는 x, y, z, r, g, b, a 정보가 함께 저장됨

/** Offset of the position data. */

private final int mPositionOffset = 0;

/** Size of the position data in elements. */

private final int mPositionDataSize = 3;

/** Offset of the color data. */

private final int mColorOffset = 3;

/** Size of the color data in elements. */

private final int mColorDataSize = 4;

/** Initialize the model data.*/

public LessonOneRenderer()

{

// Define points for equilateral triangles.

// This triangle is red, green, and blue.

final float[] triangle1VerticesData = {

// X, Y, Z, 

// R, G, B, A

            -0.5f, -0.25f, 0.0f, 

            1.0f, 0.0f, 0.0f, 1.0f,

            

            0.5f, -0.25f, 0.0f,

            0.0f, 0.0f, 1.0f, 1.0f,

            

            0.0f, 0.559016994f, 0.0f, 

            0.0f, 1.0f, 0.0f, 1.0f};

// This triangle is yellow, cyan, and magenta.

final float[] triangle2VerticesData = {

// X, Y, Z, 

// R, G, B, A

            -0.5f, -0.25f, 0.0f, 

            1.0f, 1.0f, 0.0f, 1.0f,

            

            0.5f, -0.25f, 0.0f, 

            0.0f, 1.0f, 1.0f, 1.0f,

            

            0.0f, 0.559016994f, 0.0f, 

            1.0f, 0.0f, 1.0f, 1.0f};

// This triangle is white, gray, and black.

final float[] triangle3VerticesData = {

// X, Y, Z, 

// R, G, B, A

            -0.5f, -0.25f, 0.0f, 

            1.0f, 1.0f, 1.0f, 1.0f,

            

            0.5f, -0.25f, 0.0f, 

            0.5f, 0.5f, 0.5f, 1.0f,

            

            0.0f, 0.559016994f, 0.0f, 

            0.0f, 0.0f, 0.0f, 1.0f};

// Initialize the buffers.

mTriangle1Vertices = ByteBuffer.allocateDirect(triangle1VerticesData.length * mBytesPerFloat)

        .order(ByteOrder.nativeOrder()).asFloatBuffer();

mTriangle2Vertices = ByteBuffer.allocateDirect(triangle2VerticesData.length * mBytesPerFloat)

        .order(ByteOrder.nativeOrder()).asFloatBuffer();

mTriangle3Vertices = ByteBuffer.allocateDirect(triangle3VerticesData.length * mBytesPerFloat)

        .order(ByteOrder.nativeOrder()).asFloatBuffer();

mTriangle1Vertices.put(triangle1VerticesData).position(0);

mTriangle2Vertices.put(triangle2VerticesData).position(0);

mTriangle3Vertices.put(triangle3VerticesData).position(0);

}

@Override

public void onSurfaceCreated(GL10 glUnused, EGLConfig config) 

{

// Set the background clear color to gray.

GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);

// Position the eye behind the origin.

final float eyeX = 0.0f;

final float eyeY = 0.0f;

final float eyeZ = 1.5f;


// We are looking toward the distance

final float lookX = 0.0f;

final float lookY = 0.0f;

final float lookZ = -5.0f;


// Set our up vector. This is where our head would be pointing were we holding the camera.

final float upX = 0.0f;

final float upY = 1.0f;

final float upZ = 0.0f;


// Set the view matrix. This matrix can be said to represent the camera position.

// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and

// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.

Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);


final String vertexShader =

   "uniform mat4 u_MVPMatrix;  \n" // A constant representing the combined model/view/projection matrix.

  + "attribute vec4 a_Position;  \n"  // Per-vertex position information we will pass in.

  + "attribute vec4 a_Color;    \n"  // Per-vertex color information we will pass in.   

  

  + "varying vec4 v_Color;   \n" // This will be passed into the fragment shader.

  

  + "void main()                    \n" // The entry point for our vertex shader.

  + "{                              \n"

  + "   v_Color = a_Color;          \n" // Pass the color through to the fragment shader. 

   // It will be interpolated across the triangle.

  + "   gl_Position = u_MVPMatrix   \n"  // gl_Position is a special variable used to store the final position.

  + "               * a_Position;   \n"     // Multiply the vertex by the matrix to get the final point in                                                

  + "}                              \n";    // normalized screen coordinates.

final String fragmentShader =

   "precision mediump float;       \n" // Set the default precision to medium. We don't need as high of a 

// precision in the fragment shader.

  + "varying vec4 v_Color;          \n" // This is the color from the vertex shader interpolated across the 

   // triangle per fragment.   

  + "void main()                    \n"         // The entry point for our fragment shader.

  + "{                              \n"

  + "   gl_FragColor = v_Color;     \n" // Pass the color directly through the pipeline.   

  + "}                              \n";

// Load in the vertex shader.

int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);


if (vertexShaderHandle != 0) 

{

// Pass in the shader source.

GLES20.glShaderSource(vertexShaderHandle, vertexShader);


// Compile the shader.

GLES20.glCompileShader(vertexShaderHandle);


// Get the compilation status.

final int[] compileStatus = new int[1];

GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);


// If the compilation failed, delete the shader.

if (compileStatus[0] == 0) 

{

GLES20.glDeleteShader(vertexShaderHandle);

vertexShaderHandle = 0;

}

}


if (vertexShaderHandle == 0)

{

throw new RuntimeException("Error creating vertex shader.");

}

// Load in the fragment shader shader.

int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);


if (fragmentShaderHandle != 0) 

{

// Pass in the shader source.

GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);


// Compile the shader.

GLES20.glCompileShader(fragmentShaderHandle);


// Get the compilation status.

final int[] compileStatus = new int[1];

GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);


// If the compilation failed, delete the shader.

if (compileStatus[0] == 0) 

{

GLES20.glDeleteShader(fragmentShaderHandle);

fragmentShaderHandle = 0;

}

}


if (fragmentShaderHandle == 0)

{

throw new RuntimeException("Error creating fragment shader.");

}

// Create a program object and store the handle to it.

int programHandle = GLES20.glCreateProgram();

if (programHandle != 0) 

{

// Bind the vertex shader to the program.

GLES20.glAttachShader(programHandle, vertexShaderHandle);


// Bind the fragment shader to the program.

GLES20.glAttachShader(programHandle, fragmentShaderHandle);

// Bind attributes

GLES20.glBindAttribLocation(programHandle, 0, "a_Position");

GLES20.glBindAttribLocation(programHandle, 1, "a_Color");

// Link the two shaders together into a program.

GLES20.glLinkProgram(programHandle);


// Get the link status.

final int[] linkStatus = new int[1];

GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);


// If the link failed, delete the program.

if (linkStatus[0] == 0) 

{

GLES20.glDeleteProgram(programHandle);

programHandle = 0;

}

}

if (programHandle == 0)

{

throw new RuntimeException("Error creating program.");

}

        

        // Set program handles. These will later be used to pass in values to the program.

        mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");        

        mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");

        mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");        

        

        // Tell OpenGL to use this program when rendering.

        GLES20.glUseProgram(programHandle);        

}

@Override

public void onSurfaceChanged(GL10 glUnused, int width, int height) 

{

// Set the OpenGL viewport to the same size as the surface.

GLES20.glViewport(0, 0, width, height);


// Create a new perspective projection matrix. The height will stay the same

// while the width will vary as per aspect ratio.

final float ratio = (float) width / height;

final float left = -ratio;

final float right = ratio;

final float bottom = -1.0f;

final float top = 1.0f;

final float near = 1.0f;

final float far = 10.0f;

Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);

}


@Override

public void onDrawFrame(GL10 glUnused) 

{

GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);         

                

        // Do a complete rotation every 10 seconds.

        long time = SystemClock.uptimeMillis() % 10000L;

        float angleInDegrees = (360.0f / 10000.0f) * ((int) time);

        

        // Draw the triangle facing straight on.

        Matrix.setIdentityM(mModelMatrix, 0);

        Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);        

        drawTriangle(mTriangle1Vertices);

        

        // Draw one translated a bit down and rotated to be flat on the ground.

        Matrix.setIdentityM(mModelMatrix, 0);

        Matrix.translateM(mModelMatrix, 0, 0.0f, -1.0f, 0.0f);

        Matrix.rotateM(mModelMatrix, 0, 90.0f, 1.0f, 0.0f, 0.0f);

        Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);        

        drawTriangle(mTriangle2Vertices);

        

        // Draw one translated a bit to the right and rotated to be facing to the left.

        Matrix.setIdentityM(mModelMatrix, 0);

        Matrix.translateM(mModelMatrix, 0, 1.0f, 0.0f, 0.0f);

        Matrix.rotateM(mModelMatrix, 0, 90.0f, 0.0f, 1.0f, 0.0f);

        Matrix.rotateM(mModelMatrix, 0, angleInDegrees, 0.0f, 0.0f, 1.0f);

        drawTriangle(mTriangle3Vertices);

}

/**

 * Draws a triangle from the given vertex data.

 * @param aTriangleBuffer The buffer containing the vertex data.

 */

private void drawTriangle(final FloatBuffer aTriangleBuffer)

{

// Pass in the position information

aTriangleBuffer.position(mPositionOffset);

        GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,

         mStrideBytes, aTriangleBuffer);        

                

        GLES20.glEnableVertexAttribArray(mPositionHandle);        

        

        // Pass in the color information

        aTriangleBuffer.position(mColorOffset);

        GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,

         mStrideBytes, aTriangleBuffer);        

        

        GLES20.glEnableVertexAttribArray(mColorHandle);

        

// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix

        // (which currently contains model * view).

        Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);

        

        // This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix

        // (which now contains model * view * projection).

        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);


        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0, 3);                               

}

}