1. utf-8 to unicode

  I referenced below site

  http://en.wikipedia.org/wiki/UTF-8

wstring Utf8ToUnicode(const string& source_str) { int dest_len = 0; int source_len = source_str.length(); const char* source = source_str.c_str(); wchar_t* dest = new wchar_t[source_len]; wstring value; for (int i = 0; i < source_len; i++, dest_len++) { // ansi if (source[i] <= 127) { dest[dest_len] = source[i]; } // 2byte else if ((source[i] & 0xF0) == 0xC0) { dest[dest_len] = ((source[i] & 0x1F) << 6) + (source[i+1] & 0x3F); i += 1; } // 3byte else if ((source[i] & 0xF0) == 0xE0) { dest[dest_len] = ((source[i] & 0x0F) << 12) + ((source[i+1] & 0x3F) << 6) + (source[i+2] & 0x3F); i += 2; } // ignore 4byte else { Log::warning("Can't change utf8 4byte characters"); return value; } } value.assign(dest, dest_len); delete[] dest; return value; }

 If you use window, string has byte order(like EF BB BF). This case you have to remove byte order.


2. unicode to utf-8

  It don't support 4byte

string UnicodeToUtf8(const wstring& source_str)
{
	string value;
	int dest_len 			= 0;
	int source_len 			= source_str.length();
	const wchar_t* source 	= source_str.c_str();
	char* dest 				= new char[source_len*3 + 1];

	for(int i = 0 ; i < source_len ; i++)
	{
		if(source[i]<= 0x7F)
		{
			dest[dest_len] = source[i];
			dest_len++;
		}
		else if(source[i] >= 0x80 && source[i] <=0x7FF)
		{
			wchar_t tmp = source[i];
			char first = 0, second = 0, third = 0;
			for(int j = 0; j < 3 ; j++)
			{
				wchar_t tmp_quota = tmp%16;
				switch(j)
				{
				case 0: third	 = tmp_quota; break;
				case 1: second	 = tmp_quota; break;
				case 2: first	 = tmp_quota; break;
				}
				tmp /= 16;
			}

			dest[dest_len] = 0xC0 + (first<<2) + (second>>2);
			dest[dest_len+1] = 0x80 + (((second%8)%4) << 4) + third;
			dest_len +=2;
		}
		else if(source[i] >= 0x800 && source[i] <=0xFFFF)
		{
			wchar_t tmp = source[i];
			char first = 0, second = 0, third = 0, fourth = 0;
			for(int j = 0; j < 4 ; j++)
			{
				wchar_t tmp_quota = tmp%16;
				switch(j)
				{
				case 0: fourth	 = tmp_quota; break;
				case 1: third	 = tmp_quota; break;
				case 2: second	 = tmp_quota; break;
				case 3: first	 = tmp_quota; break;
				}
				tmp /= 16;
			}

			dest[dest_len] = 0xE0 + first;
			dest[dest_len+1] = 0x80 + second << 2 + third >> 2;
			dest[dest_len+2] = 0x80 + (((third%8)%4) << 4) + fourth;
			dest_len +=3;
		}
		else
			return value;
	}
	dest[dest_len++] = '\0';

	value.assign(dest, dest_len);
	delete[] dest;
	return value;
}



1. android/log.h

  In ndk library provide log functions that can be seen in LogCat. First you have to include <android/log.h>.

#include <android/log.h>

  You can use two functions [__android_log_write] and [__android_log_vprint]. If you want to write log like printf, Use [__android_log_vprint]. It support parameters like %d, %f, %s.etc else  [__android_log_write] only send a message. Below show example using  [__android_log_write] and [__android_log_vprint]

__android_log_write(ANDROID_LOG_INFO,	LOG_TAG, msg.c_str());
__android_log_vprint(ANDROID_LOG_INFO,	LOG_TAG, msg.c_str(), ap);


2. Tag

  Android Log have tags 

    ANDROID_LOG_UNKNOWN ,

    ANDROID_LOG_DEFAULT,

    ANDROID_LOG_VERBOSE,

    ANDROID_LOG_DEBUG,

    ANDROID_LOG_INFO,

    ANDROID_LOG_WARN,

    ANDROID_LOG_ERROR,

    ANDROID_LOG_FATAL,

    ANDROID_LOG_SILENT.

  we usually use INFO, WARN, ERROR, VERBOSE. DEBUG. LogCat in eclipse IDE show you message according to each tag.



3. Wrapping

  I recommend wrapping log functions for more clear and useful. 

class Log
{
public:
	static void info(string msg)
	{
#if defined(LOG) && !defined(PUBLISH)
		__android_log_write(ANDROID_LOG_INFO,	LOG_TAG, msg.c_str());
#endif
	}

	static void info_print(string msg, ...)
	{
#if defined(LOG) && !defined(PUBLISH)
		va_list ap;
		va_start(ap,msg);
		__android_log_vprint(ANDROID_LOG_INFO,	LOG_TAG, msg.c_str(), ap);
		va_end(ap);
#endif
	}

	static void debug(string msg)
	{
#if defined(LOG) && !defined(PUBLISH)
		__android_log_write(ANDROID_LOG_DEBUG,	LOG_TAG, msg.c_str());
#endif
	}

	static void debug_print(string msg, ...)
	{
#if defined(LOG) && !defined(PUBLISH)
		va_list ap;
		va_start(ap,msg);
		__android_log_vprint(ANDROID_LOG_DEBUG,	LOG_TAG, msg.c_str(), ap);
		va_end(ap);
#endif
	}


	static void warning(string msg)
	{
#if defined(LOG) && !defined(PUBLISH)
		__android_log_write(ANDROID_LOG_WARN,	LOG_TAG, msg.c_str());
#endif
	}

	static void warning_print(string msg, ...)
	{
#if defined(LOG) && !defined(PUBLISH)
		va_list ap;
		va_start(ap,msg);
		__android_log_vprint(ANDROID_LOG_WARN,	LOG_TAG, msg.c_str(), ap);
		va_end(ap);
#endif
	}

	static void error(string msg)
	{
#if defined(LOG) && !defined(PUBLISH)
		__android_log_write(ANDROID_LOG_ERROR,	LOG_TAG, msg.c_str());
#endif
	}

	static void error_print(string msg, ...)
	{
#if defined(LOG) && !defined(PUBLISH)
		va_list ap;
		va_start(ap,msg);
		__android_log_vprint(ANDROID_LOG_ERROR,	LOG_TAG, msg.c_str(), ap);
		va_end(ap);
#endif
	}

};

  I made 4 kind methods using tags is ERROR, WARN, INFO and DEBUG, and if publish mode, I don't write a log to improve performance. I used this methods like below code.

Log::info_print("GL_EXTENSIONS : %s",glGetString(GL_EXTENSIONS));
Log::info_print("GL_VENDOR : %s",glGetString(GL_VENDOR));
Log::info_print("GL_RENDERER : %s",glGetString(GL_RENDERER));
Log::info_print("GL_VERSION : %s",glGetString(GL_VERSION));
Log::info("GLManager::jdGLInit end");

  In OpenGL Dirary app, you can see this logs.







1. Typeface

  you can get typeface info by [Typeface.createFromAsset] method. You don't call this method every frame. Because it happen memory leak problem, so I recommend to use Map class to Manage font data. Below show code using Typeface.createFromAsset.


Hashtable<String, Typeface> m_font_type_face = new Hashtable<String, Typeface>();
	
private ResourceManager()
{
	m_font_type_face.put("CORBEL.TTF", Typeface.createFromAsset(MainActivity.context.getAssets(), "CORBEL.TTF"));
};
	
public Typeface getTypeface(String key)
{
	if(m_font_type_face.containsKey(key))
		return m_font_type_face.get(key);
	return null;
}

※ Use ttf font. Some device don't  support ttc or other fonts.


2. Make Bitmap

  In Android, you can draw a string on Canvas using font predefined. Through Paint class, you can set size, color, etc. I use below code to use in NDK OpenGLES code.

public static void decodeStringTexture(String str,String str_key, String font_name, float fontSize, int r, int g, int b, int a ) { Log.i("AndroidOpenGL", "getStringTextureData start, str : "+ str+" size : "+ fontSize); try { Rect rect = new Rect(); Paint paint = new Paint(Paint.FILTER_BITMAP_FLAG); paint.setTypeface(ResourceManager.instance.getTypeface(font_name)); paint.setTextSize(fontSize); paint.setAntiAlias(true); paint.setFilterBitmap(true); paint.setDither(true); paint.setTextAlign(Align.LEFT); paint.setARGB(a, r, g, b); paint.getTextBounds(str,0, str.length(), rect); int width = rect.right; int height = rect.height(); int tex_width = 8; int tex_height = 8; while(width > tex_width) tex_width *= 2; while(height > tex_height) tex_height *= 2; Bitmap bitmap = Bitmap.createBitmap(tex_width, tex_height, Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); canvas.drawText(str, 0, 0-rect.top, paint); ByteBuffer bb = ByteBuffer.allocate(tex_width*tex_height*4); bitmap.copyPixelsToBuffer(bb); bitmap.recycle(); Log.i("AndroidOpenGL", "getStringTextureData end"); Log.i("AndroidOpenGL", "Allocated allocate buffer capacity:"+ bb.capacity()); Log.i("AndroidOpenGL", "getStringTextureData end, bb width:"+ tex_width +" height:"+tex_height); onDecodeStringTextureComplete(bb.array(), width, height, tex_width, tex_height, str_key); } catch(Exception e) { Log.e("AndroidOpenGL", "getStringTexture failed.\n"+e.toString()); } }

paint.setTypeface - Set system font predefined.

paint.antialias - make smooth font

paint.getTextBound - get string width, height

cavas.drawText(str, 0, 0-rect.top, paint)


glTexImage2D's width, height parameter is 2^n(exponent) so I set canvas size exponent of 2.

onDecodeStringTextureComplet is jni call. I send  size of string and size of canvas to calculate tex-coordinate value. For example

m_tex_coord[0] = 0.0f;
m_tex_coord[1] = 0.0f;

m_tex_coord[2] = 0.0f;
m_tex_coord[3] = string_height/canvas_height;

m_tex_coord[4] = string_width/canvas_whdith;
m_tex_coord[5] = 0.0f;

m_tex_coord[6] = string_width/canvas_whdith;
m_tex_coord[7] = string_height/canvas_height;

 lastly you can use send data by glTexImage2D.


3. Result Scene


<OpenGL Diary>



1. Load texture

  I used Android BitmapFactory to decode PNG texture. If you want one source multi flatform, Use libpng. This writing show example BitmapFactory decoding. First we have to load image. Theretofore we have to decide resource foloder.


  A. res/raw

    Many developer use assets foloder to manage resource, but I decide resource folder res/raw because res folder managed to R.java, there could be used on Android Java easily. 





  B. Load resource

    First to manage loaded resource, I made Texture class

public class Texture 
{
	public String		mName;
	public ByteBuffer	mEncodeData;
}

    It have texture name and ByteBuffer that is used in decoding functions. 

    And save  loaded resource to Texture object. 

public static boolean loadImage(String strName)
{
	Log.i("AndroidOpenGL", "loadImage start, name : "+ strName);
	if(MainActivity.context == null)
	{
		Log.w("AndroidOpenGL", "Failed get Bitmap from Asset. context is null");
		return false;
	}
	if(strName == null)
		return false;
	
    try 
    {
    	Texture tex 	= new Texture();
	Resources res 	= MainActivity.context.getResources();
	int resId 	= res.getIdentifier("raw/"+strName, null, MainActivity.context.getPackageName());
	    
	if(resId == 0)
	{
		Log.w("AndroidOpenGL", "Can't find resource : "+ strName);
	    	return false;
	}
	    
	BufferedInputStream  is 	= new BufferedInputStream(res.openRawResource(resId));
	ReadableByteChannel channel	= Channels.newChannel(is);

    	tex.mName	= strName;
    	int fileSize	= (int) res.openRawResourceFd(res.getIdentifier("raw/"+strName, null, MainActivity.context.getPackageName())).getLength();
    	tex.mEncodeData = ByteBuffer.allocate(fileSize);
    	
    	int r = 0;
    	while (tex.mEncodeData.remaining() > 0 && r != -1) 
    		r = channel.read(tex.mEncodeData);        
    	ResourceManager.instance.addTexture(tex);
    	
    	
    	if(is != null)
    		is.close();
    	
    	return true;
    } catch (IOException e) 
    {
	Log.w("AndroidOpenGL", e.toString());
	e.printStackTrace();
    }
    return false;
}

    I used ReadableByteChannel to get file size because I want to allocate memory bytes to Texture object.


2. Decode Texture

  If you use libpng library, It could be used on other flatform like IOS. But libpng code little hard to use and tiresome. So I used Android API.

public static void decodeImage(String strName, boolean add_to_atlas) { Log.i("AndroidOpenGL", "decodeImage start, name : "+ strName); if(MainActivity.context == null) { Log.w("AndroidOpenGL", "Failed get Bitmap from Asset. context is null"); return ; } if(strName == null) return; Texture tex = ResourceManager.instance.getTexture(strName); if(tex == null) return; Bitmap bitmap = null; int width; int height; ByteBuffer bb = null; try { bitmap = BitmapFactory.decodeByteArray(tex.mEncodeData.array(), 0, tex.mEncodeData.capacity()); width = bitmap.getWidth(); height = bitmap.getHeight(); bb = ByteBuffer.allocate(width*height*4); bitmap.copyPixelsToBuffer(bb); bitmap.recycle(); }catch(Exception e) { Log.w("AndroidOpenGL", "OpenGL : Get Bitmap Failed"); Log.w("AndroidOpenGL", e.toString()); return ; } Log.i("AndroidOpenGL", "Allocated direct-buffer capacity:"+ bb.capacity()); Log.i("AndroidOpenGL", "decodeImage end, bb width:"+ width +" height:"+height); onDecodeImageComplete(bb.array(), width, height, strName, add_to_atlas); }

  You can get byte array using BitmapFactory. [onDecodeImageComplete] is jni function that send decoded resource to c++. 

  Caution thing is your resource's width and height have to be 2^n(exponent) because OpenGL glTexImage2D function only get 2^n width and height to parameter. 

  So I used image that size is 32*32, 64*128 etc.



3. Jni 

  After decoding, we have to send data to c++. Below show [onDecodeImageComplete] code(Ignore add_to_atlas parameter, It don't need to example)

JNIEXPORT void JNICALL Java_com_snj_opengles_MainNDK_onDecodeImageComplete(JNIEnv * a, jclass b, jbyteArray pixels, jint width, jint height, jstring name, jboolean add_to_atlas) { Log::info_print("onDecodeImageComplete start"); //Use direct buffer // GLubyte* data =(GLubyte*)a->GetDirectBufferAddress(pixels); //Use non-direct buffer int len = width*height*4; GLbyte* data = new GLbyte[len]; a->GetByteArrayRegion(pixels,0, len, data); len = a->GetStringUTFLength(name); char* texName = new char[len+1]; a->GetStringUTFRegion(name,0,len,texName); shared_ptr<Texture> tex = make_shared<Texture>(); tex->m_name = string(texName); tex->m_data = (GLubyte*)data; tex->m_width = width; tex->m_height = height; tex->m_tex_map_width = width; tex->m_tex_map_height = height; tex->m_tex_map_ratio_width = tex->m_width/(float)tex->m_tex_map_width; tex->m_tex_map_ratio_height = tex->m_height/(float)tex->m_tex_map_height;

tex->m_request_add_to_atlas = add_to_atlas; ResourceManager::getInstance()->addToTextureCompletList(tex); delete[] (texName); Log::info("onDecodeImageComplete end"); }

  Don't use Direct buffer. When I use direct buffer, I knew that allocating direct buffer is very slow. So I use non-direct buffer.     

  Use GetByteArrayRegion, It is optimized function to copy data. 

  When allocating char array, size is GetStringUTFLength +1. If you don't +1, it would be crashed when you delete array.


4. result

 It is finish decode texture, If you use opengles 2.0 and want to know  shader, reference http://shakddoo.tistory.com/9



<OpenGL Dirary>



1. Outline

  Recently, many developer use OpenGLES 2.0 to make a 2d graphics  game. But it have little difficult because of OpenGLES1.0 & OpenglES 2.0 graphic-pipeline difference. Especially, when we meet GLSL shader first-time, it give little fear of programming. Follow writing show shader for someone who want to make 2d graphic game or app.


2. Vertex Shader

  vertex shader handle each vertex infos like poisition, vertex color etc. In 2d graphic, you don't need to set light or fog. 

  Before processing shader, you have to send posion, color, texture_coord infos. Below code is example.

void OpenGLES_2_0::jdGLVertexPointer(GLint size, GLenum type, GLsizei stride, const GLvoid* pointer)
{
	glVertexAttribPointer(m_Attributes.Position,size,type, GL_FALSE, stride, pointer);
}
void OpenGLES_2_0::jdGLColorPointer(int size, unsigned int type, int stride, const void* pointer)
{
	glVertexAttribPointer(m_Attributes.Color, size,type, GL_FALSE, stride, pointer);
}
void OpenGLES_2_0::jdGLTexCoordPointer (GLint size, GLenum type, GLsizei stride, const GLvoid *pointer)
{
	glVertexAttribPointer(m_Attributes.TexCoord, size,type, GL_FALSE, stride, pointer);
}

  I wrap opengles2.0 functions to looks like opengles1.0 funtions. If you want to know each parameter means, See reference page(OpenGL ES API), Using  above code and [glDrawElements(mode, count, type, indices)] , you can get each vertex infos like position, color etc. Following code show vertex shader.


attribute vec4 position; attribute vec4 source_color; attribute vec2 texture_coord; uniform mat4 projection; uniform mat4 modelview; varying vec4 destination_color;

varying      vec2 texture_coord_out; void main(void) { destination_color = source_color;

gl_Position = projection * modelview * position;

texture_coord_out      = texture_coord;

}

  In opengles 2.0, you have to set projection and modelview matrix.  In other words, you have to make projection matrix like glOrtho, glFrustum and modelview matrix like glTranslate, glRotate, glScale. This programming is easy  using man page.  Search [man glOrtho ]in google. You can see formula.

  Through varying keyword, You can send info to Fragment shader


3. Fragment Shader

  Fragment shader set pixel color info. It run as a number of screen pixels. So when you programming fragment shader, you have to do discreet. It affect performance.

varying lowp vec4 destination_color;
varying mediump vec2 texture_coord_out;
uniform sampler2D sampler;

uniform int enable_texture;
lowp vec4 sample_color;
lowp vec4 tmp_color;
void main(void)
{
	if(enable_texture == 1)
	{
		sample_color	= texture2D(sampler, texture_coord_out);
		gl_FragColor	= sample_color*destination_color;
	}
	else
	{
		gl_FragColor	= destination_color;
	}
}

  I didn't handle about glTexEvn, But If you want to apply glTexEnv effect, you have make direct in fragment shader. In shader, bool is exist but you can't get from opengles2.0 functions. So I used int type value. Follow code is my opengles 2.0 eable state code.

void OpenGLES_2_0::jdGLEnable(GLenum cap)
{
	switch(cap)
	{
	case GL_TEXTURE_2D:
		glUniform1i(m_Uniforms.EnableTexture, 1);
		return;
	case GL_NORMALIZE:
		glUniform1i(m_Uniforms.EnableNomalize, 1);
		m_enable_normalize = true;
		return;
	case GL_LIGHTING:
		glUniform1i(m_Uniforms.EnableLighting, 1);
		return;
	case GL_ALPHA_TEST:
		glUniform1i(m_Uniforms.EnableAlphaTest, 1);
		return;
	case GL_FOG:
		glUniform1i(m_Uniforms.EnableFog, 1);
		return;
	case GL_LIGHT0:
	case GL_LIGHT1:
	case GL_LIGHT2:
	case GL_LIGHT3:
	case GL_LIGHT4:
	case GL_LIGHT5:
	case GL_LIGHT6:
	case GL_LIGHT7:
		int index = cap ^ 0x4000;
		Light& tmp_light = m_light_array[index];
		glUniform1i(tmp_light.enable, 1);
		int result = 0;
		return;
	}
	glEnable(cap);
}

 opengles 2.0 don't have GL_TEXTURE_2D, GLNORMALIZE etc. But 2d-graphics only need GL_TEXURE_2D. You don't neet to set GL_LIGHTING, GL_FOG.


3. Result Scene


<OpenGL Diary app>




1. outline

  OpenGLES 2.0 don't support glLightf. So developer have to make light effect using glsl shader. Following content don't explane  about OpenGL2.0 functions like glGetUniformLocation, glGetAttribLocation, ect. 


2. Light formula

  Below formual show graphics expression of light in 3D graphics.


I = Global Ambient  + Emission + Ambient + Diffuse + Specular


= Ga + Me + 1/a+bD+cD^2(Ka*Ia + Kd*Id(NㆍL) + Ks*Is(RㆍV)^n)


Ga = Global Ambient

Me = Material Emission

D = light distance from vertex

a = constant attenuation

b = linear attenuation

c = quadratic attenuation

Ka  = Material Ambient

Ia = Light Ambient

Kd = Material Diffuse

Id = Light Diffuse

Ks = Material Specular

Is  = Light Specular



N = Normal Unit Vector of Vertex

L = Light Unit Vector (i.e. Light Position - Vertex Position)

R = Reflection Unit Vector

V = View point Unit Vector


Global Ambient, Ambient, Diffuse, Spcrular, Emission, Attenuations and Position values are set by opengles 2.0 functions like glUniform4f, glUniform1f. 

e.g.

I initialize material values 

//init material
m_material.ambient	= glGetUniformLocation(program, "material.ambient");
m_material.diffuse 	= glGetUniformLocation(program, "material.diffuse");
m_material.specular 	= glGetUniformLocation(program, "material.specular");
m_material.emission 	= glGetUniformLocation(program, "material.emission");
m_material.shininess 	= glGetUniformLocation(program, "material.shininess");

glUniform4f(m_material.ambient, 0.2f, 0.2f, 0.2f, 1.0f);
glUniform4f(m_material.diffuse, 0.8f, 0.8f, 0.8f, 1.0f);
glUniform4f(m_material.specular, 0.0f, 0.0f, 0.0f, 1.0f);
glUniform4f(m_material.emission, 0.0f, 0.0f, 0.0f, 1.0f);
glUniform1f(m_material.shininess, 0.0f);


Otherwise, Normal Vector, Reflection Vector, Eye Vector and Light Vector are set in shader.


Light Vector : Light Position - Vertex Position. Blow show shader code.

gl_Position = modelview * position; vec3 L = vec3(light_position.x - gl_Position.x, light_position.y - gl_Position.y, light_position.z - gl_Position.z); D = sqrt(L.x*L.x + L.y*L.y + L.z*L.z);

'D' is distance light and vertex. This is used to apply attenuation effect.


Normal Vector : This value is influenced by modelview matrix. If you translate your model, normal vector is also translated. So I made a Nomal-Matirx that is made by ModelView-Matrix.

m_Uniforms.NormalMatrix = glGetUniformLocation(m_current_shader_program,"normal_matrix");
glUniformMatrix3fv(m_Uniforms.NormalMatrix, 1, 0, m_model_view_matrix_stack.top()->ToMat3().Inverse().Transposed().Pointer());

To get Normal Matrix, I got 3x3 Matrix from Modelview Matrix. And I inversed and transpose this matrix. Lastly Send to shader matrix.

In shader, you can get normal vector. Below show shader code.

vec3 normalized_N 	= normalize(normal_matrix * normal);	//normal vector


Reflection Vector : This Vector is reflection of light-vector. You can get from Light vector and Normal Vector. Below show shader code.

vec3 normalized_R 	= normalize(dot(L, normalized_N)*2.0*normalized_N - L);		//reflection vector

Eye Vector :This Vector is direction of eye. Default is vec3(0,0,1). Also I use Default.

vec3 normalized_V 	= normalize(vec3(0.0, 0.0, 1.0));		//eye vector


Attenuation : Light is changed by distance. If light and vertex distance is close, light intensity is big. In 3D graphics, attenuation is expressed 

1/(a+ b*dD + c*D^2). 

attenuation = 1.0/(light[0].constant_attenuation + light[0].linear_attenuation*D + light[0].quadratic_attenuation*D*D);


3. Ambient + Diffuse + Specualr

Ambient is neighboring light. In 3d graphics, This value is expressed Ka*Ia/D^2. [Ka] is Material Ambient value. [Ia] is Light Ambient. D is distance. 

Diffuse is spread of light. This is Kd*Id*(NㆍL).

Specular is Ks*Is*((RㆍV)^n)

Below show shader code.

float df = max(0.0, dot(normalized_N, normalized_L));							//get diffuse constant
float sf = max(0.0, dot(normalized_R, normalized_V));							//get specualr constant
				
float shininess = clamp(material.shininess, 0.0, 128.0);
if(material.shininess != 0.0)
	sf = pow(sf, material.shininess);

float attenuation = 1.0;
float spot_exponent_value = 1.0;
if(light[0].position.w != 0.0)
{
	attenuation = 1.0/(light[0].constant_attenuation + light[0].linear_attenuation*D + light[0].quadratic_attenuation*D*D);
	spot_exponent_value = pow(max(0.0,spotlight_dot),light[0].spot_exponent);
}
				
				
//get color
vec4 ambient_value = light[0].ambient*material.ambient*attenuation;
vec4 diffuse_value = df*light[0].diffuse*material.diffuse*attenuation*spot_exponent_value;
vec4 specualr_value = sf*light[0].specular*material.specular*attenuation*spot_exponent_value;   
				
sum_color = global_ambient + material.emission + ambient_value + diffuse_value;

df is inner product Normal Vector and Light Vector. This is used in diffuse. sf is ((RㆍV)^n) that used in Specular.




4. Result Scene

This is OpenGL test App (OpenGL Diary)



'NDK & OpenGL ES' 카테고리의 다른 글

Android, NDK OpenGLES2.0 Texture  (0) 2014.09.22
OpenGLES2.0 GLSL, 2D Basic shader  (1) 2014.09.18
OpenGLES 2.0,GLSL lighting code  (0) 2014.09.15
GLSL, OpenGLES2.0 glFog  (0) 2014.09.12
NDK AdMob, Can't see admob on Framelayout  (0) 2014.09.10
GLSurfaceView & GLRenderer 설정  (0) 2014.04.15



OpenGLES2.0 not support glFog. So If you want fog effect, you have to a code using glsl.


1. Calculate

  GL_FOG_MODE have 3 modes.


GL_LINEAR : f = (end - z)/(end - start)

GL_EXP :  f = exp(-density*z) 

GL_EXP2 : f = exp2(-(density*z)*(density*z))

Color = Color_source*f + (1-f)Color_fog



  start - fog start distionce(usually 0.0f)

  end - fog's end distance (usually 1.0f)

  Color_source is fragment color.

  Color_fog is fog-color set by GL_FOG_COLOR


2. GLSL Code

  Below show glsl code. It is very simple code

tmp_color is rgba color that texture color and source color modulated. 

enable_fog is set by glUniform1i c++ code. Default mode is GL_LINEAR. 

To get fragment's depth buffer value, I used gl_FragCoord.z that is [0-1].


void main(void)
{	
	if(enable_fog == 1)
	{
		float f = 0.0;
		float z = gl_FragCoord.z;
		float alpha = tmp_color.a;
		if(fog_mode == FOG_EXP)
		{
			f = clamp(exp(-fog_density*z),0.0, 1.0);
		}
		else if(fog_mode == FOG_EXP2)
		{
			f = clamp(exp((fog_density*z)*(fog_density*z)),0.0, 1.0);
		}
		else
		{
			float divide = fog_end - fog_start;
			if(divide != 0.0)
				f = clamp((fog_end - z)/(divide),0.0, 1.0);
		}
		tmp_color = f*tmp_color + (1.0 - f)*fog_color;
		tmp_color.a = alpha;
	}
	
	gl_FragColor = tmp_color;
}


Below show result,

(OpenGL Diary)



'NDK & OpenGL ES' 카테고리의 다른 글

OpenGLES2.0 GLSL, 2D Basic shader  (1) 2014.09.18
OpenGLES 2.0,GLSL lighting code  (0) 2014.09.15
GLSL, OpenGLES2.0 glFog  (0) 2014.09.12
NDK AdMob, Can't see admob on Framelayout  (0) 2014.09.10
GLSurfaceView & GLRenderer 설정  (0) 2014.04.15
이클립스 NDK설정  (0) 2014.04.15

I added admob view to Android framelayout. 

I couldn't see ad. I think that admob view  is not disappered, because when I click admob location, actvity is changed to show advertisement.





To resolve problem, I applied background color to admob view. Below show my code.


1
2
3
4
5
6
7
8
9
10
11
12
13
14
//-----------------------------------------------------------------------------------------------------------//
//for admob
adView = new AdView(this);
adView.setAdSize(AdSize.SMART_BANNER);
adView.setAdUnitId("ca-app-pub-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");
layout.addView(adView, new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT,Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL));
        
AdRequest adRequest = new AdRequest.Builder()
.addTestDevice(AdRequest.DEVICE_ID_EMULATOR)
.addTestDevice("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
.build();
adView.loadAd(adRequest);
adView.setBackgroundColor(Color.TRANSPARENT);
//-----------------------------------------------------------------------------------------------------------//


I added [adView.setBackgroundColor(Color.TRANSPARENT);]. After applying this code, I could see admob test image.




'NDK & OpenGL ES' 카테고리의 다른 글

OpenGLES2.0 GLSL, 2D Basic shader  (1) 2014.09.18
OpenGLES 2.0,GLSL lighting code  (0) 2014.09.15
GLSL, OpenGLES2.0 glFog  (0) 2014.09.12
NDK AdMob, Can't see admob on Framelayout  (0) 2014.09.10
GLSurfaceView & GLRenderer 설정  (0) 2014.04.15
이클립스 NDK설정  (0) 2014.04.15


1. MainActivity

아래 코드를 보면 알 수 있듯이, MainActivity자체는 별로 하는 일이 없다. setContentView로 화면에 보여질 뷰를 MainGLSurfaceView로 설정한다. openGLVerstion는 MainGLSurfaceView 클래스 내에서, GL버전을 설정하는데 쓰인다.


public class MainActivity extends Activity {
	public static MainGLSurfaceView view;
	public static Context 			context = null;
	public static final int			openGLVerstion = 2;
        
	@Override
	protected void onCreate(Bundle savedInstanceState) 
	{
		super.onCreate(savedInstanceState);
		context = this;
		
		/*
		 * Setting OpenGL.
		 * Version : 2.0 
		 * Renderer : MainGLRenderer
		 */
		view = new MainGLSurfaceView(this);
		setContentView(view);
	}
}


2. GLSurfaceView 

  아래 클래스는 GLSurfaceView를 상속받아 커스톰하게 만들었다. 사실 커스톰 클래스로 만들지 않아도 되지만, 나중에 이벤트 처리 때문에, 그렇게 만들었다(자세한 내용은 이벤트 처리 설명에서..). 

public class MainGLSurfaceView extends GLSurfaceView 
{
	MainGLRenderer mRenderer;
	public MainGLSurfaceView(Context con)
	{
		super(con);
		mRenderer = new MainGLRenderer();
		this.setEGLContextClientVersion(MainActivity.openGLVerstion);
		this.setPreserveEGLContextOnPause(MainActivity.saveGLSurface);
		this.setRenderer(mRenderer);
	}
}


  - setEGLContextClientVersion : opengles버전을 설정한다. 버전을 설정하기 위해서는, AndroidManifast.xml, 

jni/Android.mk 파일들도 수정해야 한다(아래 예제 참고).

- setPreserveEGLContextOnPause : 이 함수를 사용하지 않으면, pause시 opengles와 관련된 데이터들이 모두 지워진다. 때문에,

     resume 시 다시 gl데이터를 다시 복구 시켜줘야하는데, 그 과정이 생각보다 귀찮다. API11부터는 이 함수를 이용해 gl데이터가 

     사라지지 않도록 할 수 있다.

  - setRenderer : 렌더러 객체를 설정한다.



3. Renderer 인터페이스

  Renderer인터페이스를 사용하기 위해서는, onDrawFrame, onSurfaceChanged, onSurfaceCreated함수를 구현해 주어야 한다. 아래 클래스는 그 예이다.


public class MainGLRenderer implements Renderer { long preTime; long diffTime; boolean isFirst = true; float frameTime = 1000/30; @Override public void onDrawFrame(GL10 gl) { if(isFirst) { MainNDK.render(0); preTime = System.currentTimeMillis(); isFirst = false;

return;

} diffTime = System.currentTimeMillis() - preTime; MainNDK.render(diffTime);

} @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.i("AndroidOpenGL", "onSurfaceChanged start"); MainNDK.onResize(width, height); Log.i("AndroidOpenGL", "onSurfaceChanged end"); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.i("AndroidOpenGL", "onSurfaceCreated start"); MainNDK.init(); MainActivity.isGLCreated = true; Log.i("AndroidOpenGL", "onSurfaceCreated end"); } }


  - onDrawFrame : 매 프레임마다 호출되는 함수이다. 내가 설정한 프레임 속도에 맞춰 c++의 render함수를 호출한다

    (추후 JNI글에서 설명).

  - onSurfaceChanged : 화면이 생성되거나, 회전 등에 의해 화면 크기가 변경되는 경우에 호출 된다. 여기서 c++의 onResize함수 호출

  - onSurfaceCreated : gl뷰가 생성되었을 때, 딱 한번 호출된다. 이 함수가 호출돼야 c++에서 gl함수를 사용할 수 있다. 한마디로 gl문맥이 만들어 졌다고 생각하면 된다. 여기서 c++의 init함수 호출




4. AndroidManifast.xml파일


<?xml version="1.0" encoding="utf-8"?>

<manifest xmlns:android="http://schemas.android.com/apk/res/android"

    package="com.snj.opengles"

    android:versionCode="1"

    android:versionName="1.0" >


    <uses-sdk

        android:minSdkVersion="11"

        android:targetSdkVersion="18" />

    <uses-feature android:glEsVersion="0x00020000" android:required="true" />

    <application

        android:allowBackup="true"

        android:icon="@drawable/ic_launcher"

        android:label="@string/app_name"

        android:theme="@style/AppTheme" 

        android:hardwareAccelerated="true">

        <meta-data android:name="com.google.android.gms.version"

               android:value="@integer/google_play_services_version"/>

        <activity

            android:name="com.snj.opengles.MainActivity"

            android:theme="@android:style/Theme.NoTitleBar.Fullscreen"

            android:label="@string/app_name" 

             android:screenOrientation="landscape"

            android:configChanges="fontScale|keyboard|keyboardHidden|locale|mnc|mcc|navigation|orientation|screenLayout|screenSize|smallestScreenSize|uiMode|touchscreen"

         >

            <intent-filter>

                <action android:name="android.intent.action.MAIN" />


                <category android:name="android.intent.category.LAUNCHER" />

            </intent-filter>

        </activity>

<activity android:name="com.google.android.gms.ads.AdActivity"

             android:configChanges="keyboard|keyboardHidden|orientation|screenLayout|uiMode|screenSize|smallestScreenSize"/>

    </application>

<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>

<uses-permission android:name="android.permission.INTERNET"/>

  <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>

  <uses-permission android:name="android.permission.BLUETOOTH" />

</manifest>



'NDK & OpenGL ES' 카테고리의 다른 글

OpenGLES2.0 GLSL, 2D Basic shader  (1) 2014.09.18
OpenGLES 2.0,GLSL lighting code  (0) 2014.09.15
GLSL, OpenGLES2.0 glFog  (0) 2014.09.12
NDK AdMob, Can't see admob on Framelayout  (0) 2014.09.10
GLSurfaceView & GLRenderer 설정  (0) 2014.04.15
이클립스 NDK설정  (0) 2014.04.15


1. NDK 프로젝트 생성

  일단 안드로이드 SDK, NDK, 이클립스를 모두 다운 받는다.

  - 이클립스 + SDK 다운로드

  - NDK 다운로드

  -  cygwin설치(윈도우 경우)

  - Eclipse ADT 설치, CDT 등 설치



2. c++ 프로젝트 생성

  - 빈 이클립스 프로젝트 생성

  - 프로젝트 Right클릭 -> Android Tools -> Add Native Support클릭




  -  jni폴더 등이 생성되면, 프로젝트를 빌드한번 해주자.

  - 이렇게 하면 아래 그림과 같이, 기본적인 설정이 끝난다.





3. 추가적인 설정

  - NDK 디버깅. 프로젝트 속성 -> c/c++ build -> Build command

    ndk-build NDK_DEBUG=1

    로 변경(추후 추가 설명)

  - 프로젝트 속성 -> c/c++ build -> Build command -> Behaviour -> build 에서 all 제거

  - Clean체크

'NDK & OpenGL ES' 카테고리의 다른 글

OpenGLES2.0 GLSL, 2D Basic shader  (1) 2014.09.18
OpenGLES 2.0,GLSL lighting code  (0) 2014.09.15
GLSL, OpenGLES2.0 glFog  (0) 2014.09.12
NDK AdMob, Can't see admob on Framelayout  (0) 2014.09.10
GLSurfaceView & GLRenderer 설정  (0) 2014.04.15
이클립스 NDK설정  (0) 2014.04.15