Porting iPhone Games to Android - Textures and Buffers

I programmed my game in C ++. I managed to compile everything using ndk, but I noticed a couple of interference.

How to load textures into OpenGL NDK (since the code already exists)? I used BitmapFactory to load the image, and then copied the pixels into a ByteBuffer, which I selected with a size of width * height * 4. Then I sent the ByteBuffer array to my own code and pulled the pointer using Get / ReleasePrimiativeArrayCritical. I do not think that he works when he falls, when he does. In addition, I cannot help but notice that the Garbage compiler groans about 5.37 MB of space, taking up space when the image is only 16 KB.

Buffers like framebuffers and rendering are another thing. I notice that this is not the use of framebuffers and rendering. Does GLSurfaceView get this? And is it good to use my own buffers?

+4
source share
1 answer

Ok, I figured it out. It turns out that BitmapFactory scaled my images to the weakness of the 2nd size so that it doesn't work. GLSurfaceView supports OpenGL ES 1.0, to do something, you just draw on the screen. I can use framebuffers, but I need to associate them with a texture, and then draw a texture on the screen. I used example and ported it to my native, excluding the depth buffer.

Here is the code:

Java:

public class NativeRenderer implements Renderer { static { System.loadLibrary("Test"); } private static native void init(); private static native void setTexture(byte[] data, int width, int height); private static native void resize(int width, int height); private static native void render(); private Context mContext; public NativeRenderer(Context context) { mContext = context; } public void onDrawFrame(GL10 gl) { render(); } public void onSurfaceChanged(GL10 gl, int width, int height) { resize(width, height); } public void onSurfaceCreated(GL10 gl, EGLConfig config) { init(); ByteBuffer data; BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPreferredConfig = Bitmap.Config.ARGB_8888; //ARGB_888 is the default opts.inDensity = 240; // this needed so my images are 512x512 (power of 2) opts.inScaled = false; // someone suggested to add this Bitmap bitmap = BitmapFactory.decodeResource(mContext.getResources(), R.drawable.image, opts); int width = bitmap.getWidth(); int height = bitmap.getHeight(); data = ByteBuffer.allocate(width * height * 4); Log.i(TAG, "allocate:" + (width * height * 4)); data.order(ByteOrder.nativeOrder()); bitmap.copyPixelsToBuffer(data); data.position(0); bitmap.recycle(); Log.i(TAG, "data.hasArray():" + data.hasArray()); setTexture(data.array(), width, height); } } 

C ++:

 #include "your_company_NativeRenderer.h" #include <android/log.h> #include <GLES/gl.h> #include <GLES/glext.h> #include <string.h> #include "Functions.h" static GLuint texture[2]; static GLuint framebuffer; static GLuint renderbuffer; static GLuint texturebuffer; static int textureWidth; static int textureHeight; static int sWidth; static int sHeight; static float hheight; void viewportFramebuffer() { glViewport(0, 0, textureWidth, textureHeight); glMatrixMode(GL_PROJECTION); glLoadIdentity(); static float viewportFramebufferRight = 160.0f + (320.0 / (float)sWidth) * (textureWidth - sWidth); static float viewportFramebufferTop = hheight + ((2 * hheight) / (float)sHeight) * (textureHeight - sHeight); glOrthof(-160.0f, viewportFramebufferRight, -hheight, viewportFramebufferTop, -1.0f, 1.0f); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); } void viewportSurface() { glViewport(0, 0, sWidth, sHeight); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrthof(-160.0f, 160.0f, -hheight, hheight, -1.0f, 1.0f); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); } JNIEXPORT void JNICALL Java_your_company_NativeRenderer_init (JNIEnv *env, jclass obj) { __android_log_print(ANDROID_LOG_INFO, TAG, "init()"); glGenFramebuffersOES(1, &framebuffer); glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer); glEnable(GL_TEXTURE_2D); //glEnable(GL_BLEND); glGenTextures(2, texture); texturebuffer = texture[0]; } JNIEXPORT void JNICALL Java_your_company_NativeRenderer_setTexture (JNIEnv *env, jclass obj, jbyteArray array, jint width, jint height) { __android_log_print(ANDROID_LOG_INFO, TAG, "setTexture()"); void *image = env->GetPrimitiveArrayCritical(array, 0); unsigned int *imageData = static_cast<unsigned int *>(image); flipImageVertically(imageData, width, height); // flips the bytes vertically glBindTexture(GL_TEXTURE_2D, texture[1]); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image); env->ReleasePrimitiveArrayCritical(array, image, 0); } JNIEXPORT void JNICALL Java_your_company_NativeRenderer_resize (JNIEnv *env, jclass obj, jint width, jint height) { __android_log_print(ANDROID_LOG_INFO, TAG, "resize(width:%d, height:%d)", width, height); sWidth = width; sHeight = height; hheight = 240.0f + additionToScalePlane(width, height); // used to adjust the spect ratio // iPhone is 2 : 3, my Galaxy S is 5 : 3 __android_log_print(ANDROID_LOG_INFO, TAG, "hheight:%g", hheight); textureWidth = powerOf2Bigger(width); textureHeight = powerOf2Bigger(height); __android_log_print(ANDROID_LOG_INFO, TAG, "texture:{w:%d, h:%d}", textureWidth, textureHeight); glBindTexture(GL_TEXTURE_2D, texturebuffer); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glColor4ub(0, 0, 0, 255); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textureWidth, textureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); int area[] = {0.0, 0.0, sWidth, sHeight}; // using draw_texture extension glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, area); glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer); glFramebufferTexture2DOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_TEXTURE_2D, texturebuffer, 0); int status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES); if(status != GL_FRAMEBUFFER_COMPLETE_OES) __android_log_print(ANDROID_LOG_ERROR, TAG, "Framebuffer is not complete: %x", status); glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer); } JNIEXPORT void JNICALL Java_your_company_NativeRenderer_render (JNIEnv *env, jclass obj) { static float texCoord[] = { 0.0, 0.0, 0.0, 480.0/512.0, 320.0/512.0, 0.0, 320.0/512.0, 480.0/512.0, }; static float vertex[] = { -160.0, -240.0, -160.0, 240.0, 160.0, -240.0, 160.0, 240.0, }; //off-screen viewportFramebuffer(); glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer); glClearColor(1.0, 0.0, 0.0, 1.0); glClear(GL_COLOR_BUFFER_BIT); glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_TEXTURE_COORD_ARRAY); glBindTexture(GL_TEXTURE_2D, texture[1]); glVertexPointer(2, GL_FLOAT, 0, vertex); glTexCoordPointer(2, GL_FLOAT, 0, texCoord); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); glDisableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); glBindFramebufferOES(GL_FRAMEBUFFER_OES, 0); //on-screen viewportSurface(); glClearColor(0.0, 0.0, 1.0, 1.0); glClear(GL_COLOR_BUFFER_BIT); /*static float textureCoord[] = { 0.0, 0.0, 0.0, sHeight/(float)textureHeight, sWidth/(float)textureWidth, 0.0, sWidth/(float)textureWidth, sHeight/(float)textureHeight, }; static float textureVertex[] = { -160.0, -hheight, -160.0, hheight, 160.0, -hheight, 160.0, hheight, };*/ //if there isn't draw_texture extension glBindTexture(GL_TEXTURE_2D, texturebuffer); glTexEnvi(GL_TEXTURE_ENV,GL_TEXTURE_ENV_MODE, GL_REPLACE); glActiveTexture(GL_TEXTURE0); glDrawTexiOES(0, 0, 0, sWidth, sHeight); //glVertexPointer(2, GL_FLOAT, 0, textureVertex); //glTexCoordPointer(2, GL_FLOAT, 0, textureCoord); //glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); //glDisableClientState(GL_VERTEX_ARRAY); //glDisableClientState(GL_TEXTURE_COORD_ARRAY); glBindTexture(GL_TEXTURE_2D, 0); } 
+2
source

Source: https://habr.com/ru/post/1332854/


All Articles