Why does eglCreateImageKHR return EGL_NO_IMAGE_KHR (on Android)?

I need to create a texture in one thread and use it in another thread. I tried to use a common context, but the texture does not seem to separate. I tried using eglCreateImageKHR using this example (code sent by Wiktor)

eglCreateImageKHR returns EGL_NO_IMAGE_KHR .

Also, when using extensions, I needed to define #define EGL_EGLEXT_PROTOTYPES and #define GL_GLEXT_PROTOTYPES before including eglext.h and gl2ext.h in order to get the code to compile. It is not right? Should they be defined in some other heading?

If that helps, here is my code for the thread that creates the texture. I think I am mistaken in passing the texture handler returned by OpenGL to eglCreateImageKHR and just dropping it to EGLClientBuffer . (Also, I don't know what reinterpret_cast does, I just copied it from the sample, but the sample is not very clear in this part.)

  GLuint framebuffer; GLuint depthRenderbuffer; GLuint texture; GLint texWidth = 256, texHeight = 256; const EGLint attribs[] = { EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8, EGL_ALPHA_SIZE, 8, EGL_DEPTH_SIZE, 8, EGL_STENCIL_SIZE, 0, EGL_NONE }; const EGLint pbuf_attribs[] = { EGL_WIDTH, 512, EGL_HEIGHT, 512, EGL_NONE}; EGLSurface psurface; EGLContext context; EGLConfig config; EGLint numConfigs; EGLDisplay display; const EGLint contextAttribs[] = {EGL_CONTEXT_CLIENT_VERSION,2, EGL_NONE }; if ((display = eglGetDisplay(EGL_DEFAULT_DISPLAY)) == EGL_NO_DISPLAY) { LOG_ERROR("Tex eglGetDisplay() returned error %d", eglGetError()); return false; } if (!eglChooseConfig(display, attribs, &config, 1, &numConfigs)) { LOG_ERROR("eglChooseConfig() returned error %d", eglGetError()); destroy(); return false; } if (!(context = eglCreateContext(display, config, 0, contextAttribs))) { LOG_ERROR("Tex eglCreateContext() returned error %d", eglGetError()); destroy(); return false; } if (!(psurface = eglCreatePbufferSurface(display, config, pbuf_attribs))) { LOG_ERROR("Tex eglCreatePbufferSurface() returned error %d", eglGetError()); destroy(); return false; } LOGI("About to make current. Display %p surface %p context %p",display, psurface, context); if (!eglMakeCurrent(display, psurface, psurface, context)) { LOG_ERROR("Tex eglMakeCurrent() returned error %d", eglGetError()); destroy(); return false; } checkGlError("make current"); glGenFramebuffers(1, &framebuffer); glGenRenderbuffers(1, &depthRenderbuffer); glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_2D, texture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texWidth, texHeight, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, NULL); glBindTexture(GL_TEXTURE_2D, 0); //FIXME Should this be here?? Tried with and without EGLint imageAttributes[] = { EGL_GL_TEXTURE_LEVEL_KHR, 0, EGL_IMAGE_PRESERVED_KHR, EGL_FALSE, EGL_NONE }; LOGI("Before CreateImage display %p context %p texture %d",display, context, texture); _eglImage = eglCreateImageKHR(display, context, EGL_GL_TEXTURE_2D_KHR, reinterpret_cast<EGLClientBuffer>(texture), 0); if(_eglImage == EGL_NO_IMAGE_KHR){ LOGE("eglCreateImageKHR failed"); } 
+4
source share

Source: https://habr.com/ru/post/1487817/


All Articles