Problem with Android OpenGL Camera

I am working on an Android camera application that changes the camera channel and displays it on the screen. I work and do what I want on my DROID RAZR MAXX with 4.3, and it works fine on other phones, but, unfortunately, I had a problem with several phones, and I canโ€™t track the problem.

I have attached a screenshot showing what the problem is.

It is very difficult to say what green "artifacts" are, but it almost seems that they are blocked from the camera from the moment it was first turned on. Colors flicker, but the shapes inside the blocks do not change.

I deleted everything that you donโ€™t need and cleared the code as best as possible, but I honestly donโ€™t know why this happens, especially since it works fine on some phones, while other phones donโ€™t.

If I need to give more information just for comment, and I will add it!

CameraActivity.java

public class CameraActivity extends Activity { private MyGLSurfaceView glSurfaceView; private MyCamera mCamera; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mCamera = new MyCamera(); glSurfaceView = new MyGLSurfaceView(this, mCamera); setContentView(glSurfaceView); } @Override protected void onPause() { super.onPause(); mCamera.stop(); } } 

MyCamera.java

 public class MyCamera { private final static String LOG_TAG = "MyCamera"; private Camera mCamera; private Parameters mCameraParams; private Boolean running = false; void start(SurfaceTexture surface) { Log.v(LOG_TAG, "Starting Camera"); mCamera = Camera.open(0); mCameraParams = mCamera.getParameters(); Log.v(LOG_TAG, mCameraParams.getPreviewSize().width + " x " + mCameraParams.getPreviewSize().height); try { mCamera.setPreviewTexture(surface); mCamera.startPreview(); running = true; } catch (IOException e) { e.printStackTrace(); } } void stop() { if (running) { Log.v(LOG_TAG, "Stopping Camera"); mCamera.stopPreview(); mCamera.release(); running = false; } } } 

MyGLSurfaceView.java

 class MyGLSurfaceView extends GLSurfaceView implements Renderer { private final static String LOG_TAG = "MyGLSurfaceView"; private MyCamera mCamera; private SurfaceTexture mSurface; private DirectVideo mDirectVideo; public MyGLSurfaceView(Context context, MyCamera camera) { super(context); mCamera = camera; setEGLContextClientVersion(2); setRenderer(this); } @Override public void onDrawFrame(GL10 gl) { float[] mtx = new float[16]; mSurface.updateTexImage(); mSurface.getTransformMatrix(mtx); mDirectVideo.draw(); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.v(LOG_TAG, "Surface Changed"); GLES20.glViewport(0, 0, width, height); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.v(LOG_TAG, "Surface Created"); int texture = createTexture(); mDirectVideo = new DirectVideo(texture); mSurface = new SurfaceTexture(texture); mCamera.start(mSurface); } private int createTexture() { int[] textures = new int[1]; // generate one texture pointer and bind it as an external texture. GLES20.glGenTextures(1, textures, 0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); // No mip-mapping with camera source. GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); // Clamp to edge is only option. GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); return textures[0]; } public static int loadShader(int type, String shaderCode){ // create a vertex shader type (GLES20.GL_VERTEX_SHADER) // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER) int shader = GLES20.glCreateShader(type); // add the source code to the shader and compile it GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); return shader; } } 

DirectVideo.java

 public class DirectVideo { private final String vertexShaderCode = "attribute vec4 vPosition;" + "attribute vec2 inputTextureCoordinate;" + "varying vec2 textureCoordinate;" + "void main()" + "{"+ "gl_Position = vPosition;"+ "textureCoordinate = inputTextureCoordinate;" + "}"; private final String fragmentShaderCode = "#extension GL_OES_EGL_image_external : require\n"+ "precision mediump float;" + "varying vec2 textureCoordinate;\n" + "uniform samplerExternalOES s_texture;\n" + "void main() {" + " gl_FragColor = texture2D( s_texture, textureCoordinate );\n" + "}"; private FloatBuffer vertexBuffer, textureVerticesBuffer; private ShortBuffer drawListBuffer; private final int mProgram; private int mPositionHandle; private int mTextureCoordHandle; private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; // order to draw vertices // number of coordinates per vertex in this array private static final int COORDS_PER_VERTEX = 2; private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex static float squareCoords[] = { -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, -1.0f, 1.0f, 1.0f, }; static float textureVertices[] = { 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, }; private int texture; public DirectVideo(int texture) { this.texture = texture; // initialize vertex byte buffer for shape coordinates ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4); bb.order(ByteOrder.nativeOrder()); vertexBuffer = bb.asFloatBuffer(); vertexBuffer.put(squareCoords); vertexBuffer.position(0); // initialize byte buffer for the draw list ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2); dlb.order(ByteOrder.nativeOrder()); drawListBuffer = dlb.asShortBuffer(); drawListBuffer.put(drawOrder); drawListBuffer.position(0); ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4); bb2.order(ByteOrder.nativeOrder()); textureVerticesBuffer = bb2.asFloatBuffer(); textureVerticesBuffer.put(textureVertices); textureVerticesBuffer.position(0); int vertexShader = MyGLSurfaceView.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode); int fragmentShader = MyGLSurfaceView.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program GLES20.glLinkProgram(mProgram); // creates OpenGL ES program executables } public void draw() { GLES20.glUseProgram(mProgram); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture); // get handle to vertex shader vPosition member mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition"); // Enable a handle to the triangle vertices GLES20.glEnableVertexAttribArray(mPositionHandle); // Prepare the <insert shape here> coordinate data GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate"); GLES20.glEnableVertexAttribArray(mTextureCoordHandle); GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer); GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); GLES20.glDisableVertexAttribArray(mTextureCoordHandle); } } 
+6
source share
2 answers

In the onDrawFrame method onDrawFrame you get the transformation matrix, but you don't use it. This matrix should be used to transform texture coordinates. See the SurfaceTexture class documentation for more details .

Here is the fix:

  • pass the matrix to the drawing method:

     @Override public void onDrawFrame(GL10 gl) { float[] mtx = new float[16]; mSurface.updateTexImage(); mSurface.getTransformMatrix(mtx); mDirectVideo.draw(mtx); } 
  • Add the following class to the DirectVideo class:

      private float[] transformTextureCoordinates( float[] coords, float[] matrix) { float[] result = new float[ coords.length ]; float[] vt = new float[4]; for ( int i = 0 ; i < coords.length ; i += 2 ) { float[] v = { coords[i], coords[i+1], 0 , 1 }; Matrix.multiplyMV(vt, 0, matrix, 0, v, 0); result[i] = vt[0]; result[i+1] = vt[1]; } return result; } 
  • In the drawing method Convert the textureVertices list before adding it to the buffer (you have to do this conversion with every paint, as the matrix may change):

     textureVerticesBuffer.clear(); textureVerticesBuffer.put( transformTextureCoordinates( textureVertices, mtx )); textureVerticesBuffer.position(0); 

An alternative solution is to switch the matrix to a shader.

+1
source
 implements SurfaceTexture.OnFrameAvailableListener @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { // TODO Auto-generated method stub } setOnFrameAvailableListener(this); 
+1
source

Source: https://habr.com/ru/post/957648/


All Articles