Display gstreamer video stream in Google Cardboard SurfaceTexture

I use gstreamer to get a video stream (sent from RaspPi) and I need to display it in Google Cardboard.

I based my work on tutorial 3 with the gstreamer example . I managed to display my video in SurfaceView by providing my Surface (obtained from SurfaceView.SurfaceHolder.getSurface() , but now I need to connect it with Google Map.

If I'm not mistaken, Google Cardboard relies on some SurfaceTexture. So I thought it was easy to just get a surface from a SurfaceTexture using the Surface(SurfaceTexture) constructor Surface(SurfaceTexture) .

The problem is that it just doesn't work. Google Cardboard application An example of cartographic firmware , and I did not touch the OpenGL code, since I do not know anything about it.

During debugging, I found that there is (at least) one problem with the code that I use. It seems that the line

 GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); 

causes me some problem because GL_TEXTURE_EXTERNAL_OES not in the required range for glActiveTexture methods (which requires GL from 0 to GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ). Here are my logs:

 GLConsumer W [unnamed-12520-0] bindTextureImage: clearing GL error: 0x500 Adreno-ES20 W <core_glActiveTexture:348>: GL_INVALID_ENUM 

So what is working now?

It seems that the video stream was received by gstreamer, which is trying to update the Surface (I get some onFrameAvailable SurfaceTexture notifications that are updated, and error logs are only sent there). However, the screens remain black, as if nothing is being updated.

Here are the most interesting parts of my code:

 @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); CardboardView cardboardView = (CardboardView) findViewById(R.id.cardboard_view); cardboardView.setRenderer(this); setCardboardView(cardboardView); // Initialize GStreamer and warn if it fails try { GStreamer.init(this); } catch (Exception e) { //Catch e... } mCamera = new float[16]; mView = new float[16]; mHeadView = new float[16]; //gstreamer stuff nativeInit(); } @Override public void onSurfaceCreated(EGLConfig eglConfig) { Log.d(TAG, "onSurfaceCreated start"); GLES20.glClearColor(0.5f, 0.1f, 0.1f, 0.5f); ByteBuffer bb = ByteBuffer.allocateDirect(squareVertices.length * 4); bb.order(ByteOrder.nativeOrder()); vertexBuffer = bb.asFloatBuffer(); vertexBuffer.put(squareVertices); vertexBuffer.position(0); ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2); dlb.order(ByteOrder.nativeOrder()); drawListBuffer = dlb.asShortBuffer(); drawListBuffer.put(drawOrder); drawListBuffer.position(0); ByteBuffer bb2 = ByteBuffer.allocateDirect(textureVertices.length * 4); bb2.order(ByteOrder.nativeOrder()); textureVerticesBuffer = bb2.asFloatBuffer(); textureVerticesBuffer.put(textureVertices); textureVerticesBuffer.position(0); int vertexShader = loadGLShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode); int fragmentShader = loadGLShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode); mProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program GLES20.glAttachShader(mProgram, vertexShader); // add the vertex shader to program GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program GLES20.glLinkProgram(mProgram); checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); Log.d(TAG, "Surface created"); texture = createTexture(); initSurface(texture); } static private int createTexture() { Log.d(TAG + "_cardboard", "createTexture"); int[] texture = new int[1]; GLES20.glGenTextures(1,texture, 0); checkGLError("GenTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture[0]); checkGLError("BindTextures Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); return texture[0]; } //Give the surface to gstreamer. private void initSurface(int texture) { mSurface = new SurfaceTexture(texture); mSurface.setOnFrameAvailableListener(this); Log.d(TAG, "OnFrameAvailableListener set"); Surface toto = new Surface(mSurface); nativeSurfaceInit(toto); toto.release(); } //When we need to render @Override public void onFrameAvailable(SurfaceTexture surfaceTexture) { Log.d(TAG, "onFrameAvailable"); this.getCardboardView().requestRender(); } //Display to cardboard @Override public void onNewFrame(HeadTransform headTransform) { headTransform.getHeadView(mHeadView, 0); // Build the camera matrix and apply it to the ModelView. Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, 0.01f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); float[] mtx = new float[16]; GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); mSurface.updateTexImage(); mSurface.getTransformMatrix(mtx); float[] test = new float[3]; headTransform.getEulerAngles(test, 0); //if(networkThread != null){ // networkThread.setRegValue(test); //} } @Override public void onDrawEye(Eye eye) { // Log.d(TAG, "onDrawEye"); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); GLES20.glUseProgram(mProgram); Log.d(TAG, "trying to access " + GL_TEXTURE_EXTERNAL_OES +" out of " + GLES20.GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS); GLES20.glActiveTexture(GL_TEXTURE_EXTERNAL_OES); // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture); // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); mPositionHandle = GLES20.glGetAttribLocation(mProgram, "position"); GLES20.glEnableVertexAttribArray(mPositionHandle); // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer); // checkGLError("Problem on line "+new Throwable().getStackTrace()[0].getLineNumber()); mTextureCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate"); GLES20.glEnableVertexAttribArray(mTextureCoordHandle); GLES20.glVertexAttribPointer(mTextureCoordHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, textureVerticesBuffer); mColorHandle = GLES20.glGetAttribLocation(mProgram, "s_texture"); GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); GLES20.glDisableVertexAttribArray(mTextureCoordHandle); Matrix.multiplyMM(mView, 0, eye.getEyeView(), 0, mCamera, 0); } 

For more code, here is the gist with two main files: https://gist.github.com/MagicMicky/4caa3ac669215652e40f

edit . When trying to work with the camera application on gstreamer, the same errors appear in logcat as the one I described earlier. So it could be nothing important ...

+6
source share

Source: https://habr.com/ru/post/988399/


All Articles