Image Processing Frame Using Android Camera2 API and OpenCV

I am new to camera2 api. I want to create an image processing infrastructure on my Android phone.

Step 1: Use the Camera2 API to open the camera preview stream.

Step 2: transfer the preview data to OpenCV for processing

Step 3: display the processed result live on the screen

I currently finished Step1 using ImageReaderboth the C ++ code of OpenCV. However, I do not know how to do step3. How to display the processed image on the screen? (I want to display a normal image and overlay the icon if I find a predefined object)

Here are some key codes:

protected void createCameraPreview() {
    try {
        SurfaceTexture texture = textureView.getSurfaceTexture();
        assert texture != null;
        texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
      //  Surface surface = new Surface(texture);
        Surface mImageSurface = mImageReader.getSurface();
        captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      //  captureRequestBuilder.addTarget(surface);

        captureRequestBuilder.addTarget(mImageSurface);


        cameraDevice.createCaptureSession(Arrays.asList(mImageSurface), new CameraCaptureSession.StateCallback(){
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
                        //The camera is already closed
                        if (null == cameraDevice) {
                            return;
                        }
                        cameraCaptureSessions = cameraCaptureSession;
                        updatePreview();
                    }
                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
                        Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
                    }
                }, null);
        } catch (CameraAccessException e) {
             e.printStackTrace();
        }
}

protected void updatePreview() {
    if(null == cameraDevice) {
        Log.e(TAG, "updatePreview error, return");
    }
    try {
        cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

  private final ImageReader.OnImageAvailableListener mOnImageAvailableListener  = new ImageReader.OnImageAvailableListener() {

    @Override
    public void onImageAvailable(ImageReader reader) {
        Log.e(TAG, "onImageAvailable: " + count++);
        Image img = null;
        img = reader.acquireNextImage();
        try {
            if (img == null) throw new NullPointerException("cannot be null");
            ByteBuffer buffer = img.getPlanes()[0].getBuffer();
            byte[] data = new byte[buffer.remaining()];
            buffer.get(data);
            int width = img.getWidth();
            int height = img.getHeight();

            // ****try to get captured img for display here (synchronized)

            // ****try to process image for detecting the object here (asynchronized)

        } catch (NullPointerException ex) {
            ex.printStackTrace();
        }finally {
            Log.e(TAG, "in the finally! ------------");
            if (img != null)
                img.close();

        }
    }

};
+4

Source: https://habr.com/ru/post/1668191/


All Articles