Play video using OpenCV library with kiwi and python for Android

Try playing the video using the OpenCV library with kivy and python-for-android

Here is my attempt:

import os import cv2 from kivy.app import App from kivy.clock import Clock from kivy.graphics.texture import Texture from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.image import Image class KivyCamera(Image): def __init__(self, capture=None, fps=0, **kwargs): super(KivyCamera, self).__init__(**kwargs) # self.capture = cv2.VideoCapture("/sdcard2/python-apk/2.mp4") print "file path exist :" + str(os.path.exists("/sdcard2/python-apk/1.mkv")) self.capture = cv2.VideoCapture("/sdcard2/python-apk/1.mkv") Clock.schedule_interval(self.update, 1.0 / fps) def update(self, dt): ret, frame = self.capture.read() print str(os.listdir('/sdcard2/')) if ret: # convert it to texture buf1 = cv2.flip(frame, 0) buf = buf1.tostring() image_texture = Texture.create(size=(frame.shape[1], frame.shape[0]), colorfmt='bgr') image_texture.blit_buffer(buf, colorfmt='bgr', bufferfmt='ubyte') # display image from the texture self.texture = image_texture class CamApp(App): def build(self): self.my_camera = KivyCamera(fps=30) self.box = BoxLayout(orientation='vertical') btn1 = Button(text="Hello") self.box.add_widget(btn1) # l = Label(text=cv2.__version__, font_size=150) # self.box.add_widget(l) self.box.add_widget(self.my_camera) return self.box def on_stop(self): # without this, app will not exit even if the window is closed # self.capture.release() pass def on_pause(self): return True if __name__ == '__main__': CamApp().run() 

and file bulldozer.spec

 # (list) Source files to include (let empty to include all the files) source.include_exts = py,png,jpg,kv,atlas,zip,mp4 # (list) Application requirements requirements = plyer,kivy,opencv,numpy,pyjnius,ffmpeg, sqlite3, openssl 

I am trying to play a video using the OpenCV method cv2.VideoCapture () on a program that runs on the desktop, but when I create an APK using bulldozer and run the application on an Android phone, I just get a white screen. I am trying to play .mp4 or .mkv format, but in both cases I get a white screen. What am I doing wrong or where is my mistake?

enter image description here

Here is my log file:

Log file

Related questions:

This question has no answer yet.

+5
source share
1 answer

You cannot use python opencv on android as it is designed for windows / Linux machines. If you want to manipulate video with opencv on android, you can use the official android opencv library , compile opencv for handheld devices or use JavaCV .

Here is an example of using the official android opencv library. This example recognizes faces in the camera channel

 import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import org.opencv.android.BaseLoaderCallback; import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; import org.opencv.android.LoaderCallbackInterface; import org.opencv.android.OpenCVLoader; import org.opencv.core.Core; import org.opencv.core.Mat; import org.opencv.core.MatOfRect; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.android.CameraBridgeViewBase; import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; import org.opencv.objdetect.CascadeClassifier; import org.opencv.imgproc.Imgproc; import android.app.Activity; import android.content.Context; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.WindowManager; public class FdActivity extends Activity implements CvCameraViewListener2 { private static final String TAG = "OCVSample::Activity"; private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255); public static final int JAVA_DETECTOR = 0; public static final int NATIVE_DETECTOR = 1; private MenuItem mItemFace50; private MenuItem mItemFace40; private MenuItem mItemFace30; private MenuItem mItemFace20; private MenuItem mItemType; private Mat mRgba; private Mat mGray; private File mCascadeFile; private CascadeClassifier mJavaDetector; private DetectionBasedTracker mNativeDetector; private int mDetectorType = JAVA_DETECTOR; private String[] mDetectorName; private float mRelativeFaceSize = 0.2f; private int mAbsoluteFaceSize = 0; private CameraBridgeViewBase mOpenCvCameraView; private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { @Override public void onManagerConnected(int status) { switch (status) { case LoaderCallbackInterface.SUCCESS: { Log.i(TAG, "OpenCV loaded successfully"); // Load native library after(!) OpenCV initialization System.loadLibrary("detection_based_tracker"); try { // load cascade file from application resources InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface); File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml"); FileOutputStream os = new FileOutputStream(mCascadeFile); byte[] buffer = new byte[4096]; int bytesRead; while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } is.close(); os.close(); mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath()); if (mJavaDetector.empty()) { Log.e(TAG, "Failed to load cascade classifier"); mJavaDetector = null; } else Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0); cascadeDir.delete(); } catch (IOException e) { e.printStackTrace(); Log.e(TAG, "Failed to load cascade. Exception thrown: " + e); } mOpenCvCameraView.enableView(); } break; default: { super.onManagerConnected(status); } break; } } }; public FdActivity() { mDetectorName = new String[2]; mDetectorName[JAVA_DETECTOR] = "Java"; mDetectorName[NATIVE_DETECTOR] = "Native (tracking)"; Log.i(TAG, "Instantiated new " + this.getClass()); } /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { Log.i(TAG, "called onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.face_detect_surface_view); mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view); mOpenCvCameraView.setCvCameraViewListener(this); } @Override public void onPause() { super.onPause(); if (mOpenCvCameraView != null) mOpenCvCameraView.disableView(); } @Override public void onResume() { super.onResume(); if (!OpenCVLoader.initDebug()) { Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization"); OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback); } else { Log.d(TAG, "OpenCV library found inside package. Using it!"); mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS); } } public void onDestroy() { super.onDestroy(); mOpenCvCameraView.disableView(); } public void onCameraViewStarted(int width, int height) { mGray = new Mat(); mRgba = new Mat(); } public void onCameraViewStopped() { mGray.release(); mRgba.release(); } public Mat onCameraFrame(CvCameraViewFrame inputFrame) { mRgba = inputFrame.rgba(); mGray = inputFrame.gray(); if (mAbsoluteFaceSize == 0) { int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) { mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); } mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); } MatOfRect faces = new MatOfRect(); if (mDetectorType == JAVA_DETECTOR) { if (mJavaDetector != null) mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); } else if (mDetectorType == NATIVE_DETECTOR) { if (mNativeDetector != null) mNativeDetector.detect(mGray, faces); } else { Log.e(TAG, "Detection method is not selected!"); } Rect[] facesArray = faces.toArray(); for (int i = 0; i < facesArray.length; i++) Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); return mRgba; } @Override public boolean onCreateOptionsMenu(Menu menu) { Log.i(TAG, "called onCreateOptionsMenu"); mItemFace50 = menu.add("Face size 50%"); mItemFace40 = menu.add("Face size 40%"); mItemFace30 = menu.add("Face size 30%"); mItemFace20 = menu.add("Face size 20%"); mItemType = menu.add(mDetectorName[mDetectorType]); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { Log.i(TAG, "called onOptionsItemSelected; selected item: " + item); if (item == mItemFace50) setMinFaceSize(0.5f); else if (item == mItemFace40) setMinFaceSize(0.4f); else if (item == mItemFace30) setMinFaceSize(0.3f); else if (item == mItemFace20) setMinFaceSize(0.2f); else if (item == mItemType) { int tmpDetectorType = (mDetectorType + 1) % mDetectorName.length; item.setTitle(mDetectorName[tmpDetectorType]); setDetectorType(tmpDetectorType); } return true; } private void setMinFaceSize(float faceSize) { mRelativeFaceSize = faceSize; mAbsoluteFaceSize = 0; } private void setDetectorType(int type) { if (mDetectorType != type) { mDetectorType = type; if (type == NATIVE_DETECTOR) { Log.i(TAG, "Detection Based Tracker enabled"); mNativeDetector.start(); } else { Log.i(TAG, "Cascade detector enabled"); mNativeDetector.stop(); } } } public class DetectionBasedTracker { public DetectionBasedTracker(String cascadeName, int minFaceSize) { mNativeObj = nativeCreateObject(cascadeName, minFaceSize); } public void start() { nativeStart(mNativeObj); } public void stop() { nativeStop(mNativeObj); } public void setMinFaceSize(int size) { nativeSetFaceSize(mNativeObj, size); } public void detect(Mat imageGray, MatOfRect faces) { nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr()); } public void release() { nativeDestroyObject(mNativeObj); mNativeObj = 0; } private long mNativeObj = 0; private static native long nativeCreateObject(String cascadeName, int minFaceSize); private static native void nativeDestroyObject(long thiz); private static native void nativeStart(long thiz); private static native void nativeStop(long thiz); private static native void nativeSetFaceSize(long thiz, int size); private static native void nativeDetect(long thiz, long inputImage, long faces); } } 

If you want to manipulate a file from a file, you must use ffmpeg since opencv for android does not come with a video decoder, or you could use JavaCv yourself, which comes with a wrapper for ffmpeg and opencv out -of boxes.

0
source

Source: https://habr.com/ru/post/1272520/


All Articles