I have little doubt that I hope you can solve it.
I want to create an Android app in unity. The application consists of activating the deviceβs camera and viewing it on the screen. For this, I want to base my own C ++ code based on OpenCV.
I have the generated code, but when I launch the application, I see the scene, but not the image, and I have the feeling that this is because I do not use the VideoCapture OpenCV function for Android. Could you help me. I am attaching the code:
C ++:
__declspec(dllexport) void iniciar(int& widt, int& heigh) {
camera.open(0);
if (!camera.isOpened())
{
return;
}
widt = (int)camera.get(CV_CAP_PROP_FRAME_WIDTH);
heigh = (int)camera.get(CV_CAP_PROP_FRAME_HEIGHT);
trueRect.x = 5;
trueRect.y = 5;
trueRect.width = 100;
trueRect.height = 100;
midX = 1;
midY = 1;
wi = 0;
he = 0;
}
__declspec(dllexport)
void video(unsigned char* arr) {
Mat frame;
Mat resi;
Mat dst;
camera >> frame;
if (frame.empty()) {
return;
}
flip(frame, dst,1);
cv::cvtColor(dst, dst, COLOR_BGR2RGB);
copy(dst.datastart, dst.dataend, arr);
}
WITH#:
public class camara : MonoBehaviour {
[DllImport("NativoPrincipio")]
public static extern void video(byte[] img);
[DllImport("NativoPrincipio")]
public static extern void iniciar(ref int widt, ref int heigh);
WebCamTexture back;
Texture2D textura;
byte[] imgData;
int width = 0;
int height = 0;
void Start () {
back = new WebCamTexture();
iniciar(ref width, ref height);
}
void Update ()
{
imgData = new byte[width * height * 4];
video(imgData);
textura = new Texture2D(width, height, TextureFormat.RGB24, false);
textura.LoadRawTextureData(imgData);
textura.Apply();
GetComponent<Renderer>().material.mainTexture = textura;
imgData = null;
textura = null;
}}
source
share