This happens in CameraSource.java
Frame outputFrame = new Frame.Builder() .setImageData(mPendingFrameData, mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.NV21) .setId(mPendingFrameId) .setTimestampMillis(mPendingTimeMillis) .setRotation(mRotation) .build(); int w = outputFrame.getMetadata().getWidth(); int h = outputFrame.getMetadata().getHeight(); SparseArray<Face> detectedFaces = mDetector.detect(outputFrame); Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); if (detectedFaces.size() > 0) { ByteBuffer byteBufferRaw = outputFrame.getGrayscaleImageData(); byte[] byteBuffer = byteBufferRaw.array(); YuvImage yuvimage = new YuvImage(byteBuffer, ImageFormat.NV21, w, h, null); Face face = detectedFaces.valueAt(0); int left = (int) face.getPosition().x; int top = (int) face.getPosition().y; int right = (int) face.getWidth() + left; int bottom = (int) face.getHeight() + top; ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvimage.compressToJpeg(new Rect(left, top, right, bottom), 80, baos); byte[] jpegArray = baos.toByteArray(); bitmap = BitmapFactory.decodeByteArray(jpegArray, 0, jpegArray.length); } ((FaceTrackerActivity) mContext).setBitmapToImageView(bitmap);
Andro source share