I did the same using didOutputSampleBuffer and Objective-C. It looks like this:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)attachments]; if (attachments) CFRelease(attachments); NSNumber *orientation = (__bridge NSNumber *)(CMGetAttachment(imageDataSampleBuffer, kCGImagePropertyOrientation, NULL)); NSArray *features = [[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:@{ CIDetectorAccuracy: CIDetectorAccuracyHigh }] featuresInImage:ciImage options:@{ CIDetectorImageOrientation: orientation }]; if (features.count == 1) { CIFaceFeature *faceFeature = [features firstObject]; CGRect faceRect = faceFeature.bounds; CGImageRef tempImage = [[CIContext contextWithOptions:nil] createCGImage:ciImage fromRect:ciImage.extent]; UIImage *image = [UIImage imageWithCGImage:tempImage scale:1.0 orientation:orientation.intValue]; UIImage *face = [image extractFace:faceRect]; } }
where extractFace is an extension of UIImage:
- (UIImage *)extractFace:(CGRect)rect { rect = CGRectMake(rect.origin.x * self.scale, rect.origin.y * self.scale, rect.size.width * self.scale, rect.size.height * self.scale); CGImageRef imageRef = CGImageCreateWithImageInRect(self.CGImage, rect); UIImage *result = [UIImage imageWithCGImage:imageRef scale:self.scale orientation:self.imageOrientation]; CGImageRelease(imageRef); return result; }
Creating a video output:
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCMPixelFormat_32BGRA] }; videoOutput.alwaysDiscardsLateVideoFrames = YES; self.videoOutputQueue = dispatch_queue_create("OutputQueue", DISPATCH_QUEUE_SERIAL); [videoOutput setSampleBufferDelegate:self queue:self.videoOutputQueue]; [self.session addOutput:videoOutput];
source share