The didOutputSampleBuffer function in my code was not called. I do not know why this happened. Here is the code:
import UIKit
import AVFoundation
import Accelerate
class ViewController: UIViewController {
var captureSession: AVCaptureSession?
var dataOutput: AVCaptureVideoDataOutput?
var customPreviewLayer: AVCaptureVideoPreviewLayer?
@IBOutlet weak var camView: UIView!
override func viewWillAppear(animated: Bool) {
super.viewDidAppear(animated)
captureSession?.startRunning()
}
override func viewDidLoad() {
super.viewDidLoad()
setupCameraSession()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
func setupCameraSession() {
self.captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080
let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var deviceInput = AVCaptureDeviceInput()
do {
deviceInput = try AVCaptureDeviceInput(device: inputDevice)
} catch let error as NSError {
print(error)
}
if captureSession!.canAddInput(deviceInput) {
captureSession!.addInput(deviceInput)
}
self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.customPreviewLayer!.frame = camView.bounds
self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait
camView.layer.addSublayer(self.customPreviewLayer!)
print("Cam layer added")
self.dataOutput = AVCaptureVideoDataOutput()
self.dataOutput!.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
]
dataOutput!.alwaysDiscardsLateVideoFrames = true
if captureSession!.canAddOutput(dataOutput) {
captureSession!.addOutput(dataOutput)
}
captureSession!.commitConfiguration()
let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL)
let delegate = VideoDelegate()
dataOutput!.setSampleBufferDelegate(delegate, queue: queue)
}
func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) {
let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)!
CVPixelBufferLockBaseAddress(imageBuffer, 0)
let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0)
let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0)
let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0)
let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()!
let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)!
let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)!
dispatch_sync(dispatch_get_main_queue(), {() -> Void in
self.customPreviewLayer!.contents = dstImageFilter as AnyObject
})
}
}
And here is my VideoDelegate code:
import Foundation
import AVFoundation
import UIKit
class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(captureOutput: AVCaptureOutput!,
didOutputSampleBuffer sampleBuffer: CMSampleBuffer!,
fromConnection connection: AVCaptureConnection!){
print("hihi")
}
func captureOutput(captureOutput: AVCaptureOutput!,
didDropSampleBuffer sampleBuffer: CMSampleBuffer!,
fromConnection connection: AVCaptureConnection!){
print("LOL")
}
}
Why is my delegate called and how to fix it? I checked a similar stack overflow question, but I cannot find a way to solve it. Please, help.
source
share