I want to process a video with a chromakey filter, and the output should be played on the screen.
Below is the code I developed that does not show any video, and I cannot understand why:
override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. let aSelector : Selector = "start:" let tapGesture = UITapGestureRecognizer(target: self, action: aSelector) tapGesture.numberOfTapsRequired = 1 view.addGestureRecognizer(tapGesture) } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } @IBAction func start(sender: AnyObject) { NSLog("tap pressed") play() } func play(){ let path = NSBundle.mainBundle().pathForResource("glass_buster", ofType: "mp4") let player = AVPlayer() let pathURL = NSURL.fileURLWithPath(path!) NSLog(pathURL.absoluteString) let playerItem = AVPlayerItem(URL: pathURL) player.replaceCurrentItemWithPlayerItem(playerItem) let gpuMovie = GPUImageMovie(playerItem: playerItem) gpuMovie.playAtActualSpeed = true let filter = GPUImageChromaKeyFilter() gpuMovie.addTarget(filter) gpuMovie.playAtActualSpeed = true gpuMovie.startProcessing() player.play() }
source share