Color video from GPUImage on SCNPlane in ARKit

I am trying to play a video showing transparency in ARSCNView. A is SCNPlaneused as the projection space for the video, and I'm trying to adjust the color of this video using GPUImage.

I followed this example here. Unfortunately, I did not find a way to project this video onto mine videoSpriteKitNode. Since the filter is displayed in GPUImageView, it SKVideoNodetakes AVPlayer.

I’m not sure if it’s possible what I’m trying to do, so if someone can share their understanding, I would be very grateful!

import UIKit
import ARKit
import GPUImage

class ARTransVC: UIViewController{

@IBOutlet weak var sceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
var movie: GPUImageMovie!
var filter: GPUImageChromaKeyBlendFilter!
var sourcePicture: GPUImagePicture!
var player = AVPlayer()
var gpuImageView: GPUImageView!


override func viewDidLoad() {
    super.viewDidLoad()
    self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
    self.sceneView.session.run(configuration)

    self.gpuImageView = GPUImageView()
    self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false

    //a delay for ARKit to capture the surroundings
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) {

        // A SpriteKit scene to contain the SpriteKit video node
        let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height))
        spriteKitScene.scaleMode = .aspectFit

        // Create a video player, which will be responsible for the playback of the video material
        guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return }
        let playerItem = AVPlayerItem(url: url)
        self.player.replaceCurrentItem(with: playerItem)

        //trans
        self.filter = GPUImageChromaKeyBlendFilter()
        self.filter.thresholdSensitivity = 0.15
        self.filter.smoothing = 0.3
        self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831)

        self.movie = GPUImageMovie(playerItem: playerItem)
        self.movie.playAtActualSpeed = true
        self.movie.addTarget(self.filter)
        self.movie.startProcessing()

        let backgroundImage = UIImage(named: "transparent.png")
        self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)!
        self.sourcePicture.addTarget(self.filter)
        self.sourcePicture.processImage()

        ///HERE DON'T KNOW HOW TO CONTINUE ?
        self.filter.addTarget(self.gpuImageView)


        // To make the video loop
        self.player.actionAtItemEnd = .none
        NotificationCenter.default.addObserver(
            self,
            selector: #selector(ARTransVC.playerItemDidReachEnd),
            name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
            object:  self.player.currentItem)

        // Create the SpriteKit video node, containing the video player
        let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
        videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
        videoSpriteKitNode.size = spriteKitScene.size
        videoSpriteKitNode.yScale = -1.0
        videoSpriteKitNode.play()
        spriteKitScene.addChild(videoSpriteKitNode)

        // Create the SceneKit scene
        let scene = SCNScene()
        self.sceneView.scene = scene
        self.sceneView.isPlaying = true

        // Create a SceneKit plane and add the SpriteKit scene as its material
        let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
        background.firstMaterial?.diffuse.contents = spriteKitScene
        let backgroundNode = SCNNode(geometry: background)
        backgroundNode.geometry?.firstMaterial?.isDoubleSided = true

        backgroundNode.position = SCNVector3(0,0,-2.0)
        scene.rootNode.addChildNode(backgroundNode)
    }
}

@objc func playerItemDidReachEnd(notification: NSNotification) {
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
        playerItem.seek(to: kCMTimeZero, completionHandler: nil)
    }
}
}
+4
source share
2 answers

try to clear the background and set the zoom mode with

backgroundColor = .clear 
scaleMode = .aspectFit
+1

, ARSCNView, .

ChromaKeyMaterial Lësha Turkowski!

, , :

import SceneKit

public class ChromaKeyMaterial: SCNMaterial {

public var backgroundColor: UIColor {
    didSet { didSetBackgroundColor() }
}

public var thresholdSensitivity: Float {
    didSet { didSetThresholdSensitivity() }
}

public var smoothing: Float  {
    didSet { didSetSmoothing() }
}

public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) {

    self.backgroundColor = backgroundColor
    self.thresholdSensitivity = thresholdSensitivity
    self.smoothing = smoothing

    super.init()

    didSetBackgroundColor()
    didSetThresholdSensitivity()
    didSetSmoothing()

    // chroma key shader is based on GPUImage
    // https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m

    let surfaceShader =
    """
    uniform vec3 c_colorToReplace;
    uniform float c_thresholdSensitivity;
    uniform float c_smoothing;

    #pragma transparent
    #pragma body

    vec3 textureColor = _surface.diffuse.rgb;

    float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
    float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
    float maskCb = 0.5647 * (c_colorToReplace.b - maskY);

    float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
    float Cr = 0.7132 * (textureColor.r - Y);
    float Cb = 0.5647 * (textureColor.b - Y);

    float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));

    float a = blendValue;
    _surface.transparent.a = a;
    """

    //_surface.transparent.a = a;

    shaderModifiers = [
        .surface: surfaceShader,
    ]
}

required public init?(coder aDecoder: NSCoder) {
    fatalError("init(coder:) has not been implemented")
}

//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float($0)}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
    let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663)
    setValue(vector, forKey: "c_colorToReplace")
}

private func didSetSmoothing() {
    setValue(smoothing, forKey: "c_smoothing")
}

private func didSetThresholdSensitivity() {
    setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}

, ARKit SCNPlane:

import UIKit
import ARKit

class ARTransVC: UIViewController{

@IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()

private var player: AVPlayer = {
    guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
    return AVPlayer(url: url)
}()

override func viewDidLoad() {
    super.viewDidLoad()
    self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
    self.arSceneView.session.run(configuration)

    //a delay for ARKit to capture the surroundings
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) {

        // A SpriteKit scene to contain the SpriteKit video node
        let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
        spriteKitScene.scaleMode = .aspectFit
        spriteKitScene.backgroundColor = .clear
        spriteKitScene.scaleMode = .aspectFit

        //Create the SpriteKit video node, containing the video player
        let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
        videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
        videoSpriteKitNode.size = spriteKitScene.size
        videoSpriteKitNode.yScale = -1.0
        videoSpriteKitNode.play()
        spriteKitScene.addChild(videoSpriteKitNode)

        // To make the video loop
        self.player.actionAtItemEnd = .none
        NotificationCenter.default.addObserver(
            self,
            selector: #selector(ARTransVC.playerItemDidReachEnd),
            name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
            object:  self.player.currentItem)

        // Create the SceneKit scene
        let scene = SCNScene()
        self.arSceneView.scene = scene

        //Create a SceneKit plane and add the SpriteKit scene as its material
        let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
        background.firstMaterial?.diffuse.contents = spriteKitScene
        let chromaKeyMaterial = ChromaKeyMaterial()
        chromaKeyMaterial.diffuse.contents = self.player

        let backgroundNode = SCNNode(geometry: background)
        backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
        backgroundNode.geometry!.materials = [chromaKeyMaterial]

        backgroundNode.position = SCNVector3(0,0,-2.0)
        scene.rootNode.addChildNode(backgroundNode)

        //video does not start without delaying the player
        //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
        DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
            self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
            self.player.play()
        }
    }
}

@objc func playerItemDidReachEnd(notification: NSNotification) {
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
        playerItem.seek(to: kCMTimeZero, completionHandler: nil)
    }
}

[SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef), , -, iOS 11.2. : . .

+3

Source: https://habr.com/ru/post/1690337/


All Articles