我正在尝试在 ARKit 中对视频进行色度键控,我所做的与 @Felix 在这里所做的非常相似:SKScene 中的 GPUImageView 作为 SKNode 材质 - 在 ARKit 上播放透明视频
但是,当视频应该显示时(在本例中,当检测到 AR 参考图像时)我收到
[SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
错误,并且视频不再播放。在我实施 chromaKeyMaterial
之前它确实播放了。这是我的代码,从检测到 AR 参考图像后开始:
DispatchQueue.main.async {
let filePath = Bundle.main.path(forResource: "wigz", ofType: "mp4")
let videoURL = NSURL(fileURLWithPath: filePath!)
let player = AVPlayer(url: videoURL as URL)
let spriteKitScene = SKScene(size: CGSize(width: 640, height: 480))
let videoSpriteKitNode = SKVideoNode(avPlayer: player)
let videoNode = SCNNode()
videoNode.geometry = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width,
height: imageAnchor.referenceImage.physicalSize.height)
videoNode.eulerAngles = SCNVector3(-Float.pi/2, 0, 0)
// Use spritekit with videonode inside
spriteKitScene.scaleMode = .aspectFit
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2,
y: spriteKitScene.size.height / 2)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()
// Loop video
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
player.seek(to: kCMTimeZero)
player.play()
}
spriteKitScene.addChild(videoSpriteKitNode)
videoNode.geometry?.firstMaterial?.diffuse.contents = spriteKitScene
videoNode.geometry?.firstMaterial?.isDoubleSided = true
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = player
videoNode.geometry!.materials = [chromaKeyMaterial]
node.addChildNode(videoNode)
self.imageDetectView.scene.rootNode.addChildNode(node)
}
在 ChromaKeyMaterial.swift 文件中,我已将这些行更改为:
float maskY = 0.0 * c_colorToReplace.r + 1.0 * c_colorToReplace.g + 0.0 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
float Y = 0.0 * textureColor.r + 1.0 * textureColor.g + 0.0 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
努力抠出纯绿色,但我不确定这是否是正确的方法。
任何帮助将不胜感激!
弄清楚了这一点。我将颜色设置为不正确的抠出(甚至在错误的位置facepalm),并且似乎存在一个错误,该错误会阻止视频播放,除非您稍微延迟一下。该错误据说已修复,但似乎并非如此。
如果有人感兴趣的话,这是我更正和清理的代码(编辑包括来自@mnuages的提示):
// Get Video URL and create AV Player
let filePath = Bundle.main.path(forResource: "VIDEO_FILE_NAME", ofType: "VIDEO_FILE_EXTENSION")
let videoURL = NSURL(fileURLWithPath: filePath!)
let player = AVPlayer(url: videoURL as URL)
// Create SceneKit videoNode to hold the spritekit scene.
let videoNode = SCNNode()
// Set geometry of the SceneKit node to be a plane, and rotate it to be flat with the image
videoNode.geometry = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width,
height: imageAnchor.referenceImage.physicalSize.height)
videoNode.eulerAngles = SCNVector3(-Float.pi/2, 0, 0)
//Set the video AVPlayer as the contents of the video node's material.
videoNode.geometry?.firstMaterial?.diffuse.contents = player
videoNode.geometry?.firstMaterial?.isDoubleSided = true
// Alpha transparancy stuff
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = player
videoNode.geometry!.materials = [chromaKeyMaterial]
//video does not start without delaying the player
//playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.001) {
player.seek(to:CMTimeMakeWithSeconds(1, 1000))
player.play()
}
// Loop video
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
player.seek(to: kCMTimeZero)
player.play()
}
// Add videoNode to ARAnchor
node.addChildNode(videoNode)
// Add ARAnchor node to the root node of the scene
self.imageDetectView.scene.rootNode.addChildNode(node)
这是镀铬钥匙材料
import SceneKit
public class ChromaKeyMaterial: SCNMaterial {
public var backgroundColor: UIColor {
didSet { didSetBackgroundColor() }
}
public var thresholdSensitivity: Float {
didSet { didSetThresholdSensitivity() }
}
public var smoothing: Float {
didSet { didSetSmoothing() }
}
public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.50, smoothing: Float = 0.001) {
self.backgroundColor = backgroundColor
self.thresholdSensitivity = thresholdSensitivity
self.smoothing = smoothing
super.init()
didSetBackgroundColor()
didSetThresholdSensitivity()
didSetSmoothing()
// chroma key shader is based on GPUImage
// https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m
let surfaceShader =
"""
uniform vec3 c_colorToReplace;
uniform float c_thresholdSensitivity;
uniform float c_smoothing;
#pragma transparent
#pragma body
vec3 textureColor = _surface.diffuse.rgb;
float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);
float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
float a = blendValue;
_surface.transparent.a = a;
"""
//_surface.transparent.a = a;
shaderModifiers = [
.surface: surfaceShader,
]
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float($0)}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
let vector = SCNVector3(x: 0.0, y: 1.0, z: 0.0)
setValue(vector, forKey: "c_colorToReplace")
}
private func didSetSmoothing() {
setValue(smoothing, forKey: "c_smoothing")
}
private func didSetThresholdSensitivity() {
setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}
我相信使用 RealityKit,您需要使用金属来创建色度着色器。
我对金属还不太了解,也无法说出如何创建它,但我找到了另一种使用 RealityKit 在 AR 中播放色度键视频的方法。
从 iOS14 开始,可以使用视频素材作为
ModelEntity
的纹理。
对于色度,需要一些额外的步骤:
我们开始导入 Yu Ao 这个令人难以置信的软件包。
https://github.com/MetalPetal/MetalPetal/issues/289
不要忘记导入包:
import MetalPetal
这是代码:
// in the viewmodel you process the asset and create the player
let context = try! MTIContext(device: MTLCreateSystemDefaultDevice()!)
let chromaKeyBlendFilter = MTIChromaKeyBlendFilter()
let color = MTIColor(red: 0.998, green: 0.0, blue: 0.996, alpha: 1)
//let backgroundColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0)
let backgroundColor = MTIColor(red: 0.0, green: 0.0, blue: 0, alpha: 0)
chromaKeyBlendFilter.color = color
chromaKeyBlendFilter.smoothing = 0.001
chromaKeyBlendFilter.thresholdSensitivity = 0.4//0.475
chromaKeyBlendFilter.inputBackgroundImage = MTIImage(color: backgroundColor, sRGB: false, size: videoSize)
let composition = MTIVideoComposition(asset: asset, context: context, queue: DispatchQueue.main, filter: { request in
guard let sourceImage = request.anySourceImage else {
return MTIImage(color: backgroundColor, sRGB: false, size: videoSize)
}
return FilterGraph.makeImage(builder: { output in
sourceImage => chromaKeyBlendFilter.inputPorts.inputImage
chromaKeyBlendFilter => output
})!
})
videoPlayerItem = AVPlayerItem(asset: asset)
videoPlayerItem.videoComposition = composition.makeAVVideoComposition()
let player = AVPlayer(playerItem: videoPlayerItem)
player.volume = 0.5
// player.play()
我们可以使用 RealityKit 2.0(Xcode 12 和 iOS 14)中的视频纹理。 请参阅 Andy Jazz 的回答,了解如何设置
我正在遵循您提供的相同代码,但它只允许删除绿色背景。这里有设置自定义颜色并删除背景的选项吗?颜色和视频 URL 来自 API。请让我知道您的反馈。