我是ARKit的新手。我正在使用图像跟踪来检测图像并分散该图像旁边的内容。在图像的左侧上显示图像的某些信息,在右侧上显示Web View。我只想在图像上显示一些视频(顶部)。你们能帮我在图片上显示视频吗(顶部)。我已将要显示的Info和webview类似代码附加到video
func displayDetailView(on rootNode: SCNNode, xOffset: CGFloat) {
let detailPlane = SCNPlane(width: xOffset, height: xOffset * 1.4)
detailPlane.cornerRadius = 0.25
let detailNode = SCNNode(geometry: detailPlane)
detailNode.geometry?.firstMaterial?.diffuse.contents = SKScene(fileNamed: "DetailScene")
// Due to the origin of the iOS coordinate system, SCNMaterial's content appears upside down, so flip the y-axis.
detailNode.geometry?.firstMaterial?.diffuse.contentsTransform = SCNMatrix4Translate(SCNMatrix4MakeScale(1, -1, 1), 0, 1, 0)
detailNode.position.z -= 0.5
detailNode.opacity = 0
rootNode.addChildNode(detailNode)
detailNode.runAction(.sequence([
.wait(duration: 1.0),
.fadeOpacity(to: 1.0, duration: 1.5),
.moveBy(x: xOffset * -1.1, y: 0, z: -0.05, duration: 1.5),
.moveBy(x: 0, y: 0, z: -0.05, duration: 0.2)
])
)
}
func displayWebView(on rootNode: SCNNode, xOffset: CGFloat) {
// Xcode yells at us about the deprecation of UIWebView in iOS 12.0, but there is currently
// a bug that does now allow us to use a WKWebView as a texture for our webViewNode
// Note that UIWebViews should only be instantiated on the main thread!
DispatchQueue.main.async {
let request = URLRequest(url: URL(string: "https://www.youtube.com/watch?v=QvzVCOiC-qs")!)
let webView = UIWebView(frame: CGRect(x: 0, y: 0, width: 400, height: 672))
webView.loadRequest(request)
let webViewPlane = SCNPlane(width: xOffset, height: xOffset * 1.4)
webViewPlane.cornerRadius = 0.25
let webViewNode = SCNNode(geometry: webViewPlane)
webViewNode.geometry?.firstMaterial?.diffuse.contents = webView
webViewNode.position.z -= 0.5
webViewNode.opacity = 0
rootNode.addChildNode(webViewNode)
webViewNode.runAction(.sequence([
.wait(duration: 3.0),
.fadeOpacity(to: 1.0, duration: 1.5),
.moveBy(x: xOffset * 1.1, y: 0, z: -0.05, duration: 1.5),
.moveBy(x: 0, y: 0, z: -0.05, duration: 0.2)
])
)
}
}
我在下面的函数中调用了此方法..
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
guard let imageAnchor = anchor as? ARImageAnchor else { return }
updateQueue.async {
let physicalWidth = imageAnchor.referenceImage.physicalSize.width
let physicalHeight = imageAnchor.referenceImage.physicalSize.height
// Create a plane geometry to visualize the initial position of the detected image
let mainPlane = SCNPlane(width: physicalWidth, height: physicalHeight)
mainPlane.firstMaterial?.colorBufferWriteMask = .alpha
// Create a SceneKit root node with the plane geometry to attach to the scene graph
// This node will hold the virtual UI in place
let mainNode = SCNNode(geometry: mainPlane)
mainNode.eulerAngles.x = -.pi / 2
mainNode.renderingOrder = -1
mainNode.opacity = 1
// Add the plane visualization to the scene
node.addChildNode(mainNode)
// Perform a quick animation to visualize the plane on which the image was detected.
// We want to let our users know that the app is responding to the tracked image.
self.highlightDetection(on: mainNode, width: physicalWidth, height: physicalHeight, completionHandler: {
// Introduce virtual content
self.displayDetailView(on: mainNode, xOffset: physicalWidth)
// Animate the WebView to the right
self.displayWebView(on: mainNode, xOffset: physicalWidth)
})
}
}
感谢任何帮助..
您试图达到的效果是在SCNNode上播放视频。为此,您需要在渲染器功能中创建一个AVPlayer并使用它来创建SKVideoNode。然后,您需要创建一个SKScene,添加SKVideoNode。接下来,将飞机的纹理设置为该SKScene。
因此,在您上面的代码中是这样的:
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
guard let imageAnchor = anchor as? ARImageAnchor else { return }
updateQueue.async {
let physicalWidth = imageAnchor.referenceImage.physicalSize.width
let physicalHeight = imageAnchor.referenceImage.physicalSize.height
// Create a plane geometry to visualize the initial position of the detected image
let mainPlane = SCNPlane(width: physicalWidth, height: physicalHeight)
mainPlane.firstMaterial?.colorBufferWriteMask = .alpha
// Create a SceneKit root node with the plane geometry to attach to the scene graph
// This node will hold the virtual UI in place
let mainNode = SCNNode(geometry: mainPlane)
mainNode.eulerAngles.x = -.pi / 2
mainNode.renderingOrder = -1
mainNode.opacity = 1
// Add the plane visualization to the scene
node.addChildNode(mainNode)
// Perform a quick animation to visualize the plane on which the image was detected.
// We want to let our users know that the app is responding to the tracked image.
self.highlightDetection(on: mainNode, width: physicalWidth, height: physicalHeight, completionHandler: {
// Introduce virtual content
self.displayDetailView(on: mainNode, xOffset: physicalWidth)
// Animate the WebView to the right
self.displayWebView(on: mainNode, xOffset: physicalWidth)
// setup AV player & create SKVideoNode from avPlayer
let videoURL = URL(fileURLWithPath: Bundle.main.path(forResource: videoAssetName, ofType: videoAssetExtension)!)
let player = AVPlayer(url: videoURL)
player.actionAtItemEnd = .none
videoPlayerNode = SKVideoNode(avPlayer: player)
// setup SKScene to hold the SKVideoNode
let skSceneSize = CGSize(width: physicalWidth, height: physicalHeight)
let skScene = SKScene(size: skSceneSize)
skScene.addChild(videoPlayerNode)
videoPlayerNode.position = CGPoint(x: skScene.size.width/2, y: skScene.size.height/2)
videoPlayerNode.size = skScene.size
// Set the SKScene as the texture for the main plane
mainPlane.firstMaterial?.diffuse.contents = skScene
mainPlane.firstMaterial?.isDoubleSided = true
// setup node to auto remove itself upon completion
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { (_) in
DispatchQueue.main.async {
if self.debug { NSLog("video completed") }
// do something when the video ends
}
})
// play the video node
videoPlayerNode.play()
})
}
}