我正在尝试 AR 应用程序。我正在尝试执行以下操作:
iOS 设备通过
ARView
显示真实场景,ARView
创建网格。我不确定我当前的代码是否正确,但它似乎执行了我上面描述的操作。如果我移动设备,显示的网格就会更新。当找到新的最近顶点时,它会切换
refreshToggle
中的状态变量 MainView
,我预计更新后的网格会与虚拟对象一起显示。但是虚拟物体没有显示,我不明白为什么。
这是我的代码。很抱歉这么长,但我不知道该省略什么。 欢迎任何帮助!
struct MainView : View {
@State private var refreshToggle = false // Toggled, when a new closest anchor is found
var body: some View {
ARViewContainer(refreshToggle: $refreshToggle).edgesIgnoringSafeArea(.all)
}
}
struct ARViewContainer: UIViewRepresentable {
@Binding var refreshToggle: Bool
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
arView.environment.sceneUnderstanding.options = []
arView.environment.sceneUnderstanding.options.insert(.occlusion) // Turn on occlusion from the scene reconstruction's mesh.
arView.environment.sceneUnderstanding.options.insert(.physics) // Turn on physics for the scene reconstruction's mesh.
arView.debugOptions.insert(.showSceneUnderstanding) // Display a debug visualization of the mesh.
arView.renderOptions = [.disablePersonOcclusion, .disableDepthOfField, .disableMotionBlur] // Disable not required render options
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {}
func makeCoordinator() -> Coordinator {
Coordinator($refreshToggle)
}
class Coordinator: NSObject, ARSessionDelegate {
@Binding var refreshToggle: Bool
var model: ModelEntity
init(_ refreshToggle: Binding<Bool>) {
self._refreshToggle = refreshToggle
// Create a cube model
let mesh = MeshResource.generateBox(size: 0.1, cornerRadius: 0.005)
let material = SimpleMaterial(color: .gray, roughness: 0.15, isMetallic: true)
model = ModelEntity(mesh: mesh, materials: [material])
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
var closestAnchor: ARAnchor? = nil
for anchor in anchors {
if let meshAnchor = anchor as? ARMeshAnchor {
let meshGeometry = meshAnchor.geometry
let vertices = meshGeometry.vertices
// Search for the vertex closest to the camera and place there a virtual marker object
let nrVertices = vertices.count
var closestVertex = SIMD3<Float>(x: 0, y: .infinity, z: 0)
for i in 0 ..< nrVertices {
let nextVertex = meshGeometry.vertex(at: UInt32(i))
if nextVertex.y < closestVertex.y {
closestVertex = nextVertex
if closestAnchor?.identifier != meshAnchor.identifier {
// A new closest anchor has been found. Remove a virtual marker object
if let closestAnchor = closestAnchor {
let anchor = AnchorEntity(anchor: closestAnchor)
anchor.children.remove(model)
}
}
closestAnchor = meshAnchor
}
}
// If a closest vertex was found, attach a virtual object to it
if let closestAnchor = closestAnchor {
let anchor = AnchorEntity(anchor: closestAnchor)
anchor.children.append(model)
refreshToggle = !refreshToggle // Let ARViewContainer redisplay the real scene with the mesh and a virtual object attached to the closest anchor
}
} // if an ARMeshAnchor was found
} // for all anchors
} // session didUpdate anchors
} // coordinator
}
extension ARMeshGeometry { // See https://developer.apple.com/documentation/arkit/armeshgeometry/3516924-vertices
func vertex(at index: UInt32) -> SIMD3<Float> {
assert(vertices.format == MTLVertexFormat.float3, "Expected three floats (twelve bytes) per vertex.")
let vertexPointer = vertices.buffer.contents().advanced(by: vertices.offset + (vertices.stride * Int(index)))
let vertex = vertexPointer.assumingMemoryBound(to: SIMD3<Float>.self).pointee
return vertex
}
}
编辑:下面代码的解释
我很高兴该解决方案对您有用!
只是为了那些可能不熟悉这一点的人澄清一下 代码,它正在做的是使用生成增强现实视图 Apple 的 ARKit、RealityKit 和 SwiftUI。 ARKit 是 Apple 的框架 创建增强现实体验,RealityKit 是一个基于 Swift 的 用于创建增强现实 3D 内容的框架和 SwiftUI 是 Apple 的声明式用户界面构建框架。
这里的主要任务是找到“最近的”ARAnchor(3D 点) 所有检测到的 ARAnchors 中的增强现实空间)。最近的 ARAnchor 是一个具有顶点(3D 空间中的点)的顶点 最大的 z 坐标(因为在作为 ARKit 基础的 SceneKit 中, z 轴从相机指向场景)。
一旦找到最近的 ARAnchor,它就会被用来创建一个 AnchorEntity,这是一个连接虚拟的RealityKit实体 对现实世界的物体或位置感到满意。 ARAnchor 用于 将虚拟模型实体(在本例中为立方体)放置在增强模型中 现实空间。
变换矩阵(simd_float4x4) AnchoringComponent(.world(transform:transform)) 用于定位 并将 ModelEntity 相对于 AnchorEntity 定向。具体的 该变换矩阵中的值将影响位置和 AR 场景中模型的方向。
因此,当您在具有 AR 功能的设备上运行此代码时 并四处走动,您应该会看到一个小立方体出现在您的 环境,位于“最近”(最前面)的表面上 ARKit 能够检测到。
第 1 部分
SwiftUI 不会像常规 SwiftUI 视图那样刷新 UIViewRepresentable 类型。换句话说,当状态切换时,SwiftUI 会重新渲染其主体,但它不会对 UIViewRepresentable 类型执行相同的操作。这就是你的 ARView 不刷新的原因。
您需要直接在 ARView 实例中处理更改,并在必要时手动触发更新。
...
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
// existing code...
// Create a single AnchorEntity instance
var anchorEntity: AnchorEntity?
for anchor in anchors {
// existing code...
if let closestAnchor = closestAnchor {
// If an anchorEntity already exists, remove it from the ARView's scene
if let existingAnchor = anchorEntity {
existingAnchor.removeFromParent()
}
// Create a new AnchorEntity and attach the model to it
anchorEntity = AnchorEntity(anchor: closestAnchor)
anchorEntity?.children.append(model)
// Add the anchorEntity to the ARView's scene
DispatchQueue.main.async {
arView.scene.anchors.append(anchorEntity!)
}
}
}
}
现在,只要找到新的最接近的锚点,我们就会直接更新 ARView。它不会尝试刷新整个
ARViewContainer
,而是只更新 AR 场景的相关部分,这样效率更高,也避免了 SwiftUI 不刷新 UIViewRepresentable
类型的潜在问题。
问题解决了,虽然我不明白(没有计算机图形学经验)。
找到最近的网格锚点后,必须创建一个
AnchorEntity
。anchoring
初始化的 AnchoringComponent
来设置 AnchoringComponent.Target
属性。只有这样,虚拟对象才会在场景中渲染。
以下代码对我有用,并且基于一些有价值的信息,KFDoom(+1)的答案,Ethan Saadia的博客,以及Ralf Ebert的教程。
这是更新的代码,以防有人想玩它。
.world(transform: transform)
中的变换取自不同版本,结果证明非常有用。
import ARKit
import RealityKit
import SwiftUI
struct MainView: View {
var body: some View {
ARViewContainer()
.edgesIgnoringSafeArea(.all)
}
}
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView()
// Configure the ARView to generate a mesh
arView.environment.sceneUnderstanding.options = []
// Turn on occlusion from the scene reconstruction's mesh.
arView.environment.sceneUnderstanding.options.insert(.occlusion)
// Turn on physics for the scene reconstruction's mesh.
arView.environment.sceneUnderstanding.options.insert(.physics)
// Display a debug visualization of the mesh.
arView.debugOptions.insert(.showSceneUnderstanding)
// For performance, disable render options that are not required for this app.
arView.renderOptions = [.disablePersonOcclusion, .disableDepthOfField, .disableMotionBlur]
arView.session.delegate = context.coordinator
// Handle ARSession events via delegate
context.coordinator.arView = arView
arView.session.delegate = context.coordinator
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {}
func makeCoordinator() -> Coordinator {
Coordinator()
}
class Coordinator: NSObject, ARSessionDelegate {
var model: ModelEntity
weak var arView: ARView?
override init() {
// Create a cube model
let boxMesh = MeshResource.generateBox(size: 0.1, cornerRadius: 0.005)
let material = SimpleMaterial(color: .gray, roughness: 0.15, isMetallic: true)
model = ModelEntity(mesh: boxMesh, materials: [material])
super.init()
}
func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) {
var closestAnchor: ARAnchor? = nil
guard let arView = arView else { return }
// Create a single AnchorEntity instance
var anchorEntity: AnchorEntity?
for anchor in anchors {
if let meshAnchor = anchor as? ARMeshAnchor {
let meshGeometry = meshAnchor.geometry
let vertices = meshGeometry.vertices
// For debugging, we search for the vertex closest to the camera and place there a virtual marker object
let nrVertices = vertices.count
var closestVertex = SIMD3<Float>(x: 0, y: .infinity, z: 0)
for i in 0 ..< nrVertices {
let nextVertex = meshGeometry.vertex(at: UInt32(i))
// The frontmost vertex has the largest z value, see https://developer.apple.com/documentation/scenekit/organizing_a_scene_with_nodes
if nextVertex.z > closestVertex.z {
closestVertex = nextVertex
if closestAnchor?.identifier != meshAnchor.identifier {
// A new closest anchor has been found. Remove the virtual marker object if it exists.
// If an anchorEntity already exists, remove it from the ARView's scene
if let existingAnchor = anchorEntity {
existingAnchor.removeFromParent()
}
}
closestAnchor = meshAnchor
}
}
} // if an ARMeshAnchor found
} // for all anchors
// If a closest vertex was found, attach a virtual object to it
if let closestAnchor = closestAnchor {
// Create a new AnchorEntity and attach the model to it
anchorEntity = AnchorEntity(anchor: closestAnchor)
let transform = simd_float4x4([[0.96475136, 0.0, 0.26316252, 0.0], [0.0, 1.0, 0.0, 0.0], [-0.26316252, 0.0, 0.9647514, 0.0], [0.16189954, -0.25364277, -0.22894737, 1.0]])
let anchoring = AnchoringComponent(.world(transform: transform))
anchorEntity!.anchoring = anchoring
anchorEntity!.addChild(model)
arView.scene.anchors.append(anchorEntity!)
}
} // session didUpdate anchors
} // coordinator
}
extension ARMeshGeometry { // See https://developer.apple.com/documentation/arkit/armeshgeometry/3516924-vertices
func vertex(at index: UInt32) -> SIMD3<Float> {
assert(vertices.format == MTLVertexFormat.float3, "Expected three floats (twelve bytes) per vertex.")
let vertexPointer = vertices.buffer.contents().advanced(by: vertices.offset + (vertices.stride * Int(index)))
let vertex = vertexPointer.assumingMemoryBound(to: SIMD3<Float>.self).pointee
return vertex
}
}
#Preview {
MainView()
}