如何在visionOS中了解用户在周围空间中的位置

问题描述 投票:0回答:1

在沉浸式空间内的visionOS 中进行用户位置跟踪。有什么见解或技巧可以解决这个问题吗?目前,文档似乎难以捉摸。我搜索并找到了 queryPose 但 Xcode 抛出错误。

struct ImmersiveView : View {
    private let attachmentID = "viewID"

    var body: some View {
        RealityView { content, attachments in
            if let fixedScene = try? await Entity(named: "ImmersiveScene",
                                                     in: realityKitContentBundle) {               
                let wtp = WorldTrackingProvider()
                let session = ARKitSession()

                let anchor = AnchorEntity(.head)
                anchor.anchoring.trackingMode = .continuous
                fixedScene.setParent(anchor)
                content.add(anchor)                
            
                if let sceneAttachment = attachments.entity(for: attachmentID) {
                    fixedScene.addChild(sceneAttachment)
                }
            
                guard let env = try? await EnvironmentResource(named: "Directional")
                else { return }
                let iblComponent = ImageBasedLightComponent(source: .single(env),
                                                 intensityExponent: 10)
                fixedScene.components[ImageBasedLightComponent.self] = iblComponent
                fixedScene.components.set(ImageBasedLightReceiverComponent(imageBasedLight: fixedScene))
            
                fixedScene.transform.translation.z = -1.0
                fixedScene.transform.translation.y = 0.35
                fixedScene.transform.translation.x = 0.25
                anchor.name = "Attachments"
            }
        }
    } attachments: {
        Attachment(id: attachmentID) { 
    }
}
swiftui arkit realitykit visionos
1个回答
0
投票

运行

ARKitSession
对象,创建
DeviceAnchor
,然后使用
originFromAnchorTransform
实例属性来获取从设备姿态到原点坐标系的变换。

import SwiftUI
import RealityKit
import ARKit

@main struct PoseXApp : App {
    var body: some Scene {
        ImmersiveSpace(id: "ImmersiveSpace") {
            ContentView()
        }
        .immersionStyle(selection: .constant(.mixed), in: .mixed)
    }
}

@Observable class VisionProPose {
    let session = ARKitSession()
    let worldTracking = WorldTrackingProvider()
    
    func runArSession() async {
        Task {
            try? await session.run([worldTracking])
        }
    }

    func getTransform() async -> simd_float4x4? {
        guard let deviceAnchor = worldTracking.queryDeviceAnchor(atTimestamp: 1)
        else { return nil }
    
        let transform = deviceAnchor.originFromAnchorTransform
        return transform
    }
}

struct ContentView : View {
    @State var visionProPose = VisionProPose()
    let anchor = AnchorEntity(.head, trackingMode: .continuous)
    let box = ModelEntity(mesh: .generateBox(size: 0.2))
    
    var body: some View {
        RealityView { content in
            Task {
                await visionProPose.runArSession()
            }
            for i in 1...5 {
                let model = ModelEntity(mesh: .generateSphere(radius: 0.15))
                model.position.z -= Float(i)
                content.add(model)
            }
            content.add(box)
        }
        .onAppear {
            Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { _ in
                Task {
                    let val = await visionProPose.getTransform()
                    print(val?.columns.3.z ?? 0.0)
                    
                    box.position = [Float((val?.columns.3.x)!),
                                    Float((val?.columns.3.y)!),
                                    Float((val?.columns.3.z)!) - 1.0 ]
                }
            }
        }
    }
}
© www.soinside.com 2019 - 2024. All rights reserved.