项目正在构建,没有任何错误或警告,当我运行应用程序时,我能够看到相机视图。但是当我点击开始扫描按钮时,相机视图冻结并且没有任何反应。当我点击开始扫描按钮时,它应该开始在环境中绘制红点,当我点击停止扫描按钮时,它应该会引导我到另一个屏幕来显示点云本身。以下是我项目中的不同文件。请让我知道我做错了什么。
ContentView.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Foundation
struct ContentView: View {
@StateObject var scanner = ScannerViewController()
var body: some View {
NavigationView {
VStack {
ARViewContainer().edgesIgnoringSafeArea(.all)
HStack {
Spacer()
Button(action: {
scanner.startScanning()
}, label: {
Text("Start Scanning")
})
.padding()
Spacer()
NavigationLink(destination: PointCloudView(pointCloud: scanner.pointCloud, pointSize: 5.0)) {
Text("Stop Scanning")
}
.padding()
Spacer()
}
}
.navigationBarTitle(Text("Scan an Object"))
}
}
struct ARViewContainer: UIViewRepresentable {
func makeUIView(context: Context) -> ARView {
let arView = ARView(frame: .zero)
return arView
}
func updateUIView(_ uiView: ARView, context: Context) {
}
}
}
PointCloudView.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Foundation
struct PointCloudView: View {
var pointCloud: [SIMD3<Float>]
var pointSize: CGFloat
var body: some View {
ZStack {
ForEach(pointCloud, id: \.self) { point in
Sphere()
.frame(width: pointSize, height: pointSize, alignment: .center)
.position(CGPoint(x: CGFloat(point.x), y: CGFloat(point.y)))
.foregroundColor(.red)
}
}
}
}
struct Sphere: Shape {
func path(in rect: CGRect) -> Path {
let center = CGPoint(x: rect.width / 2, y: rect.height / 2)
let radius = min(rect.width, rect.height) / 2
let path = Path { p in
p.addArc(center: center, radius: radius, startAngle: .zero, endAngle: .degrees(360), clockwise: true)
}
return path
}
}
ScannerViewController.swift
import AVFoundation
import ARKit
import RealityKit
import SceneKit
import SwiftUI
import Combine
import Foundation
class ScannerViewController: NSObject, ObservableObject {
private var cancellables = Set<AnyCancellable>()
@Published var isScanning = false
@Published var pointCloud: [SIMD3<Float>] = []
private let arSession = ARSession()
private var arConfiguration: ARConfiguration {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
configuration.environmentTexturing = .automatic
return configuration
}
func startScanning() {
print("started scanning")
isScanning = true
arSession.run(arConfiguration)
}
func stopScanning() {
print("stopped scanning")
isScanning = false
arSession.pause()
}
func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
print("continusoyl called")
guard isScanning else { return }
// Get the current ARFrame
guard let currentFrame = self.arSession.currentFrame else { return }
// Generate the point cloud data from the ARFrame
guard let pointCloud = currentFrame.rawFeaturePoints?.points else { return }
// Convert the point cloud data to a format that can be displayed in the PointCloudView
let convertedPointCloud = pointCloud.map { point in
SIMD3<Float>(point.x, point.y, point.z)
}
// Pass the converted point cloud data to the PointCloudView
DispatchQueue.main.async {
self.pointCloud = convertedPointCloud
}
}
}
我试过同时使用 SceneKit 和 Metal 来渲染点云并参考了苹果的文档:https://developer.apple.com/documentation/arkit/environmental_analysis/displaying_a_point_cloud_using_scene_depth
我也试过构建故事板项目,但故事板和 SceneKit 一直在制造很多错误。使用 metal 和 swiftui 构建至少没有错误,但仍然无法运行。