我希望在工作表视图中显示相机视图,以便能够扫描条形码。 由于某种原因,创建的相机图层不想出现在工作表视图中,即使绿点出现在 iPhone 上,或者日志显示一切正常。
// MainView.swift
@State private var showScanSheet = false
var body: some View {
NavigationStack {
VStack {
...
}.sheet(isPresented: $showScanSheet) {
ScannerView()
}
}
}
// ScannerView.swift
import SwiftUI
import AVKit
struct ScannerView: View {
@State private var isScanning: Bool = false
@State private var session: AVCaptureSession = .init()
@State private var cameraPermission: Permission = .idle
@State private var barcodeOutput: AVCaptureMetadataOutput = .init()
@State private var errorMessage: String = ""
@State private var showError: Bool = false
@Environment(\.openURL) private var openURL
@StateObject private var barcodeDelegate = BarcodeScannerDelegate()
var body: some View {
GeometryReader {
let size = $0.size
ZStack {
CameraView(frameSize: CGSize(width: size.width, height: 200), session: $session).scaleEffect(0.97)
RoundedRectangle(cornerRadius: 10, style: .circular)
.trim(from: 0.55, to: 0.60)
.stroke(Color.red, style: StrokeStyle(lineWidth: 5, lineCap: .round, lineJoin: .round))
.padding()
RoundedRectangle(cornerRadius: 10, style: .circular)
.trim(from: 0.55, to: 0.60)
.stroke(Color.red, style: StrokeStyle(lineWidth: 5, lineCap: .round, lineJoin: .round))
.rotationEffect(.init(degrees: 180))
.padding()
RoundedRectangle(cornerRadius: 10, style: .circular)
.trim(from: 0.40, to: 0.45)
.stroke(Color.red, style: StrokeStyle(lineWidth: 5, lineCap: .round, lineJoin: .round))
.padding()
RoundedRectangle(cornerRadius: 10, style: .circular)
.trim(from: 0.40, to: 0.45)
.stroke(Color.red, style: StrokeStyle(lineWidth: 5, lineCap: .round, lineJoin: .round))
.rotationEffect(.init(degrees: 180))
.padding()
}
.frame(width: size.width, height: 200)
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
.onAppear(perform: checkCameraPermission)
.alert(errorMessage, isPresented: $showError) {
if cameraPermission == .denied {
Button("Settings") {
let settingsString = UIApplication.openSettingsURLString
if let settingsURL = URL(string: settingsString) {
openURL(settingsURL)
}
}
Button("Cancel", role: .cancel) {}
}
}
}
func checkCameraPermission() {
print("Checking camera permission")
Task {
switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
cameraPermission = .approved
setupCamera()
case .notDetermined, .denied, .restricted:
if await AVCaptureDevice.requestAccess(for: .video) {
cameraPermission = .approved
setupCamera()
} else {
cameraPermission = .denied
presentError("Please provide access to the camera for scanning barcodes.")
}
default: break
}
print(cameraPermission)
}
}
func setupCamera() {
do {
guard let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInUltraWideCamera], mediaType: .video, position: .back).devices.first else {
presentError("Unknown error.")
return
}
let input = try AVCaptureDeviceInput(device: device)
guard session.canAddInput(input), session.canAddOutput(barcodeOutput) else {
presentError("Unknown error.")
return
}
session.beginConfiguration()
session.addInput(input)
session.addOutput(barcodeOutput)
barcodeOutput.metadataObjectTypes = [.upce, .ean8, .ean13]
barcodeOutput.setMetadataObjectsDelegate(barcodeDelegate, queue: .main)
session.commitConfiguration()
DispatchQueue.global(qos: .background).async {
session.startRunning()
}
} catch {
presentError(error.localizedDescription)
}
}
func presentError(_ message: String) {
errorMessage = message
showError.toggle()
}
}
// BarcodeScannerDelegate.swift
import Foundation
import AVKit
class BarcodeScannerDelegate: NSObject, ObservableObject, AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let scannedCode = readableObject.stringValue else { return }
print(scannedCode)
}
}
}
// CameraView.swift
import SwiftUI
import AVKit
struct CameraView: UIViewRepresentable {
var frameSize: CGSize
@Binding var session: AVCaptureSession
func makeUIView(context: Context) -> UIView {
let view = UIViewType(frame: CGRect(origin: .zero, size: frameSize))
view.backgroundColor = .clear
let cameraLayer = AVCaptureVideoPreviewLayer(session: session)
cameraLayer.frame = .init(origin: .zero, size: frameSize)
cameraLayer.videoGravity = .resizeAspectFill
cameraLayer.masksToBounds = true
view.layer.addSublayer(cameraLayer)
return view
}
func updateUIView(_ uiView: UIViewType, context: Context) {
}
}
但是,当我在 MainView.swift 中使用
sheet
或 fullScreenCover
而不是使用 navigationDestination
时,应用程序完全按预期工作。
谢谢您的帮助。
我遇到了同样的问题,我想我们都关注了 Kavsoft :)。经过一番调查,我发现了根本原因。
问题在于 CameraView 结构中相机层大小的初始化。相机层最初创建时大小为零:
cameraLayer.frame = .init(origin: .zero, size: frameSize)
呈现工作表视图时,frameSize 初始化为零,然后更新为实际大小。但是,此更新不会传播到相机层,导致其无法正确显示。
一个快速解决方法是提供一个固定的frameSize,例如CGSize(宽度:300,高度:300),这使得相机视图可见。然而,这种方法并不是最佳的。
为了正确解决这个问题,我们需要确保每当视图尺寸发生变化时相机层都会更新其尺寸。这可以通过实现 updateUIView 方法来实现。方法如下:
func updateUIView(_ uiView: UIViewType, context: Context) {
// Check if the view's size has changed
guard uiView.frame.size != frameSize else { return }
// Update the view's size
uiView.frame.size = frameSize
// Update the camera layer's frame accordingly
if let cameraLayer = uiView.layer.sublayers?.first as? AVCaptureVideoPreviewLayer {
cameraLayer.frame = CGRect(origin: .zero, size: frameSize)
}
}
通过将此代码片段合并到 CameraView 结构中,相机层将动态调整其大小,确保其在工作表视图中正确显示。
希望有帮助。