尝试在二维码上显示边框

问题描述 投票:0回答:1

我尝试在检测到的二维码周围显示边界框,但该框未显示。认为这可能是坐标系不匹配的原因,但不完全确定。

import SwiftUI
import AVFoundation
import Combine

struct ContentView: View {
    @StateObject private var viewModel = QRCodeScannerViewModel()

    var body: some View {
        VStack {
            if let url = viewModel.detectedURL {
                Text(url)
                    .foregroundColor(.blue)
                    .padding()
            } else {
                Text("Scanning for QR Codes...")
            }
            CameraPreview(session: viewModel.session, detectedBoundingBox: $viewModel.detectedBoundingBox)
                .onAppear {
                    viewModel.startScanning()
                }
                .onDisappear {
                    viewModel.stopScanning()
                }
        }
    }
}

struct CameraPreview: UIViewRepresentable {
    let session: AVCaptureSession
    @Binding var detectedBoundingBox: CGRect?

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: UIScreen.main.bounds)
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.frame = view.bounds
        previewLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(previewLayer)
        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {
        guard let boundingBox = detectedBoundingBox else { return }
        
        // Transform the bounding box to the UIView's coordinate system
        let viewSize = uiView.bounds.size
        let scaleX = viewSize.width
        let scaleY = viewSize.height
        let transformedBox = CGRect(
            x: boundingBox.origin.y * scaleX,
            y: boundingBox.origin.x * scaleY,
            width: boundingBox.size.height * scaleX,
            height: boundingBox.size.width * scaleY
        )

        // Update or create the bounding box layer
        if let boundingBoxLayer = uiView.layer.sublayers?.first(where: { $0 is CAShapeLayer }) as? CAShapeLayer {
            boundingBoxLayer.frame = transformedBox
        } else {
            let boundingBoxLayer = CAShapeLayer()
            boundingBoxLayer.frame = transformedBox
            boundingBoxLayer.borderColor = UIColor.red.cgColor
            boundingBoxLayer.borderWidth = 2
            uiView.layer.addSublayer(boundingBoxLayer)
        }
    }
}


class QRCodeScannerViewModel: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    @Published var detectedURL: String?
    @Published var detectedBoundingBox: CGRect?

    let session = AVCaptureSession()
    private let videoDataOutput = AVCaptureVideoDataOutput()

    override init() {
        super.init()
        setupCaptureSession()
    }

    private func setupCaptureSession() {
        guard let device = AVCaptureDevice.default(for: .video),
              let input = try? AVCaptureDeviceInput(device: device),
              session.canAddInput(input),
              session.canAddOutput(videoDataOutput) else {
            return
        }

        session.addInput(input)
        videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sampleBufferQueue"))
        session.addOutput(videoDataOutput)
    }

    func startScanning() {
        DispatchQueue.global(qos: .userInitiated).async {
            self.session.startRunning()
        }
    }

    func stopScanning() {
        session.stopRunning()
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        
        let ciImage = CIImage(cvImageBuffer: pixelBuffer)
        let context = CIContext()
        let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: context, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
        let features = detector?.features(in: ciImage) as? [CIQRCodeFeature]

        DispatchQueue.main.async {
            if let feature = features?.first {
                self.detectedURL = feature.messageString
                self.detectedBoundingBox = feature.bounds
            }
        }
    }
}

更新:有它......有点工作吗?

import SwiftUI
import AVFoundation
import Combine

struct ContentView: View {
    @StateObject private var viewModel = QRCodeScannerViewModel()

    var body: some View {
        VStack {
            if let url = viewModel.detectedURL {
                Text(url)
                    .foregroundColor(.blue)
                    .padding()
            } else {
                Text("Scanning for QR Codes...")
            }
            CameraPreview(session: viewModel.session, boundingBox: $viewModel.boundingBox, updateBoundingBox: viewModel.updateBoundingBox)
                .onAppear {
                    viewModel.startScanning()
                }
                .onDisappear {
                    viewModel.stopScanning()
                }
        }
    }
}

struct CameraPreview: UIViewRepresentable {
    let session: AVCaptureSession
    @Binding var boundingBox: CGRect
    let updateBoundingBox: (CGRect) -> Void

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: UIScreen.main.bounds)
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.frame = view.frame
        previewLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(previewLayer)
        context.coordinator.previewLayer = previewLayer
        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {
        // Instead of updating the bounding box directly in the view, use a callback
        if let layer = uiView.layer.sublayers?.first(where: { $0 is CAShapeLayer }) as? CAShapeLayer {
            layer.path = UIBezierPath(rect: boundingBox).cgPath
        } else {
            let shapeLayer = CAShapeLayer()
            shapeLayer.path = UIBezierPath(rect: boundingBox).cgPath
            shapeLayer.strokeColor = UIColor.red.cgColor
            shapeLayer.fillColor = UIColor.clear.cgColor
            shapeLayer.lineWidth = 2
            uiView.layer.addSublayer(shapeLayer)
        }
    }

    func makeCoordinator() -> Coordinator {
        Coordinator(boundingBox: $boundingBox, updateBoundingBox: updateBoundingBox)
    }

    class Coordinator: NSObject {
        var previewLayer: AVCaptureVideoPreviewLayer?
        @Binding var boundingBox: CGRect
        let updateBoundingBox: (CGRect) -> Void

        init(boundingBox: Binding<CGRect>, updateBoundingBox: @escaping (CGRect) -> Void) {
            _boundingBox = boundingBox
            self.updateBoundingBox = updateBoundingBox
        }

        var boundingBoxLayer: CAShapeLayer?

     
    }
}

class QRCodeScannerViewModel: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    @Published var detectedURL: String?
    @Published var boundingBox: CGRect = .zero

    let session = AVCaptureSession()
    private let videoDataOutput = AVCaptureVideoDataOutput()

    override init() {
        super.init()
        setupCaptureSession()
    }

    private func setupCaptureSession() {
        guard let device = AVCaptureDevice.default(for: .video),
              let input = try? AVCaptureDeviceInput(device: device),
              session.canAddInput(input),
              session.canAddOutput(videoDataOutput) else {
            return
        }

        session.addInput(input)
        videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sampleBufferQueue"))
        session.addOutput(videoDataOutput)
    }

    func startScanning() {
        DispatchQueue.global(qos: .userInitiated).async {
            self.session.startRunning()
        }
    }

    func stopScanning() {
        session.stopRunning()
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        
        let ciImage = CIImage(cvImageBuffer: pixelBuffer)
        let context = CIContext()
        let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: context, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
        let features = detector?.features(in: ciImage) as? [CIQRCodeFeature]

        DispatchQueue.main.async {
            if let feature = features?.first {
                self.detectedURL = feature.messageString
                // Convert feature bounds to view coordinates
                let boundingBox = feature.bounds
                let imageSize = ciImage.extent.size
                let viewSize = UIScreen.main.bounds.size
                let scaleX = viewSize.width / imageSize.width
                let scaleY = viewSize.height / imageSize.height
                let transformedBoundingBox = CGRect(
                    x: boundingBox.origin.x * scaleX,
                    y: viewSize.height - (boundingBox.origin.y + boundingBox.height) * scaleY,
                    width: boundingBox.width * scaleX,
                    height: boundingBox.height * scaleY
                )
                self.boundingBox = transformedBoundingBox
                // Call the callback to update the bounding box in CameraPreview
                self.updateBoundingBox(self.boundingBox)
            } else {
                self.detectedURL = nil
                self.boundingBox = .zero
                // Call the callback to update the bounding box in CameraPreview
                self.updateBoundingBox(self.boundingBox)
            }
        }
    }

    func updateBoundingBox(_ boundingBox: CGRect) {
        DispatchQueue.main.async {
            self.boundingBox = boundingBox
        }
    }
}

更新2 仅使用静态图像时获得相同的结果

struct ContentView: View {
    @StateObject private var viewModel = QRCodeScannerViewModel()
    @State private var useCamera: Bool = true
    @State private var testImage: UIImage? = UIImage(named: "testImage.jpg")
    @State private var processedImage: UIImage?

    var body: some View {
        VStack {
            Toggle("Use Camera", isOn: $useCamera)
                .padding()

            if useCamera {
                if let url = viewModel.detectedURL {
                    Text(url)
                        .foregroundColor(.blue)
                        .padding()
                } else {
                    Text("Scanning for QR Codes...")
                }
                CameraPreview(session: viewModel.session)
                    .onAppear {
                        viewModel.startScanning()
                    }
                    .onDisappear {
                        viewModel.stopScanning()
                    }
            } else {
                if let image = processedImage {
                    Image(uiImage: image)
                        .resizable()
                        .scaledToFit()
                        .padding()
                }
            }
        }
        .onChange(of: useCamera) { usingCamera in
            if usingCamera {
                viewModel.startScanning()
                processedImage = nil
            } else {
                viewModel.stopScanning()
                if let image = testImage {
                    processedImage = viewModel.processImageForQRCode(image)
                }
            }
        }
    }
}



struct CameraPreview: UIViewRepresentable {
    let session: AVCaptureSession

    func makeUIView(context: Context) -> UIView {
        let view = UIView(frame: UIScreen.main.bounds)
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.frame = view.frame
        previewLayer.videoGravity = .resizeAspectFill
        view.layer.addSublayer(previewLayer)
        return view
    }

    func updateUIView(_ uiView: UIView, context: Context) {}
}

class QRCodeScannerViewModel: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    @Published var detectedURL: String?
    let session = AVCaptureSession()
    private let videoDataOutput = AVCaptureVideoDataOutput()

    override init() {
        super.init()
        setupCaptureSession()
    }

    private func setupCaptureSession() {
        guard let device = AVCaptureDevice.default(for: .video),
              let input = try? AVCaptureDeviceInput(device: device),
              session.canAddInput(input),
              session.canAddOutput(videoDataOutput) else {
            return
        }

        session.addInput(input)
        videoDataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sampleBufferQueue"))
        session.addOutput(videoDataOutput)
    }

    func startScanning() {
        if session.isRunning {
            return
        }
        DispatchQueue.global(qos: .userInitiated).async {
            self.session.startRunning()
        }
    }

    func stopScanning() {
        if session.isRunning {
            session.stopRunning()
        }
    }

    func processImageForQRCode(_ image: UIImage) -> UIImage {
        let ciImage = CIImage(image: image)!
        let context = CIContext()
        let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: context, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
        let features = detector?.features(in: ciImage) as? [CIQRCodeFeature]

        UIGraphicsBeginImageContext(image.size)
        image.draw(at: CGPoint.zero)

        if let features = features {
            for feature in features {
                drawBoundingBox(around: feature)
            }
        }

        let processedImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()

        return processedImage ?? image
    }

    private func drawBoundingBox(around feature: CIQRCodeFeature) {
        guard let context = UIGraphicsGetCurrentContext() else { return }

        context.setStrokeColor(UIColor.red.cgColor)
        context.setLineWidth(10)
        context.addRect(feature.bounds)
        context.strokePath()
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        
        let ciImage = CIImage(cvImageBuffer: pixelBuffer)
        let context = CIContext()
        guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return }
        let uiImage = UIImage(cgImage: cgImage)

        let newCIImage = CIImage(image: uiImage)
        
        let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: context, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
        let features = detector?.features(in: newCIImage ?? ciImage) as? [CIQRCodeFeature]

        DispatchQueue.main.async {
            self.detectedURL = features?.first?.messageString
        }
    }
}

ios swiftui avfoundation core-image
1个回答
0
投票

应用此变换最终成为我需要做的:

let transform = CGAffineTransform(scaleX: 1, y: -1).translatedBy(x: 0, y: -image.size.height)
                drawBoundingBox(around: feature.bounds.applying(transform))
© www.soinside.com 2019 - 2024. All rights reserved.