CoreML:`检查模型路径时无法获取主目录`错误

问题描述 投票:0回答:1

我正在使用此代码从图像中提取文本,代码第一次运行完美,然后它开始给出此错误消息

[coreml] Failed to get the home directory when checking model path
。这是我用来提取文本形式图像的代码。这与我从 apple 文档

复制的代码相同
func requestORC(image: UIImage) {
        
        //        guard let cgImage = UIImage(named: "test")?.cgImage else { return }
        guard let cgImage = image.cgImage else { return }
        
        //        / Create a new image-request handler.
        let requestHandler = VNImageRequestHandler(cgImage: cgImage)
        
        // Create a new request to recognize text.
        let request = VNRecognizeTextRequest(completionHandler: recognizeTextHandler)
        
        do {
            // Perform the text-recognition request.
            try requestHandler.perform([request])
        } catch {
            print("Unable to perform the requests: \(error).")
        }
        
    }
    
    func recognizeTextHandler(request: VNRequest, error: Error?) {
        guard let observations =
                request.results as? [VNRecognizedTextObservation] else {
            return
        }
        let recognizedStrings = observations.compactMap { observation in
            // Return the string of the top VNRecognizedText instance.
            return observation.topCandidates(1).first?.string
        }
        // Process the recognized strings.
//        print(recognizedStrings)
        
        self.recognizedStrings = recognizedStrings
        
    }
xcode swiftui coreml text-recognition apple-vision
1个回答
0
投票

消除模拟器中的错误...

这可能听起来很奇怪,但仅当您使用请求识别级别的默认

Failed to get the home directory when checking model path
大小写运行文本识别应用程序时,Xcode 模拟器中才会出现
.accurate
错误。将值更改为
.fast
,错误就会消失。

还应该说的是,在实际设备上运行代码时,根本不会出现上述错误。

这是代码:

import SwiftUI
import Vision
import CoreML

struct ContentView : View {
    @State var recognizedStrings = [String]()
    @State var interpolatedString = ""
    
    var body: some View {
        ZStack {
            Color.yellow.ignoresSafeArea()

            Text(interpolatedString)
                .multilineTextAlignment(.center)
                .font(.largeTitle)
                .padding(.horizontal, 100)
        }
        .onAppear {
            self.opticalCharacterRecognition(.init(named: "make.png")!)
            
            if recognizedStrings.count > 0 {
                for i in 0 ..< recognizedStrings.count {
                    interpolatedString += recognizedStrings[i] + " "
                }
            }
        }
    }
}

extension ContentView {

    func opticalCharacterRecognition(_ image: UIImage) {
        guard let pngData = image.pngData() else { return }
        let requestHandler = VNImageRequestHandler(data: pngData)
        
        let request = VNRecognizeTextRequest(completionHandler: textHandler)
        
#if targetEnvironment(simulator)
        print("No more errors...")
        request.recognitionLevel = .fast      // Here it is
#endif
        
        do {
            try requestHandler.perform([request])
        } catch {
            print("Unable to perform the requests: \(error).")
        }
    }
    
    func textHandler(request: VNRequest, error: Error?) {
        guard let observations = request.results as? [VNRecognizedTextObservation]
        else { return }
        let recognizedStrings = observations.compactMap { observation in
            return observation.topCandidates(1).first?.string
        }
        self.recognizedStrings = recognizedStrings
    }
}

© www.soinside.com 2019 - 2024. All rights reserved.