OpenCV Vector Subscript out of range error in java

问题描述 投票:0回答:0

我正在尝试使用 OpenCV 用 Java 编写一个年龄性别猜测程序。 我在运行 DNN 识别人脸时遇到问题。 我已经开始添加代码来调试我的程序。

package AgeGenderCV;

import java.util.ArrayList;
import java.util.List;

import org.opencv.core.Core;
import org.opencv.core.Point;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.dnn.Dnn;
import org.opencv.dnn.Net;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;

public class AgeGenderCV {
    private static final String[] GENDER_LIST = {"Male", "Female"};
    private static final String[] AGE_LIST = {"(0-2)", "(4-6)", "(8-12)", "(15-20)", "(25-32)", "(38-43)", "(48-53)", "(60-100)"};
    private static final double[] MODEL_MEAN_VALUES = {78.4263377603, 87.7689143744, 114.895847746};
    private static final double CONFIDENCE_THRESHOLD = 0.7;

    public void ProcessImage(){
        // Load networks
        Net faceNet = Dnn.readNetFromTensorflow("opencv_face_detector_uint8.pb", "opencv_face_detector.pbtxt");
        Net ageNet = Dnn.readNetFromCaffe("age_deploy.prototxt", "age_net.caffemodel");
        Net genderNet = Dnn.readNetFromCaffe("gender_deploy.prototxt", "gender_net.caffemodel");
        
        
        // Open a video file or an image file or a camera stream
        VideoCapture cap;
        //if (args.length > 0) {
        //    cap = new VideoCapture(args[0]);
        //} else {
            cap = new VideoCapture(0);
        //}

        while (true) {
            // Read frame
            Mat frame = new Mat();
            cap.read(frame);
            
            // Get face bounding boxes
            Mat frameFace = frame.clone();
            //Imgcodecs.imwrite("frame.jpg", frameFace);
            
            List<Rect> bboxes = new ArrayList<>();
            Size size = new Size(300,300);
            System.out.println("Size : "+size);
            
            Scalar scalar = new Scalar(104, 117, 123);
            System.out.println("Scalar : "+scalar);
            
            Mat blob = Dnn.blobFromImage(frameFace, 1.0, size, scalar, false, false);
            System.out.println("blob : "+blob);
            
            faceNet.setInput(blob);
            System.out.println("FaceNet : "+faceNet);
            
            Mat detections = faceNet.forward();
            System.out.println("detections : "+detections);
            
            for (int i = 0; i < detections.size().height; i++) {
                System.out.println("i : "+i);
                //double confidence = detections.get(0, 0, i, 2)[0];
                double confidence;
                confidence = detections.get(i, 2)[0];
                if (confidence > CONFIDENCE_THRESHOLD) {
                    int x1 = (int) (detections.get(i, 3)[0] * frame.cols());
                    int y1 = (int) (detections.get(i, 4)[0] * frame.rows());
                    int x2 = (int) (detections.get(i, 5)[0] * frame.cols());
                    int y2 = (int) (detections.get(i, 6)[0] * frame.rows());
                    bboxes.add(new Rect(x1, y1, x2 - x1, y2 - y1));
                    Imgproc.rectangle(frameFace, new org.opencv.core.Point(x1, y1), new org.opencv.core.Point(x2, y2), new org.opencv.core.Scalar(0, 255, 0), (int) Math.round(frame.rows() / 150), 8, 0);
                }
            }


        if (bboxes.isEmpty()) {
            System.out.println("No face Detected, Checking next frame");
            continue;
        }

        for (Rect bbox : bboxes) {
            Mat face = new Mat(frame, bbox);
            Size sizeb = new Size(227,227);
            Scalar scalarb = new Scalar(MODEL_MEAN_VALUES);
            blob = Dnn.blobFromImage(face, 1.0, sizeb, scalarb, false);
            genderNet.setInput(blob);
            Mat genderPreds = genderNet.forward();
            String gender = GENDER_LIST[(int) genderPreds.get(0, 0)[0]];
            //System.out.println("Gender Output : " + genderPreds);
            System.out.println("Gender : " + gender + ", conf = " + genderPreds.get(0, 0)[0]);

            ageNet.setInput(blob);
            Mat agePreds = ageNet.forward();
            agePreds = agePreds.reshape(1, 1); // reshape to 2D matrix with one row
            //Point maxLoc = new Point(); // !!! not assiged !!!
            //Core.minMaxLoc(agePreds);
            //int ageIdx = (int) maxLoc.x; // !!! uninitialized !!!
             Core.MinMaxLocResult mm = Core.minMaxLoc(agePreds);
             int ageIdx = (int) mm.maxLoc.x;  

            //Imgcodecs.imwrite("age-gender-out-"+args[0]+".jpg", frameFace);
            //Imgcodecs.imwrite("age-gender-out-.jpg", frameFace);
            System.out.println("Age : " + AGE_LIST[ageIdx] + ", conf = " + agePreds.get(0, ageIdx)[0]);
        }
        cap.release();
        }
    }
    
    public static void main(String args[])
    {
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        System.loadLibrary("opencv_java470");
        
        AgeGenderCV example = new AgeGenderCV();
        example.ProcessImage();
    }
}

输出 - AgeGenderCV(运行) 跑步: [ INFO:[email protected]] global tf_importer.cpp:3014 cv::dnn::dnn4_v20221220::

anonymous-namespace'::TFImporter::populateNet DNN/TF: parsing model (N/A version info). Number of nodes = 570 [ INFO:[email protected]] global tf_importer.cpp:3021 cv::dnn::dnn4_v20221220::
anonymous-namespace'::TFImporter::populateNet DNN/TF: 解析配置(N/A 版本信息)。节点数 = 145 尝试升级使用已弃用的 V1LayerParameter 指定的输入文件:age_deploy.prototxt 使用已弃用的 V1LayerParameter 指定的成功升级文件 尝试升级使用已弃用的 V1LayerParameter 指定的输入文件:age_net.caffemodel 使用已弃用的 V1LayerParameter 指定的成功升级文件 尝试升级使用已弃用的 V1LayerParameter 指定的输入文件:gender_deploy.prototxt 使用已弃用的 V1LayerParameter 指定的成功升级文件 尝试升级使用已弃用的 V1LayerParameter 指定的输入文件:gender_net.caffemodel 使用已弃用的 V1LayerParameter 指定的成功升级文件 尺寸 : 300x300 标量:[104.0, 117.0, 123.0, 0.0] blob:Mat [13300300CV_32FC1,isCont=true,isSubmat=false,nativeObj=0x1ef8ecf6270,dataAddr=0x1ef9a76c080] 人脸网络:org.opencv.dnn.Net@b81eda8 C:\用户
java tensorflow opencv dotnetnuke caffe
© www.soinside.com 2019 - 2024. All rights reserved.