JNI UnsatisfiedLinkError - Android

问题描述 投票:1回答:1

我是jni的初学者,我正在尝试加载库,但我在日志中不断收到UnSatisfiedLinkError。我已经多次检查了所有文件,但仍然是同样的错误。

Android.门口

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)


LOCAL_MODULE    := native_sample
LOCAL_SRC_FILES := myFirstApp.cpp
LOCAL_LDLIBS +=  -llog -ldl

include $(BUILD_SHARED_LIBRARY)

myFirstApp.cpp

#include <jni.h>
#include <opencv2/core/core.hpp>
#include "opencv2/highgui/highgui.hpp"
#include <vector>
#include <math.h>
#include <android/log.h>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/legacy/legacy.hpp>
#include "opencv2/video/tracking.hpp"
#include <time.h>
#include <math.h>
#include <string.h>

#define APPNAME "myFirstApp"
using namespace std;
using namespace cv;

extern "C" {

static int numTemplate = 24;
static int tWidth = 256;
static int tHight = 256;
static vector<Mat> tmplts;
static vector<vector<KeyPoint> > keyPointsTmplts;
static vector<Mat> descriptorsTmplts;
static vector<Mat> trainDescriptors;
// find squares vars
static vector<vector<Point> > squares;
static vector<Point2f> squaresCenters;
static vector<int> squaresAbsAreas;
static vector<int> clustersAreas;
static double scaleFactor = 1.5;
static double MARKER_RATIO = 0.03;
//clustering vars
static vector<Point2f> clusterCenters;
static vector<vector<Point> > clusterBoundaries;
static int CLUSTERTHRESHOLD = 25;
//tracking variables
static Mat prevFrame;
static vector<Point2f> oldPoints;
static TermCriteria termcrit(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03);
static Size winSize(31, 31);
static Size subPixWinSize(10, 10);
//Recognition
static std::vector<int> matchingResults;
static vector<Mat> hos;
static Mat warpedImg;
static Mat cropped;
static vector<vector<KeyPoint> > candidatesKeypoints;
vector<Mat> candidates;

// DETECTION
static FastFeatureDetector detector(16);
//static int MAX_KEYPOINTS_COUNT=100;
//static GoodFeaturesToTrackDetector detector(MAX_KEYPOINTS_COUNT, 0.01, 10, 3, false, 0.04);

// DESCRIPTOR
static FREAK extractor;

// MATCHER
static BFMatcher matcher(NORM_HAMMING, true);


double diffclock(clock_t clock1, clock_t clock2);



// for int arrays
static Mat points2Mat(const Point* p) {
    Mat srcP(4, 2, CV_32FC1);
    for (int i = 0; i < 4; i++) {
        srcP.at<float>(i, 0) = p[i].x;
        srcP.at<float>(i, 1) = p[i].y;
    }
    return srcP;
}

// for float arrays
static Mat points2MatF(const Point2f* p) {
    Mat srcP(4, 2, CV_32FC1);
    for (int i = 0; i < 4; i++) {
        srcP.at<float>(i, 0) = p[i].x;
        srcP.at<float>(i, 1) = p[i].y;
    }
    return srcP;
}

static Mat prepareWarpDstMat(const Point* p) {
    Mat dstP = cvCreateMat(4, 2, CV_32FC1);
    dstP.at<float>(0, 0) = p[0].x;
    dstP.at<float>(0, 1) = p[0].y;
    dstP.at<float>(1, 0) = p[0].x + tWidth;
    dstP.at<float>(1, 1) = p[0].y;
    dstP.at<float>(2, 0) = p[0].x + tWidth;
    dstP.at<float>(2, 1) = p[0].y + tHight;
    dstP.at<float>(3, 0) = p[0].x;
    dstP.at<float>(3, 1) = p[0].y + tHight;
    return dstP;
}

//-----------------------------Find Squares-------------------------------------------

// helper function:
// finds a cosine of angle between vectors
// from pt0->pt1 and from pt0->pt2
static double angle(Point pt1, Point pt2, Point pt0) {
    double dx1 = pt1.x - pt0.x;
    double dy1 = pt1.y - pt0.y;
    double dx2 = pt2.x - pt0.x;
    double dy2 = pt2.y - pt0.y;
    return (dx1 * dx2 + dy1 * dy2)
            / sqrt((dx1 * dx1 + dy1 * dy1) * (dx2 * dx2 + dy2 * dy2) + 1e-10);
}

static void clearVectors() {
    // clear all vectors from data
    squares.clear();
    matchingResults.clear();
    squaresCenters.clear();
    squaresAbsAreas.clear();
    clusterCenters.clear();
    clusterBoundaries.clear();
    clustersAreas.clear();
    candidatesKeypoints.clear();
    candidates.clear();

}

// the sequence is stored in the specified memory storage
static void findSquares(const Mat& grayImg) {
    clock_t begin = clock();
    Mat timg, gray, scaledImg;

    resize(grayImg, scaledImg, Size(0, 0), 1 / scaleFactor, 1 / scaleFactor,
            CV_INTER_CUBIC);

    __android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "scaledImage %i %i",
            scaledImg.cols,scaledImg.rows);
    // Gaussian blurring better than pyr up and down
    GaussianBlur(scaledImg, timg, Size(5, 5), 0, 0, BORDER_DEFAULT); // t11

    vector<vector<Point> > contours;
    // find squares in every color plane of the image

    Canny(timg, gray, 50, 200, 5); //t3

    dilate(gray, gray, Mat(), Point(-1, -1));

    // find contours and store them all as a list
    findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);

    vector<Point> approx;

    // test each contour
    for (int i = 0; i < contours.size(); i++) {
        // approximate contour with accuracy proportional
        // to the contour perimeter
        approxPolyDP(Mat(contours[i]), approx,
                arcLength(Mat(contours[i]), true) * 0.02, true);

        // square contours should have 4 vertices after approximation
        // relatively large area (to filter out noisy contours)
        // and be convex.
        // Note: absolute value of an area is used because
        // area may be positive or negative - in accordance with the
        // contour orientation
        int imgArea = gray.cols * gray.rows;
        int absArea = fabs(contourArea(Mat(approx)));
        if (approx.size() == 4 && absArea > 1000 && isContourConvex(Mat(approx))
                && absArea < 0.8 * imgArea) {
            double maxCosine = 0;
            Point a, b, c, d;

            for (int j = 2; j < 5; j++) {
                // find the maximum cosine of the angle between joint edges
                a = approx[j % 4];
                b = approx[j - 2];
                c = approx[j - 1];
                double cosine = fabs(angle(a, b, c));
                maxCosine = MAX(maxCosine, cosine);
            }

            // if cosines of all angles are small
            // (all angles are ~90 degree) then write quandrange
            // vertices to resultant sequence
            if (maxCosine < 0.3) {

                // restore scaling
                Point* p0 = (Point*) &approx[0];
                Point* p1 = (Point*) &approx[1];
                Point* p2 = (Point*) &approx[2];
                Point* p3 = (Point*) &approx[3];

                p0->x = p0->x * scaleFactor;
                p0->y = p0->y * scaleFactor;
                p1->x = p1->x * scaleFactor;
                p1->y = p1->y * scaleFactor;
                p2->x = p2->x * scaleFactor;
                p2->y = p2->y * scaleFactor;
                p3->x = p3->x * scaleFactor;
                p3->y = p3->y * scaleFactor;

                Point2f center = (*p0 + *p1 + *p2 + *p3) * (0.25);
                //  //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "squareCenter %G,%G",center.x,center.y);
                squares.push_back(approx);
                squaresCenters.push_back(center);
                squaresAbsAreas.push_back(absArea);
            }

        }
    }

}

//--------------------------------Cluster Rectangles-------------------------------------

static void updateCluster(int pNum, int* clusters, int n) {
    for (int i = 0; i < n; i++) {
        if (clusters[pNum] != clusters[i]) {
            Point2f p0 = (Point2f) squaresCenters[pNum];
            Point2f p1 = (Point2f) squaresCenters[i];
//          //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "norm %G",
//                  norm(p0-p1));
            if (norm(p0 - p1) < CLUSTERTHRESHOLD) {
                clusters[i] = clusters[pNum];
                updateCluster(i, clusters, n);
            }
        }
    }

}
static int TRACKED_THRESHOLD = 100;
static bool inTrackingList(Point2f clusterCenter) {
//  int tracklistSize = trackedMarkersCenters.size();
//  Point2f trackedCenter;
//  for (int i = 0; i < tracklistSize; i++) {
//      trackedCenter = (Point2f) trackedMarkersCenters[i][0];
////        //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "dist %G",
////                norm(clusterCenter - trackedCenter));
//      if (norm(clusterCenter - trackedCenter) < TRACKED_THRESHOLD)
//          return true;
//  }
    return false;
}



static void mergeRectangles() {
    int n = squaresCenters.size();
    int clusters[n];
    int clusterCounter = 0;

    for (int i = 0; i < n; i++)
        clusters[i] = -1;
    for (int i = 0; i < n; i++)
        if (clusters[i] == -1) {
            clusters[i] = clusterCounter;
            clusterCounter++;
            updateCluster(i, clusters, n);
        }
    //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "clusters %i",
    //clusterCounter);
    int members, maxArea;

    for (int i = 0; i < clusterCounter; i++) {
        members = 0;
        Point2f center(0, 0);
        maxArea = -1;
        vector<Point> maxSquare;
        for (int j = 0; j < n; j++) {
            if (clusters[j] == i) {
                center += (Point2f) squaresCenters[j];
                members++;

                if (maxArea < (int) squaresAbsAreas[j]) {
                    maxArea = (int) squaresAbsAreas[j];
                    maxSquare = squares[j];
                }
            }
        }
        center *= (1.0 / members);

        if (!inTrackingList(center)) {
            clusterCenters.push_back(center);
            clusterBoundaries.push_back(maxSquare);
            clustersAreas.push_back(maxArea);
            matchingResults.push_back(-3);
            const Point* floPoin = &maxSquare[0];
            Mat scene = points2Mat(floPoin);
        }

    }

}

//------------------------------Process Filtered squares--------------------------------------

static int imageArea;
// crop squares
static void cropAndWarpCandidate(Mat& grayImg, const Point* p, int i) {

    //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "Area %i %i %G %G", i,
    //      clustersAreas[i], imageArea, clustersAreas[i] * 1.0 / imageArea);
//
//  if (clustersAreas[i] < MARKER_RATIO * imageArea) {
//
//      matchingResults[i] = -2;
//      return;
//  }

    Mat srcPMat = points2Mat(p);
    Mat dstPMat = prepareWarpDstMat(p);
    Mat ho = findHomography(srcPMat, dstPMat, 0);



    warpPerspective(grayImg, warpedImg, ho,
            Size(grayImg.cols + tWidth, grayImg.rows + tHight));

    cropped = Mat(warpedImg, Rect(p[0].x, p[0].y, tWidth, tHight));

//  int templateIndex = matchCandidate(cropped);
//
//  matchingResults[i] = templateIndex;
    candidates.push_back(cropped);

    srcPMat.release();
    dstPMat.release();
    warpedImg.release();
    cropped.release();

}

// the function draws all the squares in the image
static void processFilteredSquares(Mat& grayImg) {
    imageArea = grayImg.cols * grayImg.rows;

    int squaresSize = clusterBoundaries.size();

    //__android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "filtered %d",
    //      squaresSize);
    for (int i = 0; i < squaresSize; i++) {
        const Point* p = &clusterBoundaries[i][0];
        cropAndWarpCandidate(grayImg, p, i);

    }

}

//------------------Drawing ---------------------------------------


static void drawFilteredSquaresWithoutMarker(Mat& rgbImg) {
    int squaresSize = clusterBoundaries.size();
    __android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "squares %i",
            squaresSize);
    int n = 4;
    for (int i = 0; i < squaresSize; i++) {
        const Point* p = &clusterBoundaries[i][0];
        Point center = clusterCenters[i];
        polylines(rgbImg, &p, &n, 1, true, Scalar(0, 255, 0, 255), 3, CV_AA);
        circle(rgbImg, center, 10, Scalar(0, 255, 0, 255));
    }

}


// calc time helper
double diffclock(clock_t clock1, clock_t clock2) {
    double diffticks = clock1 - clock2;
    double diffms = (diffticks * 1000) / CLOCKS_PER_SEC;
    return diffms;
}

JNIEXPORT jint JNICALL Java_com_example_myfirstapp_RegisterMarkerMain_findMarkersNative(
        JNIEnv* env, jobject, jlong addrRgba) {
    //clock_t begin = clock();
    Mat& mRgb = *(Mat*) addrRgba;
    Mat mgray(mRgb.rows, mRgb.cols, CV_8UC1);
    cvtColor(mRgb, mgray, CV_RGBA2GRAY, 1); // the working one

    clearVectors();

    findSquares(mgray);
    mergeRectangles();

    processFilteredSquares(mgray);

    drawFilteredSquaresWithoutMarker(mRgb);
    __android_log_print(ANDROID_LOG_VERBOSE, APPNAME, "Candidates %i",candidates.size());
    return clusterBoundaries.size();
//  clock_t end = clock();

//  mgray.release();
}

JNIEXPORT void JNICALL Java_com_example_myfirstapp_RegisterMarkerMain_loadCand(
        JNIEnv* env, jobject, jlong addrRgba, jlong descriptorAdd, jint i) {
    vector<KeyPoint> keyPoints;
    Mat nativeM = candidates[i];
    Mat& mRgb = *(Mat*) addrRgba;
    Mat& descriptor = *(Mat*) descriptorAdd;
    nativeM.copyTo(mRgb);

    Mat descriptorUnFiltered;
    detector.detect(nativeM, keyPoints);
    if(keyPoints.size()==0)
        return;
    extractor.compute(nativeM, keyPoints, descriptorUnFiltered);
    vector<vector<DMatch> > matches;
    if(descriptorUnFiltered.rows==0)
        return;
    matcher.radiusMatch(descriptorUnFiltered, descriptorUnFiltered, matches,
            50);



    descriptor = descriptorUnFiltered.row(0);

    std::vector<DMatch> mat;
    for (int j = 1; j < matches.size(); j++) {

        mat = matches[j];
        // if no matches neglect
        if (mat.size() >= 2) {
            DMatch m = mat[1];

            if (m.trainIdx < m.queryIdx)
                continue;

            else
                vconcat(descriptor, descriptorUnFiltered.row(m.queryIdx),
                        descriptor);

        } else {
            DMatch m0 = mat[0];
            vconcat(descriptor, descriptorUnFiltered.row(m0.queryIdx),
                    descriptor);

        }
    }

}

}

这就是我在活动中调用loadLibrary的地方

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            Log.i("loading libs", "OpenCV loading status " + status);
            switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i("loading libs", "OpenCV loaded successfully");

                // Load native library after(!) OpenCV initialization
                System.loadLibrary("native_sample");

            }
                break;
            default: {
                super.onManagerConnected(status);
            }
                break;
            }
        }
    };

任何帮助将非常感激。

android java-native-interface unsatisfiedlinkerror
1个回答
0
投票

当应用程序尝试加载本机库(例如Linux中的.so,Windows上的.dll或Mac中的.dylib)并且该库不存在时,将引发UnsatisfiedLinkError。我扔了这个控制台:

java.lang.UnsatisfiedLinkError:dalvik.system.PathClassLoader [DexPathList [[zip file“/system/framework/org.apache.http.legacy.boot.jar”,zip file“/data/app/com.imaniac.myo- QS9EJbxzOjKhre3FebKwoA == / base.apk“],nativeLibraryDirectories = [/ data / app / com.imaniac.myo-QS9EJbxzOjKhre3FebKwoA == / lib / arm64,/ system / lib64]]]找不到”libgesture-classifier.so“at java.lang.Runtime.loadLibrary0(Runtime.java:1012)at java.lang.System.loadLibrary(System.java:1669)

好吧,它通过在projectfolder \ src \ main中添加[this] [1]来为我工作

[1]:https://mega.nz/#!HsVijIxa!CLbeM1BhpEd5sUrErFglP7R8BaHPKaYTG3CkCkaoXpk

尝试将该库添加到我之前说过的路径(projectfolder \ src \ main)

© www.soinside.com 2019 - 2024. All rights reserved.