Recognizing images using SURF with OpenCV on Android

I am trying to create a simple leaf recognition application with Android and OpenCV; my database consists of only 3 records (3 images from 3 types of leaves), and I would like to know if there is one of the pictures in the database inside another image captured by the smartphone. I use the SURF method to extract key points from database images and then compare them with the extracted key points of the captured image, looking for matches. My problem is that the result looks like “color matching”, more than “matching functions”: when I compare the image from the database and the one that was recorded, the number of matches is equal to all 3 elements, and thus I getting the wrong match.

This is one of the images from the database (note that without backgroud)

database image

:

Match Screenshot

- , , - .

, :

Mat orig = Highgui.imread(photoPathwithoutFile);
Mat origBW = new Mat();
Imgproc.cvtColor(orig, origBW, Imgproc.COLOR_RGB2GRAY);
MatOfKeyPoint kpOrigin = createSURFdetector(origBW);
Mat descOrig = extractDescription(kpOrigin, origBW);
Leaf result = findMatches(descOrig);
Mat imageOut = orig.clone();
Features2d.drawMatches(orig, kpOrigin, maple, keypointsMaple, resultMaple, imageOut);


public MatOfKeyPoint createSURFdetector (Mat origBW) {
    FeatureDetector surf = FeatureDetector.create(FeatureDetector.FAST);

    MatOfKeyPoint keypointsOrig = new MatOfKeyPoint();

    surf.detect(origBW, keypointsOrig);

    return keypointsOrig;
}

public Mat extractDescription (MatOfKeyPoint kpOrig, Mat origBW) {
    DescriptorExtractor surfExtractor = DescriptorExtractor.create(FeatureDetector.SURF);

    Mat origDesc = new Mat();

    surfExtractor.compute(origBW, kpOrig, origDesc);

    return origDesc;
}

public Leaf findMatches (Mat descriptors) {
    DescriptorMatcher m = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
    MatOfDMatch max = new MatOfDMatch();
    resultMaple = new MatOfDMatch();
    resultChestnut = new MatOfDMatch();
    resultSwedish = new MatOfDMatch();
    Leaf match = null;

    m.match(descriptors, mapleDescriptors, resultMaple);
    Log.d("Origin", resultMaple.toList().size()+" matches with Maples");
    if (resultMaple.toList().size() > max.toList().size()) { max = resultMaple; match = Leaf.MAPLE; }
    m.match(descriptors, chestnutDescriptors, resultChestnut);
    Log.d("Origin", resultChestnut.toList().size()+" matches with Chestnut");
    if (resultChestnut.toList().size() > max.toList().size()) { max = resultChestnut; match = Leaf.CHESTNUT; }
    m.match(descriptors, swedishDescriptors, resultSwedish);
    Log.d("Origin", resultSwedish.toList().size()+" matches with Swedish");
    if (resultSwedish.toList().size() > max.toList().size()) { max = resultSwedish; match = Leaf.SWEDISH; }

    //return the match object with more matches
    return match;
}

, ?

+4
1

, SURF . SURF- . , " ". , , .

, , , ( , ).

, , HOG , - , - , . , , , , , - . , - , . , , .

- , ( , - ). , SIFT SURF - . - .

, , , .

: , , . findHomography , , mask. , .

Edit2: (, Java , Python)

import cv2
import numpy as np

# read input
a = cv2.imread(r'C:\Temp\leaf1.jpg')
b = cv2.imread(r'C:\Temp\leaf2.jpg')

# convert to gray
agray = cv2.cvtColor(a, cv2.COLOR_BGR2GRAY)
bgray = cv2.cvtColor(b, cv2.COLOR_BGR2GRAY)

# detect features and compute descriptors
surf = cv2.SURF() # better use SIFT instead
kp1, d1 = surf.detectAndCompute(agray,None)
kp2, d2 = surf.detectAndCompute(bgray,None)
print 'numFeatures1 =', len(kp1)
print 'numFeatures2 =', len(kp2)

# use KNN matcher
bf = cv2.BFMatcher()
matches = bf.knnMatch(d1,d2, k=2)

# Apply Lowe ratio test
good = []
for m,n in matches:
    if m.distance < 0.75*n.distance:
        good.append(m)

print 'numMatches =', len(matches)
print 'numGoodMatches =', len(good)

# if have enough matches - try to calculare homography to discard matches 
# that don't fit perspective transformation model
if len(good)>10:
    # convert matches into correct format (python-specific)
    src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
    dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)

    M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
    print 'numMatches =', sum(mask.ravel().tolist()) # calc number of 1s in mask

else:
    print "not enough good matches are found"

SURF

numFeatures1 = 685
numFeatures2 = 1566
numMatches = 685
numGoodMatches = 52
numMatches = 11

, "" . , , numMatches , . , , , , - . , .

+1

Source: https://habr.com/ru/post/1619912/


All Articles