feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake
1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试 2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程 3.重整权利声明文件,重整代码工程,确保最小化侵权风险 Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
@ -0,0 +1,89 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Range;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
|
||||
class PanoramaStitchingRotatingCameraRun {
|
||||
void basicPanoramaStitching (String[] args) {
|
||||
String img1path = args[0], img2path = args[1];
|
||||
Mat img1 = new Mat(), img2 = new Mat();
|
||||
img1 = Imgcodecs.imread(img1path);
|
||||
img2 = Imgcodecs.imread(img2path);
|
||||
|
||||
//! [camera-pose-from-Blender-at-location-1]
|
||||
Mat c1Mo = new Mat( 4, 4, CvType.CV_64FC1 );
|
||||
c1Mo.put(0 ,0 ,0.9659258723258972, 0.2588190734386444, 0.0, 1.5529145002365112,
|
||||
0.08852133899927139, -0.3303661346435547, -0.9396926164627075, -0.10281121730804443,
|
||||
-0.24321036040782928, 0.9076734185218811, -0.342020183801651, 6.130080699920654,
|
||||
0, 0, 0, 1 );
|
||||
//! [camera-pose-from-Blender-at-location-1]
|
||||
|
||||
//! [camera-pose-from-Blender-at-location-2]
|
||||
Mat c2Mo = new Mat( 4, 4, CvType.CV_64FC1 );
|
||||
c2Mo.put(0, 0, 0.9659258723258972, -0.2588190734386444, 0.0, -1.5529145002365112,
|
||||
-0.08852133899927139, -0.3303661346435547, -0.9396926164627075, -0.10281121730804443,
|
||||
0.24321036040782928, 0.9076734185218811, -0.342020183801651, 6.130080699920654,
|
||||
0, 0, 0, 1);
|
||||
//! [camera-pose-from-Blender-at-location-2]
|
||||
|
||||
//! [camera-intrinsics-from-Blender]
|
||||
Mat cameraMatrix = new Mat(3, 3, CvType.CV_64FC1);
|
||||
cameraMatrix.put(0, 0, 700.0, 0.0, 320.0, 0.0, 700.0, 240.0, 0, 0, 1 );
|
||||
//! [camera-intrinsics-from-Blender]
|
||||
|
||||
//! [extract-rotation]
|
||||
Range rowRange = new Range(0,3);
|
||||
Range colRange = new Range(0,3);
|
||||
//! [extract-rotation]
|
||||
|
||||
//! [compute-rotation-displacement]
|
||||
//c1Mo * oMc2
|
||||
Mat R1 = new Mat(c1Mo, rowRange, colRange);
|
||||
Mat R2 = new Mat(c2Mo, rowRange, colRange);
|
||||
Mat R_2to1 = new Mat();
|
||||
Core.gemm(R1, R2.t(), 1, new Mat(), 0, R_2to1 );
|
||||
//! [compute-rotation-displacement]
|
||||
|
||||
//! [compute-homography]
|
||||
Mat tmp = new Mat(), H = new Mat();
|
||||
Core.gemm(cameraMatrix, R_2to1, 1, new Mat(), 0, tmp);
|
||||
Core.gemm(tmp, cameraMatrix.inv(), 1, new Mat(), 0, H);
|
||||
Scalar s = new Scalar(H.get(2, 2)[0]);
|
||||
Core.divide(H, s, H);
|
||||
System.out.println(H.dump());
|
||||
//! [compute-homography]
|
||||
|
||||
//! [stitch]
|
||||
Mat img_stitch = new Mat();
|
||||
Imgproc.warpPerspective(img2, img_stitch, H, new Size(img2.cols()*2, img2.rows()) );
|
||||
Mat half = new Mat();
|
||||
half = new Mat(img_stitch, new Rect(0, 0, img1.cols(), img1.rows()));
|
||||
img1.copyTo(half);
|
||||
//! [stitch]
|
||||
|
||||
Mat img_compare = new Mat();
|
||||
Mat img_space = Mat.zeros(new Size(50, img1.rows()), CvType.CV_8UC3);
|
||||
List<Mat>list = new ArrayList<>();
|
||||
list.add(img1);
|
||||
list.add(img_space);
|
||||
list.add(img2);
|
||||
Core.hconcat(list, img_compare);
|
||||
|
||||
HighGui.imshow("Compare Images", img_compare);
|
||||
HighGui.imshow("Panorama Stitching", img_stitch);
|
||||
HighGui.waitKey(0);
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class PanoramaStitchingRotatingCamera {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new PanoramaStitchingRotatingCameraRun().basicPanoramaStitching(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.calib3d.Calib3d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
|
||||
class PerspectiveCorrectionRun {
|
||||
void perspectiveCorrection (String[] args) {
|
||||
String img1Path = args[0], img2Path = args[1];
|
||||
Mat img1 = Imgcodecs.imread(img1Path);
|
||||
Mat img2 = Imgcodecs.imread(img2Path);
|
||||
|
||||
//! [find-corners]
|
||||
MatOfPoint2f corners1 = new MatOfPoint2f(), corners2 = new MatOfPoint2f();
|
||||
boolean found1 = Calib3d.findChessboardCorners(img1, new Size(9, 6), corners1 );
|
||||
boolean found2 = Calib3d.findChessboardCorners(img2, new Size(9, 6), corners2 );
|
||||
//! [find-corners]
|
||||
|
||||
if (!found1 || !found2) {
|
||||
System.out.println("Error, cannot find the chessboard corners in both images.");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
//! [estimate-homography]
|
||||
Mat H = new Mat();
|
||||
H = Calib3d.findHomography(corners1, corners2);
|
||||
System.out.println(H.dump());
|
||||
//! [estimate-homography]
|
||||
|
||||
//! [warp-chessboard]
|
||||
Mat img1_warp = new Mat();
|
||||
Imgproc.warpPerspective(img1, img1_warp, H, img1.size());
|
||||
//! [warp-chessboard]
|
||||
|
||||
Mat img_draw_warp = new Mat();
|
||||
List<Mat> list1 = new ArrayList<>(), list2 = new ArrayList<>() ;
|
||||
list1.add(img2);
|
||||
list1.add(img1_warp);
|
||||
Core.hconcat(list1, img_draw_warp);
|
||||
HighGui.imshow("Desired chessboard view / Warped source chessboard view", img_draw_warp);
|
||||
|
||||
//! [compute-transformed-corners]
|
||||
Mat img_draw_matches = new Mat();
|
||||
list2.add(img1);
|
||||
list2.add(img2);
|
||||
Core.hconcat(list2, img_draw_matches);
|
||||
Point []corners1Arr = corners1.toArray();
|
||||
|
||||
for (int i = 0 ; i < corners1Arr.length; i++) {
|
||||
Mat pt1 = new Mat(3, 1, CvType.CV_64FC1), pt2 = new Mat();
|
||||
pt1.put(0, 0, corners1Arr[i].x, corners1Arr[i].y, 1 );
|
||||
|
||||
Core.gemm(H, pt1, 1, new Mat(), 0, pt2);
|
||||
double[] data = pt2.get(2, 0);
|
||||
Core.divide(pt2, new Scalar(data[0]), pt2);
|
||||
|
||||
double[] data1 =pt2.get(0, 0);
|
||||
double[] data2 = pt2.get(1, 0);
|
||||
Point end = new Point((int)(img1.cols()+ data1[0]), (int)data2[0]);
|
||||
Imgproc.line(img_draw_matches, corners1Arr[i], end, RandomColor(), 2);
|
||||
}
|
||||
|
||||
HighGui.imshow("Draw matches", img_draw_matches);
|
||||
HighGui.waitKey(0);
|
||||
//! [compute-transformed-corners]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Scalar RandomColor () {
|
||||
Random rng = new Random();
|
||||
int r = rng.nextInt(256);
|
||||
int g = rng.nextInt(256);
|
||||
int b = rng.nextInt(256);
|
||||
return new Scalar(r, g, b);
|
||||
}
|
||||
}
|
||||
|
||||
public class PerspectiveCorrection {
|
||||
public static void main (String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new PerspectiveCorrectionRun().perspectiveCorrection(args);
|
||||
}
|
||||
}
|
163
3rdparty/opencv-4.5.4/samples/java/tutorial_code/features2D/akaze_matching/AKAZEMatchDemo.java
vendored
Normal file
163
3rdparty/opencv-4.5.4/samples/java/tutorial_code/features2D/akaze_matching/AKAZEMatchDemo.java
vendored
Normal file
@ -0,0 +1,163 @@
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.KeyPoint;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.AKAZE;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
class AKAZEMatch {
|
||||
public void run(String[] args) {
|
||||
//! [load]
|
||||
String filename1 = args.length > 2 ? args[0] : "../data/graf1.png";
|
||||
String filename2 = args.length > 2 ? args[1] : "../data/graf3.png";
|
||||
String filename3 = args.length > 2 ? args[2] : "../data/H1to3p.xml";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
File file = new File(filename3);
|
||||
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder documentBuilder;
|
||||
Document document;
|
||||
Mat homography = new Mat(3, 3, CvType.CV_64F);
|
||||
double[] homographyData = new double[(int) (homography.total()*homography.channels())];
|
||||
try {
|
||||
documentBuilder = documentBuilderFactory.newDocumentBuilder();
|
||||
document = documentBuilder.parse(file);
|
||||
String homographyStr = document.getElementsByTagName("data").item(0).getTextContent();
|
||||
String[] splited = homographyStr.split("\\s+");
|
||||
int idx = 0;
|
||||
for (String s : splited) {
|
||||
if (!s.isEmpty()) {
|
||||
homographyData[idx] = Double.parseDouble(s);
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
} catch (ParserConfigurationException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
} catch (SAXException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
}
|
||||
homography.put(0, 0, homographyData);
|
||||
//! [load]
|
||||
|
||||
//! [AKAZE]
|
||||
AKAZE akaze = AKAZE.create();
|
||||
MatOfKeyPoint kpts1 = new MatOfKeyPoint(), kpts2 = new MatOfKeyPoint();
|
||||
Mat desc1 = new Mat(), desc2 = new Mat();
|
||||
akaze.detectAndCompute(img1, new Mat(), kpts1, desc1);
|
||||
akaze.detectAndCompute(img2, new Mat(), kpts2, desc2);
|
||||
//! [AKAZE]
|
||||
|
||||
//! [2-nn matching]
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(desc1, desc2, knnMatches, 2);
|
||||
//! [2-nn matching]
|
||||
|
||||
//! [ratio test filtering]
|
||||
float ratioThreshold = 0.8f; // Nearest neighbor matching ratio
|
||||
List<KeyPoint> listOfMatched1 = new ArrayList<>();
|
||||
List<KeyPoint> listOfMatched2 = new ArrayList<>();
|
||||
List<KeyPoint> listOfKeypoints1 = kpts1.toList();
|
||||
List<KeyPoint> listOfKeypoints2 = kpts2.toList();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
float dist1 = matches[0].distance;
|
||||
float dist2 = matches[1].distance;
|
||||
if (dist1 < ratioThreshold * dist2) {
|
||||
listOfMatched1.add(listOfKeypoints1.get(matches[0].queryIdx));
|
||||
listOfMatched2.add(listOfKeypoints2.get(matches[0].trainIdx));
|
||||
}
|
||||
}
|
||||
//! [ratio test filtering]
|
||||
|
||||
//! [homography check]
|
||||
double inlierThreshold = 2.5; // Distance threshold to identify inliers with homography check
|
||||
List<KeyPoint> listOfInliers1 = new ArrayList<>();
|
||||
List<KeyPoint> listOfInliers2 = new ArrayList<>();
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < listOfMatched1.size(); i++) {
|
||||
Mat col = new Mat(3, 1, CvType.CV_64F);
|
||||
double[] colData = new double[(int) (col.total() * col.channels())];
|
||||
colData[0] = listOfMatched1.get(i).pt.x;
|
||||
colData[1] = listOfMatched1.get(i).pt.y;
|
||||
colData[2] = 1.0;
|
||||
col.put(0, 0, colData);
|
||||
|
||||
Mat colRes = new Mat();
|
||||
Core.gemm(homography, col, 1.0, new Mat(), 0.0, colRes);
|
||||
colRes.get(0, 0, colData);
|
||||
Core.multiply(colRes, new Scalar(1.0 / colData[2]), col);
|
||||
col.get(0, 0, colData);
|
||||
|
||||
double dist = Math.sqrt(Math.pow(colData[0] - listOfMatched2.get(i).pt.x, 2) +
|
||||
Math.pow(colData[1] - listOfMatched2.get(i).pt.y, 2));
|
||||
|
||||
if (dist < inlierThreshold) {
|
||||
listOfGoodMatches.add(new DMatch(listOfInliers1.size(), listOfInliers2.size(), 0));
|
||||
listOfInliers1.add(listOfMatched1.get(i));
|
||||
listOfInliers2.add(listOfMatched2.get(i));
|
||||
}
|
||||
}
|
||||
//! [homography check]
|
||||
|
||||
//! [draw final matches]
|
||||
Mat res = new Mat();
|
||||
MatOfKeyPoint inliers1 = new MatOfKeyPoint(listOfInliers1.toArray(new KeyPoint[listOfInliers1.size()]));
|
||||
MatOfKeyPoint inliers2 = new MatOfKeyPoint(listOfInliers2.toArray(new KeyPoint[listOfInliers2.size()]));
|
||||
MatOfDMatch goodMatches = new MatOfDMatch(listOfGoodMatches.toArray(new DMatch[listOfGoodMatches.size()]));
|
||||
Features2d.drawMatches(img1, inliers1, img2, inliers2, goodMatches, res);
|
||||
Imgcodecs.imwrite("akaze_result.png", res);
|
||||
|
||||
double inlierRatio = listOfInliers1.size() / (double) listOfMatched1.size();
|
||||
System.out.println("A-KAZE Matching Results");
|
||||
System.out.println("*******************************");
|
||||
System.out.println("# Keypoints 1: \t" + listOfKeypoints1.size());
|
||||
System.out.println("# Keypoints 2: \t" + listOfKeypoints2.size());
|
||||
System.out.println("# Matches: \t" + listOfMatched1.size());
|
||||
System.out.println("# Inliers: \t" + listOfInliers1.size());
|
||||
System.out.println("# Inliers Ratio: \t" + inlierRatio);
|
||||
|
||||
HighGui.imshow("result", res);
|
||||
HighGui.waitKey();
|
||||
//! [draw final matches]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class AKAZEMatchDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new AKAZEMatch().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFMatching {
|
||||
public void run(String[] args) {
|
||||
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
|
||||
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
|
||||
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
|
||||
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a brute force matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
|
||||
MatOfDMatch matches = new MatOfDMatch();
|
||||
matcher.match(descriptors1, descriptors2, matches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
|
||||
|
||||
HighGui.imshow("Matches", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFMatchingDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFMatching().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFDetection {
|
||||
public void run(String[] args) {
|
||||
String filename = args.length > 0 ? args[0] : "../data/box.png";
|
||||
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
||||
detector.detect(src, keypoints);
|
||||
|
||||
//-- Draw keypoints
|
||||
Features2d.drawKeypoints(src, keypoints, src);
|
||||
|
||||
//-- Show detected (drawn) keypoints
|
||||
HighGui.imshow("SURF Keypoints", src);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFDetectionDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFDetection().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFFLANNMatching {
|
||||
public void run(String[] args) {
|
||||
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
|
||||
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
|
||||
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
|
||||
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(descriptors1, descriptors2, knnMatches, 2);
|
||||
|
||||
//-- Filter matches using the Lowe's ratio test
|
||||
float ratioThresh = 0.7f;
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
if (knnMatches.get(i).rows() > 1) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
if (matches[0].distance < ratioThresh * matches[1].distance) {
|
||||
listOfGoodMatches.add(matches[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
MatOfDMatch goodMatches = new MatOfDMatch();
|
||||
goodMatches.fromList(listOfGoodMatches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, goodMatches, imgMatches, Scalar.all(-1),
|
||||
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
|
||||
|
||||
//-- Show detected matches
|
||||
HighGui.imshow("Good Matches", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFFLANNMatchingDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFFLANNMatching().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.calib3d.Calib3d;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.KeyPoint;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFFLANNMatchingHomography {
|
||||
public void run(String[] args) {
|
||||
String filenameObject = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filenameScene = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat imgObject = Imgcodecs.imread(filenameObject, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat imgScene = Imgcodecs.imread(filenameScene, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (imgObject.empty() || imgScene.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypointsObject = new MatOfKeyPoint(), keypointsScene = new MatOfKeyPoint();
|
||||
Mat descriptorsObject = new Mat(), descriptorsScene = new Mat();
|
||||
detector.detectAndCompute(imgObject, new Mat(), keypointsObject, descriptorsObject);
|
||||
detector.detectAndCompute(imgScene, new Mat(), keypointsScene, descriptorsScene);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(descriptorsObject, descriptorsScene, knnMatches, 2);
|
||||
|
||||
//-- Filter matches using the Lowe's ratio test
|
||||
float ratioThresh = 0.75f;
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
if (knnMatches.get(i).rows() > 1) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
if (matches[0].distance < ratioThresh * matches[1].distance) {
|
||||
listOfGoodMatches.add(matches[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
MatOfDMatch goodMatches = new MatOfDMatch();
|
||||
goodMatches.fromList(listOfGoodMatches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(imgObject, keypointsObject, imgScene, keypointsScene, goodMatches, imgMatches, Scalar.all(-1),
|
||||
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
|
||||
|
||||
//-- Localize the object
|
||||
List<Point> obj = new ArrayList<>();
|
||||
List<Point> scene = new ArrayList<>();
|
||||
|
||||
List<KeyPoint> listOfKeypointsObject = keypointsObject.toList();
|
||||
List<KeyPoint> listOfKeypointsScene = keypointsScene.toList();
|
||||
for (int i = 0; i < listOfGoodMatches.size(); i++) {
|
||||
//-- Get the keypoints from the good matches
|
||||
obj.add(listOfKeypointsObject.get(listOfGoodMatches.get(i).queryIdx).pt);
|
||||
scene.add(listOfKeypointsScene.get(listOfGoodMatches.get(i).trainIdx).pt);
|
||||
}
|
||||
|
||||
MatOfPoint2f objMat = new MatOfPoint2f(), sceneMat = new MatOfPoint2f();
|
||||
objMat.fromList(obj);
|
||||
sceneMat.fromList(scene);
|
||||
double ransacReprojThreshold = 3.0;
|
||||
Mat H = Calib3d.findHomography( objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold );
|
||||
|
||||
//-- Get the corners from the image_1 ( the object to be "detected" )
|
||||
Mat objCorners = new Mat(4, 1, CvType.CV_32FC2), sceneCorners = new Mat();
|
||||
float[] objCornersData = new float[(int) (objCorners.total() * objCorners.channels())];
|
||||
objCorners.get(0, 0, objCornersData);
|
||||
objCornersData[0] = 0;
|
||||
objCornersData[1] = 0;
|
||||
objCornersData[2] = imgObject.cols();
|
||||
objCornersData[3] = 0;
|
||||
objCornersData[4] = imgObject.cols();
|
||||
objCornersData[5] = imgObject.rows();
|
||||
objCornersData[6] = 0;
|
||||
objCornersData[7] = imgObject.rows();
|
||||
objCorners.put(0, 0, objCornersData);
|
||||
|
||||
Core.perspectiveTransform(objCorners, sceneCorners, H);
|
||||
float[] sceneCornersData = new float[(int) (sceneCorners.total() * sceneCorners.channels())];
|
||||
sceneCorners.get(0, 0, sceneCornersData);
|
||||
|
||||
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]),
|
||||
new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]),
|
||||
new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]),
|
||||
new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]),
|
||||
new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);
|
||||
|
||||
//-- Show detected matches
|
||||
HighGui.imshow("Good Matches & Object detection", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFFLANNMatchingHomographyDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFFLANNMatchingHomography().run(args);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user