feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake
1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试 2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程 3.重整权利声明文件,重整代码工程,确保最小化侵权风险 Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
13
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/VID5.xml
vendored
Normal file
13
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/VID5.xml
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0"?>
|
||||
<opencv_storage>
|
||||
<images>
|
||||
images/CameraCalibraation/VID5/xx1.jpg
|
||||
images/CameraCalibraation/VID5/xx2.jpg
|
||||
images/CameraCalibraation/VID5/xx3.jpg
|
||||
images/CameraCalibraation/VID5/xx4.jpg
|
||||
images/CameraCalibraation/VID5/xx5.jpg
|
||||
images/CameraCalibraation/VID5/xx6.jpg
|
||||
images/CameraCalibraation/VID5/xx7.jpg
|
||||
images/CameraCalibraation/VID5/xx8.jpg
|
||||
</images>
|
||||
</opencv_storage>
|
751
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/camera_calibration.cpp
vendored
Normal file
751
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/camera_calibration.cpp
vendored
Normal file
@ -0,0 +1,751 @@
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <ctime>
|
||||
#include <cstdio>
|
||||
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/core/utility.hpp>
|
||||
#include <opencv2/imgproc.hpp>
|
||||
#include <opencv2/calib3d.hpp>
|
||||
#include <opencv2/imgcodecs.hpp>
|
||||
#include <opencv2/videoio.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
using namespace cv;
|
||||
using namespace std;
|
||||
|
||||
class Settings
|
||||
{
|
||||
public:
|
||||
Settings() : goodInput(false) {}
|
||||
enum Pattern { NOT_EXISTING, CHESSBOARD, CIRCLES_GRID, ASYMMETRIC_CIRCLES_GRID };
|
||||
enum InputType { INVALID, CAMERA, VIDEO_FILE, IMAGE_LIST };
|
||||
|
||||
void write(FileStorage& fs) const //Write serialization for this class
|
||||
{
|
||||
fs << "{"
|
||||
<< "BoardSize_Width" << boardSize.width
|
||||
<< "BoardSize_Height" << boardSize.height
|
||||
<< "Square_Size" << squareSize
|
||||
<< "Calibrate_Pattern" << patternToUse
|
||||
<< "Calibrate_NrOfFrameToUse" << nrFrames
|
||||
<< "Calibrate_FixAspectRatio" << aspectRatio
|
||||
<< "Calibrate_AssumeZeroTangentialDistortion" << calibZeroTangentDist
|
||||
<< "Calibrate_FixPrincipalPointAtTheCenter" << calibFixPrincipalPoint
|
||||
|
||||
<< "Write_DetectedFeaturePoints" << writePoints
|
||||
<< "Write_extrinsicParameters" << writeExtrinsics
|
||||
<< "Write_gridPoints" << writeGrid
|
||||
<< "Write_outputFileName" << outputFileName
|
||||
|
||||
<< "Show_UndistortedImage" << showUndistorted
|
||||
|
||||
<< "Input_FlipAroundHorizontalAxis" << flipVertical
|
||||
<< "Input_Delay" << delay
|
||||
<< "Input" << input
|
||||
<< "}";
|
||||
}
|
||||
void read(const FileNode& node) //Read serialization for this class
|
||||
{
|
||||
node["BoardSize_Width" ] >> boardSize.width;
|
||||
node["BoardSize_Height"] >> boardSize.height;
|
||||
node["Calibrate_Pattern"] >> patternToUse;
|
||||
node["Square_Size"] >> squareSize;
|
||||
node["Calibrate_NrOfFrameToUse"] >> nrFrames;
|
||||
node["Calibrate_FixAspectRatio"] >> aspectRatio;
|
||||
node["Write_DetectedFeaturePoints"] >> writePoints;
|
||||
node["Write_extrinsicParameters"] >> writeExtrinsics;
|
||||
node["Write_gridPoints"] >> writeGrid;
|
||||
node["Write_outputFileName"] >> outputFileName;
|
||||
node["Calibrate_AssumeZeroTangentialDistortion"] >> calibZeroTangentDist;
|
||||
node["Calibrate_FixPrincipalPointAtTheCenter"] >> calibFixPrincipalPoint;
|
||||
node["Calibrate_UseFisheyeModel"] >> useFisheye;
|
||||
node["Input_FlipAroundHorizontalAxis"] >> flipVertical;
|
||||
node["Show_UndistortedImage"] >> showUndistorted;
|
||||
node["Input"] >> input;
|
||||
node["Input_Delay"] >> delay;
|
||||
node["Fix_K1"] >> fixK1;
|
||||
node["Fix_K2"] >> fixK2;
|
||||
node["Fix_K3"] >> fixK3;
|
||||
node["Fix_K4"] >> fixK4;
|
||||
node["Fix_K5"] >> fixK5;
|
||||
|
||||
validate();
|
||||
}
|
||||
void validate()
|
||||
{
|
||||
goodInput = true;
|
||||
if (boardSize.width <= 0 || boardSize.height <= 0)
|
||||
{
|
||||
cerr << "Invalid Board size: " << boardSize.width << " " << boardSize.height << endl;
|
||||
goodInput = false;
|
||||
}
|
||||
if (squareSize <= 10e-6)
|
||||
{
|
||||
cerr << "Invalid square size " << squareSize << endl;
|
||||
goodInput = false;
|
||||
}
|
||||
if (nrFrames <= 0)
|
||||
{
|
||||
cerr << "Invalid number of frames " << nrFrames << endl;
|
||||
goodInput = false;
|
||||
}
|
||||
|
||||
if (input.empty()) // Check for valid input
|
||||
inputType = INVALID;
|
||||
else
|
||||
{
|
||||
if (input[0] >= '0' && input[0] <= '9')
|
||||
{
|
||||
stringstream ss(input);
|
||||
ss >> cameraID;
|
||||
inputType = CAMERA;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (isListOfImages(input) && readStringList(input, imageList))
|
||||
{
|
||||
inputType = IMAGE_LIST;
|
||||
nrFrames = (nrFrames < (int)imageList.size()) ? nrFrames : (int)imageList.size();
|
||||
}
|
||||
else
|
||||
inputType = VIDEO_FILE;
|
||||
}
|
||||
if (inputType == CAMERA)
|
||||
inputCapture.open(cameraID);
|
||||
if (inputType == VIDEO_FILE)
|
||||
inputCapture.open(input);
|
||||
if (inputType != IMAGE_LIST && !inputCapture.isOpened())
|
||||
inputType = INVALID;
|
||||
}
|
||||
if (inputType == INVALID)
|
||||
{
|
||||
cerr << " Input does not exist: " << input;
|
||||
goodInput = false;
|
||||
}
|
||||
|
||||
flag = 0;
|
||||
if(calibFixPrincipalPoint) flag |= CALIB_FIX_PRINCIPAL_POINT;
|
||||
if(calibZeroTangentDist) flag |= CALIB_ZERO_TANGENT_DIST;
|
||||
if(aspectRatio) flag |= CALIB_FIX_ASPECT_RATIO;
|
||||
if(fixK1) flag |= CALIB_FIX_K1;
|
||||
if(fixK2) flag |= CALIB_FIX_K2;
|
||||
if(fixK3) flag |= CALIB_FIX_K3;
|
||||
if(fixK4) flag |= CALIB_FIX_K4;
|
||||
if(fixK5) flag |= CALIB_FIX_K5;
|
||||
|
||||
if (useFisheye) {
|
||||
// the fisheye model has its own enum, so overwrite the flags
|
||||
flag = fisheye::CALIB_FIX_SKEW | fisheye::CALIB_RECOMPUTE_EXTRINSIC;
|
||||
if(fixK1) flag |= fisheye::CALIB_FIX_K1;
|
||||
if(fixK2) flag |= fisheye::CALIB_FIX_K2;
|
||||
if(fixK3) flag |= fisheye::CALIB_FIX_K3;
|
||||
if(fixK4) flag |= fisheye::CALIB_FIX_K4;
|
||||
if (calibFixPrincipalPoint) flag |= fisheye::CALIB_FIX_PRINCIPAL_POINT;
|
||||
}
|
||||
|
||||
calibrationPattern = NOT_EXISTING;
|
||||
if (!patternToUse.compare("CHESSBOARD")) calibrationPattern = CHESSBOARD;
|
||||
if (!patternToUse.compare("CIRCLES_GRID")) calibrationPattern = CIRCLES_GRID;
|
||||
if (!patternToUse.compare("ASYMMETRIC_CIRCLES_GRID")) calibrationPattern = ASYMMETRIC_CIRCLES_GRID;
|
||||
if (calibrationPattern == NOT_EXISTING)
|
||||
{
|
||||
cerr << " Camera calibration mode does not exist: " << patternToUse << endl;
|
||||
goodInput = false;
|
||||
}
|
||||
atImageList = 0;
|
||||
|
||||
}
|
||||
Mat nextImage()
|
||||
{
|
||||
Mat result;
|
||||
if( inputCapture.isOpened() )
|
||||
{
|
||||
Mat view0;
|
||||
inputCapture >> view0;
|
||||
view0.copyTo(result);
|
||||
}
|
||||
else if( atImageList < imageList.size() )
|
||||
result = imread(imageList[atImageList++], IMREAD_COLOR);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static bool readStringList( const string& filename, vector<string>& l )
|
||||
{
|
||||
l.clear();
|
||||
FileStorage fs(filename, FileStorage::READ);
|
||||
if( !fs.isOpened() )
|
||||
return false;
|
||||
FileNode n = fs.getFirstTopLevelNode();
|
||||
if( n.type() != FileNode::SEQ )
|
||||
return false;
|
||||
FileNodeIterator it = n.begin(), it_end = n.end();
|
||||
for( ; it != it_end; ++it )
|
||||
l.push_back((string)*it);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool isListOfImages( const string& filename)
|
||||
{
|
||||
string s(filename);
|
||||
// Look for file extension
|
||||
if( s.find(".xml") == string::npos && s.find(".yaml") == string::npos && s.find(".yml") == string::npos )
|
||||
return false;
|
||||
else
|
||||
return true;
|
||||
}
|
||||
public:
|
||||
Size boardSize; // The size of the board -> Number of items by width and height
|
||||
Pattern calibrationPattern; // One of the Chessboard, circles, or asymmetric circle pattern
|
||||
float squareSize; // The size of a square in your defined unit (point, millimeter,etc).
|
||||
int nrFrames; // The number of frames to use from the input for calibration
|
||||
float aspectRatio; // The aspect ratio
|
||||
int delay; // In case of a video input
|
||||
bool writePoints; // Write detected feature points
|
||||
bool writeExtrinsics; // Write extrinsic parameters
|
||||
bool writeGrid; // Write refined 3D target grid points
|
||||
bool calibZeroTangentDist; // Assume zero tangential distortion
|
||||
bool calibFixPrincipalPoint; // Fix the principal point at the center
|
||||
bool flipVertical; // Flip the captured images around the horizontal axis
|
||||
string outputFileName; // The name of the file where to write
|
||||
bool showUndistorted; // Show undistorted images after calibration
|
||||
string input; // The input ->
|
||||
bool useFisheye; // use fisheye camera model for calibration
|
||||
bool fixK1; // fix K1 distortion coefficient
|
||||
bool fixK2; // fix K2 distortion coefficient
|
||||
bool fixK3; // fix K3 distortion coefficient
|
||||
bool fixK4; // fix K4 distortion coefficient
|
||||
bool fixK5; // fix K5 distortion coefficient
|
||||
|
||||
int cameraID;
|
||||
vector<string> imageList;
|
||||
size_t atImageList;
|
||||
VideoCapture inputCapture;
|
||||
InputType inputType;
|
||||
bool goodInput;
|
||||
int flag;
|
||||
|
||||
private:
|
||||
string patternToUse;
|
||||
|
||||
|
||||
};
|
||||
|
||||
static inline void read(const FileNode& node, Settings& x, const Settings& default_value = Settings())
|
||||
{
|
||||
if(node.empty())
|
||||
x = default_value;
|
||||
else
|
||||
x.read(node);
|
||||
}
|
||||
|
||||
enum { DETECTION = 0, CAPTURING = 1, CALIBRATED = 2 };
|
||||
|
||||
bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
|
||||
vector<vector<Point2f> > imagePoints, float grid_width, bool release_object);
|
||||
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
const String keys
|
||||
= "{help h usage ? | | print this message }"
|
||||
"{@settings |default.xml| input setting file }"
|
||||
"{d | | actual distance between top-left and top-right corners of "
|
||||
"the calibration grid }"
|
||||
"{winSize | 11 | Half of search window for cornerSubPix }";
|
||||
CommandLineParser parser(argc, argv, keys);
|
||||
parser.about("This is a camera calibration sample.\n"
|
||||
"Usage: camera_calibration [configuration_file -- default ./default.xml]\n"
|
||||
"Near the sample file you'll find the configuration file, which has detailed help of "
|
||||
"how to edit it. It may be any OpenCV supported file format XML/YAML.");
|
||||
if (!parser.check()) {
|
||||
parser.printErrors();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (parser.has("help")) {
|
||||
parser.printMessage();
|
||||
return 0;
|
||||
}
|
||||
|
||||
//! [file_read]
|
||||
Settings s;
|
||||
const string inputSettingsFile = parser.get<string>(0);
|
||||
FileStorage fs(inputSettingsFile, FileStorage::READ); // Read the settings
|
||||
if (!fs.isOpened())
|
||||
{
|
||||
cout << "Could not open the configuration file: \"" << inputSettingsFile << "\"" << endl;
|
||||
parser.printMessage();
|
||||
return -1;
|
||||
}
|
||||
fs["Settings"] >> s;
|
||||
fs.release(); // close Settings file
|
||||
//! [file_read]
|
||||
|
||||
//FileStorage fout("settings.yml", FileStorage::WRITE); // write config as YAML
|
||||
//fout << "Settings" << s;
|
||||
|
||||
if (!s.goodInput)
|
||||
{
|
||||
cout << "Invalid input detected. Application stopping. " << endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
int winSize = parser.get<int>("winSize");
|
||||
|
||||
float grid_width = s.squareSize * (s.boardSize.width - 1);
|
||||
bool release_object = false;
|
||||
if (parser.has("d")) {
|
||||
grid_width = parser.get<float>("d");
|
||||
release_object = true;
|
||||
}
|
||||
|
||||
vector<vector<Point2f> > imagePoints;
|
||||
Mat cameraMatrix, distCoeffs;
|
||||
Size imageSize;
|
||||
int mode = s.inputType == Settings::IMAGE_LIST ? CAPTURING : DETECTION;
|
||||
clock_t prevTimestamp = 0;
|
||||
const Scalar RED(0,0,255), GREEN(0,255,0);
|
||||
const char ESC_KEY = 27;
|
||||
|
||||
//! [get_input]
|
||||
for(;;)
|
||||
{
|
||||
Mat view;
|
||||
bool blinkOutput = false;
|
||||
|
||||
view = s.nextImage();
|
||||
|
||||
//----- If no more image, or got enough, then stop calibration and show result -------------
|
||||
if( mode == CAPTURING && imagePoints.size() >= (size_t)s.nrFrames )
|
||||
{
|
||||
if(runCalibrationAndSave(s, imageSize, cameraMatrix, distCoeffs, imagePoints, grid_width,
|
||||
release_object))
|
||||
mode = CALIBRATED;
|
||||
else
|
||||
mode = DETECTION;
|
||||
}
|
||||
if(view.empty()) // If there are no more images stop the loop
|
||||
{
|
||||
// if calibration threshold was not reached yet, calibrate now
|
||||
if( mode != CALIBRATED && !imagePoints.empty() )
|
||||
runCalibrationAndSave(s, imageSize, cameraMatrix, distCoeffs, imagePoints, grid_width,
|
||||
release_object);
|
||||
break;
|
||||
}
|
||||
//! [get_input]
|
||||
|
||||
imageSize = view.size(); // Format input image.
|
||||
if( s.flipVertical ) flip( view, view, 0 );
|
||||
|
||||
//! [find_pattern]
|
||||
vector<Point2f> pointBuf;
|
||||
|
||||
bool found;
|
||||
|
||||
int chessBoardFlags = CALIB_CB_ADAPTIVE_THRESH | CALIB_CB_NORMALIZE_IMAGE;
|
||||
|
||||
if(!s.useFisheye) {
|
||||
// fast check erroneously fails with high distortions like fisheye
|
||||
chessBoardFlags |= CALIB_CB_FAST_CHECK;
|
||||
}
|
||||
|
||||
switch( s.calibrationPattern ) // Find feature points on the input format
|
||||
{
|
||||
case Settings::CHESSBOARD:
|
||||
found = findChessboardCorners( view, s.boardSize, pointBuf, chessBoardFlags);
|
||||
break;
|
||||
case Settings::CIRCLES_GRID:
|
||||
found = findCirclesGrid( view, s.boardSize, pointBuf );
|
||||
break;
|
||||
case Settings::ASYMMETRIC_CIRCLES_GRID:
|
||||
found = findCirclesGrid( view, s.boardSize, pointBuf, CALIB_CB_ASYMMETRIC_GRID );
|
||||
break;
|
||||
default:
|
||||
found = false;
|
||||
break;
|
||||
}
|
||||
//! [find_pattern]
|
||||
//! [pattern_found]
|
||||
if ( found) // If done with success,
|
||||
{
|
||||
// improve the found corners' coordinate accuracy for chessboard
|
||||
if( s.calibrationPattern == Settings::CHESSBOARD)
|
||||
{
|
||||
Mat viewGray;
|
||||
cvtColor(view, viewGray, COLOR_BGR2GRAY);
|
||||
cornerSubPix( viewGray, pointBuf, Size(winSize,winSize),
|
||||
Size(-1,-1), TermCriteria( TermCriteria::EPS+TermCriteria::COUNT, 30, 0.0001 ));
|
||||
}
|
||||
|
||||
if( mode == CAPTURING && // For camera only take new samples after delay time
|
||||
(!s.inputCapture.isOpened() || clock() - prevTimestamp > s.delay*1e-3*CLOCKS_PER_SEC) )
|
||||
{
|
||||
imagePoints.push_back(pointBuf);
|
||||
prevTimestamp = clock();
|
||||
blinkOutput = s.inputCapture.isOpened();
|
||||
}
|
||||
|
||||
// Draw the corners.
|
||||
drawChessboardCorners( view, s.boardSize, Mat(pointBuf), found );
|
||||
}
|
||||
//! [pattern_found]
|
||||
//----------------------------- Output Text ------------------------------------------------
|
||||
//! [output_text]
|
||||
string msg = (mode == CAPTURING) ? "100/100" :
|
||||
mode == CALIBRATED ? "Calibrated" : "Press 'g' to start";
|
||||
int baseLine = 0;
|
||||
Size textSize = getTextSize(msg, 1, 1, 1, &baseLine);
|
||||
Point textOrigin(view.cols - 2*textSize.width - 10, view.rows - 2*baseLine - 10);
|
||||
|
||||
if( mode == CAPTURING )
|
||||
{
|
||||
if(s.showUndistorted)
|
||||
msg = cv::format( "%d/%d Undist", (int)imagePoints.size(), s.nrFrames );
|
||||
else
|
||||
msg = cv::format( "%d/%d", (int)imagePoints.size(), s.nrFrames );
|
||||
}
|
||||
|
||||
putText( view, msg, textOrigin, 1, 1, mode == CALIBRATED ? GREEN : RED);
|
||||
|
||||
if( blinkOutput )
|
||||
bitwise_not(view, view);
|
||||
//! [output_text]
|
||||
//------------------------- Video capture output undistorted ------------------------------
|
||||
//! [output_undistorted]
|
||||
if( mode == CALIBRATED && s.showUndistorted )
|
||||
{
|
||||
Mat temp = view.clone();
|
||||
if (s.useFisheye)
|
||||
{
|
||||
Mat newCamMat;
|
||||
fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize,
|
||||
Matx33d::eye(), newCamMat, 1);
|
||||
cv::fisheye::undistortImage(temp, view, cameraMatrix, distCoeffs, newCamMat);
|
||||
}
|
||||
else
|
||||
undistort(temp, view, cameraMatrix, distCoeffs);
|
||||
}
|
||||
//! [output_undistorted]
|
||||
//------------------------------ Show image and check for input commands -------------------
|
||||
//! [await_input]
|
||||
imshow("Image View", view);
|
||||
char key = (char)waitKey(s.inputCapture.isOpened() ? 50 : s.delay);
|
||||
|
||||
if( key == ESC_KEY )
|
||||
break;
|
||||
|
||||
if( key == 'u' && mode == CALIBRATED )
|
||||
s.showUndistorted = !s.showUndistorted;
|
||||
|
||||
if( s.inputCapture.isOpened() && key == 'g' )
|
||||
{
|
||||
mode = CAPTURING;
|
||||
imagePoints.clear();
|
||||
}
|
||||
//! [await_input]
|
||||
}
|
||||
|
||||
// -----------------------Show the undistorted image for the image list ------------------------
|
||||
//! [show_results]
|
||||
if( s.inputType == Settings::IMAGE_LIST && s.showUndistorted && !cameraMatrix.empty())
|
||||
{
|
||||
Mat view, rview, map1, map2;
|
||||
|
||||
if (s.useFisheye)
|
||||
{
|
||||
Mat newCamMat;
|
||||
fisheye::estimateNewCameraMatrixForUndistortRectify(cameraMatrix, distCoeffs, imageSize,
|
||||
Matx33d::eye(), newCamMat, 1);
|
||||
fisheye::initUndistortRectifyMap(cameraMatrix, distCoeffs, Matx33d::eye(), newCamMat, imageSize,
|
||||
CV_16SC2, map1, map2);
|
||||
}
|
||||
else
|
||||
{
|
||||
initUndistortRectifyMap(
|
||||
cameraMatrix, distCoeffs, Mat(),
|
||||
getOptimalNewCameraMatrix(cameraMatrix, distCoeffs, imageSize, 1, imageSize, 0), imageSize,
|
||||
CV_16SC2, map1, map2);
|
||||
}
|
||||
|
||||
for(size_t i = 0; i < s.imageList.size(); i++ )
|
||||
{
|
||||
view = imread(s.imageList[i], IMREAD_COLOR);
|
||||
if(view.empty())
|
||||
continue;
|
||||
remap(view, rview, map1, map2, INTER_LINEAR);
|
||||
imshow("Image View", rview);
|
||||
char c = (char)waitKey();
|
||||
if( c == ESC_KEY || c == 'q' || c == 'Q' )
|
||||
break;
|
||||
}
|
||||
}
|
||||
//! [show_results]
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
//! [compute_errors]
|
||||
static double computeReprojectionErrors( const vector<vector<Point3f> >& objectPoints,
|
||||
const vector<vector<Point2f> >& imagePoints,
|
||||
const vector<Mat>& rvecs, const vector<Mat>& tvecs,
|
||||
const Mat& cameraMatrix , const Mat& distCoeffs,
|
||||
vector<float>& perViewErrors, bool fisheye)
|
||||
{
|
||||
vector<Point2f> imagePoints2;
|
||||
size_t totalPoints = 0;
|
||||
double totalErr = 0, err;
|
||||
perViewErrors.resize(objectPoints.size());
|
||||
|
||||
for(size_t i = 0; i < objectPoints.size(); ++i )
|
||||
{
|
||||
if (fisheye)
|
||||
{
|
||||
fisheye::projectPoints(objectPoints[i], imagePoints2, rvecs[i], tvecs[i], cameraMatrix,
|
||||
distCoeffs);
|
||||
}
|
||||
else
|
||||
{
|
||||
projectPoints(objectPoints[i], rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
|
||||
}
|
||||
err = norm(imagePoints[i], imagePoints2, NORM_L2);
|
||||
|
||||
size_t n = objectPoints[i].size();
|
||||
perViewErrors[i] = (float) std::sqrt(err*err/n);
|
||||
totalErr += err*err;
|
||||
totalPoints += n;
|
||||
}
|
||||
|
||||
return std::sqrt(totalErr/totalPoints);
|
||||
}
|
||||
//! [compute_errors]
|
||||
//! [board_corners]
|
||||
static void calcBoardCornerPositions(Size boardSize, float squareSize, vector<Point3f>& corners,
|
||||
Settings::Pattern patternType /*= Settings::CHESSBOARD*/)
|
||||
{
|
||||
corners.clear();
|
||||
|
||||
switch(patternType)
|
||||
{
|
||||
case Settings::CHESSBOARD:
|
||||
case Settings::CIRCLES_GRID:
|
||||
for( int i = 0; i < boardSize.height; ++i )
|
||||
for( int j = 0; j < boardSize.width; ++j )
|
||||
corners.push_back(Point3f(j*squareSize, i*squareSize, 0));
|
||||
break;
|
||||
|
||||
case Settings::ASYMMETRIC_CIRCLES_GRID:
|
||||
for( int i = 0; i < boardSize.height; i++ )
|
||||
for( int j = 0; j < boardSize.width; j++ )
|
||||
corners.push_back(Point3f((2*j + i % 2)*squareSize, i*squareSize, 0));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
//! [board_corners]
|
||||
static bool runCalibration( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
|
||||
vector<vector<Point2f> > imagePoints, vector<Mat>& rvecs, vector<Mat>& tvecs,
|
||||
vector<float>& reprojErrs, double& totalAvgErr, vector<Point3f>& newObjPoints,
|
||||
float grid_width, bool release_object)
|
||||
{
|
||||
//! [fixed_aspect]
|
||||
cameraMatrix = Mat::eye(3, 3, CV_64F);
|
||||
if( !s.useFisheye && s.flag & CALIB_FIX_ASPECT_RATIO )
|
||||
cameraMatrix.at<double>(0,0) = s.aspectRatio;
|
||||
//! [fixed_aspect]
|
||||
if (s.useFisheye) {
|
||||
distCoeffs = Mat::zeros(4, 1, CV_64F);
|
||||
} else {
|
||||
distCoeffs = Mat::zeros(8, 1, CV_64F);
|
||||
}
|
||||
|
||||
vector<vector<Point3f> > objectPoints(1);
|
||||
calcBoardCornerPositions(s.boardSize, s.squareSize, objectPoints[0], s.calibrationPattern);
|
||||
objectPoints[0][s.boardSize.width - 1].x = objectPoints[0][0].x + grid_width;
|
||||
newObjPoints = objectPoints[0];
|
||||
|
||||
objectPoints.resize(imagePoints.size(),objectPoints[0]);
|
||||
|
||||
//Find intrinsic and extrinsic camera parameters
|
||||
double rms;
|
||||
|
||||
if (s.useFisheye) {
|
||||
Mat _rvecs, _tvecs;
|
||||
rms = fisheye::calibrate(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, _rvecs,
|
||||
_tvecs, s.flag);
|
||||
|
||||
rvecs.reserve(_rvecs.rows);
|
||||
tvecs.reserve(_tvecs.rows);
|
||||
for(int i = 0; i < int(objectPoints.size()); i++){
|
||||
rvecs.push_back(_rvecs.row(i));
|
||||
tvecs.push_back(_tvecs.row(i));
|
||||
}
|
||||
} else {
|
||||
int iFixedPoint = -1;
|
||||
if (release_object)
|
||||
iFixedPoint = s.boardSize.width - 1;
|
||||
rms = calibrateCameraRO(objectPoints, imagePoints, imageSize, iFixedPoint,
|
||||
cameraMatrix, distCoeffs, rvecs, tvecs, newObjPoints,
|
||||
s.flag | CALIB_USE_LU);
|
||||
}
|
||||
|
||||
if (release_object) {
|
||||
cout << "New board corners: " << endl;
|
||||
cout << newObjPoints[0] << endl;
|
||||
cout << newObjPoints[s.boardSize.width - 1] << endl;
|
||||
cout << newObjPoints[s.boardSize.width * (s.boardSize.height - 1)] << endl;
|
||||
cout << newObjPoints.back() << endl;
|
||||
}
|
||||
|
||||
cout << "Re-projection error reported by calibrateCamera: "<< rms << endl;
|
||||
|
||||
bool ok = checkRange(cameraMatrix) && checkRange(distCoeffs);
|
||||
|
||||
objectPoints.clear();
|
||||
objectPoints.resize(imagePoints.size(), newObjPoints);
|
||||
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix,
|
||||
distCoeffs, reprojErrs, s.useFisheye);
|
||||
|
||||
return ok;
|
||||
}
|
||||
|
||||
// Print camera parameters to the output file
|
||||
static void saveCameraParams( Settings& s, Size& imageSize, Mat& cameraMatrix, Mat& distCoeffs,
|
||||
const vector<Mat>& rvecs, const vector<Mat>& tvecs,
|
||||
const vector<float>& reprojErrs, const vector<vector<Point2f> >& imagePoints,
|
||||
double totalAvgErr, const vector<Point3f>& newObjPoints )
|
||||
{
|
||||
FileStorage fs( s.outputFileName, FileStorage::WRITE );
|
||||
|
||||
time_t tm;
|
||||
time( &tm );
|
||||
struct tm *t2 = localtime( &tm );
|
||||
char buf[1024];
|
||||
strftime( buf, sizeof(buf), "%c", t2 );
|
||||
|
||||
fs << "calibration_time" << buf;
|
||||
|
||||
if( !rvecs.empty() || !reprojErrs.empty() )
|
||||
fs << "nr_of_frames" << (int)std::max(rvecs.size(), reprojErrs.size());
|
||||
fs << "image_width" << imageSize.width;
|
||||
fs << "image_height" << imageSize.height;
|
||||
fs << "board_width" << s.boardSize.width;
|
||||
fs << "board_height" << s.boardSize.height;
|
||||
fs << "square_size" << s.squareSize;
|
||||
|
||||
if( !s.useFisheye && s.flag & CALIB_FIX_ASPECT_RATIO )
|
||||
fs << "fix_aspect_ratio" << s.aspectRatio;
|
||||
|
||||
if (s.flag)
|
||||
{
|
||||
std::stringstream flagsStringStream;
|
||||
if (s.useFisheye)
|
||||
{
|
||||
flagsStringStream << "flags:"
|
||||
<< (s.flag & fisheye::CALIB_FIX_SKEW ? " +fix_skew" : "")
|
||||
<< (s.flag & fisheye::CALIB_FIX_K1 ? " +fix_k1" : "")
|
||||
<< (s.flag & fisheye::CALIB_FIX_K2 ? " +fix_k2" : "")
|
||||
<< (s.flag & fisheye::CALIB_FIX_K3 ? " +fix_k3" : "")
|
||||
<< (s.flag & fisheye::CALIB_FIX_K4 ? " +fix_k4" : "")
|
||||
<< (s.flag & fisheye::CALIB_RECOMPUTE_EXTRINSIC ? " +recompute_extrinsic" : "");
|
||||
}
|
||||
else
|
||||
{
|
||||
flagsStringStream << "flags:"
|
||||
<< (s.flag & CALIB_USE_INTRINSIC_GUESS ? " +use_intrinsic_guess" : "")
|
||||
<< (s.flag & CALIB_FIX_ASPECT_RATIO ? " +fix_aspectRatio" : "")
|
||||
<< (s.flag & CALIB_FIX_PRINCIPAL_POINT ? " +fix_principal_point" : "")
|
||||
<< (s.flag & CALIB_ZERO_TANGENT_DIST ? " +zero_tangent_dist" : "")
|
||||
<< (s.flag & CALIB_FIX_K1 ? " +fix_k1" : "")
|
||||
<< (s.flag & CALIB_FIX_K2 ? " +fix_k2" : "")
|
||||
<< (s.flag & CALIB_FIX_K3 ? " +fix_k3" : "")
|
||||
<< (s.flag & CALIB_FIX_K4 ? " +fix_k4" : "")
|
||||
<< (s.flag & CALIB_FIX_K5 ? " +fix_k5" : "");
|
||||
}
|
||||
fs.writeComment(flagsStringStream.str());
|
||||
}
|
||||
|
||||
fs << "flags" << s.flag;
|
||||
|
||||
fs << "fisheye_model" << s.useFisheye;
|
||||
|
||||
fs << "camera_matrix" << cameraMatrix;
|
||||
fs << "distortion_coefficients" << distCoeffs;
|
||||
|
||||
fs << "avg_reprojection_error" << totalAvgErr;
|
||||
if (s.writeExtrinsics && !reprojErrs.empty())
|
||||
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
|
||||
|
||||
if(s.writeExtrinsics && !rvecs.empty() && !tvecs.empty() )
|
||||
{
|
||||
CV_Assert(rvecs[0].type() == tvecs[0].type());
|
||||
Mat bigmat((int)rvecs.size(), 6, CV_MAKETYPE(rvecs[0].type(), 1));
|
||||
bool needReshapeR = rvecs[0].depth() != 1 ? true : false;
|
||||
bool needReshapeT = tvecs[0].depth() != 1 ? true : false;
|
||||
|
||||
for( size_t i = 0; i < rvecs.size(); i++ )
|
||||
{
|
||||
Mat r = bigmat(Range(int(i), int(i+1)), Range(0,3));
|
||||
Mat t = bigmat(Range(int(i), int(i+1)), Range(3,6));
|
||||
|
||||
if(needReshapeR)
|
||||
rvecs[i].reshape(1, 1).copyTo(r);
|
||||
else
|
||||
{
|
||||
//*.t() is MatExpr (not Mat) so we can use assignment operator
|
||||
CV_Assert(rvecs[i].rows == 3 && rvecs[i].cols == 1);
|
||||
r = rvecs[i].t();
|
||||
}
|
||||
|
||||
if(needReshapeT)
|
||||
tvecs[i].reshape(1, 1).copyTo(t);
|
||||
else
|
||||
{
|
||||
CV_Assert(tvecs[i].rows == 3 && tvecs[i].cols == 1);
|
||||
t = tvecs[i].t();
|
||||
}
|
||||
}
|
||||
fs.writeComment("a set of 6-tuples (rotation vector + translation vector) for each view");
|
||||
fs << "extrinsic_parameters" << bigmat;
|
||||
}
|
||||
|
||||
if(s.writePoints && !imagePoints.empty() )
|
||||
{
|
||||
Mat imagePtMat((int)imagePoints.size(), (int)imagePoints[0].size(), CV_32FC2);
|
||||
for( size_t i = 0; i < imagePoints.size(); i++ )
|
||||
{
|
||||
Mat r = imagePtMat.row(int(i)).reshape(2, imagePtMat.cols);
|
||||
Mat imgpti(imagePoints[i]);
|
||||
imgpti.copyTo(r);
|
||||
}
|
||||
fs << "image_points" << imagePtMat;
|
||||
}
|
||||
|
||||
if( s.writeGrid && !newObjPoints.empty() )
|
||||
{
|
||||
fs << "grid_points" << newObjPoints;
|
||||
}
|
||||
}
|
||||
|
||||
//! [run_and_save]
|
||||
bool runCalibrationAndSave(Settings& s, Size imageSize, Mat& cameraMatrix, Mat& distCoeffs,
|
||||
vector<vector<Point2f> > imagePoints, float grid_width, bool release_object)
|
||||
{
|
||||
vector<Mat> rvecs, tvecs;
|
||||
vector<float> reprojErrs;
|
||||
double totalAvgErr = 0;
|
||||
vector<Point3f> newObjPoints;
|
||||
|
||||
bool ok = runCalibration(s, imageSize, cameraMatrix, distCoeffs, imagePoints, rvecs, tvecs, reprojErrs,
|
||||
totalAvgErr, newObjPoints, grid_width, release_object);
|
||||
cout << (ok ? "Calibration succeeded" : "Calibration failed")
|
||||
<< ". avg re projection error = " << totalAvgErr << endl;
|
||||
|
||||
if (ok)
|
||||
saveCameraParams(s, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, reprojErrs, imagePoints,
|
||||
totalAvgErr, newObjPoints);
|
||||
return ok;
|
||||
}
|
||||
//! [run_and_save]
|
59
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/in_VID5.xml
vendored
Normal file
59
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/in_VID5.xml
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
<?xml version="1.0"?>
|
||||
<opencv_storage>
|
||||
<Settings>
|
||||
<!-- Number of inner corners per a item row and column. (square, circle) -->
|
||||
<BoardSize_Width> 9</BoardSize_Width>
|
||||
<BoardSize_Height>6</BoardSize_Height>
|
||||
|
||||
<!-- The size of a square in some user defined metric system (pixel, millimeter)-->
|
||||
<Square_Size>50</Square_Size>
|
||||
|
||||
<!-- The type of input used for camera calibration. One of: CHESSBOARD CIRCLES_GRID ASYMMETRIC_CIRCLES_GRID -->
|
||||
<Calibrate_Pattern>"CHESSBOARD"</Calibrate_Pattern>
|
||||
|
||||
<!-- The input to use for calibration.
|
||||
To use an input camera -> give the ID of the camera, like "1"
|
||||
To use an input video -> give the path of the input video, like "/tmp/x.avi"
|
||||
To use an image list -> give the path to the XML or YAML file containing the list of the images, like "/tmp/circles_list.xml"
|
||||
-->
|
||||
<Input>"images/CameraCalibration/VID5/VID5.xml"</Input>
|
||||
<!-- If true (non-zero) we flip the input images around the horizontal axis.-->
|
||||
<Input_FlipAroundHorizontalAxis>0</Input_FlipAroundHorizontalAxis>
|
||||
|
||||
<!-- Time delay between frames in case of camera. -->
|
||||
<Input_Delay>100</Input_Delay>
|
||||
|
||||
<!-- How many frames to use, for calibration. -->
|
||||
<Calibrate_NrOfFrameToUse>25</Calibrate_NrOfFrameToUse>
|
||||
<!-- Consider only fy as a free parameter, the ratio fx/fy stays the same as in the input cameraMatrix.
|
||||
Use or not setting. 0 - False Non-Zero - True-->
|
||||
<Calibrate_FixAspectRatio> 1 </Calibrate_FixAspectRatio>
|
||||
<!-- If true (non-zero) tangential distortion coefficients are set to zeros and stay zero.-->
|
||||
<Calibrate_AssumeZeroTangentialDistortion>1</Calibrate_AssumeZeroTangentialDistortion>
|
||||
<!-- If true (non-zero) the principal point is not changed during the global optimization.-->
|
||||
<Calibrate_FixPrincipalPointAtTheCenter> 1 </Calibrate_FixPrincipalPointAtTheCenter>
|
||||
|
||||
<!-- The name of the output log file. -->
|
||||
<Write_outputFileName>"out_camera_data.xml"</Write_outputFileName>
|
||||
<!-- If true (non-zero) we write to the output file the feature points.-->
|
||||
<Write_DetectedFeaturePoints>1</Write_DetectedFeaturePoints>
|
||||
<!-- If true (non-zero) we write to the output file the extrinsic camera parameters.-->
|
||||
<Write_extrinsicParameters>1</Write_extrinsicParameters>
|
||||
<!-- If true (non-zero) we write to the output file the refined 3D target grid points.-->
|
||||
<Write_gridPoints>1</Write_gridPoints>
|
||||
<!-- If true (non-zero) we show after calibration the undistorted images.-->
|
||||
<Show_UndistortedImage>1</Show_UndistortedImage>
|
||||
<!-- If true (non-zero) will be used fisheye camera model.-->
|
||||
<Calibrate_UseFisheyeModel>0</Calibrate_UseFisheyeModel>
|
||||
<!-- If true (non-zero) distortion coefficient k1 will be equals to zero.-->
|
||||
<Fix_K1>0</Fix_K1>
|
||||
<!-- If true (non-zero) distortion coefficient k2 will be equals to zero.-->
|
||||
<Fix_K2>0</Fix_K2>
|
||||
<!-- If true (non-zero) distortion coefficient k3 will be equals to zero.-->
|
||||
<Fix_K3>0</Fix_K3>
|
||||
<!-- If true (non-zero) distortion coefficient k4 will be equals to zero.-->
|
||||
<Fix_K4>1</Fix_K4>
|
||||
<!-- If true (non-zero) distortion coefficient k5 will be equals to zero.-->
|
||||
<Fix_K5>1</Fix_K5>
|
||||
</Settings>
|
||||
</opencv_storage>
|
352
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/out_camera_data.yml
vendored
Normal file
352
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/camera_calibration/out_camera_data.yml
vendored
Normal file
@ -0,0 +1,352 @@
|
||||
%YAML:1.0
|
||||
calibration_Time: "08/19/11 20:44:38"
|
||||
nrOfFrames: 8
|
||||
image_Width: 640
|
||||
image_Height: 480
|
||||
board_Width: 9
|
||||
board_Height: 6
|
||||
square_Size: 50.
|
||||
FixAspectRatio: 1.
|
||||
# flags: +fix_aspectRatio +fix_principal_point +zero_tangent_dist
|
||||
flagValue: 14
|
||||
Camera_Matrix: !!opencv-matrix
|
||||
rows: 3
|
||||
cols: 3
|
||||
dt: d
|
||||
data: [ 6.5746697810243404e+002, 0., 3.1950000000000000e+002, 0.,
|
||||
6.5746697810243404e+002, 2.3950000000000000e+002, 0., 0., 1. ]
|
||||
Distortion_Coefficients: !!opencv-matrix
|
||||
rows: 5
|
||||
cols: 1
|
||||
dt: d
|
||||
data: [ -4.1802327018241026e-001, 5.0715243805833121e-001, 0., 0.,
|
||||
-5.7843596847939704e-001 ]
|
||||
Avg_Reprojection_Error: 3.8441346462381665e-001
|
||||
Per_View_Reprojection_Errors: !!opencv-matrix
|
||||
rows: 8
|
||||
cols: 1
|
||||
dt: f
|
||||
data: [ 5.04357755e-001, 4.85754758e-001, 3.99563968e-001,
|
||||
4.13829178e-001, 3.53570908e-001, 3.21116358e-001,
|
||||
2.74473161e-001, 2.39761785e-001 ]
|
||||
# a set of 6-tuples (rotation vector + translation vector) for each view
|
||||
Extrinsic_Parameters: !!opencv-matrix
|
||||
rows: 8
|
||||
cols: 6
|
||||
dt: d
|
||||
data: [ -7.8704123655486097e-002, -1.5922384772614945e-001,
|
||||
3.1166227207451498e+000, 2.4224388101960471e+002,
|
||||
1.1795590397660339e+002, 6.2576484126093249e+002,
|
||||
-1.4117480285164308e-001, -1.7917415443804836e-002,
|
||||
3.1333182268743949e+000, 2.5943034781849354e+002,
|
||||
1.4039780562976958e+002, 6.3848706527260981e+002,
|
||||
7.2230525186138789e-002, -7.5445981266787754e-002,
|
||||
1.5712860749221762e+000, 1.7426560451795339e+002,
|
||||
-1.9309240362258871e+002, 7.0891416556762647e+002,
|
||||
2.0367310600105853e-002, 6.8565520026996951e-002,
|
||||
-5.4313033031644169e-004, -2.0146314940404827e+002,
|
||||
-1.3305643514116997e+002, 7.4933554744027231e+002,
|
||||
-3.4468530027734055e-002, 2.1921265175331925e-002,
|
||||
-1.5731053528054522e+000, -1.1155718744299284e+002,
|
||||
2.0307615364261443e+002, 8.4915903914333899e+002,
|
||||
3.7425562109513817e-002, 7.4883169379022230e-002,
|
||||
-3.6031632305130512e-002, -2.0094505419395196e+002,
|
||||
-1.1627359108310560e+002, 9.2021583518760133e+002,
|
||||
6.8105689976949157e-002, 6.4426739692440949e-002,
|
||||
-7.0967130057087435e-002, -1.9233852871740035e+002,
|
||||
-1.0334652096641923e+002, 1.0755293563503658e+003,
|
||||
-5.8017546499862287e-002, -1.6909812666033443e-003,
|
||||
-1.5876137659782963e+000, -1.0242234847115104e+002,
|
||||
2.2583088401423066e+002, 1.1125972190244058e+003 ]
|
||||
Image_points: !!opencv-matrix
|
||||
rows: 8
|
||||
cols: 54
|
||||
dt: "2f"
|
||||
data: [ 5.58494690e+002, 3.55650085e+002, 5.13314697e+002,
|
||||
3.59107666e+002, 4.65728333e+002, 3.62133911e+002,
|
||||
4.15701111e+002, 3.65026459e+002, 3.64399353e+002,
|
||||
3.67339203e+002, 3.12101196e+002, 3.69211914e+002,
|
||||
2.59208405e+002, 3.70413513e+002, 2.07456192e+002,
|
||||
3.71175995e+002, 1.56619507e+002, 3.72176544e+002,
|
||||
5.60868713e+002, 3.08104828e+002, 5.15191772e+002,
|
||||
3.10485626e+002, 4.67032959e+002, 3.12660004e+002,
|
||||
4.16112152e+002, 3.14887177e+002, 3.64010712e+002,
|
||||
3.16825775e+002, 3.10712372e+002, 3.18640808e+002,
|
||||
2.56853943e+002, 3.20017365e+002, 2.04168182e+002,
|
||||
3.20908417e+002, 1.52469528e+002, 3.22105377e+002,
|
||||
5.62328369e+002, 2.58646881e+002, 5.16396301e+002,
|
||||
2.59919281e+002, 4.67907654e+002, 2.61257874e+002,
|
||||
4.16463440e+002, 2.62675537e+002, 3.63546570e+002,
|
||||
2.64064117e+002, 3.09528137e+002, 2.65489990e+002,
|
||||
2.54765533e+002, 2.66862030e+002, 2.01299225e+002,
|
||||
2.67997345e+002, 1.48913437e+002, 2.69627167e+002,
|
||||
5.63098022e+002, 2.08423523e+002, 5.16782654e+002,
|
||||
2.08424667e+002, 4.68059296e+002, 2.08661697e+002,
|
||||
4.16216431e+002, 2.09268982e+002, 3.62888763e+002,
|
||||
2.10013397e+002, 3.08458557e+002, 2.11074738e+002,
|
||||
2.53267990e+002, 2.12496582e+002, 1.99121384e+002,
|
||||
2.14005814e+002, 1.46551376e+002, 2.15851318e+002,
|
||||
5.62997437e+002, 1.57966492e+002, 5.16406494e+002,
|
||||
1.56580688e+002, 4.67334900e+002, 1.55756500e+002,
|
||||
4.15378235e+002, 1.55492874e+002, 3.62096710e+002,
|
||||
1.55498734e+002, 3.07522827e+002, 1.56133240e+002,
|
||||
2.52235214e+002, 1.57516571e+002, 1.97876328e+002,
|
||||
1.59318787e+002, 1.45078247e+002, 1.61638428e+002,
|
||||
5.62097168e+002, 1.07469536e+002, 5.15766846e+002,
|
||||
1.04902527e+002, 4.66562866e+002, 1.03045807e+002,
|
||||
4.14651459e+002, 1.01924713e+002, 3.61240662e+002,
|
||||
1.01357826e+002, 3.06746613e+002, 1.01582802e+002,
|
||||
2.51568024e+002, 1.02920105e+002, 1.97343307e+002,
|
||||
1.04941299e+002, 1.44756821e+002, 1.07737488e+002,
|
||||
5.68062500e+002, 3.73591125e+002, 5.25272644e+002,
|
||||
3.77019318e+002, 4.79870941e+002, 3.80086578e+002,
|
||||
4.31823730e+002, 3.83036652e+002, 3.81995758e+002,
|
||||
3.85271759e+002, 3.30728729e+002, 3.86998779e+002,
|
||||
2.78071167e+002, 3.88151031e+002, 2.26231567e+002,
|
||||
3.88669586e+002, 1.74855331e+002, 3.89197998e+002,
|
||||
5.69792542e+002, 3.27097382e+002, 5.26866028e+002,
|
||||
3.29362366e+002, 4.81278229e+002, 3.31532928e+002,
|
||||
4.32783203e+002, 3.33552185e+002, 3.82408234e+002,
|
||||
3.35186554e+002, 3.30427399e+002, 3.36404053e+002,
|
||||
2.77138519e+002, 3.37450958e+002, 2.24525131e+002,
|
||||
3.37957092e+002, 1.72285507e+002, 3.38503540e+002,
|
||||
5.70942749e+002, 2.79243713e+002, 5.27789307e+002,
|
||||
2.80073486e+002, 4.82146576e+002, 2.81226410e+002,
|
||||
4.33247375e+002, 2.82237427e+002, 3.82503662e+002,
|
||||
2.83062286e+002, 3.30138885e+002, 2.83794434e+002,
|
||||
2.76433228e+002, 2.84549286e+002, 2.23158783e+002,
|
||||
2.84981049e+002, 1.70520218e+002, 2.85720886e+002,
|
||||
5.71001953e+002, 2.30928329e+002, 5.27846863e+002,
|
||||
2.30519928e+002, 4.82114563e+002, 2.30268906e+002,
|
||||
4.33114563e+002, 2.30243515e+002, 3.82384857e+002,
|
||||
2.30311340e+002, 3.29870392e+002, 2.30454620e+002,
|
||||
2.76012634e+002, 2.30882156e+002, 2.22529434e+002,
|
||||
2.31355362e+002, 1.69742065e+002, 2.32063004e+002,
|
||||
5.70199036e+002, 1.82609772e+002, 5.27030884e+002,
|
||||
1.80973267e+002, 4.81193573e+002, 1.79573792e+002,
|
||||
4.32409821e+002, 1.78475616e+002, 3.81855530e+002,
|
||||
1.77680283e+002, 3.29641937e+002, 1.77092087e+002,
|
||||
2.75895782e+002, 1.77155502e+002, 2.22438889e+002,
|
||||
1.77605667e+002, 1.69884583e+002, 1.78365585e+002,
|
||||
5.69026245e+002, 1.34654831e+002, 5.26171570e+002,
|
||||
1.31798691e+002, 4.80653503e+002, 1.29171509e+002,
|
||||
4.31869904e+002, 1.27280067e+002, 3.81419739e+002,
|
||||
1.25591202e+002, 3.29466644e+002, 1.24407089e+002,
|
||||
2.76225342e+002, 1.24174736e+002, 2.23024109e+002,
|
||||
1.24463333e+002, 1.70838898e+002, 1.25398903e+002,
|
||||
4.73812897e+002, 6.94673386e+001, 4.74245453e+002,
|
||||
1.12387466e+002, 4.74243347e+002, 1.56034164e+002,
|
||||
4.73834778e+002, 2.00523651e+002, 4.72891602e+002,
|
||||
2.44457306e+002, 4.71412811e+002, 2.87981171e+002,
|
||||
4.69708252e+002, 3.30783173e+002, 4.67558228e+002,
|
||||
3.71818420e+002, 4.65495667e+002, 4.11996979e+002,
|
||||
4.31027649e+002, 6.75546722e+001, 4.31269440e+002,
|
||||
1.10960022e+002, 4.31185486e+002, 1.55113556e+002,
|
||||
4.30830139e+002, 2.00061066e+002, 4.30168427e+002,
|
||||
2.44456863e+002, 4.29107544e+002, 2.88479645e+002,
|
||||
4.27829071e+002, 3.31813507e+002, 4.26131653e+002,
|
||||
3.73071228e+002, 4.24718811e+002, 4.13476563e+002,
|
||||
3.86868805e+002, 6.61982269e+001, 3.86895416e+002,
|
||||
1.09904411e+002, 3.86690216e+002, 1.54396423e+002,
|
||||
3.86368591e+002, 1.99800369e+002, 3.85792206e+002,
|
||||
2.44538574e+002, 3.85117279e+002, 2.88826447e+002,
|
||||
3.84405273e+002, 3.32408020e+002, 3.83303772e+002,
|
||||
3.74074097e+002, 3.82477448e+002, 4.14638977e+002,
|
||||
3.41941437e+002, 6.54462357e+001, 3.41628204e+002,
|
||||
1.09383698e+002, 3.41402344e+002, 1.54105545e+002,
|
||||
3.41168854e+002, 1.99655045e+002, 3.40816681e+002,
|
||||
2.44469910e+002, 3.40516937e+002, 2.88975800e+002,
|
||||
3.40365662e+002, 3.32670990e+002, 3.39935211e+002,
|
||||
3.74465759e+002, 3.39847626e+002, 4.14742279e+002,
|
||||
2.96694000e+002, 6.56859589e+001, 2.96075226e+002,
|
||||
1.09505333e+002, 2.95704895e+002, 1.54202652e+002,
|
||||
2.95653107e+002, 1.99734131e+002, 2.95589661e+002,
|
||||
2.44549530e+002, 2.95629547e+002, 2.88889496e+002,
|
||||
2.96138733e+002, 3.32610931e+002, 2.96520905e+002,
|
||||
3.74608551e+002, 2.96987091e+002, 4.14774902e+002,
|
||||
2.51414978e+002, 6.65755463e+001, 2.50681854e+002,
|
||||
1.10189331e+002, 2.50183380e+002, 1.54658005e+002,
|
||||
2.50331161e+002, 2.00073761e+002, 2.50590790e+002,
|
||||
2.44719513e+002, 2.51083817e+002, 2.88868286e+002,
|
||||
2.52134262e+002, 3.32266937e+002, 2.53097809e+002,
|
||||
3.74022491e+002, 2.54404007e+002, 4.14018066e+002,
|
||||
1.49524078e+002, 1.27699501e+002, 1.89511658e+002,
|
||||
1.25816605e+002, 2.31050888e+002, 1.24260918e+002,
|
||||
2.74076721e+002, 1.23023209e+002, 3.17643005e+002,
|
||||
1.22288109e+002, 3.61785889e+002, 1.22105164e+002,
|
||||
4.06142670e+002, 1.22401566e+002, 4.49623962e+002,
|
||||
1.23246025e+002, 4.92677216e+002, 1.24087708e+002,
|
||||
1.48706085e+002, 1.69077423e+002, 1.88827805e+002,
|
||||
1.67750443e+002, 2.30439865e+002, 1.66769333e+002,
|
||||
2.73830933e+002, 1.65871170e+002, 3.17596741e+002,
|
||||
1.65410919e+002, 3.61983459e+002, 1.65327866e+002,
|
||||
4.06748322e+002, 1.65463974e+002, 4.50450226e+002,
|
||||
1.66126526e+002, 4.93614655e+002, 1.66970413e+002,
|
||||
1.48312607e+002, 2.11499451e+002, 1.88574097e+002,
|
||||
2.10860214e+002, 2.30130676e+002, 2.10261612e+002,
|
||||
2.73557709e+002, 2.09837143e+002, 3.17542572e+002,
|
||||
2.09633057e+002, 3.62091248e+002, 2.09732620e+002,
|
||||
4.06934570e+002, 2.09926758e+002, 4.50914612e+002,
|
||||
2.10320221e+002, 4.94044495e+002, 2.10900925e+002,
|
||||
1.48613831e+002, 2.53997177e+002, 1.88797791e+002,
|
||||
2.53912842e+002, 2.30240204e+002, 2.53975067e+002,
|
||||
2.73746704e+002, 2.54010208e+002, 3.17718262e+002,
|
||||
2.54106003e+002, 3.62188965e+002, 2.54205475e+002,
|
||||
4.06908783e+002, 2.54317505e+002, 4.50824951e+002,
|
||||
2.54539490e+002, 4.93825714e+002, 2.54753876e+002,
|
||||
1.49541687e+002, 2.96404175e+002, 1.89357727e+002,
|
||||
2.97117523e+002, 2.30807007e+002, 2.97805603e+002,
|
||||
2.74325470e+002, 2.97966522e+002, 3.18042206e+002,
|
||||
2.98304535e+002, 3.62105774e+002, 2.98552643e+002,
|
||||
4.06672272e+002, 2.98572418e+002, 4.50363068e+002,
|
||||
2.98569550e+002, 4.93109894e+002, 2.98516205e+002,
|
||||
1.50883698e+002, 3.38493195e+002, 1.90633621e+002,
|
||||
3.39862610e+002, 2.31920990e+002, 3.40869415e+002,
|
||||
2.74971252e+002, 3.41453766e+002, 3.18235229e+002,
|
||||
3.41952637e+002, 3.62063477e+002, 3.42314026e+002,
|
||||
4.06098938e+002, 3.42221802e+002, 4.49477386e+002,
|
||||
3.42063812e+002, 4.91864716e+002, 3.41727600e+002,
|
||||
2.36129852e+002, 3.92798004e+002, 2.34999939e+002,
|
||||
3.56118683e+002, 2.34376099e+002, 3.18607025e+002,
|
||||
2.33822159e+002, 2.80400696e+002, 2.33565445e+002,
|
||||
2.42213104e+002, 2.33583069e+002, 2.03937286e+002,
|
||||
2.34028824e+002, 1.65756607e+002, 2.34613373e+002,
|
||||
1.28586639e+002, 2.35190308e+002, 9.18279037e+001,
|
||||
2.73031616e+002, 3.93267242e+002, 2.72295166e+002,
|
||||
3.56342743e+002, 2.71799347e+002, 3.18847412e+002,
|
||||
2.71418854e+002, 2.80287872e+002, 2.71161469e+002,
|
||||
2.41881134e+002, 2.71248962e+002, 2.03348145e+002,
|
||||
2.71379303e+002, 1.64895874e+002, 2.71946045e+002,
|
||||
1.27450935e+002, 2.72322418e+002, 9.06900787e+001,
|
||||
3.10670715e+002, 3.93568848e+002, 3.10389160e+002,
|
||||
3.56545959e+002, 3.10084625e+002, 3.18814514e+002,
|
||||
3.09801544e+002, 2.80242737e+002, 3.09678711e+002,
|
||||
2.41574814e+002, 3.09779663e+002, 2.02989838e+002,
|
||||
3.09842712e+002, 1.64338043e+002, 3.10076782e+002,
|
||||
1.26870911e+002, 3.10243286e+002, 8.98413315e+001,
|
||||
3.48618134e+002, 3.93563202e+002, 3.48617065e+002,
|
||||
3.56472382e+002, 3.48608795e+002, 3.18855621e+002,
|
||||
3.48544556e+002, 2.80011017e+002, 3.48556396e+002,
|
||||
2.41388168e+002, 3.48585388e+002, 2.02692429e+002,
|
||||
3.48435089e+002, 1.64099731e+002, 3.48442902e+002,
|
||||
1.26549957e+002, 3.48338043e+002, 8.98002014e+001,
|
||||
3.86625610e+002, 3.93188599e+002, 3.87047729e+002,
|
||||
3.56377594e+002, 3.87306274e+002, 3.18714752e+002,
|
||||
3.87337799e+002, 2.79868896e+002, 3.87402740e+002,
|
||||
2.41228760e+002, 3.87295166e+002, 2.02695313e+002,
|
||||
3.87030273e+002, 1.64203415e+002, 3.86741211e+002,
|
||||
1.26606262e+002, 3.86337311e+002, 8.99655075e+001,
|
||||
4.24534088e+002, 3.92702545e+002, 4.25310822e+002,
|
||||
3.55900452e+002, 4.25869019e+002, 3.18160614e+002,
|
||||
4.25909790e+002, 2.79615753e+002, 4.25977295e+002,
|
||||
2.41165100e+002, 4.25826477e+002, 2.02876389e+002,
|
||||
4.25331665e+002, 1.64527618e+002, 4.24775787e+002,
|
||||
1.27097328e+002, 4.23985138e+002, 9.08176651e+001,
|
||||
1.79142670e+002, 1.58573654e+002, 2.12791580e+002,
|
||||
1.56291031e+002, 2.47140106e+002, 1.54265656e+002,
|
||||
2.82607300e+002, 1.52373688e+002, 3.18175507e+002,
|
||||
1.50692184e+002, 3.54185852e+002, 1.49404175e+002,
|
||||
3.90455200e+002, 1.48229370e+002, 4.26106689e+002,
|
||||
1.47507843e+002, 4.61576141e+002, 1.46712479e+002,
|
||||
1.80388336e+002, 1.93027603e+002, 2.14026459e+002,
|
||||
1.91128204e+002, 2.48376541e+002, 1.89414978e+002,
|
||||
2.83795807e+002, 1.87720856e+002, 3.19472473e+002,
|
||||
1.86192383e+002, 3.55483826e+002, 1.84929199e+002,
|
||||
3.91970764e+002, 1.83747040e+002, 4.27654572e+002,
|
||||
1.82931534e+002, 4.63295227e+002, 1.81977234e+002,
|
||||
1.81914261e+002, 2.27955460e+002, 2.15291260e+002,
|
||||
2.26512482e+002, 2.49628265e+002, 2.25067520e+002,
|
||||
2.85066406e+002, 2.23593185e+002, 3.20846680e+002,
|
||||
2.22337708e+002, 3.56862885e+002, 2.21191040e+002,
|
||||
3.93279907e+002, 2.19905640e+002, 4.29202271e+002,
|
||||
2.18870361e+002, 4.64728424e+002, 2.17972977e+002,
|
||||
1.83496948e+002, 2.62963226e+002, 2.16930527e+002,
|
||||
2.61755219e+002, 2.51115829e+002, 2.60777222e+002,
|
||||
2.86553406e+002, 2.59500336e+002, 3.22299896e+002,
|
||||
2.58380737e+002, 3.58307648e+002, 2.57236694e+002,
|
||||
3.94551819e+002, 2.56009125e+002, 4.30358948e+002,
|
||||
2.54925797e+002, 4.65684998e+002, 2.54021484e+002,
|
||||
1.85461685e+002, 2.97687378e+002, 2.18712234e+002,
|
||||
2.96999207e+002, 2.52770218e+002, 2.96270752e+002,
|
||||
2.88213776e+002, 2.95168213e+002, 3.23698334e+002,
|
||||
2.94233032e+002, 3.59477722e+002, 2.93170715e+002,
|
||||
3.95647766e+002, 2.91897400e+002, 4.31309845e+002,
|
||||
2.90856995e+002, 4.66494110e+002, 2.89726410e+002,
|
||||
1.87661331e+002, 3.32186188e+002, 2.20767746e+002,
|
||||
3.31906250e+002, 2.54839096e+002, 3.31398651e+002,
|
||||
2.89963745e+002, 3.30524139e+002, 3.25207642e+002,
|
||||
3.29771820e+002, 3.60686035e+002, 3.28762695e+002,
|
||||
3.96576447e+002, 3.27542206e+002, 4.31994415e+002,
|
||||
3.26294189e+002, 4.66894653e+002, 3.24949921e+002,
|
||||
2.03543015e+002, 1.77473557e+002, 2.32777847e+002,
|
||||
1.74712509e+002, 2.62628723e+002, 1.72331970e+002,
|
||||
2.93045898e+002, 1.69686768e+002, 3.23527618e+002,
|
||||
1.67496246e+002, 3.54206787e+002, 1.65446075e+002,
|
||||
3.85180176e+002, 1.63360580e+002, 4.15484253e+002,
|
||||
1.61536423e+002, 4.45720947e+002, 1.59896164e+002,
|
||||
2.05864395e+002, 2.07228104e+002, 2.35242096e+002,
|
||||
2.04699326e+002, 2.64853973e+002, 2.02407455e+002,
|
||||
2.95353882e+002, 1.99972321e+002, 3.25811890e+002,
|
||||
1.97671921e+002, 3.56471252e+002, 1.95763168e+002,
|
||||
3.87280548e+002, 1.93597977e+002, 4.17615814e+002,
|
||||
1.91867371e+002, 4.48018677e+002, 1.90067413e+002,
|
||||
2.08421249e+002, 2.37166977e+002, 2.37513824e+002,
|
||||
2.34982773e+002, 2.67261261e+002, 2.32802841e+002,
|
||||
2.97555817e+002, 2.30466080e+002, 3.28118103e+002,
|
||||
2.28462463e+002, 3.58699707e+002, 2.26417038e+002,
|
||||
3.89468842e+002, 2.24356827e+002, 4.19895996e+002,
|
||||
2.22421921e+002, 4.50077850e+002, 2.20683517e+002,
|
||||
2.11095444e+002, 2.66940186e+002, 2.40241348e+002,
|
||||
2.64970093e+002, 2.69563019e+002, 2.63153290e+002,
|
||||
2.99863464e+002, 2.60983551e+002, 3.30282440e+002,
|
||||
2.58911560e+002, 3.60724792e+002, 2.56935730e+002,
|
||||
3.91487915e+002, 2.54799423e+002, 4.21789093e+002,
|
||||
2.52929688e+002, 4.51818481e+002, 2.51059357e+002,
|
||||
2.13829117e+002, 2.96591217e+002, 2.42742859e+002,
|
||||
2.94884583e+002, 2.72209076e+002, 2.93215668e+002,
|
||||
3.02402985e+002, 2.91230591e+002, 3.32536072e+002,
|
||||
2.89165192e+002, 3.62860901e+002, 2.87413605e+002,
|
||||
3.93481842e+002, 2.85199615e+002, 4.23728851e+002,
|
||||
2.83277496e+002, 4.53453094e+002, 2.81229309e+002,
|
||||
2.16799316e+002, 3.25975220e+002, 2.45605515e+002,
|
||||
3.24619904e+002, 2.74777344e+002, 3.22958679e+002,
|
||||
3.04762817e+002, 3.21008057e+002, 3.34797150e+002,
|
||||
3.19291443e+002, 3.65005798e+002, 3.17295044e+002,
|
||||
3.95311981e+002, 3.15296021e+002, 4.25312592e+002,
|
||||
3.13086945e+002, 4.54931152e+002, 3.11027130e+002,
|
||||
2.60550232e+002, 3.70739563e+002, 2.59674011e+002,
|
||||
3.42115936e+002, 2.58910492e+002, 3.13278015e+002,
|
||||
2.58195618e+002, 2.84013580e+002, 2.57727173e+002,
|
||||
2.55017166e+002, 2.57326263e+002, 2.25760986e+002,
|
||||
2.57096619e+002, 1.96577972e+002, 2.57031860e+002,
|
||||
1.68026199e+002, 2.56873383e+002, 1.39550308e+002,
|
||||
2.89019318e+002, 3.70481354e+002, 2.88355560e+002,
|
||||
3.41833252e+002, 2.87601471e+002, 3.12872925e+002,
|
||||
2.87057190e+002, 2.83485535e+002, 2.86599762e+002,
|
||||
2.54255096e+002, 2.86174438e+002, 2.25023285e+002,
|
||||
2.85775940e+002, 1.95715347e+002, 2.85577087e+002,
|
||||
1.66989502e+002, 2.85395477e+002, 1.38597382e+002,
|
||||
3.18072754e+002, 3.70118317e+002, 3.17432709e+002,
|
||||
3.41398743e+002, 3.16917267e+002, 3.12476044e+002,
|
||||
3.16284363e+002, 2.83001587e+002, 3.15799072e+002,
|
||||
2.53725845e+002, 3.15411957e+002, 2.24337708e+002,
|
||||
3.15070374e+002, 1.95034119e+002, 3.14736847e+002,
|
||||
1.66195313e+002, 3.14439789e+002, 1.37797058e+002,
|
||||
3.47083588e+002, 3.69678101e+002, 3.46717987e+002,
|
||||
3.40949524e+002, 3.46185303e+002, 3.12009857e+002,
|
||||
3.45728088e+002, 2.82454071e+002, 3.45226624e+002,
|
||||
2.53109863e+002, 3.44883606e+002, 2.23839539e+002,
|
||||
3.44373535e+002, 1.94399933e+002, 3.43879852e+002,
|
||||
1.65690643e+002, 3.43438629e+002, 1.37252930e+002,
|
||||
3.76341522e+002, 3.68972321e+002, 3.76086884e+002,
|
||||
3.40412842e+002, 3.75708893e+002, 3.11398376e+002,
|
||||
3.75143494e+002, 2.81901520e+002, 3.74762970e+002,
|
||||
2.52577988e+002, 3.74223969e+002, 2.23348221e+002,
|
||||
3.73600891e+002, 1.93979538e+002, 3.72983917e+002,
|
||||
1.65201294e+002, 3.72517273e+002, 1.36871033e+002,
|
||||
4.05512115e+002, 3.68243225e+002, 4.05366333e+002,
|
||||
3.39678650e+002, 4.05090027e+002, 3.10679108e+002,
|
||||
4.04612366e+002, 2.81203522e+002, 4.04152649e+002,
|
||||
2.52051605e+002, 4.03539703e+002, 2.22930420e+002,
|
||||
4.02903351e+002, 1.93625381e+002, 4.02272827e+002,
|
||||
1.65004440e+002, 4.01353333e+002, 1.36796814e+002 ]
|
21
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/CMakeLists.txt
vendored
Normal file
21
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/CMakeLists.txt
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
set(sample_dir ${CMAKE_CURRENT_SOURCE_DIR}/tutorial_code/calib3d/real_time_pose_estimation/src/)
|
||||
set(target example_tutorial_)
|
||||
|
||||
set(sample_pnplib
|
||||
${sample_dir}CsvReader.cpp
|
||||
${sample_dir}CsvWriter.cpp
|
||||
${sample_dir}ModelRegistration.cpp
|
||||
${sample_dir}Mesh.cpp
|
||||
${sample_dir}Model.cpp
|
||||
${sample_dir}PnPProblem.cpp
|
||||
${sample_dir}Utils.cpp
|
||||
${sample_dir}RobustMatcher.cpp
|
||||
)
|
||||
|
||||
ocv_include_modules_recurse(${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
|
||||
|
||||
add_executable( ${target}pnp_registration ${sample_dir}main_registration.cpp ${sample_pnplib} )
|
||||
add_executable( ${target}pnp_detection ${sample_dir}main_detection.cpp ${sample_pnplib} )
|
||||
|
||||
ocv_target_link_libraries(${target}pnp_registration PRIVATE ${OPENCV_LINKER_LIBS} ${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
|
||||
ocv_target_link_libraries(${target}pnp_detection PRIVATE ${OPENCV_LINKER_LIBS} ${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
|
BIN
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.mp4
vendored
Normal file
BIN
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.mp4
vendored
Normal file
Binary file not shown.
31
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.ply
vendored
Normal file
31
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.ply
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
ply
|
||||
format ascii 1.0
|
||||
comment made by anonymous
|
||||
comment this file is a cube
|
||||
element vertex 8
|
||||
property float32 x
|
||||
property float32 y
|
||||
property float32 z
|
||||
element face 12
|
||||
property list uint8 int32 vertex_index
|
||||
end_header
|
||||
0 0 0
|
||||
0 25.8 0
|
||||
18.9 0 0
|
||||
18.9 25.8 0
|
||||
0 0 7.5
|
||||
0 25.8 7.5
|
||||
18.9 0 7.5
|
||||
18.9 25.8 7.5
|
||||
3 5 1 0
|
||||
3 5 4 0
|
||||
3 4 0 2
|
||||
3 4 6 2
|
||||
3 7 5 4
|
||||
3 7 6 4
|
||||
3 3 2 1
|
||||
3 1 2 0
|
||||
3 5 7 1
|
||||
3 7 1 3
|
||||
3 7 6 3
|
||||
3 6 3 2
|
31793
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/cookies_ORB.yml
vendored
Normal file
31793
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/cookies_ORB.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
After Width: | Height: | Size: 102 KiB |
@ -0,0 +1,79 @@
|
||||
#include "CsvReader.h"
|
||||
|
||||
/** The default constructor of the CSV reader Class */
|
||||
CsvReader::CsvReader(const string &path, char separator){
|
||||
_file.open(path.c_str(), ifstream::in);
|
||||
_separator = separator;
|
||||
}
|
||||
|
||||
/* Read a plane text file with .ply format */
|
||||
void CsvReader::readPLY(vector<Point3f> &list_vertex, vector<vector<int> > &list_triangles)
|
||||
{
|
||||
std::string line, tmp_str, n;
|
||||
int num_vertex = 0, num_triangles = 0;
|
||||
int count = 0;
|
||||
bool end_header = false;
|
||||
bool end_vertex = false;
|
||||
|
||||
// Read the whole *.ply file
|
||||
while (getline(_file, line)) {
|
||||
stringstream liness(line);
|
||||
|
||||
// read header
|
||||
if(!end_header)
|
||||
{
|
||||
getline(liness, tmp_str, _separator);
|
||||
if( tmp_str == "element" )
|
||||
{
|
||||
getline(liness, tmp_str, _separator);
|
||||
getline(liness, n);
|
||||
if(tmp_str == "vertex") num_vertex = StringToInt(n);
|
||||
if(tmp_str == "face") num_triangles = StringToInt(n);
|
||||
}
|
||||
if(tmp_str == "end_header") end_header = true;
|
||||
}
|
||||
|
||||
// read file content
|
||||
else if(end_header)
|
||||
{
|
||||
// read vertex and add into 'list_vertex'
|
||||
if(!end_vertex && count < num_vertex)
|
||||
{
|
||||
string x, y, z;
|
||||
getline(liness, x, _separator);
|
||||
getline(liness, y, _separator);
|
||||
getline(liness, z);
|
||||
|
||||
cv::Point3f tmp_p;
|
||||
tmp_p.x = (float)StringToInt(x);
|
||||
tmp_p.y = (float)StringToInt(y);
|
||||
tmp_p.z = (float)StringToInt(z);
|
||||
list_vertex.push_back(tmp_p);
|
||||
|
||||
count++;
|
||||
if(count == num_vertex)
|
||||
{
|
||||
count = 0;
|
||||
end_vertex = !end_vertex;
|
||||
}
|
||||
}
|
||||
// read faces and add into 'list_triangles'
|
||||
else if(end_vertex && count < num_triangles)
|
||||
{
|
||||
string num_pts_per_face, id0, id1, id2;
|
||||
getline(liness, num_pts_per_face, _separator);
|
||||
getline(liness, id0, _separator);
|
||||
getline(liness, id1, _separator);
|
||||
getline(liness, id2);
|
||||
|
||||
std::vector<int> tmp_triangle(3);
|
||||
tmp_triangle[0] = StringToInt(id0);
|
||||
tmp_triangle[1] = StringToInt(id1);
|
||||
tmp_triangle[2] = StringToInt(id2);
|
||||
list_triangles.push_back(tmp_triangle);
|
||||
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
#ifndef CSVREADER_H
|
||||
#define CSVREADER_H
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include "Utils.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace cv;
|
||||
|
||||
class CsvReader {
|
||||
public:
|
||||
/**
|
||||
* The default constructor of the CSV reader Class.
|
||||
* The default separator is ' ' (empty space)
|
||||
*
|
||||
* @param path - The path of the file to read
|
||||
* @param separator - The separator character between words per line
|
||||
* @return
|
||||
*/
|
||||
CsvReader(const string &path, char separator = ' ');
|
||||
|
||||
/**
|
||||
* Read a plane text file with .ply format
|
||||
*
|
||||
* @param list_vertex - The container of the vertices list of the mesh
|
||||
* @param list_triangle - The container of the triangles list of the mesh
|
||||
* @return
|
||||
*/
|
||||
void readPLY(vector<Point3f> &list_vertex, vector<vector<int> > &list_triangles);
|
||||
|
||||
private:
|
||||
/** The current stream file for the reader */
|
||||
ifstream _file;
|
||||
/** The separator character between words for each line */
|
||||
char _separator;
|
||||
};
|
||||
|
||||
#endif
|
@ -0,0 +1,45 @@
|
||||
#include "CsvWriter.h"
|
||||
|
||||
CsvWriter::CsvWriter(const string &path, const string &separator){
|
||||
_file.open(path.c_str(), ofstream::out);
|
||||
_isFirstTerm = true;
|
||||
_separator = separator;
|
||||
}
|
||||
|
||||
CsvWriter::~CsvWriter() {
|
||||
_file.flush();
|
||||
_file.close();
|
||||
}
|
||||
|
||||
void CsvWriter::writeXYZ(const vector<Point3f> &list_points3d)
|
||||
{
|
||||
for(size_t i = 0; i < list_points3d.size(); ++i)
|
||||
{
|
||||
string x = FloatToString(list_points3d[i].x);
|
||||
string y = FloatToString(list_points3d[i].y);
|
||||
string z = FloatToString(list_points3d[i].z);
|
||||
|
||||
_file << x << _separator << y << _separator << z << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void CsvWriter::writeUVXYZ(const vector<Point3f> &list_points3d, const vector<Point2f> &list_points2d, const Mat &descriptors)
|
||||
{
|
||||
for(size_t i = 0; i < list_points3d.size(); ++i)
|
||||
{
|
||||
string u = FloatToString(list_points2d[i].x);
|
||||
string v = FloatToString(list_points2d[i].y);
|
||||
string x = FloatToString(list_points3d[i].x);
|
||||
string y = FloatToString(list_points3d[i].y);
|
||||
string z = FloatToString(list_points3d[i].z);
|
||||
|
||||
_file << u << _separator << v << _separator << x << _separator << y << _separator << z;
|
||||
|
||||
for(int j = 0; j < 32; ++j)
|
||||
{
|
||||
string descriptor_str = FloatToString(descriptors.at<float>((int)i,j));
|
||||
_file << _separator << descriptor_str;
|
||||
}
|
||||
_file << std::endl;
|
||||
}
|
||||
}
|
@ -0,0 +1,25 @@
|
||||
#ifndef CSVWRITER_H
|
||||
#define CSVWRITER_H
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <opencv2/core.hpp>
|
||||
#include "Utils.h"
|
||||
|
||||
using namespace std;
|
||||
using namespace cv;
|
||||
|
||||
class CsvWriter {
|
||||
public:
|
||||
CsvWriter(const string &path, const string &separator = " ");
|
||||
~CsvWriter();
|
||||
void writeXYZ(const vector<Point3f> &list_points3d);
|
||||
void writeUVXYZ(const vector<Point3f> &list_points3d, const vector<Point2f> &list_points2d, const Mat &descriptors);
|
||||
|
||||
private:
|
||||
ofstream _file;
|
||||
string _separator;
|
||||
bool _isFirstTerm;
|
||||
};
|
||||
|
||||
#endif
|
78
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Mesh.cpp
vendored
Normal file
78
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Mesh.cpp
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Mesh.cpp
|
||||
*
|
||||
* Created on: Apr 9, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#include "Mesh.h"
|
||||
#include "CsvReader.h"
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// TRIANGLE CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
/** The custom constructor of the Triangle Class */
|
||||
Triangle::Triangle(const cv::Point3f& V0, const cv::Point3f& V1, const cv::Point3f& V2) :
|
||||
v0_(V0), v1_(V1), v2_(V2)
|
||||
{
|
||||
}
|
||||
|
||||
/** The default destructor of the Class */
|
||||
Triangle::~Triangle()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// RAY CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
/** The custom constructor of the Ray Class */
|
||||
Ray::Ray(const cv::Point3f& P0, const cv::Point3f& P1) :
|
||||
p0_(P0), p1_(P1)
|
||||
{
|
||||
}
|
||||
|
||||
/** The default destructor of the Class */
|
||||
Ray::~Ray()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// OBJECT MESH CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
/** The default constructor of the ObjectMesh Class */
|
||||
Mesh::Mesh() : num_vertices_(0), num_triangles_(0),
|
||||
list_vertex_(0) , list_triangles_(0)
|
||||
{
|
||||
}
|
||||
|
||||
/** The default destructor of the ObjectMesh Class */
|
||||
Mesh::~Mesh()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
/** Load a CSV with *.ply format **/
|
||||
void Mesh::load(const std::string& path)
|
||||
{
|
||||
// Create the reader
|
||||
CsvReader csvReader(path);
|
||||
|
||||
// Clear previous data
|
||||
list_vertex_.clear();
|
||||
list_triangles_.clear();
|
||||
|
||||
// Read from .ply file
|
||||
csvReader.readPLY(list_vertex_, list_triangles_);
|
||||
|
||||
// Update mesh attributes
|
||||
num_vertices_ = (int)list_vertex_.size();
|
||||
num_triangles_ = (int)list_triangles_.size();
|
||||
}
|
82
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Mesh.h
vendored
Normal file
82
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Mesh.h
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Mesh.h
|
||||
*
|
||||
* Created on: Apr 9, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#ifndef MESH_H_
|
||||
#define MESH_H_
|
||||
|
||||
#include <iostream>
|
||||
#include <opencv2/core/core.hpp>
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// TRIANGLE CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
class Triangle {
|
||||
public:
|
||||
|
||||
explicit Triangle(const cv::Point3f& V0, const cv::Point3f& V1, const cv::Point3f& V2);
|
||||
virtual ~Triangle();
|
||||
|
||||
cv::Point3f getV0() const { return v0_; }
|
||||
cv::Point3f getV1() const { return v1_; }
|
||||
cv::Point3f getV2() const { return v2_; }
|
||||
|
||||
private:
|
||||
/** The three vertices that defines the triangle */
|
||||
cv::Point3f v0_, v1_, v2_;
|
||||
};
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// RAY CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
class Ray {
|
||||
public:
|
||||
|
||||
explicit Ray(const cv::Point3f& P0, const cv::Point3f& P1);
|
||||
virtual ~Ray();
|
||||
|
||||
cv::Point3f getP0() { return p0_; }
|
||||
cv::Point3f getP1() { return p1_; }
|
||||
|
||||
private:
|
||||
/** The two points that defines the ray */
|
||||
cv::Point3f p0_, p1_;
|
||||
};
|
||||
|
||||
|
||||
// --------------------------------------------------- //
|
||||
// OBJECT MESH CLASS //
|
||||
// --------------------------------------------------- //
|
||||
|
||||
class Mesh
|
||||
{
|
||||
public:
|
||||
|
||||
Mesh();
|
||||
virtual ~Mesh();
|
||||
|
||||
std::vector<std::vector<int> > getTrianglesList() const { return list_triangles_; }
|
||||
cv::Point3f getVertex(int pos) const { return list_vertex_[pos]; }
|
||||
int getNumVertices() const { return num_vertices_; }
|
||||
|
||||
void load(const std::string& path_file);
|
||||
|
||||
private:
|
||||
/** The current number of vertices in the mesh */
|
||||
int num_vertices_;
|
||||
/** The current number of triangles in the mesh */
|
||||
int num_triangles_;
|
||||
/* The list of triangles of the mesh */
|
||||
std::vector<cv::Point3f> list_vertex_;
|
||||
/* The list of triangles of the mesh */
|
||||
std::vector<std::vector<int> > list_triangles_;
|
||||
};
|
||||
|
||||
#endif /* OBJECTMESH_H_ */
|
83
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Model.cpp
vendored
Normal file
83
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Model.cpp
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Model.cpp
|
||||
*
|
||||
* Created on: Apr 9, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#include "Model.h"
|
||||
#include "CsvWriter.h"
|
||||
|
||||
Model::Model() : n_correspondences_(0), list_points2d_in_(0), list_points2d_out_(0), list_points3d_in_(0), training_img_path_()
|
||||
{
|
||||
}
|
||||
|
||||
Model::~Model()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
void Model::add_correspondence(const cv::Point2f &point2d, const cv::Point3f &point3d)
|
||||
{
|
||||
list_points2d_in_.push_back(point2d);
|
||||
list_points3d_in_.push_back(point3d);
|
||||
n_correspondences_++;
|
||||
}
|
||||
|
||||
void Model::add_outlier(const cv::Point2f &point2d)
|
||||
{
|
||||
list_points2d_out_.push_back(point2d);
|
||||
}
|
||||
|
||||
void Model::add_descriptor(const cv::Mat &descriptor)
|
||||
{
|
||||
descriptors_.push_back(descriptor);
|
||||
}
|
||||
|
||||
void Model::add_keypoint(const cv::KeyPoint &kp)
|
||||
{
|
||||
list_keypoints_.push_back(kp);
|
||||
}
|
||||
|
||||
void Model::set_trainingImagePath(const std::string &path)
|
||||
{
|
||||
training_img_path_ = path;
|
||||
}
|
||||
|
||||
/** Save a YAML file and fill the object mesh */
|
||||
void Model::save(const std::string &path)
|
||||
{
|
||||
cv::Mat points3dmatrix = cv::Mat(list_points3d_in_);
|
||||
cv::Mat points2dmatrix = cv::Mat(list_points2d_in_);
|
||||
|
||||
cv::FileStorage storage(path, cv::FileStorage::WRITE);
|
||||
storage << "points_3d" << points3dmatrix;
|
||||
storage << "points_2d" << points2dmatrix;
|
||||
storage << "keypoints" << list_keypoints_;
|
||||
storage << "descriptors" << descriptors_;
|
||||
storage << "training_image_path" << training_img_path_;
|
||||
|
||||
storage.release();
|
||||
}
|
||||
|
||||
/** Load a YAML file using OpenCv functions **/
|
||||
void Model::load(const std::string &path)
|
||||
{
|
||||
cv::Mat points3d_mat;
|
||||
|
||||
cv::FileStorage storage(path, cv::FileStorage::READ);
|
||||
storage["points_3d"] >> points3d_mat;
|
||||
storage["descriptors"] >> descriptors_;
|
||||
if (!storage["keypoints"].empty())
|
||||
{
|
||||
storage["keypoints"] >> list_keypoints_;
|
||||
}
|
||||
if (!storage["training_image_path"].empty())
|
||||
{
|
||||
storage["training_image_path"] >> training_img_path_;
|
||||
}
|
||||
|
||||
points3d_mat.copyTo(list_points3d_in_);
|
||||
|
||||
storage.release();
|
||||
}
|
55
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Model.h
vendored
Normal file
55
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Model.h
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Model.h
|
||||
*
|
||||
* Created on: Apr 9, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#ifndef MODEL_H_
|
||||
#define MODEL_H_
|
||||
|
||||
#include <iostream>
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <opencv2/features2d/features2d.hpp>
|
||||
|
||||
class Model
|
||||
{
|
||||
public:
|
||||
Model();
|
||||
virtual ~Model();
|
||||
|
||||
std::vector<cv::Point2f> get_points2d_in() const { return list_points2d_in_; }
|
||||
std::vector<cv::Point2f> get_points2d_out() const { return list_points2d_out_; }
|
||||
std::vector<cv::Point3f> get_points3d() const { return list_points3d_in_; }
|
||||
std::vector<cv::KeyPoint> get_keypoints() const { return list_keypoints_; }
|
||||
cv::Mat get_descriptors() const { return descriptors_; }
|
||||
int get_numDescriptors() const { return descriptors_.rows; }
|
||||
std::string get_trainingImagePath() const { return training_img_path_; }
|
||||
|
||||
void add_correspondence(const cv::Point2f &point2d, const cv::Point3f &point3d);
|
||||
void add_outlier(const cv::Point2f &point2d);
|
||||
void add_descriptor(const cv::Mat &descriptor);
|
||||
void add_keypoint(const cv::KeyPoint &kp);
|
||||
void set_trainingImagePath(const std::string &path);
|
||||
|
||||
void save(const std::string &path);
|
||||
void load(const std::string &path);
|
||||
|
||||
private:
|
||||
/** The current number of correspondences */
|
||||
int n_correspondences_;
|
||||
/** The list of 2D points on the model surface */
|
||||
std::vector<cv::KeyPoint> list_keypoints_;
|
||||
/** The list of 2D points on the model surface */
|
||||
std::vector<cv::Point2f> list_points2d_in_;
|
||||
/** The list of 2D points outside the model surface */
|
||||
std::vector<cv::Point2f> list_points2d_out_;
|
||||
/** The list of 3D points on the model surface */
|
||||
std::vector<cv::Point3f> list_points3d_in_;
|
||||
/** The list of 2D points descriptors */
|
||||
cv::Mat descriptors_;
|
||||
/** Path to the training image */
|
||||
std::string training_img_path_;
|
||||
};
|
||||
|
||||
#endif /* OBJECTMODEL_H_ */
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* ModelRegistration.cpp
|
||||
*
|
||||
* Created on: Apr 18, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#include "ModelRegistration.h"
|
||||
|
||||
ModelRegistration::ModelRegistration() : n_registrations_(0), max_registrations_(0),
|
||||
list_points2d_(), list_points3d_()
|
||||
{
|
||||
}
|
||||
|
||||
ModelRegistration::~ModelRegistration()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
void ModelRegistration::registerPoint(const cv::Point2f &point2d, const cv::Point3f &point3d)
|
||||
{
|
||||
// add correspondence at the end of the vector
|
||||
list_points2d_.push_back(point2d);
|
||||
list_points3d_.push_back(point3d);
|
||||
n_registrations_++;
|
||||
}
|
||||
|
||||
void ModelRegistration::reset()
|
||||
{
|
||||
n_registrations_ = 0;
|
||||
max_registrations_ = 0;
|
||||
list_points2d_.clear();
|
||||
list_points3d_.clear();
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* ModelRegistration.h
|
||||
*
|
||||
* Created on: Apr 18, 2014
|
||||
* Author: edgar
|
||||
*/
|
||||
|
||||
#ifndef MODELREGISTRATION_H_
|
||||
#define MODELREGISTRATION_H_
|
||||
|
||||
#include <iostream>
|
||||
#include <opencv2/core.hpp>
|
||||
|
||||
class ModelRegistration
|
||||
{
|
||||
public:
|
||||
ModelRegistration();
|
||||
virtual ~ModelRegistration();
|
||||
|
||||
void setNumMax(int n) { max_registrations_ = n; }
|
||||
|
||||
std::vector<cv::Point2f> get_points2d() const { return list_points2d_; }
|
||||
std::vector<cv::Point3f> get_points3d() const { return list_points3d_; }
|
||||
int getNumMax() const { return max_registrations_; }
|
||||
int getNumRegist() const { return n_registrations_; }
|
||||
|
||||
bool is_registrable() const { return (n_registrations_ < max_registrations_); }
|
||||
void registerPoint(const cv::Point2f &point2d, const cv::Point3f &point3d);
|
||||
void reset();
|
||||
|
||||
private:
|
||||
/** The current number of registered points */
|
||||
int n_registrations_;
|
||||
/** The total number of points to register */
|
||||
int max_registrations_;
|
||||
/** The list of 2D points to register the model */
|
||||
std::vector<cv::Point2f> list_points2d_;
|
||||
/** The list of 3D points to register the model */
|
||||
std::vector<cv::Point3f> list_points3d_;
|
||||
};
|
||||
|
||||
#endif /* MODELREGISTRATION_H_ */
|
307
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp
vendored
Normal file
307
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp
vendored
Normal file
@ -0,0 +1,307 @@
|
||||
/*
|
||||
* PnPProblem.cpp
|
||||
*
|
||||
* Created on: Mar 28, 2014
|
||||
* Author: Edgar Riba
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
|
||||
#include "PnPProblem.h"
|
||||
#include "Mesh.h"
|
||||
|
||||
#include <opencv2/calib3d/calib3d.hpp>
|
||||
|
||||
/* Functions for Möller-Trumbore intersection algorithm */
|
||||
static cv::Point3f CROSS(cv::Point3f v1, cv::Point3f v2)
|
||||
{
|
||||
cv::Point3f tmp_p;
|
||||
tmp_p.x = v1.y*v2.z - v1.z*v2.y;
|
||||
tmp_p.y = v1.z*v2.x - v1.x*v2.z;
|
||||
tmp_p.z = v1.x*v2.y - v1.y*v2.x;
|
||||
return tmp_p;
|
||||
}
|
||||
|
||||
static double DOT(cv::Point3f v1, cv::Point3f v2)
|
||||
{
|
||||
return v1.x*v2.x + v1.y*v2.y + v1.z*v2.z;
|
||||
}
|
||||
|
||||
static cv::Point3f SUB(cv::Point3f v1, cv::Point3f v2)
|
||||
{
|
||||
cv::Point3f tmp_p;
|
||||
tmp_p.x = v1.x - v2.x;
|
||||
tmp_p.y = v1.y - v2.y;
|
||||
tmp_p.z = v1.z - v2.z;
|
||||
return tmp_p;
|
||||
}
|
||||
|
||||
/* End functions for Möller-Trumbore intersection algorithm */
|
||||
|
||||
// Function to get the nearest 3D point to the Ray origin
|
||||
static cv::Point3f get_nearest_3D_point(std::vector<cv::Point3f> &points_list, cv::Point3f origin)
|
||||
{
|
||||
cv::Point3f p1 = points_list[0];
|
||||
cv::Point3f p2 = points_list[1];
|
||||
|
||||
double d1 = std::sqrt( std::pow(p1.x-origin.x, 2) + std::pow(p1.y-origin.y, 2) + std::pow(p1.z-origin.z, 2) );
|
||||
double d2 = std::sqrt( std::pow(p2.x-origin.x, 2) + std::pow(p2.y-origin.y, 2) + std::pow(p2.z-origin.z, 2) );
|
||||
|
||||
if(d1 < d2)
|
||||
{
|
||||
return p1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return p2;
|
||||
}
|
||||
}
|
||||
|
||||
// Custom constructor given the intrinsic camera parameters
|
||||
|
||||
PnPProblem::PnPProblem(const double params[])
|
||||
{
|
||||
A_matrix_ = cv::Mat::zeros(3, 3, CV_64FC1); // intrinsic camera parameters
|
||||
A_matrix_.at<double>(0, 0) = params[0]; // [ fx 0 cx ]
|
||||
A_matrix_.at<double>(1, 1) = params[1]; // [ 0 fy cy ]
|
||||
A_matrix_.at<double>(0, 2) = params[2]; // [ 0 0 1 ]
|
||||
A_matrix_.at<double>(1, 2) = params[3];
|
||||
A_matrix_.at<double>(2, 2) = 1;
|
||||
R_matrix_ = cv::Mat::zeros(3, 3, CV_64FC1); // rotation matrix
|
||||
t_matrix_ = cv::Mat::zeros(3, 1, CV_64FC1); // translation matrix
|
||||
P_matrix_ = cv::Mat::zeros(3, 4, CV_64FC1); // rotation-translation matrix
|
||||
|
||||
}
|
||||
|
||||
PnPProblem::~PnPProblem()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
void PnPProblem::set_P_matrix( const cv::Mat &R_matrix, const cv::Mat &t_matrix)
|
||||
{
|
||||
// Rotation-Translation Matrix Definition
|
||||
P_matrix_.at<double>(0,0) = R_matrix.at<double>(0,0);
|
||||
P_matrix_.at<double>(0,1) = R_matrix.at<double>(0,1);
|
||||
P_matrix_.at<double>(0,2) = R_matrix.at<double>(0,2);
|
||||
P_matrix_.at<double>(1,0) = R_matrix.at<double>(1,0);
|
||||
P_matrix_.at<double>(1,1) = R_matrix.at<double>(1,1);
|
||||
P_matrix_.at<double>(1,2) = R_matrix.at<double>(1,2);
|
||||
P_matrix_.at<double>(2,0) = R_matrix.at<double>(2,0);
|
||||
P_matrix_.at<double>(2,1) = R_matrix.at<double>(2,1);
|
||||
P_matrix_.at<double>(2,2) = R_matrix.at<double>(2,2);
|
||||
P_matrix_.at<double>(0,3) = t_matrix.at<double>(0);
|
||||
P_matrix_.at<double>(1,3) = t_matrix.at<double>(1);
|
||||
P_matrix_.at<double>(2,3) = t_matrix.at<double>(2);
|
||||
}
|
||||
|
||||
// Estimate the pose given a list of 2D/3D correspondences and the method to use
|
||||
bool PnPProblem::estimatePose( const std::vector<cv::Point3f> &list_points3d,
|
||||
const std::vector<cv::Point2f> &list_points2d,
|
||||
int flags)
|
||||
{
|
||||
cv::Mat distCoeffs = cv::Mat::zeros(4, 1, CV_64FC1);
|
||||
cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1);
|
||||
cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1);
|
||||
|
||||
bool useExtrinsicGuess = false;
|
||||
|
||||
// Pose estimation
|
||||
bool correspondence = cv::solvePnP( list_points3d, list_points2d, A_matrix_, distCoeffs, rvec, tvec,
|
||||
useExtrinsicGuess, flags);
|
||||
|
||||
// Transforms Rotation Vector to Matrix
|
||||
Rodrigues(rvec, R_matrix_);
|
||||
t_matrix_ = tvec;
|
||||
|
||||
// Set projection matrix
|
||||
this->set_P_matrix(R_matrix_, t_matrix_);
|
||||
|
||||
return correspondence;
|
||||
}
|
||||
|
||||
// Estimate the pose given a list of 2D/3D correspondences with RANSAC and the method to use
|
||||
|
||||
void PnPProblem::estimatePoseRANSAC( const std::vector<cv::Point3f> &list_points3d, // list with model 3D coordinates
|
||||
const std::vector<cv::Point2f> &list_points2d, // list with scene 2D coordinates
|
||||
int flags, cv::Mat &inliers, int iterationsCount, // PnP method; inliers container
|
||||
float reprojectionError, double confidence ) // Ransac parameters
|
||||
{
|
||||
cv::Mat distCoeffs = cv::Mat::zeros(4, 1, CV_64FC1); // vector of distortion coefficients
|
||||
cv::Mat rvec = cv::Mat::zeros(3, 1, CV_64FC1); // output rotation vector
|
||||
cv::Mat tvec = cv::Mat::zeros(3, 1, CV_64FC1); // output translation vector
|
||||
|
||||
bool useExtrinsicGuess = false; // if true the function uses the provided rvec and tvec values as
|
||||
// initial approximations of the rotation and translation vectors
|
||||
|
||||
cv::solvePnPRansac( list_points3d, list_points2d, A_matrix_, distCoeffs, rvec, tvec,
|
||||
useExtrinsicGuess, iterationsCount, reprojectionError, confidence,
|
||||
inliers, flags );
|
||||
|
||||
Rodrigues(rvec, R_matrix_); // converts Rotation Vector to Matrix
|
||||
t_matrix_ = tvec; // set translation matrix
|
||||
|
||||
this->set_P_matrix(R_matrix_, t_matrix_); // set rotation-translation matrix
|
||||
|
||||
}
|
||||
|
||||
// Given the mesh, backproject the 3D points to 2D to verify the pose estimation
|
||||
std::vector<cv::Point2f> PnPProblem::verify_points(Mesh *mesh)
|
||||
{
|
||||
std::vector<cv::Point2f> verified_points_2d;
|
||||
for( int i = 0; i < mesh->getNumVertices(); i++)
|
||||
{
|
||||
cv::Point3f point3d = mesh->getVertex(i);
|
||||
cv::Point2f point2d = this->backproject3DPoint(point3d);
|
||||
verified_points_2d.push_back(point2d);
|
||||
}
|
||||
|
||||
return verified_points_2d;
|
||||
}
|
||||
|
||||
// Backproject a 3D point to 2D using the estimated pose parameters
|
||||
cv::Point2f PnPProblem::backproject3DPoint(const cv::Point3f &point3d)
|
||||
{
|
||||
// 3D point vector [x y z 1]'
|
||||
cv::Mat point3d_vec = cv::Mat(4, 1, CV_64FC1);
|
||||
point3d_vec.at<double>(0) = point3d.x;
|
||||
point3d_vec.at<double>(1) = point3d.y;
|
||||
point3d_vec.at<double>(2) = point3d.z;
|
||||
point3d_vec.at<double>(3) = 1;
|
||||
|
||||
// 2D point vector [u v 1]'
|
||||
cv::Mat point2d_vec = cv::Mat(4, 1, CV_64FC1);
|
||||
point2d_vec = A_matrix_ * P_matrix_ * point3d_vec;
|
||||
|
||||
// Normalization of [u v]'
|
||||
cv::Point2f point2d;
|
||||
point2d.x = (float)(point2d_vec.at<double>(0) / point2d_vec.at<double>(2));
|
||||
point2d.y = (float)(point2d_vec.at<double>(1) / point2d_vec.at<double>(2));
|
||||
|
||||
return point2d;
|
||||
}
|
||||
|
||||
// Back project a 2D point to 3D and returns if it's on the object surface
|
||||
bool PnPProblem::backproject2DPoint(const Mesh *mesh, const cv::Point2f &point2d, cv::Point3f &point3d)
|
||||
{
|
||||
// Triangles list of the object mesh
|
||||
std::vector<std::vector<int> > triangles_list = mesh->getTrianglesList();
|
||||
|
||||
double lambda = 8;
|
||||
double u = point2d.x;
|
||||
double v = point2d.y;
|
||||
|
||||
// Point in vector form
|
||||
cv::Mat point2d_vec = cv::Mat::ones(3, 1, CV_64F); // 3x1
|
||||
point2d_vec.at<double>(0) = u * lambda;
|
||||
point2d_vec.at<double>(1) = v * lambda;
|
||||
point2d_vec.at<double>(2) = lambda;
|
||||
|
||||
// Point in camera coordinates
|
||||
cv::Mat X_c = A_matrix_.inv() * point2d_vec ; // 3x1
|
||||
|
||||
// Point in world coordinates
|
||||
cv::Mat X_w = R_matrix_.inv() * ( X_c - t_matrix_ ); // 3x1
|
||||
|
||||
// Center of projection
|
||||
cv::Mat C_op = cv::Mat(R_matrix_.inv()).mul(-1) * t_matrix_; // 3x1
|
||||
|
||||
// Ray direction vector
|
||||
cv::Mat ray = X_w - C_op; // 3x1
|
||||
ray = ray / cv::norm(ray); // 3x1
|
||||
|
||||
// Set up Ray
|
||||
Ray R((cv::Point3f)C_op, (cv::Point3f)ray);
|
||||
|
||||
// A vector to store the intersections found
|
||||
std::vector<cv::Point3f> intersections_list;
|
||||
|
||||
// Loop for all the triangles and check the intersection
|
||||
for (unsigned int i = 0; i < triangles_list.size(); i++)
|
||||
{
|
||||
cv::Point3f V0 = mesh->getVertex(triangles_list[i][0]);
|
||||
cv::Point3f V1 = mesh->getVertex(triangles_list[i][1]);
|
||||
cv::Point3f V2 = mesh->getVertex(triangles_list[i][2]);
|
||||
|
||||
Triangle T(V0, V1, V2);
|
||||
|
||||
double out;
|
||||
if(this->intersect_MollerTrumbore(R, T, &out))
|
||||
{
|
||||
cv::Point3f tmp_pt = R.getP0() + out*R.getP1(); // P = O + t*D
|
||||
intersections_list.push_back(tmp_pt);
|
||||
}
|
||||
}
|
||||
|
||||
// If there are intersection, find the nearest one
|
||||
if (!intersections_list.empty())
|
||||
{
|
||||
point3d = get_nearest_3D_point(intersections_list, R.getP0());
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Möller-Trumbore intersection algorithm
|
||||
bool PnPProblem::intersect_MollerTrumbore(Ray &Ray, Triangle &Triangle, double *out)
|
||||
{
|
||||
const double EPSILON = 0.000001;
|
||||
|
||||
cv::Point3f e1, e2;
|
||||
cv::Point3f P, Q, T;
|
||||
double det, inv_det, u, v;
|
||||
double t;
|
||||
|
||||
cv::Point3f V1 = Triangle.getV0(); // Triangle vertices
|
||||
cv::Point3f V2 = Triangle.getV1();
|
||||
cv::Point3f V3 = Triangle.getV2();
|
||||
|
||||
cv::Point3f O = Ray.getP0(); // Ray origin
|
||||
cv::Point3f D = Ray.getP1(); // Ray direction
|
||||
|
||||
//Find vectors for two edges sharing V1
|
||||
e1 = SUB(V2, V1);
|
||||
e2 = SUB(V3, V1);
|
||||
|
||||
// Begin calculation determinant - also used to calculate U parameter
|
||||
P = CROSS(D, e2);
|
||||
|
||||
// If determinant is near zero, ray lie in plane of triangle
|
||||
det = DOT(e1, P);
|
||||
|
||||
//NOT CULLING
|
||||
if(det > -EPSILON && det < EPSILON) return false;
|
||||
inv_det = 1.f / det;
|
||||
|
||||
//calculate distance from V1 to ray origin
|
||||
T = SUB(O, V1);
|
||||
|
||||
//Calculate u parameter and test bound
|
||||
u = DOT(T, P) * inv_det;
|
||||
|
||||
//The intersection lies outside of the triangle
|
||||
if(u < 0.f || u > 1.f) return false;
|
||||
|
||||
//Prepare to test v parameter
|
||||
Q = CROSS(T, e1);
|
||||
|
||||
//Calculate V parameter and test bound
|
||||
v = DOT(D, Q) * inv_det;
|
||||
|
||||
//The intersection lies outside of the triangle
|
||||
if(v < 0.f || u + v > 1.f) return false;
|
||||
|
||||
t = DOT(e2, Q) * inv_det;
|
||||
|
||||
if(t > EPSILON) { //ray intersection
|
||||
*out = t;
|
||||
return true;
|
||||
}
|
||||
|
||||
// No hit, no win
|
||||
return false;
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
/*
|
||||
* PnPProblem.h
|
||||
*
|
||||
* Created on: Mar 28, 2014
|
||||
* Author: Edgar Riba
|
||||
*/
|
||||
|
||||
#ifndef PNPPROBLEM_H_
|
||||
#define PNPPROBLEM_H_
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include "Mesh.h"
|
||||
#include "ModelRegistration.h"
|
||||
|
||||
class PnPProblem
|
||||
{
|
||||
public:
|
||||
explicit PnPProblem(const double param[]); // custom constructor
|
||||
virtual ~PnPProblem();
|
||||
|
||||
bool backproject2DPoint(const Mesh *mesh, const cv::Point2f &point2d, cv::Point3f &point3d);
|
||||
bool intersect_MollerTrumbore(Ray &R, Triangle &T, double *out);
|
||||
std::vector<cv::Point2f> verify_points(Mesh *mesh);
|
||||
cv::Point2f backproject3DPoint(const cv::Point3f &point3d);
|
||||
bool estimatePose(const std::vector<cv::Point3f> &list_points3d, const std::vector<cv::Point2f> &list_points2d, int flags);
|
||||
void estimatePoseRANSAC( const std::vector<cv::Point3f> &list_points3d, const std::vector<cv::Point2f> &list_points2d,
|
||||
int flags, cv::Mat &inliers,
|
||||
int iterationsCount, float reprojectionError, double confidence );
|
||||
|
||||
cv::Mat get_A_matrix() const { return A_matrix_; }
|
||||
cv::Mat get_R_matrix() const { return R_matrix_; }
|
||||
cv::Mat get_t_matrix() const { return t_matrix_; }
|
||||
cv::Mat get_P_matrix() const { return P_matrix_; }
|
||||
|
||||
void set_P_matrix( const cv::Mat &R_matrix, const cv::Mat &t_matrix);
|
||||
|
||||
private:
|
||||
/** The calibration matrix */
|
||||
cv::Mat A_matrix_;
|
||||
/** The computed rotation matrix */
|
||||
cv::Mat R_matrix_;
|
||||
/** The computed translation matrix */
|
||||
cv::Mat t_matrix_;
|
||||
/** The computed projection matrix */
|
||||
cv::Mat P_matrix_;
|
||||
};
|
||||
|
||||
#endif /* PNPPROBLEM_H_ */
|
@ -0,0 +1,154 @@
|
||||
/*
|
||||
* RobustMatcher.cpp
|
||||
*
|
||||
* Created on: Jun 4, 2014
|
||||
* Author: eriba
|
||||
*/
|
||||
|
||||
#include "RobustMatcher.h"
|
||||
#include <time.h>
|
||||
|
||||
#include <opencv2/features2d/features2d.hpp>
|
||||
|
||||
RobustMatcher::~RobustMatcher()
|
||||
{
|
||||
// TODO Auto-generated destructor stub
|
||||
}
|
||||
|
||||
void RobustMatcher::computeKeyPoints( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints)
|
||||
{
|
||||
detector_->detect(image, keypoints);
|
||||
}
|
||||
|
||||
void RobustMatcher::computeDescriptors( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, cv::Mat& descriptors)
|
||||
{
|
||||
extractor_->compute(image, keypoints, descriptors);
|
||||
}
|
||||
|
||||
int RobustMatcher::ratioTest(std::vector<std::vector<cv::DMatch> > &matches)
|
||||
{
|
||||
int removed = 0;
|
||||
// for all matches
|
||||
for ( std::vector<std::vector<cv::DMatch> >::iterator
|
||||
matchIterator= matches.begin(); matchIterator!= matches.end(); ++matchIterator)
|
||||
{
|
||||
// if 2 NN has been identified
|
||||
if (matchIterator->size() > 1)
|
||||
{
|
||||
// check distance ratio
|
||||
if ((*matchIterator)[0].distance / (*matchIterator)[1].distance > ratio_)
|
||||
{
|
||||
matchIterator->clear(); // remove match
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
else
|
||||
{ // does not have 2 neighbours
|
||||
matchIterator->clear(); // remove match
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
return removed;
|
||||
}
|
||||
|
||||
void RobustMatcher::symmetryTest( const std::vector<std::vector<cv::DMatch> >& matches1,
|
||||
const std::vector<std::vector<cv::DMatch> >& matches2,
|
||||
std::vector<cv::DMatch>& symMatches )
|
||||
{
|
||||
// for all matches image 1 -> image 2
|
||||
for (std::vector<std::vector<cv::DMatch> >::const_iterator
|
||||
matchIterator1 = matches1.begin(); matchIterator1 != matches1.end(); ++matchIterator1)
|
||||
{
|
||||
// ignore deleted matches
|
||||
if (matchIterator1->empty() || matchIterator1->size() < 2)
|
||||
continue;
|
||||
|
||||
// for all matches image 2 -> image 1
|
||||
for (std::vector<std::vector<cv::DMatch> >::const_iterator
|
||||
matchIterator2 = matches2.begin(); matchIterator2 != matches2.end(); ++matchIterator2)
|
||||
{
|
||||
// ignore deleted matches
|
||||
if (matchIterator2->empty() || matchIterator2->size() < 2)
|
||||
continue;
|
||||
|
||||
// Match symmetry test
|
||||
if ((*matchIterator1)[0].queryIdx == (*matchIterator2)[0].trainIdx &&
|
||||
(*matchIterator2)[0].queryIdx == (*matchIterator1)[0].trainIdx)
|
||||
{
|
||||
// add symmetrical match
|
||||
symMatches.push_back(cv::DMatch((*matchIterator1)[0].queryIdx,
|
||||
(*matchIterator1)[0].trainIdx, (*matchIterator1)[0].distance));
|
||||
break; // next match in image 1 -> image 2
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void RobustMatcher::robustMatch( const cv::Mat& frame, std::vector<cv::DMatch>& good_matches,
|
||||
std::vector<cv::KeyPoint>& keypoints_frame, const cv::Mat& descriptors_model,
|
||||
const std::vector<cv::KeyPoint>& keypoints_model)
|
||||
{
|
||||
// 1a. Detection of the ORB features
|
||||
this->computeKeyPoints(frame, keypoints_frame);
|
||||
|
||||
// 1b. Extraction of the ORB descriptors
|
||||
cv::Mat descriptors_frame;
|
||||
this->computeDescriptors(frame, keypoints_frame, descriptors_frame);
|
||||
|
||||
// 2. Match the two image descriptors
|
||||
std::vector<std::vector<cv::DMatch> > matches12, matches21;
|
||||
|
||||
// 2a. From image 1 to image 2
|
||||
matcher_->knnMatch(descriptors_frame, descriptors_model, matches12, 2); // return 2 nearest neighbours
|
||||
|
||||
// 2b. From image 2 to image 1
|
||||
matcher_->knnMatch(descriptors_model, descriptors_frame, matches21, 2); // return 2 nearest neighbours
|
||||
|
||||
// 3. Remove matches for which NN ratio is > than threshold
|
||||
// clean image 1 -> image 2 matches
|
||||
ratioTest(matches12);
|
||||
// clean image 2 -> image 1 matches
|
||||
ratioTest(matches21);
|
||||
|
||||
// 4. Remove non-symmetrical matches
|
||||
symmetryTest(matches12, matches21, good_matches);
|
||||
|
||||
if (!training_img_.empty() && !keypoints_model.empty())
|
||||
{
|
||||
cv::drawMatches(frame, keypoints_frame, training_img_, keypoints_model, good_matches, img_matching_);
|
||||
}
|
||||
}
|
||||
|
||||
void RobustMatcher::fastRobustMatch( const cv::Mat& frame, std::vector<cv::DMatch>& good_matches,
|
||||
std::vector<cv::KeyPoint>& keypoints_frame,
|
||||
const cv::Mat& descriptors_model,
|
||||
const std::vector<cv::KeyPoint>& keypoints_model)
|
||||
{
|
||||
good_matches.clear();
|
||||
|
||||
// 1a. Detection of the ORB features
|
||||
this->computeKeyPoints(frame, keypoints_frame);
|
||||
|
||||
// 1b. Extraction of the ORB descriptors
|
||||
cv::Mat descriptors_frame;
|
||||
this->computeDescriptors(frame, keypoints_frame, descriptors_frame);
|
||||
|
||||
// 2. Match the two image descriptors
|
||||
std::vector<std::vector<cv::DMatch> > matches;
|
||||
matcher_->knnMatch(descriptors_frame, descriptors_model, matches, 2);
|
||||
|
||||
// 3. Remove matches for which NN ratio is > than threshold
|
||||
ratioTest(matches);
|
||||
|
||||
// 4. Fill good matches container
|
||||
for ( std::vector<std::vector<cv::DMatch> >::iterator
|
||||
matchIterator= matches.begin(); matchIterator!= matches.end(); ++matchIterator)
|
||||
{
|
||||
if (!matchIterator->empty()) good_matches.push_back((*matchIterator)[0]);
|
||||
}
|
||||
|
||||
if (!training_img_.empty() && !keypoints_model.empty())
|
||||
{
|
||||
cv::drawMatches(frame, keypoints_frame, training_img_, keypoints_model, good_matches, img_matching_);
|
||||
}
|
||||
}
|
@ -0,0 +1,92 @@
|
||||
/*
|
||||
* RobustMatcher.h
|
||||
*
|
||||
* Created on: Jun 4, 2014
|
||||
* Author: eriba
|
||||
*/
|
||||
|
||||
#ifndef ROBUSTMATCHER_H_
|
||||
#define ROBUSTMATCHER_H_
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
#include <opencv2/features2d/features2d.hpp>
|
||||
|
||||
class RobustMatcher {
|
||||
public:
|
||||
RobustMatcher() : detector_(), extractor_(), matcher_(),
|
||||
ratio_(0.8f), training_img_(), img_matching_()
|
||||
{
|
||||
// ORB is the default feature
|
||||
detector_ = cv::ORB::create();
|
||||
extractor_ = cv::ORB::create();
|
||||
|
||||
// BruteFroce matcher with Norm Hamming is the default matcher
|
||||
matcher_ = cv::makePtr<cv::BFMatcher>((int)cv::NORM_HAMMING, false);
|
||||
|
||||
}
|
||||
virtual ~RobustMatcher();
|
||||
|
||||
// Set the feature detector
|
||||
void setFeatureDetector(const cv::Ptr<cv::FeatureDetector>& detect) { detector_ = detect; }
|
||||
|
||||
// Set the descriptor extractor
|
||||
void setDescriptorExtractor(const cv::Ptr<cv::DescriptorExtractor>& desc) { extractor_ = desc; }
|
||||
|
||||
// Set the matcher
|
||||
void setDescriptorMatcher(const cv::Ptr<cv::DescriptorMatcher>& match) { matcher_ = match; }
|
||||
|
||||
// Compute the keypoints of an image
|
||||
void computeKeyPoints( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints);
|
||||
|
||||
// Compute the descriptors of an image given its keypoints
|
||||
void computeDescriptors( const cv::Mat& image, std::vector<cv::KeyPoint>& keypoints, cv::Mat& descriptors);
|
||||
|
||||
cv::Mat getImageMatching() const { return img_matching_; }
|
||||
|
||||
// Set ratio parameter for the ratio test
|
||||
void setRatio( float rat) { ratio_ = rat; }
|
||||
|
||||
void setTrainingImage(const cv::Mat &img) { training_img_ = img; }
|
||||
|
||||
// Clear matches for which NN ratio is > than threshold
|
||||
// return the number of removed points
|
||||
// (corresponding entries being cleared,
|
||||
// i.e. size will be 0)
|
||||
int ratioTest(std::vector<std::vector<cv::DMatch> > &matches);
|
||||
|
||||
// Insert symmetrical matches in symMatches vector
|
||||
void symmetryTest( const std::vector<std::vector<cv::DMatch> >& matches1,
|
||||
const std::vector<std::vector<cv::DMatch> >& matches2,
|
||||
std::vector<cv::DMatch>& symMatches );
|
||||
|
||||
// Match feature points using ratio and symmetry test
|
||||
void robustMatch( const cv::Mat& frame, std::vector<cv::DMatch>& good_matches,
|
||||
std::vector<cv::KeyPoint>& keypoints_frame,
|
||||
const cv::Mat& descriptors_model,
|
||||
const std::vector<cv::KeyPoint>& keypoints_model);
|
||||
|
||||
// Match feature points using ratio test
|
||||
void fastRobustMatch( const cv::Mat& frame, std::vector<cv::DMatch>& good_matches,
|
||||
std::vector<cv::KeyPoint>& keypoints_frame,
|
||||
const cv::Mat& descriptors_model,
|
||||
const std::vector<cv::KeyPoint>& keypoints_model);
|
||||
|
||||
private:
|
||||
// pointer to the feature point detector object
|
||||
cv::Ptr<cv::FeatureDetector> detector_;
|
||||
// pointer to the feature descriptor extractor object
|
||||
cv::Ptr<cv::DescriptorExtractor> extractor_;
|
||||
// pointer to the matcher object
|
||||
cv::Ptr<cv::DescriptorMatcher> matcher_;
|
||||
// max ratio between 1st and 2nd NN
|
||||
float ratio_;
|
||||
// training image
|
||||
cv::Mat training_img_;
|
||||
// matching image
|
||||
cv::Mat img_matching_;
|
||||
};
|
||||
|
||||
#endif /* ROBUSTMATCHER_H_ */
|
394
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp
vendored
Normal file
394
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp
vendored
Normal file
@ -0,0 +1,394 @@
|
||||
/*
|
||||
* Utils.cpp
|
||||
*
|
||||
* Created on: Mar 28, 2014
|
||||
* Author: Edgar Riba
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "PnPProblem.h"
|
||||
#include "ModelRegistration.h"
|
||||
#include "Utils.h"
|
||||
|
||||
#include <opencv2/opencv_modules.hpp>
|
||||
#include <opencv2/imgproc.hpp>
|
||||
#include <opencv2/calib3d.hpp>
|
||||
#include <opencv2/flann.hpp>
|
||||
#if defined (HAVE_OPENCV_XFEATURES2D)
|
||||
#include <opencv2/xfeatures2d.hpp>
|
||||
#endif
|
||||
|
||||
// For text
|
||||
const int fontFace = cv::FONT_ITALIC;
|
||||
const double fontScale = 0.75;
|
||||
const int thickness_font = 2;
|
||||
|
||||
// For circles
|
||||
const int lineType = 8;
|
||||
const int radius = 4;
|
||||
|
||||
// Draw a text with the question point
|
||||
void drawQuestion(cv::Mat image, cv::Point3f point, cv::Scalar color)
|
||||
{
|
||||
std::string x = IntToString((int)point.x);
|
||||
std::string y = IntToString((int)point.y);
|
||||
std::string z = IntToString((int)point.z);
|
||||
|
||||
std::string text = " Where is point (" + x + "," + y + "," + z + ") ?";
|
||||
cv::putText(image, text, cv::Point(25,50), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawText(cv::Mat image, std::string text, cv::Scalar color)
|
||||
{
|
||||
cv::putText(image, text, cv::Point(25,50), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawText2(cv::Mat image, std::string text, cv::Scalar color)
|
||||
{
|
||||
cv::putText(image, text, cv::Point(25,75), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw a text with the frame ratio
|
||||
void drawFPS(cv::Mat image, double fps, cv::Scalar color)
|
||||
{
|
||||
std::string fps_str = cv::format("%.2f FPS", fps);
|
||||
cv::putText(image, fps_str, cv::Point(500,50), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw a text with the frame ratio
|
||||
void drawConfidence(cv::Mat image, double confidence, cv::Scalar color)
|
||||
{
|
||||
std::string conf_str = IntToString((int)confidence);
|
||||
std::string text = conf_str + " %";
|
||||
cv::putText(image, text, cv::Point(500,75), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawCounter(cv::Mat image, int n, int n_max, cv::Scalar color)
|
||||
{
|
||||
std::string n_str = IntToString(n);
|
||||
std::string n_max_str = IntToString(n_max);
|
||||
std::string text = n_str + " of " + n_max_str + " points";
|
||||
cv::putText(image, text, cv::Point(500,50), fontFace, fontScale, color, thickness_font, 8);
|
||||
}
|
||||
|
||||
// Draw the points and the coordinates
|
||||
void drawPoints(cv::Mat image, std::vector<cv::Point2f> &list_points_2d, std::vector<cv::Point3f> &list_points_3d, cv::Scalar color)
|
||||
{
|
||||
for (unsigned int i = 0; i < list_points_2d.size(); ++i)
|
||||
{
|
||||
cv::Point2f point_2d = list_points_2d[i];
|
||||
cv::Point3f point_3d = list_points_3d[i];
|
||||
|
||||
// Draw Selected points
|
||||
cv::circle(image, point_2d, radius, color, -1, lineType );
|
||||
|
||||
std::string idx = IntToString(i+1);
|
||||
std::string x = IntToString((int)point_3d.x);
|
||||
std::string y = IntToString((int)point_3d.y);
|
||||
std::string z = IntToString((int)point_3d.z);
|
||||
std::string text = "P" + idx + " (" + x + "," + y + "," + z +")";
|
||||
|
||||
point_2d.x = point_2d.x + 10;
|
||||
point_2d.y = point_2d.y - 10;
|
||||
cv::putText(image, text, point_2d, fontFace, fontScale*0.5, color, thickness_font, 8);
|
||||
}
|
||||
}
|
||||
|
||||
// Draw only the 2D points
|
||||
void draw2DPoints(cv::Mat image, std::vector<cv::Point2f> &list_points, cv::Scalar color)
|
||||
{
|
||||
for( size_t i = 0; i < list_points.size(); i++)
|
||||
{
|
||||
cv::Point2f point_2d = list_points[i];
|
||||
|
||||
// Draw Selected points
|
||||
cv::circle(image, point_2d, radius, color, -1, lineType );
|
||||
}
|
||||
}
|
||||
|
||||
// Draw an arrow into the image
|
||||
void drawArrow(cv::Mat image, cv::Point2i p, cv::Point2i q, cv::Scalar color, int arrowMagnitude, int thickness, int line_type, int shift)
|
||||
{
|
||||
//Draw the principle line
|
||||
cv::line(image, p, q, color, thickness, line_type, shift);
|
||||
const double PI = CV_PI;
|
||||
//compute the angle alpha
|
||||
double angle = atan2((double)p.y-q.y, (double)p.x-q.x);
|
||||
//compute the coordinates of the first segment
|
||||
p.x = (int) ( q.x + arrowMagnitude * cos(angle + PI/4));
|
||||
p.y = (int) ( q.y + arrowMagnitude * sin(angle + PI/4));
|
||||
//Draw the first segment
|
||||
cv::line(image, p, q, color, thickness, line_type, shift);
|
||||
//compute the coordinates of the second segment
|
||||
p.x = (int) ( q.x + arrowMagnitude * cos(angle - PI/4));
|
||||
p.y = (int) ( q.y + arrowMagnitude * sin(angle - PI/4));
|
||||
//Draw the second segment
|
||||
cv::line(image, p, q, color, thickness, line_type, shift);
|
||||
}
|
||||
|
||||
// Draw the 3D coordinate axes
|
||||
void draw3DCoordinateAxes(cv::Mat image, const std::vector<cv::Point2f> &list_points2d)
|
||||
{
|
||||
cv::Scalar red(0, 0, 255);
|
||||
cv::Scalar green(0,255,0);
|
||||
cv::Scalar blue(255,0,0);
|
||||
cv::Scalar black(0,0,0);
|
||||
|
||||
cv::Point2i origin = list_points2d[0];
|
||||
cv::Point2i pointX = list_points2d[1];
|
||||
cv::Point2i pointY = list_points2d[2];
|
||||
cv::Point2i pointZ = list_points2d[3];
|
||||
|
||||
drawArrow(image, origin, pointX, red, 9, 2);
|
||||
drawArrow(image, origin, pointY, green, 9, 2);
|
||||
drawArrow(image, origin, pointZ, blue, 9, 2);
|
||||
cv::circle(image, origin, radius/2, black, -1, lineType );
|
||||
}
|
||||
|
||||
// Draw the object mesh
|
||||
void drawObjectMesh(cv::Mat image, const Mesh *mesh, PnPProblem *pnpProblem, cv::Scalar color)
|
||||
{
|
||||
std::vector<std::vector<int> > list_triangles = mesh->getTrianglesList();
|
||||
for( size_t i = 0; i < list_triangles.size(); i++)
|
||||
{
|
||||
std::vector<int> tmp_triangle = list_triangles.at(i);
|
||||
|
||||
cv::Point3f point_3d_0 = mesh->getVertex(tmp_triangle[0]);
|
||||
cv::Point3f point_3d_1 = mesh->getVertex(tmp_triangle[1]);
|
||||
cv::Point3f point_3d_2 = mesh->getVertex(tmp_triangle[2]);
|
||||
|
||||
cv::Point2f point_2d_0 = pnpProblem->backproject3DPoint(point_3d_0);
|
||||
cv::Point2f point_2d_1 = pnpProblem->backproject3DPoint(point_3d_1);
|
||||
cv::Point2f point_2d_2 = pnpProblem->backproject3DPoint(point_3d_2);
|
||||
|
||||
cv::line(image, point_2d_0, point_2d_1, color, 1);
|
||||
cv::line(image, point_2d_1, point_2d_2, color, 1);
|
||||
cv::line(image, point_2d_2, point_2d_0, color, 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Computes the norm of the translation error
|
||||
double get_translation_error(const cv::Mat &t_true, const cv::Mat &t)
|
||||
{
|
||||
return cv::norm( t_true - t );
|
||||
}
|
||||
|
||||
// Computes the norm of the rotation error
|
||||
double get_rotation_error(const cv::Mat &R_true, const cv::Mat &R)
|
||||
{
|
||||
cv::Mat error_vec, error_mat;
|
||||
error_mat = -R_true * R.t();
|
||||
cv::Rodrigues(error_mat, error_vec);
|
||||
|
||||
return cv::norm(error_vec);
|
||||
}
|
||||
|
||||
// Converts a given Rotation Matrix to Euler angles
|
||||
// Convention used is Y-Z-X Tait-Bryan angles
|
||||
// Reference code implementation:
|
||||
// https://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToEuler/index.htm
|
||||
cv::Mat rot2euler(const cv::Mat & rotationMatrix)
|
||||
{
|
||||
cv::Mat euler(3,1,CV_64F);
|
||||
|
||||
double m00 = rotationMatrix.at<double>(0,0);
|
||||
double m02 = rotationMatrix.at<double>(0,2);
|
||||
double m10 = rotationMatrix.at<double>(1,0);
|
||||
double m11 = rotationMatrix.at<double>(1,1);
|
||||
double m12 = rotationMatrix.at<double>(1,2);
|
||||
double m20 = rotationMatrix.at<double>(2,0);
|
||||
double m22 = rotationMatrix.at<double>(2,2);
|
||||
|
||||
double bank, attitude, heading;
|
||||
|
||||
// Assuming the angles are in radians.
|
||||
if (m10 > 0.998) { // singularity at north pole
|
||||
bank = 0;
|
||||
attitude = CV_PI/2;
|
||||
heading = atan2(m02,m22);
|
||||
}
|
||||
else if (m10 < -0.998) { // singularity at south pole
|
||||
bank = 0;
|
||||
attitude = -CV_PI/2;
|
||||
heading = atan2(m02,m22);
|
||||
}
|
||||
else
|
||||
{
|
||||
bank = atan2(-m12,m11);
|
||||
attitude = asin(m10);
|
||||
heading = atan2(-m20,m00);
|
||||
}
|
||||
|
||||
euler.at<double>(0) = bank;
|
||||
euler.at<double>(1) = attitude;
|
||||
euler.at<double>(2) = heading;
|
||||
|
||||
return euler;
|
||||
}
|
||||
|
||||
// Converts a given Euler angles to Rotation Matrix
|
||||
// Convention used is Y-Z-X Tait-Bryan angles
|
||||
// Reference:
|
||||
// https://www.euclideanspace.com/maths/geometry/rotations/conversions/eulerToMatrix/index.htm
|
||||
cv::Mat euler2rot(const cv::Mat & euler)
|
||||
{
|
||||
cv::Mat rotationMatrix(3,3,CV_64F);
|
||||
|
||||
double bank = euler.at<double>(0);
|
||||
double attitude = euler.at<double>(1);
|
||||
double heading = euler.at<double>(2);
|
||||
|
||||
// Assuming the angles are in radians.
|
||||
double ch = cos(heading);
|
||||
double sh = sin(heading);
|
||||
double ca = cos(attitude);
|
||||
double sa = sin(attitude);
|
||||
double cb = cos(bank);
|
||||
double sb = sin(bank);
|
||||
|
||||
double m00, m01, m02, m10, m11, m12, m20, m21, m22;
|
||||
|
||||
m00 = ch * ca;
|
||||
m01 = sh*sb - ch*sa*cb;
|
||||
m02 = ch*sa*sb + sh*cb;
|
||||
m10 = sa;
|
||||
m11 = ca*cb;
|
||||
m12 = -ca*sb;
|
||||
m20 = -sh*ca;
|
||||
m21 = sh*sa*cb + ch*sb;
|
||||
m22 = -sh*sa*sb + ch*cb;
|
||||
|
||||
rotationMatrix.at<double>(0,0) = m00;
|
||||
rotationMatrix.at<double>(0,1) = m01;
|
||||
rotationMatrix.at<double>(0,2) = m02;
|
||||
rotationMatrix.at<double>(1,0) = m10;
|
||||
rotationMatrix.at<double>(1,1) = m11;
|
||||
rotationMatrix.at<double>(1,2) = m12;
|
||||
rotationMatrix.at<double>(2,0) = m20;
|
||||
rotationMatrix.at<double>(2,1) = m21;
|
||||
rotationMatrix.at<double>(2,2) = m22;
|
||||
|
||||
return rotationMatrix;
|
||||
}
|
||||
|
||||
// Converts a given string to an integer
|
||||
int StringToInt ( const std::string &Text )
|
||||
{
|
||||
std::istringstream ss(Text);
|
||||
int result;
|
||||
return ss >> result ? result : 0;
|
||||
}
|
||||
|
||||
// Converts a given float to a string
|
||||
std::string FloatToString ( float Number )
|
||||
{
|
||||
std::ostringstream ss;
|
||||
ss << Number;
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
// Converts a given integer to a string
|
||||
std::string IntToString ( int Number )
|
||||
{
|
||||
std::ostringstream ss;
|
||||
ss << Number;
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
void createFeatures(const std::string &featureName, int numKeypoints, cv::Ptr<cv::Feature2D> &detector, cv::Ptr<cv::Feature2D> &descriptor)
|
||||
{
|
||||
if (featureName == "ORB")
|
||||
{
|
||||
detector = cv::ORB::create(numKeypoints);
|
||||
descriptor = cv::ORB::create(numKeypoints);
|
||||
}
|
||||
else if (featureName == "KAZE")
|
||||
{
|
||||
detector = cv::KAZE::create();
|
||||
descriptor = cv::KAZE::create();
|
||||
}
|
||||
else if (featureName == "AKAZE")
|
||||
{
|
||||
detector = cv::AKAZE::create();
|
||||
descriptor = cv::AKAZE::create();
|
||||
}
|
||||
else if (featureName == "BRISK")
|
||||
{
|
||||
detector = cv::BRISK::create();
|
||||
descriptor = cv::BRISK::create();
|
||||
}
|
||||
else if (featureName == "SIFT")
|
||||
{
|
||||
detector = cv::SIFT::create();
|
||||
descriptor = cv::SIFT::create();
|
||||
}
|
||||
else if (featureName == "SURF")
|
||||
{
|
||||
#if defined (OPENCV_ENABLE_NONFREE) && defined (HAVE_OPENCV_XFEATURES2D)
|
||||
detector = cv::xfeatures2d::SURF::create(100, 4, 3, true); //extended=true
|
||||
descriptor = cv::xfeatures2d::SURF::create(100, 4, 3, true); //extended=true
|
||||
#else
|
||||
std::cout << "xfeatures2d module is not available or nonfree is not enabled." << std::endl;
|
||||
std::cout << "Default to ORB." << std::endl;
|
||||
detector = cv::ORB::create(numKeypoints);
|
||||
descriptor = cv::ORB::create(numKeypoints);
|
||||
#endif
|
||||
}
|
||||
else if (featureName == "BINBOOST")
|
||||
{
|
||||
#if defined (HAVE_OPENCV_XFEATURES2D)
|
||||
detector = cv::KAZE::create();
|
||||
descriptor = cv::xfeatures2d::BoostDesc::create();
|
||||
#else
|
||||
std::cout << "xfeatures2d module is not available." << std::endl;
|
||||
std::cout << "Default to ORB." << std::endl;
|
||||
detector = cv::ORB::create(numKeypoints);
|
||||
descriptor = cv::ORB::create(numKeypoints);
|
||||
#endif
|
||||
}
|
||||
else if (featureName == "VGG")
|
||||
{
|
||||
#if defined (HAVE_OPENCV_XFEATURES2D)
|
||||
detector = cv::KAZE::create();
|
||||
descriptor = cv::xfeatures2d::VGG::create();
|
||||
#else
|
||||
std::cout << "xfeatures2d module is not available." << std::endl;
|
||||
std::cout << "Default to ORB." << std::endl;
|
||||
detector = cv::ORB::create(numKeypoints);
|
||||
descriptor = cv::ORB::create(numKeypoints);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
cv::Ptr<cv::DescriptorMatcher> createMatcher(const std::string &featureName, bool useFLANN)
|
||||
{
|
||||
if (featureName == "ORB" || featureName == "BRISK" || featureName == "AKAZE" || featureName == "BINBOOST")
|
||||
{
|
||||
if (useFLANN)
|
||||
{
|
||||
cv::Ptr<cv::flann::IndexParams> indexParams = cv::makePtr<cv::flann::LshIndexParams>(6, 12, 1); // instantiate LSH index parameters
|
||||
cv::Ptr<cv::flann::SearchParams> searchParams = cv::makePtr<cv::flann::SearchParams>(50); // instantiate flann search parameters
|
||||
return cv::makePtr<cv::FlannBasedMatcher>(indexParams, searchParams);
|
||||
}
|
||||
else
|
||||
{
|
||||
return cv::DescriptorMatcher::create("BruteForce-Hamming");
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useFLANN)
|
||||
{
|
||||
return cv::DescriptorMatcher::create("FlannBased");
|
||||
}
|
||||
else
|
||||
{
|
||||
return cv::DescriptorMatcher::create("BruteForce");
|
||||
}
|
||||
}
|
||||
}
|
74
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.h
vendored
Normal file
74
3rdparty/opencv-4.5.4/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.h
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Utils.h
|
||||
*
|
||||
* Created on: Mar 28, 2014
|
||||
* Author: Edgar Riba
|
||||
*/
|
||||
|
||||
#ifndef UTILS_H_
|
||||
#define UTILS_H_
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include <opencv2/features2d.hpp>
|
||||
#include "PnPProblem.h"
|
||||
|
||||
// Draw a text with the question point
|
||||
void drawQuestion(cv::Mat image, cv::Point3f point, cv::Scalar color);
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawText(cv::Mat image, std::string text, cv::Scalar color);
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawText2(cv::Mat image, std::string text, cv::Scalar color);
|
||||
|
||||
// Draw a text with the frame ratio
|
||||
void drawFPS(cv::Mat image, double fps, cv::Scalar color);
|
||||
|
||||
// Draw a text with the frame ratio
|
||||
void drawConfidence(cv::Mat image, double confidence, cv::Scalar color);
|
||||
|
||||
// Draw a text with the number of entered points
|
||||
void drawCounter(cv::Mat image, int n, int n_max, cv::Scalar color);
|
||||
|
||||
// Draw the points and the coordinates
|
||||
void drawPoints(cv::Mat image, std::vector<cv::Point2f> &list_points_2d, std::vector<cv::Point3f> &list_points_3d, cv::Scalar color);
|
||||
|
||||
// Draw only the 2D points
|
||||
void draw2DPoints(cv::Mat image, std::vector<cv::Point2f> &list_points, cv::Scalar color);
|
||||
|
||||
// Draw an arrow into the image
|
||||
void drawArrow(cv::Mat image, cv::Point2i p, cv::Point2i q, cv::Scalar color, int arrowMagnitude = 9, int thickness=1, int line_type=8, int shift=0);
|
||||
|
||||
// Draw the 3D coordinate axes
|
||||
void draw3DCoordinateAxes(cv::Mat image, const std::vector<cv::Point2f> &list_points2d);
|
||||
|
||||
// Draw the object mesh
|
||||
void drawObjectMesh(cv::Mat image, const Mesh *mesh, PnPProblem *pnpProblem, cv::Scalar color);
|
||||
|
||||
// Computes the norm of the translation error
|
||||
double get_translation_error(const cv::Mat &t_true, const cv::Mat &t);
|
||||
|
||||
// Computes the norm of the rotation error
|
||||
double get_rotation_error(const cv::Mat &R_true, const cv::Mat &R);
|
||||
|
||||
// Converts a given Rotation Matrix to Euler angles
|
||||
cv::Mat rot2euler(const cv::Mat & rotationMatrix);
|
||||
|
||||
// Converts a given Euler angles to Rotation Matrix
|
||||
cv::Mat euler2rot(const cv::Mat & euler);
|
||||
|
||||
// Converts a given string to an integer
|
||||
int StringToInt ( const std::string &Text );
|
||||
|
||||
// Converts a given float to a string
|
||||
std::string FloatToString ( float Number );
|
||||
|
||||
// Converts a given integer to a string
|
||||
std::string IntToString ( int Number );
|
||||
|
||||
void createFeatures(const std::string &featureName, int numKeypoints, cv::Ptr<cv::Feature2D> &detector, cv::Ptr<cv::Feature2D> &descriptor);
|
||||
|
||||
cv::Ptr<cv::DescriptorMatcher> createMatcher(const std::string &featureName, bool useFLANN);
|
||||
|
||||
#endif /* UTILS_H_ */
|
@ -0,0 +1,501 @@
|
||||
// C++
|
||||
#include <iostream>
|
||||
// OpenCV
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/core/utils/filesystem.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
#include <opencv2/imgproc.hpp>
|
||||
#include <opencv2/calib3d.hpp>
|
||||
#include <opencv2/video/tracking.hpp>
|
||||
// PnP Tutorial
|
||||
#include "Mesh.h"
|
||||
#include "Model.h"
|
||||
#include "PnPProblem.h"
|
||||
#include "RobustMatcher.h"
|
||||
#include "ModelRegistration.h"
|
||||
#include "Utils.h"
|
||||
|
||||
/** GLOBAL VARIABLES **/
|
||||
|
||||
using namespace cv;
|
||||
using namespace std;
|
||||
|
||||
/** Functions headers **/
|
||||
void help();
|
||||
void initKalmanFilter( KalmanFilter &KF, int nStates, int nMeasurements, int nInputs, double dt);
|
||||
void predictKalmanFilter( KalmanFilter &KF, Mat &translation_predicted, Mat &rotation_predicted );
|
||||
void updateKalmanFilter( KalmanFilter &KF, Mat &measurements,
|
||||
Mat &translation_estimated, Mat &rotation_estimated );
|
||||
void fillMeasurements( Mat &measurements,
|
||||
const Mat &translation_measured, const Mat &rotation_measured);
|
||||
|
||||
/** Main program **/
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
help();
|
||||
|
||||
const String keys =
|
||||
"{help h | | print this message }"
|
||||
"{video v | | path to recorded video }"
|
||||
"{model | | path to yml model }"
|
||||
"{mesh | | path to ply mesh }"
|
||||
"{keypoints k |2000 | number of keypoints to detect }"
|
||||
"{ratio r |0.7 | threshold for ratio test }"
|
||||
"{iterations it |500 | RANSAC maximum iterations count }"
|
||||
"{error e |6.0 | RANSAC reprojection error }"
|
||||
"{confidence c |0.99 | RANSAC confidence }"
|
||||
"{inliers in |30 | minimum inliers for Kalman update }"
|
||||
"{method pnp |0 | PnP method: (0) ITERATIVE - (1) EPNP - (2) P3P - (3) DLS - (5) AP3P}"
|
||||
"{fast f |true | use of robust fast match }"
|
||||
"{feature |ORB | feature name (ORB, KAZE, AKAZE, BRISK, SIFT, SURF, BINBOOST, VGG) }"
|
||||
"{FLANN |false | use FLANN library for descriptors matching }"
|
||||
"{save | | path to the directory where to save the image results }"
|
||||
"{displayFiltered |false | display filtered pose (from Kalman filter) }"
|
||||
;
|
||||
CommandLineParser parser(argc, argv, keys);
|
||||
|
||||
string video_read_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.mp4"); // recorded video
|
||||
string yml_read_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/cookies_ORB.yml"); // 3dpts + descriptors
|
||||
string ply_read_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.ply"); // mesh
|
||||
|
||||
// Intrinsic camera parameters: UVC WEBCAM
|
||||
double f = 55; // focal length in mm
|
||||
double sx = 22.3, sy = 14.9; // sensor size
|
||||
double width = 640, height = 480; // image size
|
||||
|
||||
double params_WEBCAM[] = { width*f/sx, // fx
|
||||
height*f/sy, // fy
|
||||
width/2, // cx
|
||||
height/2}; // cy
|
||||
|
||||
// Some basic colors
|
||||
Scalar red(0, 0, 255);
|
||||
Scalar green(0,255,0);
|
||||
Scalar blue(255,0,0);
|
||||
Scalar yellow(0,255,255);
|
||||
|
||||
// Robust Matcher parameters
|
||||
int numKeyPoints = 2000; // number of detected keypoints
|
||||
float ratioTest = 0.70f; // ratio test
|
||||
bool fast_match = true; // fastRobustMatch() or robustMatch()
|
||||
|
||||
// RANSAC parameters
|
||||
int iterationsCount = 500; // number of Ransac iterations.
|
||||
float reprojectionError = 6.0; // maximum allowed distance to consider it an inlier.
|
||||
double confidence = 0.99; // ransac successful confidence.
|
||||
|
||||
// Kalman Filter parameters
|
||||
int minInliersKalman = 30; // Kalman threshold updating
|
||||
|
||||
// PnP parameters
|
||||
int pnpMethod = SOLVEPNP_ITERATIVE;
|
||||
string featureName = "ORB";
|
||||
bool useFLANN = false;
|
||||
|
||||
// Save results
|
||||
string saveDirectory = "";
|
||||
Mat frameSave;
|
||||
int frameCount = 0;
|
||||
|
||||
bool displayFilteredPose = false;
|
||||
|
||||
if (parser.has("help"))
|
||||
{
|
||||
parser.printMessage();
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
video_read_path = parser.get<string>("video").size() > 0 ? parser.get<string>("video") : video_read_path;
|
||||
yml_read_path = parser.get<string>("model").size() > 0 ? parser.get<string>("model") : yml_read_path;
|
||||
ply_read_path = parser.get<string>("mesh").size() > 0 ? parser.get<string>("mesh") : ply_read_path;
|
||||
numKeyPoints = parser.has("keypoints") ? parser.get<int>("keypoints") : numKeyPoints;
|
||||
ratioTest = parser.has("ratio") ? parser.get<float>("ratio") : ratioTest;
|
||||
fast_match = parser.has("fast") ? parser.get<bool>("fast") : fast_match;
|
||||
iterationsCount = parser.has("iterations") ? parser.get<int>("iterations") : iterationsCount;
|
||||
reprojectionError = parser.has("error") ? parser.get<float>("error") : reprojectionError;
|
||||
confidence = parser.has("confidence") ? parser.get<float>("confidence") : confidence;
|
||||
minInliersKalman = parser.has("inliers") ? parser.get<int>("inliers") : minInliersKalman;
|
||||
pnpMethod = parser.has("method") ? parser.get<int>("method") : pnpMethod;
|
||||
featureName = parser.has("feature") ? parser.get<string>("feature") : featureName;
|
||||
useFLANN = parser.has("FLANN") ? parser.get<bool>("FLANN") : useFLANN;
|
||||
saveDirectory = parser.has("save") ? parser.get<string>("save") : saveDirectory;
|
||||
displayFilteredPose = parser.has("displayFiltered") ? parser.get<bool>("displayFiltered") : displayFilteredPose;
|
||||
}
|
||||
|
||||
std::cout << "Video: " << video_read_path << std::endl;
|
||||
std::cout << "Training data: " << yml_read_path << std::endl;
|
||||
std::cout << "CAD model: " << ply_read_path << std::endl;
|
||||
std::cout << "Ratio test threshold: " << ratioTest << std::endl;
|
||||
std::cout << "Fast match(no symmetry test)?: " << fast_match << std::endl;
|
||||
std::cout << "RANSAC number of iterations: " << iterationsCount << std::endl;
|
||||
std::cout << "RANSAC reprojection error: " << reprojectionError << std::endl;
|
||||
std::cout << "RANSAC confidence threshold: " << confidence << std::endl;
|
||||
std::cout << "Kalman number of inliers: " << minInliersKalman << std::endl;
|
||||
std::cout << "PnP method: " << pnpMethod << std::endl;
|
||||
std::cout << "Feature: " << featureName << std::endl;
|
||||
std::cout << "Number of keypoints for ORB: " << numKeyPoints << std::endl;
|
||||
std::cout << "Use FLANN-based matching? " << useFLANN << std::endl;
|
||||
std::cout << "Save directory: " << saveDirectory << std::endl;
|
||||
std::cout << "Display filtered pose from Kalman filter? " << displayFilteredPose << std::endl;
|
||||
|
||||
PnPProblem pnp_detection(params_WEBCAM);
|
||||
PnPProblem pnp_detection_est(params_WEBCAM);
|
||||
|
||||
Model model; // instantiate Model object
|
||||
model.load(yml_read_path); // load a 3D textured object model
|
||||
|
||||
Mesh mesh; // instantiate Mesh object
|
||||
mesh.load(ply_read_path); // load an object mesh
|
||||
|
||||
RobustMatcher rmatcher; // instantiate RobustMatcher
|
||||
|
||||
Ptr<FeatureDetector> detector, descriptor;
|
||||
createFeatures(featureName, numKeyPoints, detector, descriptor);
|
||||
rmatcher.setFeatureDetector(detector); // set feature detector
|
||||
rmatcher.setDescriptorExtractor(descriptor); // set descriptor extractor
|
||||
rmatcher.setDescriptorMatcher(createMatcher(featureName, useFLANN)); // set matcher
|
||||
rmatcher.setRatio(ratioTest); // set ratio test parameter
|
||||
if (!model.get_trainingImagePath().empty())
|
||||
{
|
||||
Mat trainingImg = imread(model.get_trainingImagePath());
|
||||
rmatcher.setTrainingImage(trainingImg);
|
||||
}
|
||||
|
||||
KalmanFilter KF; // instantiate Kalman Filter
|
||||
int nStates = 18; // the number of states
|
||||
int nMeasurements = 6; // the number of measured states
|
||||
int nInputs = 0; // the number of control actions
|
||||
double dt = 0.125; // time between measurements (1/FPS)
|
||||
|
||||
initKalmanFilter(KF, nStates, nMeasurements, nInputs, dt); // init function
|
||||
Mat measurements(nMeasurements, 1, CV_64FC1); measurements.setTo(Scalar(0));
|
||||
bool good_measurement = false;
|
||||
|
||||
// Get the MODEL INFO
|
||||
vector<Point3f> list_points3d_model = model.get_points3d(); // list with model 3D coordinates
|
||||
Mat descriptors_model = model.get_descriptors(); // list with descriptors of each 3D coordinate
|
||||
vector<KeyPoint> keypoints_model = model.get_keypoints();
|
||||
|
||||
// Create & Open Window
|
||||
namedWindow("REAL TIME DEMO", WINDOW_KEEPRATIO);
|
||||
|
||||
VideoCapture cap; // instantiate VideoCapture
|
||||
cap.open(video_read_path); // open a recorded video
|
||||
|
||||
if(!cap.isOpened()) // check if we succeeded
|
||||
{
|
||||
cout << "Could not open the camera device" << endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!saveDirectory.empty())
|
||||
{
|
||||
if (!cv::utils::fs::exists(saveDirectory))
|
||||
{
|
||||
std::cout << "Create directory: " << saveDirectory << std::endl;
|
||||
cv::utils::fs::createDirectories(saveDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
// Measure elapsed time
|
||||
TickMeter tm;
|
||||
|
||||
Mat frame, frame_vis, frame_matching;
|
||||
while(cap.read(frame) && (char)waitKey(30) != 27) // capture frame until ESC is pressed
|
||||
{
|
||||
tm.reset();
|
||||
tm.start();
|
||||
frame_vis = frame.clone(); // refresh visualisation frame
|
||||
|
||||
// -- Step 1: Robust matching between model descriptors and scene descriptors
|
||||
vector<DMatch> good_matches; // to obtain the 3D points of the model
|
||||
vector<KeyPoint> keypoints_scene; // to obtain the 2D points of the scene
|
||||
|
||||
if(fast_match)
|
||||
{
|
||||
rmatcher.fastRobustMatch(frame, good_matches, keypoints_scene, descriptors_model, keypoints_model);
|
||||
}
|
||||
else
|
||||
{
|
||||
rmatcher.robustMatch(frame, good_matches, keypoints_scene, descriptors_model, keypoints_model);
|
||||
}
|
||||
|
||||
frame_matching = rmatcher.getImageMatching();
|
||||
if (!frame_matching.empty())
|
||||
{
|
||||
imshow("Keypoints matching", frame_matching);
|
||||
}
|
||||
|
||||
// -- Step 2: Find out the 2D/3D correspondences
|
||||
vector<Point3f> list_points3d_model_match; // container for the model 3D coordinates found in the scene
|
||||
vector<Point2f> list_points2d_scene_match; // container for the model 2D coordinates found in the scene
|
||||
|
||||
for(unsigned int match_index = 0; match_index < good_matches.size(); ++match_index)
|
||||
{
|
||||
Point3f point3d_model = list_points3d_model[ good_matches[match_index].trainIdx ]; // 3D point from model
|
||||
Point2f point2d_scene = keypoints_scene[ good_matches[match_index].queryIdx ].pt; // 2D point from the scene
|
||||
list_points3d_model_match.push_back(point3d_model); // add 3D point
|
||||
list_points2d_scene_match.push_back(point2d_scene); // add 2D point
|
||||
}
|
||||
|
||||
// Draw outliers
|
||||
draw2DPoints(frame_vis, list_points2d_scene_match, red);
|
||||
|
||||
Mat inliers_idx;
|
||||
vector<Point2f> list_points2d_inliers;
|
||||
|
||||
// Instantiate estimated translation and rotation
|
||||
good_measurement = false;
|
||||
|
||||
if(good_matches.size() >= 4) // OpenCV requires solvePnPRANSAC to minimally have 4 set of points
|
||||
{
|
||||
// -- Step 3: Estimate the pose using RANSAC approach
|
||||
pnp_detection.estimatePoseRANSAC( list_points3d_model_match, list_points2d_scene_match,
|
||||
pnpMethod, inliers_idx,
|
||||
iterationsCount, reprojectionError, confidence );
|
||||
|
||||
// -- Step 4: Catch the inliers keypoints to draw
|
||||
for(int inliers_index = 0; inliers_index < inliers_idx.rows; ++inliers_index)
|
||||
{
|
||||
int n = inliers_idx.at<int>(inliers_index); // i-inlier
|
||||
Point2f point2d = list_points2d_scene_match[n]; // i-inlier point 2D
|
||||
list_points2d_inliers.push_back(point2d); // add i-inlier to list
|
||||
}
|
||||
|
||||
// Draw inliers points 2D
|
||||
draw2DPoints(frame_vis, list_points2d_inliers, blue);
|
||||
|
||||
// -- Step 5: Kalman Filter
|
||||
|
||||
// GOOD MEASUREMENT
|
||||
if( inliers_idx.rows >= minInliersKalman )
|
||||
{
|
||||
// Get the measured translation
|
||||
Mat translation_measured = pnp_detection.get_t_matrix();
|
||||
|
||||
// Get the measured rotation
|
||||
Mat rotation_measured = pnp_detection.get_R_matrix();
|
||||
|
||||
// fill the measurements vector
|
||||
fillMeasurements(measurements, translation_measured, rotation_measured);
|
||||
good_measurement = true;
|
||||
}
|
||||
|
||||
// update the Kalman filter with good measurements, otherwise with previous valid measurements
|
||||
Mat translation_estimated(3, 1, CV_64FC1);
|
||||
Mat rotation_estimated(3, 3, CV_64FC1);
|
||||
updateKalmanFilter( KF, measurements,
|
||||
translation_estimated, rotation_estimated);
|
||||
|
||||
// -- Step 6: Set estimated projection matrix
|
||||
pnp_detection_est.set_P_matrix(rotation_estimated, translation_estimated);
|
||||
}
|
||||
|
||||
// -- Step X: Draw pose and coordinate frame
|
||||
float l = 5;
|
||||
vector<Point2f> pose_points2d;
|
||||
if (!good_measurement || displayFilteredPose)
|
||||
{
|
||||
drawObjectMesh(frame_vis, &mesh, &pnp_detection_est, yellow); // draw estimated pose
|
||||
|
||||
pose_points2d.push_back(pnp_detection_est.backproject3DPoint(Point3f(0,0,0))); // axis center
|
||||
pose_points2d.push_back(pnp_detection_est.backproject3DPoint(Point3f(l,0,0))); // axis x
|
||||
pose_points2d.push_back(pnp_detection_est.backproject3DPoint(Point3f(0,l,0))); // axis y
|
||||
pose_points2d.push_back(pnp_detection_est.backproject3DPoint(Point3f(0,0,l))); // axis z
|
||||
draw3DCoordinateAxes(frame_vis, pose_points2d); // draw axes
|
||||
}
|
||||
else
|
||||
{
|
||||
drawObjectMesh(frame_vis, &mesh, &pnp_detection, green); // draw current pose
|
||||
|
||||
pose_points2d.push_back(pnp_detection.backproject3DPoint(Point3f(0,0,0))); // axis center
|
||||
pose_points2d.push_back(pnp_detection.backproject3DPoint(Point3f(l,0,0))); // axis x
|
||||
pose_points2d.push_back(pnp_detection.backproject3DPoint(Point3f(0,l,0))); // axis y
|
||||
pose_points2d.push_back(pnp_detection.backproject3DPoint(Point3f(0,0,l))); // axis z
|
||||
draw3DCoordinateAxes(frame_vis, pose_points2d); // draw axes
|
||||
}
|
||||
|
||||
// FRAME RATE
|
||||
// see how much time has elapsed
|
||||
tm.stop();
|
||||
|
||||
// calculate current FPS
|
||||
double fps = 1.0 / tm.getTimeSec();
|
||||
|
||||
drawFPS(frame_vis, fps, yellow); // frame ratio
|
||||
double detection_ratio = ((double)inliers_idx.rows/(double)good_matches.size())*100;
|
||||
drawConfidence(frame_vis, detection_ratio, yellow);
|
||||
|
||||
// -- Step X: Draw some debugging text
|
||||
// Draw some debug text
|
||||
int inliers_int = inliers_idx.rows;
|
||||
int outliers_int = (int)good_matches.size() - inliers_int;
|
||||
string inliers_str = IntToString(inliers_int);
|
||||
string outliers_str = IntToString(outliers_int);
|
||||
string n = IntToString((int)good_matches.size());
|
||||
string text = "Found " + inliers_str + " of " + n + " matches";
|
||||
string text2 = "Inliers: " + inliers_str + " - Outliers: " + outliers_str;
|
||||
|
||||
drawText(frame_vis, text, green);
|
||||
drawText2(frame_vis, text2, red);
|
||||
|
||||
imshow("REAL TIME DEMO", frame_vis);
|
||||
|
||||
if (!saveDirectory.empty())
|
||||
{
|
||||
const int widthSave = !frame_matching.empty() ? frame_matching.cols : frame_vis.cols;
|
||||
const int heightSave = !frame_matching.empty() ? frame_matching.rows + frame_vis.rows : frame_vis.rows;
|
||||
frameSave = Mat::zeros(heightSave, widthSave, CV_8UC3);
|
||||
if (!frame_matching.empty())
|
||||
{
|
||||
int startX = (int)((widthSave - frame_vis.cols) / 2.0);
|
||||
Mat roi = frameSave(Rect(startX, 0, frame_vis.cols, frame_vis.rows));
|
||||
frame_vis.copyTo(roi);
|
||||
|
||||
roi = frameSave(Rect(0, frame_vis.rows, frame_matching.cols, frame_matching.rows));
|
||||
frame_matching.copyTo(roi);
|
||||
}
|
||||
else
|
||||
{
|
||||
frame_vis.copyTo(frameSave);
|
||||
}
|
||||
|
||||
string saveFilename = cv::format(string(saveDirectory + "/image_%04d.png").c_str(), frameCount);
|
||||
imwrite(saveFilename, frameSave);
|
||||
frameCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// Close and Destroy Window
|
||||
destroyWindow("REAL TIME DEMO");
|
||||
|
||||
cout << "GOODBYE ..." << endl;
|
||||
}
|
||||
|
||||
/**********************************************************************************************************/
|
||||
void help()
|
||||
{
|
||||
cout
|
||||
<< "--------------------------------------------------------------------------" << endl
|
||||
<< "This program shows how to detect an object given its 3D textured model. You can choose to "
|
||||
<< "use a recorded video or the webcam." << endl
|
||||
<< "Usage:" << endl
|
||||
<< "./cpp-tutorial-pnp_detection -help" << endl
|
||||
<< "Keys:" << endl
|
||||
<< "'esc' - to quit." << endl
|
||||
<< "--------------------------------------------------------------------------" << endl
|
||||
<< endl;
|
||||
}
|
||||
|
||||
/**********************************************************************************************************/
|
||||
void initKalmanFilter(KalmanFilter &KF, int nStates, int nMeasurements, int nInputs, double dt)
|
||||
{
|
||||
KF.init(nStates, nMeasurements, nInputs, CV_64F); // init Kalman Filter
|
||||
|
||||
setIdentity(KF.processNoiseCov, Scalar::all(1e-5)); // set process noise
|
||||
setIdentity(KF.measurementNoiseCov, Scalar::all(1e-2)); // set measurement noise
|
||||
setIdentity(KF.errorCovPost, Scalar::all(1)); // error covariance
|
||||
|
||||
/** DYNAMIC MODEL **/
|
||||
|
||||
// [1 0 0 dt 0 0 dt2 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 1 0 0 dt 0 0 dt2 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 1 0 0 dt 0 0 dt2 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 1 0 0 dt 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 1 0 0 dt 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 1 0 0 dt 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 1 0 0 dt 0 0 dt2 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 1 0 0 dt 0 0 dt2 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 1 0 0 dt 0 0 dt2]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 dt 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 dt 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 dt]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]
|
||||
|
||||
// position
|
||||
KF.transitionMatrix.at<double>(0,3) = dt;
|
||||
KF.transitionMatrix.at<double>(1,4) = dt;
|
||||
KF.transitionMatrix.at<double>(2,5) = dt;
|
||||
KF.transitionMatrix.at<double>(3,6) = dt;
|
||||
KF.transitionMatrix.at<double>(4,7) = dt;
|
||||
KF.transitionMatrix.at<double>(5,8) = dt;
|
||||
KF.transitionMatrix.at<double>(0,6) = 0.5*pow(dt,2);
|
||||
KF.transitionMatrix.at<double>(1,7) = 0.5*pow(dt,2);
|
||||
KF.transitionMatrix.at<double>(2,8) = 0.5*pow(dt,2);
|
||||
|
||||
// orientation
|
||||
KF.transitionMatrix.at<double>(9,12) = dt;
|
||||
KF.transitionMatrix.at<double>(10,13) = dt;
|
||||
KF.transitionMatrix.at<double>(11,14) = dt;
|
||||
KF.transitionMatrix.at<double>(12,15) = dt;
|
||||
KF.transitionMatrix.at<double>(13,16) = dt;
|
||||
KF.transitionMatrix.at<double>(14,17) = dt;
|
||||
KF.transitionMatrix.at<double>(9,15) = 0.5*pow(dt,2);
|
||||
KF.transitionMatrix.at<double>(10,16) = 0.5*pow(dt,2);
|
||||
KF.transitionMatrix.at<double>(11,17) = 0.5*pow(dt,2);
|
||||
|
||||
|
||||
/** MEASUREMENT MODEL **/
|
||||
|
||||
// [1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0]
|
||||
// [0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0]
|
||||
|
||||
KF.measurementMatrix.at<double>(0,0) = 1; // x
|
||||
KF.measurementMatrix.at<double>(1,1) = 1; // y
|
||||
KF.measurementMatrix.at<double>(2,2) = 1; // z
|
||||
KF.measurementMatrix.at<double>(3,9) = 1; // roll
|
||||
KF.measurementMatrix.at<double>(4,10) = 1; // pitch
|
||||
KF.measurementMatrix.at<double>(5,11) = 1; // yaw
|
||||
}
|
||||
|
||||
/**********************************************************************************************************/
|
||||
void updateKalmanFilter( KalmanFilter &KF, Mat &measurement,
|
||||
Mat &translation_estimated, Mat &rotation_estimated )
|
||||
{
|
||||
// First predict, to update the internal statePre variable
|
||||
Mat prediction = KF.predict();
|
||||
|
||||
// The "correct" phase that is going to use the predicted value and our measurement
|
||||
Mat estimated = KF.correct(measurement);
|
||||
|
||||
// Estimated translation
|
||||
translation_estimated.at<double>(0) = estimated.at<double>(0);
|
||||
translation_estimated.at<double>(1) = estimated.at<double>(1);
|
||||
translation_estimated.at<double>(2) = estimated.at<double>(2);
|
||||
|
||||
// Estimated euler angles
|
||||
Mat eulers_estimated(3, 1, CV_64F);
|
||||
eulers_estimated.at<double>(0) = estimated.at<double>(9);
|
||||
eulers_estimated.at<double>(1) = estimated.at<double>(10);
|
||||
eulers_estimated.at<double>(2) = estimated.at<double>(11);
|
||||
|
||||
// Convert estimated quaternion to rotation matrix
|
||||
rotation_estimated = euler2rot(eulers_estimated);
|
||||
}
|
||||
|
||||
/**********************************************************************************************************/
|
||||
void fillMeasurements( Mat &measurements,
|
||||
const Mat &translation_measured, const Mat &rotation_measured)
|
||||
{
|
||||
// Convert rotation matrix to euler angles
|
||||
Mat measured_eulers(3, 1, CV_64F);
|
||||
measured_eulers = rot2euler(rotation_measured);
|
||||
|
||||
// Set measurement to predict
|
||||
measurements.at<double>(0) = translation_measured.at<double>(0); // x
|
||||
measurements.at<double>(1) = translation_measured.at<double>(1); // y
|
||||
measurements.at<double>(2) = translation_measured.at<double>(2); // z
|
||||
measurements.at<double>(3) = measured_eulers.at<double>(0); // roll
|
||||
measurements.at<double>(4) = measured_eulers.at<double>(1); // pitch
|
||||
measurements.at<double>(5) = measured_eulers.at<double>(2); // yaw
|
||||
}
|
@ -0,0 +1,293 @@
|
||||
// C++
|
||||
#include <iostream>
|
||||
// OpenCV
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/imgproc.hpp>
|
||||
#include <opencv2/calib3d.hpp>
|
||||
#include <opencv2/features2d.hpp>
|
||||
// PnP Tutorial
|
||||
#include "Mesh.h"
|
||||
#include "Model.h"
|
||||
#include "PnPProblem.h"
|
||||
#include "RobustMatcher.h"
|
||||
#include "ModelRegistration.h"
|
||||
#include "Utils.h"
|
||||
|
||||
using namespace cv;
|
||||
using namespace std;
|
||||
|
||||
/** GLOBAL VARIABLES **/
|
||||
|
||||
// Boolean the know if the registration it's done
|
||||
bool end_registration = false;
|
||||
|
||||
// Intrinsic camera parameters: UVC WEBCAM
|
||||
const double f = 45; // focal length in mm
|
||||
const double sx = 22.3, sy = 14.9;
|
||||
const double width = 2592, height = 1944;
|
||||
const double params_CANON[] = { width*f/sx, // fx
|
||||
height*f/sy, // fy
|
||||
width/2, // cx
|
||||
height/2}; // cy
|
||||
|
||||
// Setup the points to register in the image
|
||||
// In the order of the *.ply file and starting at 1
|
||||
const int n = 8;
|
||||
const int pts[] = {1, 2, 3, 4, 5, 6, 7, 8}; // 3 -> 4
|
||||
|
||||
/*
|
||||
* CREATE MODEL REGISTRATION OBJECT
|
||||
* CREATE OBJECT MESH
|
||||
* CREATE OBJECT MODEL
|
||||
* CREATE PNP OBJECT
|
||||
*/
|
||||
ModelRegistration registration;
|
||||
Model model;
|
||||
Mesh mesh;
|
||||
PnPProblem pnp_registration(params_CANON);
|
||||
|
||||
/**********************************************************************************************************/
|
||||
static void help()
|
||||
{
|
||||
cout
|
||||
<< "--------------------------------------------------------------------------" << endl
|
||||
<< "This program shows how to create your 3D textured model. " << endl
|
||||
<< "Usage:" << endl
|
||||
<< "./cpp-tutorial-pnp_registration" << endl
|
||||
<< "--------------------------------------------------------------------------" << endl
|
||||
<< endl;
|
||||
}
|
||||
|
||||
// Mouse events for model registration
|
||||
static void onMouseModelRegistration( int event, int x, int y, int, void* )
|
||||
{
|
||||
if ( event == EVENT_LBUTTONUP )
|
||||
{
|
||||
bool is_registrable = registration.is_registrable();
|
||||
if (is_registrable)
|
||||
{
|
||||
int n_regist = registration.getNumRegist();
|
||||
int n_vertex = pts[n_regist];
|
||||
|
||||
Point2f point_2d = Point2f((float)x,(float)y);
|
||||
Point3f point_3d = mesh.getVertex(n_vertex-1);
|
||||
|
||||
registration.registerPoint(point_2d, point_3d);
|
||||
if( registration.getNumRegist() == registration.getNumMax() ) end_registration = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Main program **/
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
help();
|
||||
|
||||
const String keys =
|
||||
"{help h | | print this message }"
|
||||
"{image i | | path to input image }"
|
||||
"{model | | path to output yml model }"
|
||||
"{mesh | | path to ply mesh }"
|
||||
"{keypoints k |2000 | number of keypoints to detect (only for ORB) }"
|
||||
"{feature |ORB | feature name (ORB, KAZE, AKAZE, BRISK, SIFT, SURF, BINBOOST, VGG) }"
|
||||
;
|
||||
CommandLineParser parser(argc, argv, keys);
|
||||
|
||||
string img_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/resized_IMG_3875.JPG"); // image to register
|
||||
string ply_read_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/box.ply"); // object mesh
|
||||
string write_path = samples::findFile("samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/Data/cookies_ORB.yml"); // output file
|
||||
int numKeyPoints = 2000;
|
||||
string featureName = "ORB";
|
||||
|
||||
if (parser.has("help"))
|
||||
{
|
||||
parser.printMessage();
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
img_path = parser.get<string>("image").size() > 0 ? parser.get<string>("image") : img_path;
|
||||
ply_read_path = parser.get<string>("mesh").size() > 0 ? parser.get<string>("mesh") : ply_read_path;
|
||||
write_path = parser.get<string>("model").size() > 0 ? parser.get<string>("model") : write_path;
|
||||
numKeyPoints = parser.has("keypoints") ? parser.get<int>("keypoints") : numKeyPoints;
|
||||
featureName = parser.has("feature") ? parser.get<string>("feature") : featureName;
|
||||
}
|
||||
|
||||
std::cout << "Input image: " << img_path << std::endl;
|
||||
std::cout << "CAD model: " << ply_read_path << std::endl;
|
||||
std::cout << "Output training file: " << write_path << std::endl;
|
||||
std::cout << "Feature: " << featureName << std::endl;
|
||||
std::cout << "Number of keypoints for ORB: " << numKeyPoints << std::endl;
|
||||
|
||||
// load a mesh given the *.ply file path
|
||||
mesh.load(ply_read_path);
|
||||
|
||||
//Instantiate robust matcher: detector, extractor, matcher
|
||||
RobustMatcher rmatcher;
|
||||
Ptr<Feature2D> detector, descriptor;
|
||||
createFeatures(featureName, numKeyPoints, detector, descriptor);
|
||||
rmatcher.setFeatureDetector(detector);
|
||||
rmatcher.setDescriptorExtractor(descriptor);
|
||||
|
||||
|
||||
/** GROUND TRUTH OF THE FIRST IMAGE **/
|
||||
|
||||
// Create & Open Window
|
||||
namedWindow("MODEL REGISTRATION", WINDOW_KEEPRATIO);
|
||||
|
||||
// Set up the mouse events
|
||||
setMouseCallback("MODEL REGISTRATION", onMouseModelRegistration, 0);
|
||||
|
||||
// Open the image to register
|
||||
Mat img_in = imread(img_path, IMREAD_COLOR);
|
||||
Mat img_vis;
|
||||
|
||||
if (img_in.empty()) {
|
||||
cout << "Could not open or find the image" << endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set the number of points to register
|
||||
int num_registrations = n;
|
||||
registration.setNumMax(num_registrations);
|
||||
|
||||
cout << "Click the box corners ..." << endl;
|
||||
cout << "Waiting ..." << endl;
|
||||
|
||||
// Some basic colors
|
||||
const Scalar red(0, 0, 255);
|
||||
const Scalar green(0,255,0);
|
||||
const Scalar blue(255,0,0);
|
||||
const Scalar yellow(0,255,255);
|
||||
|
||||
// Loop until all the points are registered
|
||||
while ( waitKey(30) < 0 )
|
||||
{
|
||||
// Refresh debug image
|
||||
img_vis = img_in.clone();
|
||||
|
||||
// Current registered points
|
||||
vector<Point2f> list_points2d = registration.get_points2d();
|
||||
vector<Point3f> list_points3d = registration.get_points3d();
|
||||
|
||||
// Draw current registered points
|
||||
drawPoints(img_vis, list_points2d, list_points3d, red);
|
||||
|
||||
// If the registration is not finished, draw which 3D point we have to register.
|
||||
// If the registration is finished, breaks the loop.
|
||||
if (!end_registration)
|
||||
{
|
||||
// Draw debug text
|
||||
int n_regist = registration.getNumRegist();
|
||||
int n_vertex = pts[n_regist];
|
||||
Point3f current_poin3d = mesh.getVertex(n_vertex-1);
|
||||
|
||||
drawQuestion(img_vis, current_poin3d, green);
|
||||
drawCounter(img_vis, registration.getNumRegist(), registration.getNumMax(), red);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Draw debug text
|
||||
drawText(img_vis, "END REGISTRATION", green);
|
||||
drawCounter(img_vis, registration.getNumRegist(), registration.getNumMax(), green);
|
||||
break;
|
||||
}
|
||||
|
||||
// Show the image
|
||||
imshow("MODEL REGISTRATION", img_vis);
|
||||
}
|
||||
|
||||
/** COMPUTE CAMERA POSE **/
|
||||
|
||||
cout << "COMPUTING POSE ..." << endl;
|
||||
|
||||
// The list of registered points
|
||||
vector<Point2f> list_points2d = registration.get_points2d();
|
||||
vector<Point3f> list_points3d = registration.get_points3d();
|
||||
|
||||
// Estimate pose given the registered points
|
||||
bool is_correspondence = pnp_registration.estimatePose(list_points3d, list_points2d, SOLVEPNP_ITERATIVE);
|
||||
if ( is_correspondence )
|
||||
{
|
||||
cout << "Correspondence found" << endl;
|
||||
|
||||
// Compute all the 2D points of the mesh to verify the algorithm and draw it
|
||||
vector<Point2f> list_points2d_mesh = pnp_registration.verify_points(&mesh);
|
||||
draw2DPoints(img_vis, list_points2d_mesh, green);
|
||||
} else {
|
||||
cout << "Correspondence not found" << endl << endl;
|
||||
}
|
||||
|
||||
// Show the image
|
||||
imshow("MODEL REGISTRATION", img_vis);
|
||||
|
||||
// Show image until ESC pressed
|
||||
waitKey(0);
|
||||
|
||||
|
||||
/** COMPUTE 3D of the image Keypoints **/
|
||||
|
||||
// Containers for keypoints and descriptors of the model
|
||||
vector<KeyPoint> keypoints_model;
|
||||
Mat descriptors;
|
||||
|
||||
// Compute keypoints and descriptors
|
||||
rmatcher.computeKeyPoints(img_in, keypoints_model);
|
||||
rmatcher.computeDescriptors(img_in, keypoints_model, descriptors);
|
||||
|
||||
// Check if keypoints are on the surface of the registration image and add to the model
|
||||
for (unsigned int i = 0; i < keypoints_model.size(); ++i) {
|
||||
Point2f point2d(keypoints_model[i].pt);
|
||||
Point3f point3d;
|
||||
bool on_surface = pnp_registration.backproject2DPoint(&mesh, point2d, point3d);
|
||||
if (on_surface)
|
||||
{
|
||||
model.add_correspondence(point2d, point3d);
|
||||
model.add_descriptor(descriptors.row(i));
|
||||
model.add_keypoint(keypoints_model[i]);
|
||||
}
|
||||
else
|
||||
{
|
||||
model.add_outlier(point2d);
|
||||
}
|
||||
}
|
||||
|
||||
model.set_trainingImagePath(img_path);
|
||||
// save the model into a *.yaml file
|
||||
model.save(write_path);
|
||||
|
||||
// Out image
|
||||
img_vis = img_in.clone();
|
||||
|
||||
// The list of the points2d of the model
|
||||
vector<Point2f> list_points_in = model.get_points2d_in();
|
||||
vector<Point2f> list_points_out = model.get_points2d_out();
|
||||
|
||||
// Draw some debug text
|
||||
string num = IntToString((int)list_points_in.size());
|
||||
string text = "There are " + num + " inliers";
|
||||
drawText(img_vis, text, green);
|
||||
|
||||
// Draw some debug text
|
||||
num = IntToString((int)list_points_out.size());
|
||||
text = "There are " + num + " outliers";
|
||||
drawText2(img_vis, text, red);
|
||||
|
||||
// Draw the object mesh
|
||||
drawObjectMesh(img_vis, &mesh, &pnp_registration, blue);
|
||||
|
||||
// Draw found keypoints depending on if are or not on the surface
|
||||
draw2DPoints(img_vis, list_points_in, green);
|
||||
draw2DPoints(img_vis, list_points_out, red);
|
||||
|
||||
// Show the image
|
||||
imshow("MODEL REGISTRATION", img_vis);
|
||||
|
||||
// Wait until ESC pressed
|
||||
waitKey(0);
|
||||
|
||||
// Close and Destroy Window
|
||||
destroyWindow("MODEL REGISTRATION");
|
||||
|
||||
cout << "GOODBYE" << endl;
|
||||
}
|
Reference in New Issue
Block a user