feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,147 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2014, Itseez, Inc, all rights reserved.
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
namespace opencv_test {
using namespace perf;
namespace ocl {
#define SURF_MATCH_CONFIDENCE 0.65f
#define ORB_MATCH_CONFIDENCE 0.3f
#define WORK_MEGAPIX 0.6
typedef TestBaseWithParam<string> stitch;
#if defined(HAVE_OPENCV_XFEATURES2D) && defined(OPENCV_ENABLE_NONFREE)
#define TEST_DETECTORS testing::Values("surf", "orb", "akaze")
#else
#define TEST_DETECTORS testing::Values("orb", "akaze")
#endif
OCL_PERF_TEST_P(stitch, a123, TEST_DETECTORS)
{
UMat pano;
vector<Mat> _imgs;
_imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
_imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
_imgs.push_back( imread( getDataPath("stitching/a3.png") ) );
vector<UMat> imgs = ToUMat(_imgs);
Ptr<Feature2D> featuresFinder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
: makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
declare.iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setFeaturesFinder(featuresFinder);
stitcher->setFeaturesMatcher(featuresMatcher);
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1182, 50);
EXPECT_NEAR(pano.size().height, 682, 30);
SANITY_CHECK_NOTHING();
}
OCL_PERF_TEST_P(stitch, b12, TEST_DETECTORS)
{
UMat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/b1.png") ) );
imgs.push_back( imread( getDataPath("stitching/b2.png") ) );
Ptr<Feature2D> featuresFinder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
: makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
declare.iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setFeaturesFinder(featuresFinder);
stitcher->setFeaturesMatcher(featuresMatcher);
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1124, GetParam() == "surf" ? 100 : 50);
EXPECT_NEAR(pano.size().height, 644, GetParam() == "surf" ? 60 : 30);
SANITY_CHECK_NOTHING();
}
OCL_PERF_TEST_P(stitch, boat, TEST_DETECTORS)
{
Size expected_dst_size(10789, 2663);
checkDeviceMaxMemoryAllocSize(expected_dst_size, CV_16SC3, 4);
#if defined(_WIN32) && !defined(_WIN64)
if (cv::ocl::useOpenCL())
throw ::perf::TestBase::PerfSkipTestException();
#endif
UMat pano;
vector<Mat> _imgs;
_imgs.push_back( imread( getDataPath("stitching/boat1.jpg") ) );
_imgs.push_back( imread( getDataPath("stitching/boat2.jpg") ) );
_imgs.push_back( imread( getDataPath("stitching/boat3.jpg") ) );
_imgs.push_back( imread( getDataPath("stitching/boat4.jpg") ) );
_imgs.push_back( imread( getDataPath("stitching/boat5.jpg") ) );
_imgs.push_back( imread( getDataPath("stitching/boat6.jpg") ) );
vector<UMat> imgs = ToUMat(_imgs);
Ptr<Feature2D> featuresFinder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
: makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
declare.iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setFeaturesFinder(featuresFinder);
stitcher->setFeaturesMatcher(featuresMatcher);
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, expected_dst_size.width, 200);
EXPECT_NEAR(pano.size().height, expected_dst_size.height, 100);
SANITY_CHECK_NOTHING();
}
} } // namespace opencv_test::ocl
#endif // HAVE_OPENCL

View File

@ -0,0 +1,162 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2010-2013, Advanced Micro Devices, Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the OpenCV Foundation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "../perf_precomp.hpp"
#include "opencv2/stitching/warpers.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
namespace opencv_test {
namespace ocl {
///////////////////////// Stitching Warpers ///////////////////////////
enum
{
SphericalWarperType = 0,
CylindricalWarperType = 1,
PlaneWarperType = 2,
AffineWarperType = 3,
};
class WarperBase
{
public:
explicit WarperBase(int type, Size srcSize)
{
Ptr<WarperCreator> creator;
if (type == SphericalWarperType)
creator = makePtr<SphericalWarper>();
else if (type == CylindricalWarperType)
creator = makePtr<CylindricalWarper>();
else if (type == PlaneWarperType)
creator = makePtr<PlaneWarper>();
else if (type == AffineWarperType)
creator = makePtr<AffineWarper>();
CV_Assert(!creator.empty());
K = Mat::eye(3, 3, CV_32FC1);
K.at<float>(0,0) = (float)srcSize.width;
K.at<float>(0,2) = (float)srcSize.width/2;
K.at<float>(1,1) = (float)srcSize.height;
K.at<float>(1,2) = (float)srcSize.height/2;
K.at<float>(2,2) = 1.0f;
R = Mat::eye(3, 3, CV_32FC1);
float scale = (float)srcSize.width;
warper = creator->create(scale);
}
Rect buildMaps(Size src_size, OutputArray xmap, OutputArray ymap) const
{
return warper->buildMaps(src_size, K, R, xmap, ymap);
}
Point warp(InputArray src, int interp_mode, int border_mode, OutputArray dst) const
{
return warper->warp(src, K, R, interp_mode, border_mode, dst);
}
private:
Ptr<detail::RotationWarper> warper;
Mat K, R;
};
CV_ENUM(WarperType, SphericalWarperType, CylindricalWarperType, PlaneWarperType, AffineWarperType)
typedef tuple<Size, WarperType> StitchingWarpersParams;
typedef TestBaseWithParam<StitchingWarpersParams> StitchingWarpersFixture;
static void prepareWarperSrc(InputOutputArray src, Size srcSize)
{
src.create(srcSize, CV_8UC1);
src.setTo(Scalar::all(64));
ellipse(src, Point(srcSize.width/2, srcSize.height/2), Size(srcSize.width/2, srcSize.height/2),
360, 0, 360, Scalar::all(255), 2);
ellipse(src, Point(srcSize.width/2, srcSize.height/2), Size(srcSize.width/3, srcSize.height/3),
360, 0, 360, Scalar::all(128), 2);
rectangle(src, Point(10, 10), Point(srcSize.width - 10, srcSize.height - 10), Scalar::all(128), 2);
}
OCL_PERF_TEST_P(StitchingWarpersFixture, StitchingWarpers_BuildMaps,
::testing::Combine(OCL_TEST_SIZES, WarperType::all()))
{
const StitchingWarpersParams params = GetParam();
const Size srcSize = get<0>(params);
const WarperBase warper(get<1>(params), srcSize);
UMat xmap, ymap;
OCL_TEST_CYCLE() warper.buildMaps(srcSize, xmap, ymap);
SANITY_CHECK(xmap, 1e-3);
SANITY_CHECK(ymap, 1e-3);
}
OCL_PERF_TEST_P(StitchingWarpersFixture, StitchingWarpers_Warp,
::testing::Combine(OCL_TEST_SIZES, WarperType::all()))
{
const StitchingWarpersParams params = GetParam();
const Size srcSize = get<0>(params);
const WarperBase warper(get<1>(params), srcSize);
UMat src, dst;
prepareWarperSrc(src, srcSize);
declare.in(src, WARMUP_READ);
OCL_TEST_CYCLE() warper.warp(src, INTER_LINEAR, BORDER_REPLICATE, dst);
#if 0
namedWindow("src", WINDOW_NORMAL);
namedWindow("dst", WINDOW_NORMAL);
imshow("src", src);
imshow("dst", dst);
std::cout << dst.size() << " " << dst.size().area() << std::endl;
cv::waitKey();
#endif
SANITY_CHECK(dst, 1e-5);
}
} } // namespace opencv_test::ocl
#endif // HAVE_OPENCL

View File

@ -0,0 +1,96 @@
#include "perf_precomp.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/opencv_modules.hpp"
namespace opencv_test
{
using namespace perf;
typedef TestBaseWithParam<tuple<string, string> > bundleAdjuster;
#if defined(HAVE_OPENCV_XFEATURES2D) && defined(OPENCV_ENABLE_NONFREE)
#define TEST_DETECTORS testing::Values("surf", "orb")
#else
#define TEST_DETECTORS testing::Values<string>("orb")
#endif
#define WORK_MEGAPIX 0.6
#define AFFINE_FUNCTIONS testing::Values("affinePartial", "affine")
PERF_TEST_P(bundleAdjuster, affine, testing::Combine(TEST_DETECTORS, AFFINE_FUNCTIONS))
{
Mat img1, img1_full = imread(getDataPath("stitching/s1.jpg"));
Mat img2, img2_full = imread(getDataPath("stitching/s2.jpg"));
float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));
float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));
resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);
resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);
string detector = get<0>(GetParam());
string affine_fun = get<1>(GetParam());
Ptr<Feature2D> finder = getFeatureFinder(detector);
Ptr<detail::FeaturesMatcher> matcher;
Ptr<detail::BundleAdjusterBase> bundle_adjuster;
if (affine_fun == "affinePartial")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false);
bundle_adjuster = makePtr<detail::BundleAdjusterAffinePartial>();
}
else if (affine_fun == "affine")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(true);
bundle_adjuster = makePtr<detail::BundleAdjusterAffine>();
}
Ptr<detail::Estimator> estimator = makePtr<detail::AffineBasedEstimator>();
std::vector<Mat> images;
images.push_back(img1), images.push_back(img2);
std::vector<detail::ImageFeatures> features;
std::vector<detail::MatchesInfo> pairwise_matches;
std::vector<detail::CameraParams> cameras;
std::vector<detail::CameraParams> cameras2;
computeImageFeatures(finder, images, features);
(*matcher)(features, pairwise_matches);
if (!(*estimator)(features, pairwise_matches, cameras))
FAIL() << "estimation failed. this should never happen.";
// this is currently required
for (size_t i = 0; i < cameras.size(); ++i)
{
Mat R;
cameras[i].R.convertTo(R, CV_32F);
cameras[i].R = R;
}
cameras2 = cameras;
bool success = true;
while(next())
{
cameras = cameras2; // revert cameras back to original initial guess
startTimer();
success = (*bundle_adjuster)(features, pairwise_matches, cameras);
stopTimer();
}
EXPECT_TRUE(success);
EXPECT_TRUE(cameras.size() == 2);
// fist camera should be just identity
Mat &first = cameras[0].R;
SANITY_CHECK(first, 1e-3, ERROR_ABSOLUTE);
// second camera should be the estimated transform between images
// separate rotation and translation in transform matrix
Mat T_second (cameras[1].R, Range(0, 2), Range(2, 3));
Mat R_second (cameras[1].R, Range(0, 2), Range(0, 2));
Mat h (cameras[1].R, Range(2, 3), Range::all());
SANITY_CHECK(T_second, 5, ERROR_ABSOLUTE); // allow 5 pixels diff in translations
SANITY_CHECK(R_second, .01, ERROR_ABSOLUTE); // rotations must be more precise
// last row should be precisely (0, 0, 1) as it is just added for representation in homogeneous
// coordinates
EXPECT_TRUE(h.type() == CV_32F);
EXPECT_FLOAT_EQ(h.at<float>(0), 0.f);
EXPECT_FLOAT_EQ(h.at<float>(1), 0.f);
EXPECT_FLOAT_EQ(h.at<float>(2), 1.f);
}
} // namespace

View File

@ -0,0 +1,7 @@
#include "perf_precomp.hpp"
#if defined(HAVE_HPX)
#include <hpx/hpx_main.hpp>
#endif
CV_PERF_TEST_MAIN(stitching)

View File

@ -0,0 +1,293 @@
#include "perf_precomp.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/opencv_modules.hpp"
#include "opencv2/flann.hpp"
namespace opencv_test
{
using namespace perf;
typedef TestBaseWithParam<size_t> FeaturesFinderVec;
typedef TestBaseWithParam<string> match;
typedef tuple<string, int> matchVector_t;
typedef TestBaseWithParam<matchVector_t> matchVector;
#define NUMBER_IMAGES testing::Values(1, 5, 20)
#define SURF_MATCH_CONFIDENCE 0.65f
#define ORB_MATCH_CONFIDENCE 0.3f
#define WORK_MEGAPIX 0.6
#if defined(HAVE_OPENCV_XFEATURES2D) && defined(OPENCV_ENABLE_NONFREE)
#define TEST_DETECTORS testing::Values("surf", "orb")
#else
#define TEST_DETECTORS testing::Values<string>("orb")
#endif
PERF_TEST_P(FeaturesFinderVec, ParallelFeaturesFinder, NUMBER_IMAGES)
{
Mat img = imread( getDataPath("stitching/a1.png") );
vector<Mat> imgs(GetParam(), img);
vector<detail::ImageFeatures> features(imgs.size());
Ptr<Feature2D> finder = ORB::create();
TEST_CYCLE()
{
detail::computeImageFeatures(finder, imgs, features);
}
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(FeaturesFinderVec, SerialFeaturesFinder, NUMBER_IMAGES)
{
Mat img = imread( getDataPath("stitching/a1.png") );
vector<Mat> imgs(GetParam(), img);
vector<detail::ImageFeatures> features(imgs.size());
Ptr<Feature2D> finder = ORB::create();
TEST_CYCLE()
{
for (size_t i = 0; i < imgs.size(); ++i)
detail::computeImageFeatures(finder, imgs[i], features[i]);
}
SANITY_CHECK_NOTHING();
}
PERF_TEST_P( match, bestOf2Nearest, TEST_DETECTORS)
{
Mat img1, img1_full = imread( getDataPath("stitching/boat1.jpg") );
Mat img2, img2_full = imread( getDataPath("stitching/boat2.jpg") );
float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));
float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));
resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);
resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);
Ptr<Feature2D> finder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> matcher;
if (GetParam() == "surf")
{
matcher = makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
}
else if (GetParam() == "orb")
{
matcher = makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE);
}
else
{
FAIL() << "Unknown 2D features type: " << GetParam();
}
detail::ImageFeatures features1, features2;
detail::computeImageFeatures(finder, img1, features1);
detail::computeImageFeatures(finder, img2, features2);
detail::MatchesInfo pairwise_matches;
declare.in(features1.descriptors, features2.descriptors);
while(next())
{
cvflann::seed_random(42);//for predictive FlannBasedMatcher
startTimer();
(*matcher)(features1, features2, pairwise_matches);
stopTimer();
matcher->collectGarbage();
}
Mat dist (pairwise_matches.H, Range::all(), Range(2, 3));
Mat R (pairwise_matches.H, Range::all(), Range(0, 2));
// separate transform matrix, use lower error on rotations
SANITY_CHECK(dist, 3., ERROR_ABSOLUTE);
SANITY_CHECK(R, .06, ERROR_ABSOLUTE);
}
PERF_TEST_P( matchVector, bestOf2NearestVectorFeatures, testing::Combine(
TEST_DETECTORS,
testing::Values(2, 4, 8))
)
{
Mat img1, img1_full = imread( getDataPath("stitching/boat1.jpg") );
Mat img2, img2_full = imread( getDataPath("stitching/boat2.jpg") );
float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));
float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));
resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);
resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);
string detectorName = get<0>(GetParam());
int featuresVectorSize = get<1>(GetParam());
Ptr<Feature2D> finder = getFeatureFinder(detectorName);
Ptr<detail::FeaturesMatcher> matcher;
if (detectorName == "surf")
{
matcher = makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
}
else if (detectorName == "orb")
{
matcher = makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE);
}
else
{
FAIL() << "Unknown 2D features type: " << get<0>(GetParam());
}
detail::ImageFeatures features1, features2;
detail::computeImageFeatures(finder, img1, features1);
detail::computeImageFeatures(finder, img2, features2);
vector<detail::ImageFeatures> features;
vector<detail::MatchesInfo> pairwise_matches;
for(int i = 0; i < featuresVectorSize/2; i++)
{
features.push_back(features1);
features.push_back(features2);
}
declare.time(200);
while(next())
{
cvflann::seed_random(42);//for predictive FlannBasedMatcher
startTimer();
(*matcher)(features, pairwise_matches);
stopTimer();
matcher->collectGarbage();
}
size_t matches_count = 0;
for (size_t i = 0; i < pairwise_matches.size(); ++i)
{
if (pairwise_matches[i].src_img_idx < 0)
continue;
EXPECT_GT(pairwise_matches[i].matches.size(), 95u);
EXPECT_FALSE(pairwise_matches[i].H.empty());
++matches_count;
}
EXPECT_GT(matches_count, 0u);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P( match, affineBestOf2Nearest, TEST_DETECTORS)
{
Mat img1, img1_full = imread( getDataPath("stitching/s1.jpg") );
Mat img2, img2_full = imread( getDataPath("stitching/s2.jpg") );
float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));
float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));
resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);
resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);
Ptr<Feature2D> finder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> matcher;
if (GetParam() == "surf")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false, false, SURF_MATCH_CONFIDENCE);
}
else if (GetParam() == "orb")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false, false, ORB_MATCH_CONFIDENCE);
}
else
{
FAIL() << "Unknown 2D features type: " << GetParam();
}
detail::ImageFeatures features1, features2;
detail::computeImageFeatures(finder, img1, features1);
detail::computeImageFeatures(finder, img2, features2);
detail::MatchesInfo pairwise_matches;
declare.in(features1.descriptors, features2.descriptors);
while(next())
{
cvflann::seed_random(42);//for predictive FlannBasedMatcher
startTimer();
(*matcher)(features1, features2, pairwise_matches);
stopTimer();
matcher->collectGarbage();
}
// separate rotation and translation in transform matrix
Mat T (pairwise_matches.H, Range(0, 2), Range(2, 3));
Mat R (pairwise_matches.H, Range(0, 2), Range(0, 2));
Mat h (pairwise_matches.H, Range(2, 3), Range::all());
SANITY_CHECK(T, 5, ERROR_ABSOLUTE); // allow 5 pixels diff in translations
SANITY_CHECK(R, .01, ERROR_ABSOLUTE); // rotations must be more precise
// last row should be precisely (0, 0, 1) as it is just added for representation in homogeneous
// coordinates
EXPECT_DOUBLE_EQ(h.at<double>(0), 0.);
EXPECT_DOUBLE_EQ(h.at<double>(1), 0.);
EXPECT_DOUBLE_EQ(h.at<double>(2), 1.);
}
PERF_TEST_P( matchVector, affineBestOf2NearestVectorFeatures, testing::Combine(
TEST_DETECTORS,
testing::Values(2, 4, 8))
)
{
Mat img1, img1_full = imread( getDataPath("stitching/s1.jpg") );
Mat img2, img2_full = imread( getDataPath("stitching/s2.jpg") );
float scale1 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img1_full.total()));
float scale2 = (float)std::min(1.0, sqrt(WORK_MEGAPIX * 1e6 / img2_full.total()));
resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT);
resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT);
string detectorName = get<0>(GetParam());
int featuresVectorSize = get<1>(GetParam());
Ptr<Feature2D> finder = getFeatureFinder(detectorName);
Ptr<detail::FeaturesMatcher> matcher;
if (detectorName == "surf")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false, false, SURF_MATCH_CONFIDENCE);
}
else if (detectorName == "orb")
{
matcher = makePtr<detail::AffineBestOf2NearestMatcher>(false, false, ORB_MATCH_CONFIDENCE);
}
else
{
FAIL() << "Unknown 2D features type: " << get<0>(GetParam());
}
detail::ImageFeatures features1, features2;
detail::computeImageFeatures(finder, img1, features1);
detail::computeImageFeatures(finder, img2, features2);
vector<detail::ImageFeatures> features;
vector<detail::MatchesInfo> pairwise_matches;
for(int i = 0; i < featuresVectorSize/2; i++)
{
features.push_back(features1);
features.push_back(features2);
}
declare.time(200);
while(next())
{
cvflann::seed_random(42);//for predictive FlannBasedMatcher
startTimer();
(*matcher)(features, pairwise_matches);
stopTimer();
matcher->collectGarbage();
}
size_t matches_count = 0;
for (size_t i = 0; i < pairwise_matches.size(); ++i)
{
if (pairwise_matches[i].src_img_idx < 0)
continue;
EXPECT_GT(pairwise_matches[i].matches.size(), 150u);
EXPECT_FALSE(pairwise_matches[i].H.empty());
++matches_count;
}
EXPECT_GT(matches_count, 0u);
SANITY_CHECK_NOTHING();
}
} // namespace

View File

@ -0,0 +1,30 @@
#ifndef __OPENCV_PERF_PRECOMP_HPP__
#define __OPENCV_PERF_PRECOMP_HPP__
#include "opencv2/ts.hpp"
#include "opencv2/stitching.hpp"
#ifdef HAVE_OPENCV_XFEATURES2D
#include "opencv2/xfeatures2d/nonfree.hpp"
#endif
namespace cv
{
static inline Ptr<Feature2D> getFeatureFinder(const std::string& name)
{
if (name == "orb")
return ORB::create();
#if defined(HAVE_OPENCV_XFEATURES2D) && defined(OPENCV_ENABLE_NONFREE)
else if (name == "surf")
return xfeatures2d::SURF::create();
#endif
else if (name == "akaze")
return AKAZE::create();
else
return Ptr<Feature2D>();
}
} // namespace cv
#endif

View File

@ -0,0 +1,253 @@
#include "perf_precomp.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/opencv_modules.hpp"
#include "opencv2/core/ocl.hpp"
namespace opencv_test
{
using namespace perf;
#define SURF_MATCH_CONFIDENCE 0.65f
#define ORB_MATCH_CONFIDENCE 0.3f
#define WORK_MEGAPIX 0.6
typedef TestBaseWithParam<string> stitch;
typedef TestBaseWithParam<int> stitchExposureCompensation;
typedef TestBaseWithParam<tuple<string, string> > stitchDatasets;
typedef TestBaseWithParam<tuple<string, int>> stitchExposureCompMultiFeed;
#if defined(HAVE_OPENCV_XFEATURES2D) && defined(OPENCV_ENABLE_NONFREE)
#define TEST_DETECTORS testing::Values("surf", "orb", "akaze")
#else
#define TEST_DETECTORS testing::Values("orb", "akaze")
#endif
#define TEST_EXP_COMP_BS testing::Values(32, 16, 12, 10, 8)
#define TEST_EXP_COMP_NR_FEED testing::Values(1, 2, 3, 4, 5)
#define TEST_EXP_COMP_MODE testing::Values("gain", "channels", "blocks_gain", "blocks_channels")
#define AFFINE_DATASETS testing::Values("s", "budapest", "newspaper", "prague")
PERF_TEST_P(stitch, a123, TEST_DETECTORS)
{
Mat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
imgs.push_back( imread( getDataPath("stitching/a3.png") ) );
Ptr<Feature2D> featuresFinder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
: makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
declare.time(30 * 20).iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setFeaturesFinder(featuresFinder);
stitcher->setFeaturesMatcher(featuresMatcher);
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1182, 50);
EXPECT_NEAR(pano.size().height, 682, 30);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(stitchExposureCompensation, a123, TEST_EXP_COMP_BS)
{
Mat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
imgs.push_back( imread( getDataPath("stitching/a3.png") ) );
int bs = GetParam();
declare.time(30 * 10).iterations(10);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
stitcher->setExposureCompensator(
makePtr<detail::BlocksGainCompensator>(bs, bs));
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1182, 50);
EXPECT_NEAR(pano.size().height, 682, 30);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(stitchExposureCompMultiFeed, a123, testing::Combine(TEST_EXP_COMP_MODE, TEST_EXP_COMP_NR_FEED))
{
const int block_size = 32;
Mat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
imgs.push_back( imread( getDataPath("stitching/a3.png") ) );
string mode = get<0>(GetParam());
int nr_feeds = get<1>(GetParam());
declare.time(30 * 10).iterations(10);
Ptr<detail::ExposureCompensator> exp_comp;
if (mode == "gain")
exp_comp = makePtr<detail::GainCompensator>(nr_feeds);
else if (mode == "channels")
exp_comp = makePtr<detail::ChannelsCompensator>(nr_feeds);
else if (mode == "blocks_gain")
exp_comp = makePtr<detail::BlocksGainCompensator>(block_size, block_size, nr_feeds);
else if (mode == "blocks_channels")
exp_comp = makePtr<detail::BlocksChannelsCompensator>(block_size, block_size, nr_feeds);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
stitcher->setExposureCompensator(exp_comp);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1182, 50);
EXPECT_NEAR(pano.size().height, 682, 30);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(stitch, b12, TEST_DETECTORS)
{
Mat pano;
vector<Mat> imgs;
imgs.push_back( imread( getDataPath("stitching/b1.png") ) );
imgs.push_back( imread( getDataPath("stitching/b2.png") ) );
Ptr<Feature2D> featuresFinder = getFeatureFinder(GetParam());
Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
: makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);
declare.time(30 * 20).iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create();
stitcher->setFeaturesFinder(featuresFinder);
stitcher->setFeaturesMatcher(featuresMatcher);
stitcher->setWarper(makePtr<SphericalWarper>());
stitcher->setRegistrationResol(WORK_MEGAPIX);
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, 1117, GetParam() == "surf" ? 100 : 50);
EXPECT_NEAR(pano.size().height, 642, GetParam() == "surf" ? 60 : 30);
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETECTORS))
{
string dataset = get<0>(GetParam());
string detector = get<1>(GetParam());
Mat pano;
vector<Mat> imgs;
int width, height, allowed_diff = 20;
Ptr<Feature2D> featuresFinder = getFeatureFinder(detector);
if(dataset == "budapest")
{
imgs.push_back(imread(getDataPath("stitching/budapest1.jpg")));
imgs.push_back(imread(getDataPath("stitching/budapest2.jpg")));
imgs.push_back(imread(getDataPath("stitching/budapest3.jpg")));
imgs.push_back(imread(getDataPath("stitching/budapest4.jpg")));
imgs.push_back(imread(getDataPath("stitching/budapest5.jpg")));
imgs.push_back(imread(getDataPath("stitching/budapest6.jpg")));
width = 2313;
height = 1158;
// this dataset is big, the results between surf and orb differ slightly,
// but both are still good
allowed_diff = 50;
// we need to boost ORB number of features to be able to stitch this dataset
// SURF works just fine with default settings
if(detector == "orb")
featuresFinder = ORB::create(1500);
}
else if (dataset == "newspaper")
{
imgs.push_back(imread(getDataPath("stitching/newspaper1.jpg")));
imgs.push_back(imread(getDataPath("stitching/newspaper2.jpg")));
imgs.push_back(imread(getDataPath("stitching/newspaper3.jpg")));
imgs.push_back(imread(getDataPath("stitching/newspaper4.jpg")));
width = 1791;
height = 1136;
// we need to boost ORB number of features to be able to stitch this dataset
// SURF works just fine with default settings
if(detector == "orb")
featuresFinder = ORB::create(3000);
}
else if (dataset == "prague")
{
imgs.push_back(imread(getDataPath("stitching/prague1.jpg")));
imgs.push_back(imread(getDataPath("stitching/prague2.jpg")));
width = 983;
height = 1759;
}
else // dataset == "s"
{
imgs.push_back(imread(getDataPath("stitching/s1.jpg")));
imgs.push_back(imread(getDataPath("stitching/s2.jpg")));
width = 1815;
height = 700;
}
declare.time(30 * 20).iterations(20);
while(next())
{
Ptr<Stitcher> stitcher = Stitcher::create(Stitcher::SCANS);
stitcher->setFeaturesFinder(featuresFinder);
if (cv::ocl::useOpenCL())
cv::theRNG() = cv::RNG(12345); // prevent fails of Windows OpenCL builds (see #8294)
startTimer();
stitcher->stitch(imgs, pano);
stopTimer();
}
EXPECT_NEAR(pano.size().width, width, allowed_diff);
EXPECT_NEAR(pano.size().height, height, allowed_diff);
SANITY_CHECK_NOTHING();
}
} // namespace