feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,95 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
#include "../perf_bgfg_utils.hpp"
namespace cvtest {
namespace ocl {
//////////////////////////// KNN//////////////////////////
typedef tuple<string, int> VideoKNNParamType;
typedef TestBaseWithParam<VideoKNNParamType> KNN_Apply;
typedef TestBaseWithParam<VideoKNNParamType> KNN_GetBackgroundImage;
using namespace opencv_test;
OCL_PERF_TEST_P(KNN_Apply, KNN, Combine(Values("cv/video/768x576.avi", "cv/video/1920x1080.avi"), Values(1,3)))
{
VideoKNNParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = get<1>(params);
int nFrame = 5;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer);
UMat u_foreground;
OCL_TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorKNN> knn = createBackgroundSubtractorKNN();
knn->setDetectShadows(false);
u_foreground.release();
for (int i = 0; i < nFrame; i++)
{
knn->apply(frame_buffer[i], u_foreground);
}
}
SANITY_CHECK_NOTHING();
}
OCL_PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values(
std::make_pair<string, int>("cv/video/768x576.avi", 5),
std::make_pair<string, int>("cv/video/1920x1080.avi", 5)))
{
VideoKNNParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = 3;
const int skipFrames = get<1>(params);
int nFrame = 10;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer, skipFrames);
UMat u_foreground, u_background;
OCL_TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorKNN> knn = createBackgroundSubtractorKNN();
knn->setDetectShadows(false);
u_foreground.release();
u_background.release();
for (int i = 0; i < nFrame; i++)
{
knn->apply(frame_buffer[i], u_foreground);
}
knn->getBackgroundImage(u_background);
}
#ifdef DEBUG_BGFG
imwrite(format("fg_%d_%d_knn_ocl.png", frame_buffer[0].rows, cn), u_foreground.getMat(ACCESS_READ));
imwrite(format("bg_%d_%d_knn_ocl.png", frame_buffer[0].rows, cn), u_background.getMat(ACCESS_READ));
#endif
SANITY_CHECK_NOTHING();
}
}}// namespace cvtest::ocl
#endif

View File

@ -0,0 +1,95 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
#include "../perf_bgfg_utils.hpp"
namespace opencv_test {
namespace ocl {
//////////////////////////// Mog2//////////////////////////
typedef tuple<string, int> VideoMOG2ParamType;
typedef TestBaseWithParam<VideoMOG2ParamType> MOG2_Apply;
typedef TestBaseWithParam<VideoMOG2ParamType> MOG2_GetBackgroundImage;
using namespace opencv_test;
OCL_PERF_TEST_P(MOG2_Apply, Mog2, Combine(Values("cv/video/768x576.avi", "cv/video/1920x1080.avi"), Values(1,3)))
{
VideoMOG2ParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = get<1>(params);
int nFrame = 5;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer);
UMat u_foreground;
OCL_TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorMOG2> mog2 = createBackgroundSubtractorMOG2();
mog2->setDetectShadows(false);
u_foreground.release();
for (int i = 0; i < nFrame; i++)
{
mog2->apply(frame_buffer[i], u_foreground);
}
}
SANITY_CHECK_NOTHING();
}
OCL_PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values(
std::make_pair<string, int>("cv/video/768x576.avi", 5),
std::make_pair<string, int>("cv/video/1920x1080.avi", 5)))
{
VideoMOG2ParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = 3;
const int skipFrames = get<1>(params);
int nFrame = 10;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer, skipFrames);
UMat u_foreground, u_background;
OCL_TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorMOG2> mog2 = createBackgroundSubtractorMOG2();
mog2->setDetectShadows(false);
u_foreground.release();
u_background.release();
for (int i = 0; i < nFrame; i++)
{
mog2->apply(frame_buffer[i], u_foreground);
}
mog2->getBackgroundImage(u_background);
}
#ifdef DEBUG_BGFG
imwrite(format("fg_%d_%d_mog2_ocl.png", frame_buffer[0].rows, cn), u_foreground.getMat(ACCESS_READ));
imwrite(format("bg_%d_%d_mog2_ocl.png", frame_buffer[0].rows, cn), u_background.getMat(ACCESS_READ));
#endif
SANITY_CHECK_NOTHING();
}
}}// namespace opencv_test::ocl
#endif

View File

@ -0,0 +1,73 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
namespace opencv_test { namespace {
#ifdef HAVE_OPENCL
void MakeArtificialExample(UMat &dst_frame1, UMat &dst_frame2);
typedef tuple<String, Size> DISParams;
typedef TestBaseWithParam<DISParams> DenseOpticalFlow_DIS;
OCL_PERF_TEST_P(DenseOpticalFlow_DIS, perf,
Combine(Values("PRESET_ULTRAFAST", "PRESET_FAST", "PRESET_MEDIUM"), Values(szVGA, sz720p, sz1080p)))
{
DISParams params = GetParam();
// use strings to print preset names in the perf test results:
String preset_string = get<0>(params);
int preset = DISOpticalFlow::PRESET_FAST;
if (preset_string == "PRESET_ULTRAFAST")
preset = DISOpticalFlow::PRESET_ULTRAFAST;
else if (preset_string == "PRESET_FAST")
preset = DISOpticalFlow::PRESET_FAST;
else if (preset_string == "PRESET_MEDIUM")
preset = DISOpticalFlow::PRESET_MEDIUM;
Size sz = get<1>(params);
UMat frame1(sz, CV_8U);
UMat frame2(sz, CV_8U);
UMat flow;
MakeArtificialExample(frame1, frame2);
Ptr<DenseOpticalFlow> algo = DISOpticalFlow::create(preset);
PERF_SAMPLE_BEGIN()
{
algo->calc(frame1, frame2, flow);
}
PERF_SAMPLE_END()
SANITY_CHECK_NOTHING();
}
void MakeArtificialExample(UMat &dst_frame1, UMat &dst_frame2)
{
int src_scale = 2;
int OF_scale = 6;
double sigma = dst_frame1.cols / 300;
UMat tmp(Size(dst_frame1.cols / (1 << src_scale), dst_frame1.rows / (1 << src_scale)), CV_8U);
randu(tmp, 0, 255);
resize(tmp, dst_frame1, dst_frame1.size(), 0.0, 0.0, INTER_LINEAR_EXACT);
resize(tmp, dst_frame2, dst_frame2.size(), 0.0, 0.0, INTER_LINEAR_EXACT);
Mat displacement_field(Size(dst_frame1.cols / (1 << OF_scale), dst_frame1.rows / (1 << OF_scale)),
CV_32FC2);
randn(displacement_field, 0.0, sigma);
resize(displacement_field, displacement_field, dst_frame2.size(), 0.0, 0.0, INTER_CUBIC);
for (int i = 0; i < displacement_field.rows; i++)
for (int j = 0; j < displacement_field.cols; j++)
displacement_field.at<Vec2f>(i, j) += Vec2f((float)j, (float)i);
remap(dst_frame2, dst_frame2, displacement_field, Mat(), INTER_LINEAR, BORDER_REPLICATE);
}
#endif // HAVE_OPENCL
}} // namespace

View File

@ -0,0 +1,36 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
// Copyright (C) 2014, Advanced Micro Devices, Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#if 0 //def HAVE_OPENCL
namespace opencv_test {
namespace ocl {
///////////// UpdateMotionHistory ////////////////////////
typedef TestBaseWithParam<Size> UpdateMotionHistoryFixture;
OCL_PERF_TEST_P(UpdateMotionHistoryFixture, UpdateMotionHistory, OCL_TEST_SIZES)
{
const Size size = GetParam();
checkDeviceMaxMemoryAllocSize(size, CV_32FC1);
UMat silhouette(size, CV_8UC1), mhi(size, CV_32FC1);
randu(silhouette, -5, 5);
declare.in(mhi, WARMUP_RNG);
OCL_TEST_CYCLE() cv::updateMotionHistory(silhouette, mhi, 1, 0.5);
SANITY_CHECK(mhi);
}
} } // namespace opencv_test::ocl
#endif // HAVE_OPENCL

View File

@ -0,0 +1,112 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2010-2012, Multicoreware, Inc., all rights reserved.
// Copyright (C) 2010-2012, Advanced Micro Devices, Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// @Authors
// Fangfang Bai, fangfang@multicorewareinc.com
// Jin Ma, jin@multicorewareinc.com
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors as is and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
namespace opencv_test {
namespace ocl {
///////////// FarnebackOpticalFlow ////////////////////////
CV_ENUM(farneFlagType, 0, OPTFLOW_FARNEBACK_GAUSSIAN)
typedef tuple< tuple<int, double>, farneFlagType, bool > FarnebackOpticalFlowParams;
typedef TestBaseWithParam<FarnebackOpticalFlowParams> FarnebackOpticalFlowFixture;
OCL_PERF_TEST_P(FarnebackOpticalFlowFixture, FarnebackOpticalFlow,
::testing::Combine(
::testing::Values(
make_tuple<int, double>(5, 1.1),
make_tuple<int, double>(7, 1.5)
),
farneFlagType::all(),
::testing::Bool()
)
)
{
Mat frame0 = imread(getDataPath("gpu/opticalflow/rubberwhale1.png"), cv::IMREAD_GRAYSCALE);
ASSERT_FALSE(frame0.empty()) << "can't load rubberwhale1.png";
Mat frame1 = imread(getDataPath("gpu/opticalflow/rubberwhale2.png"), cv::IMREAD_GRAYSCALE);
ASSERT_FALSE(frame1.empty()) << "can't load rubberwhale2.png";
const Size srcSize = frame0.size();
const int numLevels = 5;
const int winSize = 13;
const int numIters = 10;
const FarnebackOpticalFlowParams params = GetParam();
const tuple<int, double> polyParams = get<0>(params);
const int polyN = get<0>(polyParams);
const double polySigma = get<1>(polyParams);
const double pyrScale = 0.5;
int flags = get<1>(params);
const bool useInitFlow = get<2>(params);
const double eps = 0.1;
UMat uFrame0; frame0.copyTo(uFrame0);
UMat uFrame1; frame1.copyTo(uFrame1);
UMat uFlow(srcSize, CV_32FC2);
declare.in(uFrame0, uFrame1, WARMUP_READ).out(uFlow, WARMUP_READ);
if (useInitFlow)
{
cv::calcOpticalFlowFarneback(uFrame0, uFrame1, uFlow, pyrScale, numLevels, winSize, numIters, polyN, polySigma, flags);
flags |= OPTFLOW_USE_INITIAL_FLOW;
}
OCL_TEST_CYCLE()
cv::calcOpticalFlowFarneback(uFrame0, uFrame1, uFlow, pyrScale, numLevels, winSize, numIters, polyN, polySigma, flags);
SANITY_CHECK(uFlow, eps, ERROR_RELATIVE);
}
} } // namespace opencv_test::ocl
#endif // HAVE_OPENCL

View File

@ -0,0 +1,104 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2010-2012, Multicoreware, Inc., all rights reserved.
// Copyright (C) 2010-2012, Advanced Micro Devices, Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// @Authors
// Fangfang Bai, fangfang@multicorewareinc.com
// Jin Ma, jin@multicorewareinc.com
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors as is and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "../perf_precomp.hpp"
#include "opencv2/ts/ocl_perf.hpp"
#ifdef HAVE_OPENCL
namespace opencv_test {
namespace ocl {
typedef tuple< int > PyrLKOpticalFlowParams;
typedef TestBaseWithParam<PyrLKOpticalFlowParams> PyrLKOpticalFlowFixture;
OCL_PERF_TEST_P(PyrLKOpticalFlowFixture, PyrLKOpticalFlow,
::testing::Values(1000, 2000, 4000)
)
{
Mat frame0 = imread(getDataPath("gpu/opticalflow/rubberwhale1.png"), cv::IMREAD_GRAYSCALE);
ASSERT_FALSE(frame0.empty()) << "can't load rubberwhale1.png";
Mat frame1 = imread(getDataPath("gpu/opticalflow/rubberwhale2.png"), cv::IMREAD_GRAYSCALE);
ASSERT_FALSE(frame1.empty()) << "can't load rubberwhale2.png";
UMat uFrame0; frame0.copyTo(uFrame0);
UMat uFrame1; frame1.copyTo(uFrame1);
const Size winSize = Size(21, 21);
const int maxLevel = 3;
const TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 0.01);
const int flags = 0;
const float minEigThreshold = 1e-4f;
const double eps = 1.0;
const PyrLKOpticalFlowParams params = GetParam();
const int pointsCount = get<0>(params);
// SKIP unstable tests
#ifdef __linux__
if (cvtest::skipUnstableTests && ocl::useOpenCL())
{
if (ocl::Device::getDefault().isIntel())
throw ::perf::TestBase::PerfSkipTestException();
}
#endif
vector<Point2f> pts;
goodFeaturesToTrack(frame0, pts, pointsCount, 0.01, 0.0);
Mat ptsMat(1, static_cast<int>(pts.size()), CV_32FC2, (void *)&pts[0]);
declare.in(uFrame0, uFrame1, WARMUP_READ);
UMat uNextPts, uStatus, uErr;
OCL_TEST_CYCLE()
cv::calcOpticalFlowPyrLK(uFrame0, uFrame1, pts, uNextPts, uStatus, uErr, winSize, maxLevel, criteria, flags, minEigThreshold);
SANITY_CHECK(uNextPts, eps);
}
} } // namespace opencv_test::ocl
#endif // HAVE_OPENCL

View File

@ -0,0 +1,88 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "perf_precomp.hpp"
#include "perf_bgfg_utils.hpp"
namespace opencv_test { namespace {
//////////////////////////// KNN//////////////////////////
typedef tuple<std::string, int> VideoKNNParamType;
typedef TestBaseWithParam<VideoKNNParamType> KNN_Apply;
typedef TestBaseWithParam<VideoKNNParamType> KNN_GetBackgroundImage;
PERF_TEST_P(KNN_Apply, KNN, Combine(Values("cv/video/768x576.avi", "cv/video/1920x1080.avi"), Values(1,3)))
{
VideoKNNParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = get<1>(params);
int nFrame = 5;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer);
Mat foreground;
TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorKNN> knn = createBackgroundSubtractorKNN();
knn->setDetectShadows(false);
foreground.release();
for (int i = 0; i < nFrame; i++)
{
knn->apply(frame_buffer[i], foreground);
}
}
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(KNN_GetBackgroundImage, KNN, Values(
std::make_pair<string, int>("cv/video/768x576.avi", 5),
std::make_pair<string, int>("cv/video/1920x1080.avi", 5)))
{
VideoKNNParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = 3;
const int skipFrames = get<1>(params);
int nFrame = 10;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer, skipFrames);
Mat foreground, background;
TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorKNN> knn = createBackgroundSubtractorKNN();
knn->setDetectShadows(false);
foreground.release();
background.release();
for (int i = 0; i < nFrame; i++)
{
knn->apply(frame_buffer[i], foreground);
}
knn->getBackgroundImage(background);
}
#ifdef DEBUG_BGFG
imwrite(format("fg_%d_%d_knn.png", frame_buffer[0].rows, cn), foreground);
imwrite(format("bg_%d_%d_knn.png", frame_buffer[0].rows, cn), background);
#endif
SANITY_CHECK_NOTHING();
}
}}// namespace

View File

@ -0,0 +1,88 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "perf_precomp.hpp"
#include "perf_bgfg_utils.hpp"
namespace opencv_test { namespace {
//////////////////////////// Mog2//////////////////////////
typedef tuple<std::string, int> VideoMOG2ParamType;
typedef TestBaseWithParam<VideoMOG2ParamType> MOG2_Apply;
typedef TestBaseWithParam<VideoMOG2ParamType> MOG2_GetBackgroundImage;
PERF_TEST_P(MOG2_Apply, Mog2, Combine(Values("cv/video/768x576.avi", "cv/video/1920x1080.avi"), Values(1,3)))
{
VideoMOG2ParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = get<1>(params);
int nFrame = 5;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer);
Mat foreground;
TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorMOG2> mog2 = createBackgroundSubtractorMOG2();
mog2->setDetectShadows(false);
foreground.release();
for (int i = 0; i < nFrame; i++)
{
mog2->apply(frame_buffer[i], foreground);
}
}
SANITY_CHECK_NOTHING();
}
PERF_TEST_P(MOG2_GetBackgroundImage, Mog2, Values(
std::make_pair<string, int>("cv/video/768x576.avi", 5),
std::make_pair<string, int>("cv/video/1920x1080.avi", 5)))
{
VideoMOG2ParamType params = GetParam();
const string inputFile = getDataPath(get<0>(params));
const int cn = 3;
const int skipFrames = get<1>(params);
int nFrame = 10;
vector<Mat> frame_buffer(nFrame);
cv::VideoCapture cap(inputFile);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
prepareData(cap, cn, frame_buffer, skipFrames);
Mat foreground, background;
TEST_CYCLE()
{
Ptr<cv::BackgroundSubtractorMOG2> mog2 = createBackgroundSubtractorMOG2();
mog2->setDetectShadows(false);
foreground.release();
background.release();
for (int i = 0; i < nFrame; i++)
{
mog2->apply(frame_buffer[i], foreground);
}
mog2->getBackgroundImage(background);
}
#ifdef DEBUG_BGFG
imwrite(format("fg_%d_%d_mog2.png", frame_buffer[0].rows, cn), foreground);
imwrite(format("bg_%d_%d_mog2.png", frame_buffer[0].rows, cn), background);
#endif
SANITY_CHECK_NOTHING();
}
}}// namespace

View File

@ -0,0 +1,48 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
namespace opencv_test {
//#define DEBUG_BGFG
using namespace testing;
using namespace cvtest;
using namespace perf;
namespace {
using namespace cv;
static void cvtFrameFmt(std::vector<Mat>& input, std::vector<Mat>& output)
{
for(int i = 0; i< (int)(input.size()); i++)
{
cvtColor(input[i], output[i], COLOR_RGB2GRAY);
}
}
static void prepareData(VideoCapture& cap, int cn, std::vector<Mat>& frame_buffer, int skipFrames = 0)
{
std::vector<Mat> frame_buffer_init;
int nFrame = (int)frame_buffer.size();
for (int i = 0; i < skipFrames; i++)
{
cv::Mat frame;
cap >> frame;
}
for (int i = 0; i < nFrame; i++)
{
cv::Mat frame;
cap >> frame;
ASSERT_FALSE(frame.empty());
frame_buffer_init.push_back(frame);
}
if (cn == 1)
cvtFrameFmt(frame_buffer_init, frame_buffer);
else
frame_buffer.swap(frame_buffer_init);
}
}}

View File

@ -0,0 +1,66 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "perf_precomp.hpp"
namespace opencv_test { namespace {
void MakeArtificialExample(Mat &dst_frame1, Mat &dst_frame2);
typedef tuple<String, Size> DISParams;
typedef TestBaseWithParam<DISParams> DenseOpticalFlow_DIS;
PERF_TEST_P(DenseOpticalFlow_DIS, perf,
Combine(Values("PRESET_ULTRAFAST", "PRESET_FAST", "PRESET_MEDIUM"), Values(szVGA, sz720p, sz1080p)))
{
DISParams params = GetParam();
// use strings to print preset names in the perf test results:
String preset_string = get<0>(params);
int preset = DISOpticalFlow::PRESET_FAST;
if (preset_string == "PRESET_ULTRAFAST")
preset = DISOpticalFlow::PRESET_ULTRAFAST;
else if (preset_string == "PRESET_FAST")
preset = DISOpticalFlow::PRESET_FAST;
else if (preset_string == "PRESET_MEDIUM")
preset = DISOpticalFlow::PRESET_MEDIUM;
Size sz = get<1>(params);
Mat frame1(sz, CV_8U);
Mat frame2(sz, CV_8U);
Mat flow;
MakeArtificialExample(frame1, frame2);
TEST_CYCLE_N(10)
{
Ptr<DenseOpticalFlow> algo = DISOpticalFlow::create(preset);
algo->calc(frame1, frame2, flow);
}
SANITY_CHECK_NOTHING();
}
void MakeArtificialExample(Mat &dst_frame1, Mat &dst_frame2)
{
int src_scale = 2;
int OF_scale = 6;
double sigma = dst_frame1.cols / 300;
Mat tmp(Size(dst_frame1.cols / (1 << src_scale), dst_frame1.rows / (1 << src_scale)), CV_8U);
randu(tmp, 0, 255);
resize(tmp, dst_frame1, dst_frame1.size(), 0.0, 0.0, INTER_LINEAR_EXACT);
resize(tmp, dst_frame2, dst_frame2.size(), 0.0, 0.0, INTER_LINEAR_EXACT);
Mat displacement_field(Size(dst_frame1.cols / (1 << OF_scale), dst_frame1.rows / (1 << OF_scale)),
CV_32FC2);
randn(displacement_field, 0.0, sigma);
resize(displacement_field, displacement_field, dst_frame2.size(), 0.0, 0.0, INTER_CUBIC);
for (int i = 0; i < displacement_field.rows; i++)
for (int j = 0; j < displacement_field.cols; j++)
displacement_field.at<Vec2f>(i, j) += Vec2f((float)j, (float)i);
remap(dst_frame2, dst_frame2, displacement_field, Mat(), INTER_LINEAR, BORDER_REPLICATE);
}
}} // namespace

View File

@ -0,0 +1,71 @@
#include "perf_precomp.hpp"
namespace opencv_test
{
using namespace perf;
CV_ENUM(MotionType, MOTION_TRANSLATION, MOTION_EUCLIDEAN, MOTION_AFFINE, MOTION_HOMOGRAPHY)
typedef tuple<MotionType> MotionType_t;
typedef perf::TestBaseWithParam<MotionType_t> TransformationType;
PERF_TEST_P(TransformationType, findTransformECC, /*testing::ValuesIn(MotionType::all())*/
testing::Values((int) MOTION_TRANSLATION, (int) MOTION_EUCLIDEAN,
(int) MOTION_AFFINE, (int) MOTION_HOMOGRAPHY)
)
{
Mat img = imread(getDataPath("cv/shared/fruits_ecc.png"),0);
Mat templateImage;
int transform_type = get<0>(GetParam());
Mat warpMat;
Mat warpGround;
double angle;
switch (transform_type) {
case MOTION_TRANSLATION:
warpGround = (Mat_<float>(2,3) << 1.f, 0.f, 7.234f,
0.f, 1.f, 11.839f);
warpAffine(img, templateImage, warpGround,
Size(200,200), INTER_LINEAR + WARP_INVERSE_MAP);
break;
case MOTION_EUCLIDEAN:
angle = CV_PI/30;
warpGround = (Mat_<float>(2,3) << (float)cos(angle), (float)-sin(angle), 12.123f,
(float)sin(angle), (float)cos(angle), 14.789f);
warpAffine(img, templateImage, warpGround,
Size(200,200), INTER_LINEAR + WARP_INVERSE_MAP);
break;
case MOTION_AFFINE:
warpGround = (Mat_<float>(2,3) << 0.98f, 0.03f, 15.523f,
-0.02f, 0.95f, 10.456f);
warpAffine(img, templateImage, warpGround,
Size(200,200), INTER_LINEAR + WARP_INVERSE_MAP);
break;
case MOTION_HOMOGRAPHY:
warpGround = (Mat_<float>(3,3) << 0.98f, 0.03f, 15.523f,
-0.02f, 0.95f, 10.456f,
0.0002f, 0.0003f, 1.f);
warpPerspective(img, templateImage, warpGround,
Size(200,200), INTER_LINEAR + WARP_INVERSE_MAP);
break;
}
TEST_CYCLE()
{
if (transform_type<3)
warpMat = Mat::eye(2,3, CV_32F);
else
warpMat = Mat::eye(3,3, CV_32F);
findTransformECC(templateImage, img, warpMat, transform_type,
TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 5, -1));
}
SANITY_CHECK(warpMat, 3e-3);
}
} // namespace

View File

@ -0,0 +1,22 @@
#include "perf_precomp.hpp"
#if defined(HAVE_HPX)
#include <hpx/hpx_main.hpp>
#endif
static
void initTests()
{
const char* extraTestDataPath =
#ifdef WINRT
NULL;
#else
getenv("OPENCV_DNN_TEST_DATA_PATH");
#endif
if (extraTestDataPath)
cvtest::addDataSearchPath(extraTestDataPath);
cvtest::addDataSearchSubDirectory(""); // override "cv" prefix below to access without "../dnn" hacks
}
CV_PERF_TEST_MAIN(video, initTests())

View File

@ -0,0 +1,269 @@
#include "perf_precomp.hpp"
namespace opencv_test { namespace {
using namespace perf;
typedef tuple<std::string, int, int, tuple<int,int>, int> Path_Idx_Cn_NPoints_WSize_t;
typedef TestBaseWithParam<Path_Idx_Cn_NPoints_WSize_t> Path_Idx_Cn_NPoints_WSize;
void FormTrackingPointsArray(vector<Point2f>& points, int width, int height, int nPointsX, int nPointsY)
{
int stepX = width / nPointsX;
int stepY = height / nPointsY;
if (stepX < 1 || stepY < 1) FAIL() << "Specified points number is too big";
points.clear();
points.reserve(nPointsX * nPointsY);
for( int x = stepX / 2; x < width; x += stepX )
{
for( int y = stepY / 2; y < height; y += stepY )
{
Point2f pt(static_cast<float>(x), static_cast<float>(y));
points.push_back(pt);
}
}
}
PERF_TEST_P(Path_Idx_Cn_NPoints_WSize, OpticalFlowPyrLK_full, testing::Combine(
testing::Values<std::string>("cv/optflow/frames/VGA_%02d.png", "cv/optflow/frames/720p_%02d.png"),
testing::Range(1, 3),
testing::Values(1, 3, 4),
testing::Values(make_tuple(9, 9), make_tuple(15, 15)),
testing::Values(7, 11)
)
)
{
string filename1 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam())));
string filename2 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam()) + 1));
Mat img1 = imread(filename1);
Mat img2 = imread(filename2);
if (img1.empty()) FAIL() << "Unable to load source image " << filename1;
if (img2.empty()) FAIL() << "Unable to load source image " << filename2;
int cn = get<2>(GetParam());
int nPointsX = std::min(get<0>(get<3>(GetParam())), img1.cols);
int nPointsY = std::min(get<1>(get<3>(GetParam())), img1.rows);
int winSize = get<4>(GetParam());
int maxLevel = 2;
TermCriteria criteria(TermCriteria::COUNT|TermCriteria::EPS, 7, 0.001);
int flags = 0;
double minEigThreshold = 1e-4;
Mat frame1, frame2;
switch(cn)
{
case 1:
cvtColor(img1, frame1, COLOR_BGR2GRAY, cn);
cvtColor(img2, frame2, COLOR_BGR2GRAY, cn);
break;
case 3:
frame1 = img1;
frame2 = img2;
break;
case 4:
cvtColor(img1, frame1, COLOR_BGR2BGRA, cn);
cvtColor(img2, frame2, COLOR_BGR2BGRA, cn);
break;
default:
FAIL() << "Unexpected number of channels: " << cn;
}
vector<Point2f> inPoints;
vector<Point2f> outPoints;
vector<uchar> status;
vector<float> err;
FormTrackingPointsArray(inPoints, frame1.cols, frame1.rows, nPointsX, nPointsY);
outPoints.resize(inPoints.size());
status.resize(inPoints.size());
err.resize(inPoints.size());
declare.in(frame1, frame2, inPoints).out(outPoints);
TEST_CYCLE_N(30)
{
calcOpticalFlowPyrLK(frame1, frame2, inPoints, outPoints, status, err,
Size(winSize, winSize), maxLevel, criteria,
flags, minEigThreshold);
}
SANITY_CHECK_NOTHING();
}
typedef tuple<std::string, int, tuple<int, int>, int> Path_Idx_NPoints_WSize_t;
typedef TestBaseWithParam<Path_Idx_NPoints_WSize_t> Path_Idx_NPoints_WSize;
PERF_TEST_P(Path_Idx_NPoints_WSize, DISABLED_OpticalFlowPyrLK_ovx, testing::Combine(
testing::Values<std::string>("cv/optflow/frames/VGA_%02d.png", "cv/optflow/frames/720p_%02d.png"),
testing::Range(1, 3),
testing::Values(make_tuple(9, 9), make_tuple(15, 15)),
testing::Values(7, 11)
)
)
{
string filename1 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam())));
string filename2 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam()) + 1));
Mat img1 = imread(filename1);
Mat img2 = imread(filename2);
if (img1.empty()) FAIL() << "Unable to load source image " << filename1;
if (img2.empty()) FAIL() << "Unable to load source image " << filename2;
int nPointsX = std::min(get<0>(get<2>(GetParam())), img1.cols);
int nPointsY = std::min(get<1>(get<2>(GetParam())), img1.rows);
int winSize = get<3>(GetParam());
int maxLevel = 2;
TermCriteria criteria(TermCriteria::COUNT|TermCriteria::EPS, 7, 0.001);
int flags = 0;
double minEigThreshold = 1e-4;
Mat frame1, frame2;
cvtColor(img1, frame1, COLOR_BGR2GRAY, 1);
cvtColor(img2, frame2, COLOR_BGR2GRAY, 1);
vector<Point2f> inPoints;
vector<Point2f> outPoints;
vector<uchar> status;
FormTrackingPointsArray(inPoints, frame1.cols, frame1.rows, nPointsX, nPointsY);
outPoints.resize(inPoints.size());
status.resize(inPoints.size());
declare.in(frame1, frame2, inPoints).out(outPoints);
TEST_CYCLE_N(30)
{
calcOpticalFlowPyrLK(frame1, frame2, inPoints, outPoints, status, cv::noArray(),
Size(winSize, winSize), maxLevel, criteria,
flags, minEigThreshold);
}
SANITY_CHECK_NOTHING();
}
typedef tuple<std::string, int, int, tuple<int,int>, int, bool> Path_Idx_Cn_NPoints_WSize_Deriv_t;
typedef TestBaseWithParam<Path_Idx_Cn_NPoints_WSize_Deriv_t> Path_Idx_Cn_NPoints_WSize_Deriv;
PERF_TEST_P(Path_Idx_Cn_NPoints_WSize_Deriv, OpticalFlowPyrLK_self, testing::Combine(
testing::Values<std::string>("cv/optflow/frames/VGA_%02d.png", "cv/optflow/frames/720p_%02d.png"),
testing::Range(1, 3),
testing::Values(1, 3, 4),
testing::Values(make_tuple(9, 9), make_tuple(15, 15)),
testing::Values(7, 11),
testing::Bool()
)
)
{
string filename1 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam())));
string filename2 = getDataPath(cv::format(get<0>(GetParam()).c_str(), get<1>(GetParam()) + 1));
Mat img1 = imread(filename1);
Mat img2 = imread(filename2);
if (img1.empty()) FAIL() << "Unable to load source image " << filename1;
if (img2.empty()) FAIL() << "Unable to load source image " << filename2;
int cn = get<2>(GetParam());
int nPointsX = std::min(get<0>(get<3>(GetParam())), img1.cols);
int nPointsY = std::min(get<1>(get<3>(GetParam())), img1.rows);
int winSize = get<4>(GetParam());
bool withDerivatives = get<5>(GetParam());
int maxLevel = 2;
TermCriteria criteria(TermCriteria::COUNT|TermCriteria::EPS, 7, 0.001);
int flags = 0;
double minEigThreshold = 1e-4;
Mat frame1, frame2;
switch(cn)
{
case 1:
cvtColor(img1, frame1, COLOR_BGR2GRAY, cn);
cvtColor(img2, frame2, COLOR_BGR2GRAY, cn);
break;
case 3:
frame1 = img1;
frame2 = img2;
break;
case 4:
cvtColor(img1, frame1, COLOR_BGR2BGRA, cn);
cvtColor(img2, frame2, COLOR_BGR2BGRA, cn);
break;
default:
FAIL() << "Unexpected number of channels: " << cn;
}
vector<Point2f> inPoints;
vector<Point2f> outPoints;
vector<uchar> status;
vector<float> err;
FormTrackingPointsArray(inPoints, frame1.cols, frame1.rows, nPointsX, nPointsY);
outPoints.resize(inPoints.size());
status.resize(inPoints.size());
err.resize(inPoints.size());
std::vector<Mat> pyramid1, pyramid2;
maxLevel = buildOpticalFlowPyramid(frame1, pyramid1, Size(winSize, winSize), maxLevel, withDerivatives);
maxLevel = buildOpticalFlowPyramid(frame2, pyramid2, Size(winSize, winSize), maxLevel, withDerivatives);
declare.in(pyramid1, pyramid2, inPoints).out(outPoints);
declare.time(400);
int runs = 3;
TEST_CYCLE_MULTIRUN(runs)
{
calcOpticalFlowPyrLK(pyramid1, pyramid2, inPoints, outPoints, status, err,
Size(winSize, winSize), maxLevel, criteria,
flags, minEigThreshold);
}
SANITY_CHECK_NOTHING();
}
CV_ENUM(PyrBorderMode, BORDER_DEFAULT, BORDER_TRANSPARENT)
typedef tuple<std::string, int, bool, PyrBorderMode, bool> Path_Win_Deriv_Border_Reuse_t;
typedef TestBaseWithParam<Path_Win_Deriv_Border_Reuse_t> Path_Win_Deriv_Border_Reuse;
PERF_TEST_P(Path_Win_Deriv_Border_Reuse, OpticalFlowPyrLK_pyr, testing::Combine(
testing::Values<std::string>("cv/optflow/frames/720p_01.png"),
testing::Values(7, 11),
testing::Bool(),
PyrBorderMode::all(),
testing::Bool()
)
)
{
string filename = getDataPath(get<0>(GetParam()));
Mat img = imread(filename);
Size winSize(get<1>(GetParam()), get<1>(GetParam()));
bool withDerivatives = get<2>(GetParam());
int derivBorder = get<3>(GetParam());
int pyrBorder = derivBorder;
if(derivBorder != BORDER_TRANSPARENT)
{
derivBorder = BORDER_CONSTANT;
pyrBorder = BORDER_REFLECT_101;
}
bool tryReuseInputImage = get<4>(GetParam());
std::vector<Mat> pyramid;
img.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
int maxLevel = buildOpticalFlowPyramid(img, pyramid, winSize, 1000, withDerivatives, BORDER_CONSTANT, BORDER_CONSTANT, tryReuseInputImage);
declare.in(img).out(pyramid);
TEST_CYCLE()
{
buildOpticalFlowPyramid(img, pyramid, winSize, maxLevel, withDerivatives, pyrBorder, derivBorder, tryReuseInputImage);
}
size_t expected_layers = ((size_t)maxLevel + 1) * (withDerivatives ? 2 : 1);
ASSERT_EQ(expected_layers, pyramid.size());
SANITY_CHECK_NOTHING();
}
}} // namespace

View File

@ -0,0 +1,16 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef __OPENCV_VIDEO_PRECOMP_HPP__
#define __OPENCV_VIDEO_PRECOMP_HPP__
#include "opencv2/ts.hpp"
#include <opencv2/video.hpp>
#include "opencv2/ts/ts_perf.hpp"
namespace cvtest
{
using namespace perf;
}
#endif

View File

@ -0,0 +1,104 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "perf_precomp.hpp"
namespace opencv_test { namespace {
using namespace perf;
typedef tuple<string, int, Rect> TrackingParams_t;
std::vector<TrackingParams_t> getTrackingParams()
{
std::vector<TrackingParams_t> params {
TrackingParams_t("david/data/david.webm", 300, Rect(163,62,47,56)),
TrackingParams_t("dudek/data/dudek.webm", 1, Rect(123,87,132,176)),
TrackingParams_t("faceocc2/data/faceocc2.webm", 1, Rect(118,57,82,98))
};
return params;
}
class Tracking : public perf::TestBaseWithParam<TrackingParams_t>
{
public:
template<typename ROI_t = Rect2d, typename Tracker>
void runTrackingTest(const Ptr<Tracker>& tracker, const TrackingParams_t& params);
};
template<typename ROI_t, typename Tracker>
void Tracking::runTrackingTest(const Ptr<Tracker>& tracker, const TrackingParams_t& params)
{
const int N = 10;
string video = get<0>(params);
int startFrame = get<1>(params);
//int endFrame = startFrame + N;
Rect boundingBox = get<2>(params);
string videoPath = findDataFile(std::string("cv/tracking/") + video);
VideoCapture c;
c.open(videoPath);
if (!c.isOpened())
throw SkipTestException("Can't open video file");
#if 0
// c.set(CAP_PROP_POS_FRAMES, startFrame);
#else
if (startFrame)
std::cout << "startFrame = " << startFrame << std::endl;
for (int i = 0; i < startFrame; i++)
{
Mat dummy_frame;
c >> dummy_frame;
ASSERT_FALSE(dummy_frame.empty()) << i << ": " << videoPath;
}
#endif
// decode frames into memory (don't measure decoding performance)
std::vector<Mat> frames;
for (int i = 0; i < N; ++i)
{
Mat frame;
c >> frame;
ASSERT_FALSE(frame.empty()) << "i=" << i;
frames.push_back(frame);
}
std::cout << "frame size = " << frames[0].size() << std::endl;
PERF_SAMPLE_BEGIN();
{
tracker->init(frames[0], (ROI_t)boundingBox);
for (int i = 1; i < N; ++i)
{
ROI_t rc;
tracker->update(frames[i], rc);
ASSERT_FALSE(rc.empty());
}
}
PERF_SAMPLE_END();
SANITY_CHECK_NOTHING();
}
//==================================================================================================
PERF_TEST_P(Tracking, MIL, testing::ValuesIn(getTrackingParams()))
{
auto tracker = TrackerMIL::create();
runTrackingTest<Rect>(tracker, GetParam());
}
PERF_TEST_P(Tracking, GOTURN, testing::ValuesIn(getTrackingParams()))
{
std::string model = cvtest::findDataFile("dnn/gsoc2016-goturn/goturn.prototxt");
std::string weights = cvtest::findDataFile("dnn/gsoc2016-goturn/goturn.caffemodel", false);
TrackerGOTURN::Params params;
params.modelTxt = model;
params.modelBin = weights;
auto tracker = TrackerGOTURN::create(params);
runTrackingTest<Rect>(tracker, GetParam());
}
}} // namespace

View File

@ -0,0 +1,40 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "perf_precomp.hpp"
namespace opencv_test { namespace {
typedef tuple<Size, int, int> VarRefParams;
typedef TestBaseWithParam<VarRefParams> DenseOpticalFlow_VariationalRefinement;
PERF_TEST_P(DenseOpticalFlow_VariationalRefinement, perf, Combine(Values(szQVGA, szVGA), Values(5, 10), Values(5, 10)))
{
VarRefParams params = GetParam();
Size sz = get<0>(params);
int sorIter = get<1>(params);
int fixedPointIter = get<2>(params);
Mat frame1(sz, CV_8U);
Mat frame2(sz, CV_8U);
Mat flow(sz, CV_32FC2);
randu(frame1, 0, 255);
randu(frame2, 0, 255);
flow.setTo(0.0f);
TEST_CYCLE_N(10)
{
Ptr<VariationalRefinement> var = VariationalRefinement::create();
var->setAlpha(20.0f);
var->setGamma(10.0f);
var->setDelta(5.0f);
var->setSorIterations(sorIter);
var->setFixedPointIterations(fixedPointIter);
var->calc(frame1, frame2, flow);
}
SANITY_CHECK_NOTHING();
}
}} // namespace