feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,81 @@
#!/bin/bash
set -e
if [ -z $1 ] ; then
echo "$0 <destination directory>"
exit 1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
OCV="$( cd "${DIR}/../../.." >/dev/null 2>&1 && pwd )"
mkdir -p "${1}" # Docker creates non-existed mounts with 'root' owner, lets ensure that dir exists under the current user to avoid "Permission denied" problem
DST="$( cd "$1" >/dev/null 2>&1 && pwd )"
CFG=$2
do_build()
{
TAG=$1
D=$2
F=$3
shift 3
docker build \
--build-arg http_proxy \
--build-arg https_proxy \
$@ \
-t $TAG \
-f "${D}/${F}" \
"${D}"
}
do_run()
{
TAG=$1
shift 1
docker run \
-it \
--rm \
-v "${OCV}":/opencv:ro \
-v "${DST}":/dst \
-e CFG=$CFG \
--user $(id -u):$(id -g) \
$TAG \
$@
}
build_gstreamer()
{
TAG=opencv_gstreamer_builder
do_build $TAG "${DIR}/plugin_gstreamer" Dockerfile
do_run $TAG /opencv/modules/videoio/misc/plugin_gstreamer/build.sh /dst $CFG
}
build_ffmpeg_ubuntu()
{
VER=$1
TAG=opencv_ffmpeg_ubuntu_builder:${VER}
do_build $TAG "${DIR}/plugin_ffmpeg" Dockerfile-ubuntu --build-arg VER=${VER}
do_run $TAG /opencv/modules/videoio/misc/plugin_ffmpeg/build-ubuntu.sh /dst ${VER} ${CFG}
}
build_ffmpeg()
{
VER=$1
TAG=opencv_ffmpeg_builder:${VER}
ARCHIVE="${DIR}/plugin_ffmpeg/ffmpeg-${VER}.tar.xz"
if [ ! -f "${ARCHIVE}" ] ; then
wget https://www.ffmpeg.org/releases/ffmpeg-${VER}.tar.xz -O "${ARCHIVE}"
fi
do_build $TAG "${DIR}/plugin_ffmpeg" Dockerfile-ffmpeg --build-arg VER=${VER}
do_run $TAG /opencv/modules/videoio/misc/plugin_ffmpeg/build-standalone.sh /dst ${VER} ${CFG}
}
echo "OpenCV: ${OCV}"
echo "Destination: ${DST}"
build_gstreamer
build_ffmpeg_ubuntu 18.04
build_ffmpeg_ubuntu 16.04
build_ffmpeg 4.1
build_ffmpeg 3.4.5
build_ffmpeg 2.8.15

View File

@ -0,0 +1,63 @@
{
"const_ignore_list": [
"CV_CAP_OPENNI",
"CV_CAP_OPENNI2",
"CV_CAP_PROP_OPENNI_",
"CV_CAP_INTELPERC",
"CV_CAP_PROP_INTELPERC_",
"CV_CAP_ANY",
"CV_CAP_MIL",
"CV_CAP_VFW",
"CV_CAP_V4L",
"CV_CAP_V4L2",
"CV_CAP_FIREWARE",
"CV_CAP_FIREWIRE",
"CV_CAP_IEEE1394",
"CV_CAP_DC1394",
"CV_CAP_CMU1394",
"CV_CAP_STEREO",
"CV_CAP_TYZX",
"CV_CAP_QT",
"CV_CAP_UNICAP",
"CV_CAP_DSHOW",
"CV_CAP_PVAPI",
"CV_CAP_ARAVIS",
"CV_CAP_PROP_DC1394_OFF",
"CV_CAP_PROP_DC1394_MODE_MANUAL",
"CV_CAP_PROP_DC1394_MODE_AUTO",
"CV_CAP_PROP_DC1394_MODE_ONE_PUSH_AUTO",
"CV_CAP_PROP_POS_MSEC",
"CV_CAP_PROP_POS_FRAMES",
"CV_CAP_PROP_POS_AVI_RATIO",
"CV_CAP_PROP_FPS",
"CV_CAP_PROP_FOURCC",
"CV_CAP_PROP_FRAME_COUNT",
"CV_CAP_PROP_FORMAT",
"CV_CAP_PROP_MODE",
"CV_CAP_PROP_BRIGHTNESS",
"CV_CAP_PROP_CONTRAST",
"CV_CAP_PROP_SATURATION",
"CV_CAP_PROP_HUE",
"CV_CAP_PROP_GAIN",
"CV_CAP_PROP_EXPOSURE",
"CV_CAP_PROP_CONVERT_RGB",
"CV_CAP_PROP_WHITE_BALANCE_BLUE_U",
"CV_CAP_PROP_RECTIFICATION",
"CV_CAP_PROP_MONOCHROME",
"CV_CAP_PROP_SHARPNESS",
"CV_CAP_PROP_AUTO_EXPOSURE",
"CV_CAP_PROP_GAMMA",
"CV_CAP_PROP_TEMPERATURE",
"CV_CAP_PROP_TRIGGER",
"CV_CAP_PROP_TRIGGER_DELAY",
"CV_CAP_PROP_WHITE_BALANCE_RED_V",
"CV_CAP_PROP_MAX_DC1394",
"CV_CAP_GSTREAMER_QUEUE_LENGTH",
"CV_CAP_PROP_PVAPI_MULTICASTIP",
"CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING",
"CV_TYZX_LEFT",
"CV_TYZX_RIGHT",
"CV_TYZX_COLOR",
"CV_TYZX_Z"
]
}

View File

@ -0,0 +1,64 @@
package org.opencv.test.videoio;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.videoio.Videoio;
import org.opencv.videoio.VideoCapture;
import org.opencv.test.OpenCVTestCase;
public class VideoCaptureTest extends OpenCVTestCase {
private VideoCapture capture;
private boolean isOpened;
private boolean isSucceed;
@Override
protected void setUp() throws Exception {
super.setUp();
capture = null;
isTestCaseEnabled = false;
isSucceed = false;
isOpened = false;
}
public void testGrab() {
capture = new VideoCapture();
isSucceed = capture.grab();
assertFalse(isSucceed);
}
public void testIsOpened() {
capture = new VideoCapture();
assertFalse(capture.isOpened());
}
public void testDefaultConstructor() {
capture = new VideoCapture();
assertNotNull(capture);
assertFalse(capture.isOpened());
}
public void testConstructorWithFilename() {
capture = new VideoCapture("some_file.avi");
assertNotNull(capture);
}
public void testConstructorWithFilenameAndExplicitlySpecifiedAPI() {
capture = new VideoCapture("some_file.avi", Videoio.CAP_ANY);
assertNotNull(capture);
}
public void testConstructorWithIndex() {
capture = new VideoCapture(0);
assertNotNull(capture);
}
public void testConstructorWithIndexAndExplicitlySpecifiedAPI() {
capture = new VideoCapture(0, Videoio.CAP_ANY);
assertNotNull(capture);
}
}

View File

@ -0,0 +1,20 @@
{
"AdditionalImports" : {
"Videoio" :
[ "\"videoio/registry.hpp\"" ]
},
"ManualFuncs" : {
"VideoCapture" : {
"release" : {"declaration" : [""], "implementation" : [""] }
},
"VideoWriter" : {
"release" : {"declaration" : [""], "implementation" : [""] }
}
},
"func_arg_fix" : {
"VideoCapture" : {
"(BOOL)open:(int)index apiPreference:(int)apiPreference" : { "open" : {"name" : "openWithIndex"} },
"(BOOL)open:(int)index apiPreference:(int)apiPreference params:(IntVector*)params" : { "open" : {"name" : "openWithIndexAndParameters"} }
}
}
}

View File

@ -0,0 +1,445 @@
//
// CvAbstractCamera2.mm
//
// Created by Giles Payne on 2020/04/01.
//
#import "CvCamera2.h"
#pragma mark - Private Interface
@interface CvAbstractCamera2 ()
@property (nonatomic, strong) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer;
- (void)deviceOrientationDidChange:(NSNotification*)notification;
- (void)startCaptureSession;
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition;
- (void)updateSize;
@end
#pragma mark - Implementation
@implementation CvAbstractCamera2
#pragma mark - Constructors
- (id)init;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
self.currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
self.cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (self.cameraAvailable ? @"YES" : @"NO") );
_running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.parentView = nil;
self.useAVCaptureVideoPreviewLayer = NO;
}
return self;
}
- (id)initWithParentView:(UIView*)parent;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
self.currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
self.cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (self.cameraAvailable ? @"YES" : @"NO") );
_running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
self.parentView = parent;
self.useAVCaptureVideoPreviewLayer = YES;
}
return self;
}
- (void)dealloc;
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
}
#pragma mark - Public interface
- (void)start;
{
if (![NSThread isMainThread]) {
NSLog(@"[Camera] Warning: Call start only from main thread");
[self performSelectorOnMainThread:@selector(start) withObject:nil waitUntilDone:NO];
return;
}
if (self.running == YES) {
return;
}
_running = YES;
// TODO: update image size data before actually starting (needed for recording)
[self updateSize];
if (self.cameraAvailable) {
[self startCaptureSession];
}
}
- (void)pause;
{
_running = NO;
[self.captureSession stopRunning];
}
- (void)stop;
{
_running = NO;
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
if (self.captureSession) {
for (AVCaptureInput *input in self.captureSession.inputs) {
[self.captureSession removeInput:input];
}
for (AVCaptureOutput *output in self.captureSession.outputs) {
[self.captureSession removeOutput:output];
}
[self.captureSession stopRunning];
}
_captureSessionLoaded = NO;
}
// use front/back camera
- (void)switchCameras;
{
BOOL was_running = self.running;
if (was_running) {
[self stop];
}
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
} else {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
}
if (was_running) {
[self start];
}
}
#pragma mark - Device Orientation Changes
- (void)deviceOrientationDidChange:(NSNotification*)notification
{
(void)notification;
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation)
{
case UIDeviceOrientationPortrait:
case UIDeviceOrientationPortraitUpsideDown:
case UIDeviceOrientationLandscapeLeft:
case UIDeviceOrientationLandscapeRight:
self.currentDeviceOrientation = orientation;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break;
}
NSLog(@"deviceOrientationDidChange: %d", (int)orientation);
[self updateOrientation];
}
#pragma mark - Private Interface
- (void)createCaptureSession;
{
// set a av capture session preset
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:self.defaultAVCaptureSessionPreset]) {
[self.captureSession setSessionPreset:self.defaultAVCaptureSessionPreset];
} else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
} else {
NSLog(@"[Camera] Error: could not set session preset");
}
}
- (void)createCaptureDevice;
{
// setup the device
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
}
- (void)createVideoPreviewLayer;
{
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
if ([self.captureVideoPreviewLayer.connection isVideoOrientationSupported])
{
[self.captureVideoPreviewLayer.connection setVideoOrientation:self.defaultAVCaptureVideoOrientation];
}
if (self.parentView != nil) {
self.captureVideoPreviewLayer.frame = self.parentView.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.parentView.layer addSublayer:self.captureVideoPreviewLayer];
}
NSLog(@"[Camera] created AVCaptureVideoPreviewLayer");
}
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition;
{
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device position] == desiredPosition) {
[self.captureSession beginConfiguration];
NSError* error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
NSLog(@"error creating input %@", [error description]);
}
// support for autofocus
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocus configuration %@", [error description]);
}
}
[self.captureSession addInput:input];
for (AVCaptureInput *oldInput in self.captureSession.inputs) {
[self.captureSession removeInput:oldInput];
}
[self.captureSession addInput:input];
[self.captureSession commitConfiguration];
break;
}
}
}
- (void)startCaptureSession
{
if (!self.cameraAvailable) {
return;
}
if (self.captureSessionLoaded == NO) {
[self createCaptureSession];
[self createCaptureDevice];
[self createCaptureOutput];
// setup preview layer
if (self.useAVCaptureVideoPreviewLayer) {
[self createVideoPreviewLayer];
} else {
[self createCustomVideoPreview];
}
_captureSessionLoaded = YES;
}
[self.captureSession startRunning];
}
- (void)createCaptureOutput;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %s in a subclass", __FUNCTION__];
}
- (void)createCustomVideoPreview;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %s in a subclass", __FUNCTION__];
}
- (void)updateOrientation;
{
// nothing to do here
}
- (void)updateSize;
{
if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetHigh]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetMedium]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetLow]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset352x288]) {
self.imageWidth = 352;
self.imageHeight = 288;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset640x480]) {
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset1280x720]) {
self.imageWidth = 1280;
self.imageHeight = 720;
} else {
self.imageWidth = 640;
self.imageHeight = 480;
}
}
- (void)lockFocus;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isFocusModeSupported:AVCaptureFocusModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked focus configuration %@", [error description]);
}
}
}
- (void) unlockFocus;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocus configuration %@", [error description]);
}
}
}
- (void)lockExposure;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked exposure configuration %@", [error description]);
}
}
}
- (void) unlockExposure;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autoexposure configuration %@", [error description]);
}
}
}
- (void)lockBalance;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked white balance configuration %@", [error description]);
}
}
}
- (void) unlockBalance;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for auto white balance configuration %@", [error description]);
}
}
}
@end

View File

@ -0,0 +1,85 @@
//
// CvCamera2.h
//
// Created by Giles Payne on 2020/03/11.
//
#import <UIKit/UIKit.h>
#import <Accelerate/Accelerate.h>
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
#import "CVObjcUtil.h"
@class Mat;
@class CvAbstractCamera2;
CV_EXPORTS @interface CvAbstractCamera2 : NSObject
@property UIDeviceOrientation currentDeviceOrientation;
@property BOOL cameraAvailable;
@property (nonatomic, strong) AVCaptureSession* captureSession;
@property (nonatomic, strong) AVCaptureConnection* videoCaptureConnection;
@property (nonatomic, readonly) BOOL running;
@property (nonatomic, readonly) BOOL captureSessionLoaded;
@property (nonatomic, assign) int defaultFPS;
@property (nonatomic, readonly) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
@property (nonatomic, assign) AVCaptureDevicePosition defaultAVCaptureDevicePosition;
@property (nonatomic, assign) AVCaptureVideoOrientation defaultAVCaptureVideoOrientation;
@property (nonatomic, assign) BOOL useAVCaptureVideoPreviewLayer;
@property (nonatomic, strong) NSString *const defaultAVCaptureSessionPreset;
@property (nonatomic, assign) int imageWidth;
@property (nonatomic, assign) int imageHeight;
@property (nonatomic, strong) UIView* parentView;
- (void)start;
- (void)stop;
- (void)switchCameras;
- (id)initWithParentView:(UIView*)parent;
- (void)createCaptureOutput;
- (void)createVideoPreviewLayer;
- (void)updateOrientation;
- (void)lockFocus;
- (void)unlockFocus;
- (void)lockExposure;
- (void)unlockExposure;
- (void)lockBalance;
- (void)unlockBalance;
@end
///////////////////////////////// CvVideoCamera ///////////////////////////////////////////
@class CvVideoCamera2;
@protocol CvVideoCameraDelegate2 <NSObject>
- (void)processImage:(Mat*)image;
@end
CV_EXPORTS @interface CvVideoCamera2 : CvAbstractCamera2<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, weak) id<CvVideoCameraDelegate2> delegate;
@property (nonatomic, assign) BOOL grayscaleMode;
@property (nonatomic, assign) BOOL recordVideo;
@property (nonatomic, assign) BOOL rotateVideo;
@property (nonatomic, strong) AVAssetWriterInput* recordAssetWriterInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
@property (nonatomic, strong) AVAssetWriter* recordAssetWriter;
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
- (void)layoutPreviewLayer;
- (void)saveVideo;
- (NSURL *)videoFileURL;
- (NSString *)videoFileString;
@end
///////////////////////////////// CvPhotoCamera ///////////////////////////////////////////
@class CvPhotoCamera2;
@protocol CvPhotoCameraDelegate2 <NSObject>
- (void)photoCamera:(CvPhotoCamera2*)photoCamera capturedImage:(UIImage*)image;
- (void)photoCameraCancel:(CvPhotoCamera2*)photoCamera;
@end
CV_EXPORTS @interface CvPhotoCamera2 : CvAbstractCamera2<AVCapturePhotoCaptureDelegate>
@property (nonatomic, weak) id<CvPhotoCameraDelegate2> delegate;
- (void)takePicture;
@end

View File

@ -0,0 +1,138 @@
//
// CvPhotoCamera2.mm
//
// Created by Giles Payne on 2020/04/01.
//
#import "CvCamera2.h"
#pragma mark - Private Interface
@interface CvPhotoCamera2 ()
{
id<CvPhotoCameraDelegate2> _delegate;
}
@property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
@end
#pragma mark - Implementation
@implementation CvPhotoCamera2
#pragma mark Public
- (void)setDelegate:(id<CvPhotoCameraDelegate2>)newDelegate {
_delegate = newDelegate;
}
- (id<CvPhotoCameraDelegate2>)delegate {
return _delegate;
}
#pragma mark - Public interface
- (void)takePicture
{
if (self.cameraAvailable == NO) {
return;
}
self.cameraAvailable = NO;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoCaptureConnection
completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error == nil && imageSampleBuffer != NULL)
{
// TODO check
// NSNumber* imageOrientation = [UIImage cgImageOrientationForUIDeviceOrientation:currentDeviceOrientation];
// CMSetAttachment(imageSampleBuffer, kCGImagePropertyOrientation, imageOrientation, 1);
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession stopRunning];
// Make sure we create objects on the main thread in the main context
UIImage* newImage = [UIImage imageWithData:jpegData];
//UIImageOrientation orientation = [newImage imageOrientation];
// TODO: only apply rotation, don't scale, since we can set this directly in the camera
/*
switch (orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
newImage = [newImage imageWithAppliedRotationAndMaxSize:CGSizeMake(640.0, 480.0)];
break;
case UIImageOrientationLeft:
case UIImageOrientationRight:
newImage = [newImage imageWithMaxSize:CGSizeMake(640.0, 480.0)];
default:
break;
}
*/
// We have captured the image, we can allow the user to take another picture
self.cameraAvailable = YES;
NSLog(@"CvPhotoCamera2 captured image");
[self.delegate photoCamera:self capturedImage:newImage];
[self.captureSession startRunning];
});
}
}];
}
- (void)stop;
{
[super stop];
self.stillImageOutput = nil;
}
#pragma mark - Private Interface
- (void)createStillImageOutput;
{
// setup still image output with jpeg codec
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.captureSession addOutput:self.stillImageOutput];
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([port.mediaType isEqual:AVMediaTypeVideo]) {
self.videoCaptureConnection = connection;
break;
}
}
if (self.videoCaptureConnection) {
break;
}
}
NSLog(@"[Camera] still image output created");
}
- (void)createCaptureOutput;
{
[self createStillImageOutput];
}
- (void)createCustomVideoPreview;
{
//do nothing, always use AVCaptureVideoPreviewLayer
}
@end

View File

@ -0,0 +1,575 @@
//
// CvVideoCamera2.mm
//
// Created by Giles Payne on 2020/03/11.
//
#import "Mat.h"
#import "CvCamera2.h"
#import <UIKit/UIKit.h>
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
#pragma mark - Private Interface
@interface CvVideoCamera2 () {
int recordingCountDown;
}
- (void)createVideoDataOutput;
- (void)createVideoFileOutput;
@property (nonatomic, strong) CALayer *customPreviewLayer;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
@end
#pragma mark - Implementation
@implementation CvVideoCamera2
{
id<CvVideoCameraDelegate2> _delegate;
dispatch_queue_t videoDataOutputQueue;
CMTime lastSampleTime;
}
- (void)setDelegate:(id<CvVideoCameraDelegate2>)newDelegate {
_delegate = newDelegate;
}
- (id<CvVideoCameraDelegate2>)delegate {
return _delegate;
}
#pragma mark - Constructors
- (id)initWithParentView:(UIView*)parent {
self = [super initWithParentView:parent];
if (self) {
parent.contentMode = UIViewContentModeScaleAspectFill;
self.useAVCaptureVideoPreviewLayer = NO;
self.recordVideo = NO;
self.rotateVideo = NO;
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPresetHigh;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
self.defaultFPS = 30;
self.grayscaleMode = NO;
}
return self;
}
#pragma mark - Public interface
- (void)start {
if (self.running == YES) {
return;
}
recordingCountDown = 10;
[super start];
if (self.recordVideo == YES) {
NSError* error = nil;
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
}
if (error == nil) {
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
}
}
}
- (void)stop {
if (self.running == YES) {
[super stop];
if (self.recordVideo == YES) {
if (self.recordAssetWriter) {
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
[self.recordAssetWriter finishWritingWithCompletionHandler:^void() {
NSLog(@"[Camera] recording stopped");
}];
} else {
NSLog(@"[Camera] Recording Error: asset writer status is not writing");
}
}
}
if (self.customPreviewLayer) {
[self.customPreviewLayer removeFromSuperlayer];
self.customPreviewLayer = nil;
}
}
}
// TODO fix
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (interfaceOrientation) {
case UIInterfaceOrientationPortrait:
NSLog(@"to Portrait");
rotation_angle = 270;
break;
case UIInterfaceOrientationPortraitUpsideDown:
rotation_angle = 90;
NSLog(@"to UpsideDown");
break;
case UIInterfaceOrientationLandscapeLeft:
rotation_angle = 0;
NSLog(@"to LandscapeLeft");
break;
case UIInterfaceOrientationLandscapeRight:
rotation_angle = 180;
NSLog(@"to LandscapeRight");
break;
default:
break; // leave the layer in its last known orientation
}
switch (self.defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
// TODO fix
- (void)layoutPreviewLayer {
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (self.currentDeviceOrientation) {
case UIDeviceOrientationPortrait:
rotation_angle = 270;
break;
case UIDeviceOrientationPortraitUpsideDown:
rotation_angle = 90;
break;
case UIDeviceOrientationLandscapeLeft:
NSLog(@"left");
rotation_angle = 180;
break;
case UIDeviceOrientationLandscapeRight:
NSLog(@"right");
rotation_angle = 0;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break; // leave the layer in its last known orientation
}
switch (self.defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
#pragma mark - Private Interface
- (void)createVideoDataOutput {
// Make a video data output
self.videoDataOutput = [AVCaptureVideoDataOutput new];
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
// In color mode we, BGRA format is used
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// discard if the data output queue is blocked (as we process the still image)
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
[self.captureSession addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
// set default FPS
AVCaptureDeviceInput *currentInput = [self.captureSession.inputs objectAtIndex:0];
AVCaptureDevice *device = currentInput.device;
NSError *error = nil;
[device lockForConfiguration:&error];
float maxRate = ((AVFrameRateRange*) [device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
if (maxRate > self.defaultFPS - 1 && error == nil) {
[device setActiveVideoMinFrameDuration:CMTimeMake(1, self.defaultFPS)];
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, self.defaultFPS)];
NSLog(@"[Camera] FPS set to %d", self.defaultFPS);
} else {
NSLog(@"[Camera] unable to set defaultFPS at %d FPS, max is %f FPS", self.defaultFPS, maxRate);
}
if (error != nil) {
NSLog(@"[Camera] unable to set defaultFPS: %@", error);
}
[device unlockForConfiguration];
// set video mirroring for front camera (more intuitive)
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
} else {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
}
}
// set default video orientation
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
}
// create a custom preview layer
self.customPreviewLayer = [CALayer layer];
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
self.customPreviewLayer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
[self updateOrientation];
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
NSLog(@"[Camera] created AVCaptureVideoDataOutput");
}
- (void)createVideoFileOutput {
/* Video File Output in H.264, via AVAsserWriter */
NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
NSDictionary *outputSettings
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
AVVideoCodecH264, AVVideoCodecKey,
nil
];
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.recordPixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.recordAssetWriterInput
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
NSError* error = nil;
NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
fileType:AVFileTypeMPEG4
error:&error];
if (error != nil) {
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
}
[self.recordAssetWriter addInput:self.recordAssetWriterInput];
self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
NSLog(@"[Camera] created AVAssetWriter");
}
- (void)createCaptureOutput {
[self createVideoDataOutput];
if (self.recordVideo == YES) {
[self createVideoFileOutput];
}
}
- (void)createCustomVideoPreview {
[self.parentView.layer addSublayer:self.customPreviewLayer];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
(void)captureOutput;
(void)connection;
auto strongDelegate = self.delegate;
if (strongDelegate) {
// convert from Core Media to Core Video
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void* bufferAddress;
size_t width;
size_t height;
size_t bytesPerRow;
CGColorSpaceRef colorSpace;
CGContextRef context;
int format_opencv;
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
format_opencv = CV_8UC1;
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
} else { // expect kCVPixelFormatType_32BGRA
format_opencv = CV_8UC4;
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
width = CVPixelBufferGetWidth(imageBuffer);
height = CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
}
// delegate image processing to the delegate
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
CGImage* dstImage;
if ([strongDelegate respondsToSelector:@selector(processImage:)]) {
[strongDelegate processImage:[Mat fromNative:image]];
}
// check if matrix data pointer or dimensions were changed by the delegate
bool iOSimage = false;
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
iOSimage = true;
}
// (create color space, create graphics context, render buffer)
CGBitmapInfo bitmapInfo;
// basically we decide if it's a grayscale, rgb or rgba image
if (image.channels() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
bitmapInfo = kCGImageAlphaNone;
} else if (image.channels() == 3) {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaNone;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaPremultipliedFirst;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
}
if (iOSimage) {
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
dstImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
} else {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
dstImage = CGImageCreate(image.cols, // width
image.rows, // height
8, // bits per component
8 * image.elemSize(), // bits per pixel
image.step, // bytesPerRow
colorSpace, // colorspace
bitmapInfo, // bitmap info
provider, // CGDataProviderRef
NULL, // decode
false, // should interpolate
kCGRenderingIntentDefault // intent
);
CGDataProviderRelease(provider);
}
// render buffer
dispatch_sync(dispatch_get_main_queue(), ^{
self.customPreviewLayer.contents = (__bridge id)dstImage;
});
recordingCountDown--;
if (self.recordVideo == YES && recordingCountDown < 0) {
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// CMTimeShow(lastSampleTime);
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordAssetWriter startWriting];
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
return;
} else {
NSLog(@"[Camera] Video recording started");
}
}
if (self.recordAssetWriterInput.readyForMoreMediaData) {
CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
withPresentationTime:lastSampleTime] ) {
NSLog(@"Video Writing Error");
}
if (pixelBuffer != nullptr)
CVPixelBufferRelease(pixelBuffer);
}
}
// cleanup
CGImageRelease(dstImage);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
}
- (void)updateOrientation {
if (self.rotateVideo == YES)
{
NSLog(@"rotate..");
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
}
}
- (void)saveVideo {
if (self.recordVideo == NO) {
return;
}
UISaveVideoAtPathToSavedPhotosAlbum([self videoFileString], nil, nil, NULL);
}
- (NSURL *)videoFileURL {
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSLog(@"file exists");
}
return outputURL;
}
- (NSString *)videoFileString {
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
return outputPath;
}
@end

View File

@ -0,0 +1,18 @@
cmake_minimum_required(VERSION 3.5)
get_filename_component(OpenCV_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../.." ABSOLUTE)
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVPluginStandalone.cmake")
# scan dependencies
set(WITH_FFMPEG ON)
set(OPENCV_FFMPEG_SKIP_BUILD_CHECK ON)
include("${OpenCV_SOURCE_DIR}/modules/videoio/cmake/init.cmake")
set(OPENCV_PLUGIN_DEPS core imgproc imgcodecs)
ocv_create_plugin(videoio "opencv_videoio_ffmpeg" "ocv.3rdparty.ffmpeg" "FFmpeg" "src/cap_ffmpeg.cpp")
message(STATUS "FFMPEG_libavcodec_VERSION=${FFMPEG_libavcodec_VERSION}")
message(STATUS "FFMPEG_libavformat_VERSION=${FFMPEG_libavformat_VERSION}")
message(STATUS "FFMPEG_libavutil_VERSION=${FFMPEG_libavutil_VERSION}")
message(STATUS "FFMPEG_libswscale_VERSION=${FFMPEG_libswscale_VERSION}")
message(STATUS "FFMPEG_libavresample_VERSION=${FFMPEG_libavresample_VERSION}")

View File

@ -0,0 +1,45 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get --no-install-recommends install -y \
pkg-config \
cmake \
g++ \
ninja-build \
make \
nasm \
&& \
rm -rf /var/lib/apt/lists/*
ARG VER
ADD ffmpeg-${VER}.tar.xz /ffmpeg/
WORKDIR /ffmpeg/ffmpeg-${VER}
RUN ./configure \
--enable-avresample \
--prefix=/ffmpeg-shared \
--enable-shared \
--disable-static \
--disable-programs \
--disable-doc \
--disable-avdevice \
--disable-postproc \
&& make -j8 install \
&& make clean \
&& make distclean
RUN ./configure \
--enable-avresample \
--prefix=/ffmpeg-static \
--disable-shared \
--enable-static \
--enable-pic \
--disable-programs \
--disable-doc \
--disable-avdevice \
--disable-postproc \
&& make -j8 install \
&& make clean \
&& make distclean
WORKDIR /tmp

View File

@ -0,0 +1,17 @@
ARG VER
FROM ubuntu:$VER
RUN apt-get update && apt-get --no-install-recommends install -y \
libavcodec-dev \
libavfilter-dev \
libavformat-dev \
libavresample-dev \
libavutil-dev \
pkg-config \
cmake \
g++ \
ninja-build \
&& \
rm -rf /var/lib/apt/lists/*
WORKDIR /tmp

View File

@ -0,0 +1,24 @@
#!/bin/bash
set -e
mkdir -p build_shared && pushd build_shared
PKG_CONFIG_PATH=/ffmpeg-shared/lib/pkgconfig \
cmake -GNinja \
-DOPENCV_PLUGIN_NAME=opencv_videoio_ffmpeg_shared_$2 \
-DOPENCV_PLUGIN_DESTINATION=$1 \
-DCMAKE_BUILD_TYPE=$3 \
/opencv/modules/videoio/misc/plugin_ffmpeg
ninja
popd
mkdir -p build_static && pushd build_static
PKG_CONFIG_PATH=/ffmpeg-static/lib/pkgconfig \
cmake -GNinja \
-DOPENCV_PLUGIN_NAME=opencv_videoio_ffmpeg_static_$2 \
-DOPENCV_PLUGIN_DESTINATION=$1 \
-DCMAKE_MODULE_LINKER_FLAGS=-Wl,-Bsymbolic \
-DCMAKE_BUILD_TYPE=$3 \
/opencv/modules/videoio/misc/plugin_ffmpeg
ninja
popd

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -e
cmake -GNinja \
-DOPENCV_PLUGIN_NAME=opencv_videoio_ffmpeg_ubuntu_$2 \
-DOPENCV_PLUGIN_DESTINATION=$1 \
-DCMAKE_BUILD_TYPE=$3 \
/opencv/modules/videoio/misc/plugin_ffmpeg
ninja

View File

@ -0,0 +1,13 @@
cmake_minimum_required(VERSION 3.5)
get_filename_component(OpenCV_SOURCE_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../.." ABSOLUTE)
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVPluginStandalone.cmake")
# scan dependencies
set(WITH_GSTREAMER ON)
include("${OpenCV_SOURCE_DIR}/modules/videoio/cmake/init.cmake")
set(OPENCV_PLUGIN_DEPS core imgproc imgcodecs)
ocv_create_plugin(videoio "opencv_videoio_gstreamer" "ocv.3rdparty.gstreamer" "GStreamer" "src/cap_gstreamer.cpp")
message(STATUS "Using GStreamer: ${GSTREAMER_VERSION}")

View File

@ -0,0 +1,13 @@
FROM ubuntu:18.04
RUN apt-get update && apt-get --no-install-recommends install -y \
libgstreamer-plugins-base1.0-dev \
libgstreamer-plugins-good1.0-dev \
libgstreamer1.0-dev \
cmake \
g++ \
ninja-build \
&& \
rm -rf /var/lib/apt/lists/*
WORKDIR /tmp

View File

@ -0,0 +1,11 @@
#!/bin/bash
set -e
cmake -GNinja \
-DOPENCV_PLUGIN_NAME=opencv_videoio_gstreamer \
-DOPENCV_PLUGIN_DESTINATION=$1 \
-DCMAKE_BUILD_TYPE=$2 \
/opencv/modules/videoio/misc/plugin_gstreamer
ninja

View File

@ -0,0 +1,23 @@
#ifdef HAVE_OPENCV_VIDEOIO
typedef std::vector<VideoCaptureAPIs> vector_VideoCaptureAPIs;
template<> struct pyopencvVecConverter<cv::VideoCaptureAPIs>
{
static bool to(PyObject* obj, std::vector<cv::VideoCaptureAPIs>& value, const ArgInfo& info)
{
return pyopencv_to_generic_vec(obj, value, info);
}
static PyObject* from(const std::vector<cv::VideoCaptureAPIs>& value)
{
return pyopencv_from_generic_vec(value);
}
};
template<>
bool pyopencv_to(PyObject *o, std::vector<cv::VideoCaptureAPIs>& apis, const ArgInfo& info)
{
return pyopencvVecConverter<cv::VideoCaptureAPIs>::to(o, apis, info);
}
#endif // HAVE_OPENCV_VIDEOIO

View File

@ -0,0 +1,25 @@
#!/usr/bin/env python
from __future__ import print_function
import numpy as np
import cv2 as cv
from tests_common import NewOpenCVTests
class Bindings(NewOpenCVTests):
def check_name(self, name):
#print(name)
self.assertFalse(name == None)
self.assertFalse(name == "")
def test_registry(self):
self.check_name(cv.videoio_registry.getBackendName(cv.CAP_ANY));
self.check_name(cv.videoio_registry.getBackendName(cv.CAP_FFMPEG))
self.check_name(cv.videoio_registry.getBackendName(cv.CAP_OPENCV_MJPEG))
backends = cv.videoio_registry.getBackends()
for backend in backends:
self.check_name(cv.videoio_registry.getBackendName(backend))
if __name__ == '__main__':
NewOpenCVTests.bootstrap()