feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,20 @@
{
"AdditionalImports" : {
"Videoio" :
[ "\"videoio/registry.hpp\"" ]
},
"ManualFuncs" : {
"VideoCapture" : {
"release" : {"declaration" : [""], "implementation" : [""] }
},
"VideoWriter" : {
"release" : {"declaration" : [""], "implementation" : [""] }
}
},
"func_arg_fix" : {
"VideoCapture" : {
"(BOOL)open:(int)index apiPreference:(int)apiPreference" : { "open" : {"name" : "openWithIndex"} },
"(BOOL)open:(int)index apiPreference:(int)apiPreference params:(IntVector*)params" : { "open" : {"name" : "openWithIndexAndParameters"} }
}
}
}

View File

@ -0,0 +1,445 @@
//
// CvAbstractCamera2.mm
//
// Created by Giles Payne on 2020/04/01.
//
#import "CvCamera2.h"
#pragma mark - Private Interface
@interface CvAbstractCamera2 ()
@property (nonatomic, strong) AVCaptureVideoPreviewLayer* captureVideoPreviewLayer;
- (void)deviceOrientationDidChange:(NSNotification*)notification;
- (void)startCaptureSession;
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition;
- (void)updateSize;
@end
#pragma mark - Implementation
@implementation CvAbstractCamera2
#pragma mark - Constructors
- (id)init;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
self.currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
self.cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (self.cameraAvailable ? @"YES" : @"NO") );
_running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.parentView = nil;
self.useAVCaptureVideoPreviewLayer = NO;
}
return self;
}
- (id)initWithParentView:(UIView*)parent;
{
self = [super init];
if (self) {
// react to device orientation notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
self.currentDeviceOrientation = [[UIDevice currentDevice] orientation];
// check if camera available
self.cameraAvailable = [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
NSLog(@"camera available: %@", (self.cameraAvailable ? @"YES" : @"NO") );
_running = NO;
// set camera default configuration
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationLandscapeLeft;
self.defaultFPS = 15;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPreset640x480;
self.parentView = parent;
self.useAVCaptureVideoPreviewLayer = YES;
}
return self;
}
- (void)dealloc;
{
[[NSNotificationCenter defaultCenter] removeObserver:self];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
}
#pragma mark - Public interface
- (void)start;
{
if (![NSThread isMainThread]) {
NSLog(@"[Camera] Warning: Call start only from main thread");
[self performSelectorOnMainThread:@selector(start) withObject:nil waitUntilDone:NO];
return;
}
if (self.running == YES) {
return;
}
_running = YES;
// TODO: update image size data before actually starting (needed for recording)
[self updateSize];
if (self.cameraAvailable) {
[self startCaptureSession];
}
}
- (void)pause;
{
_running = NO;
[self.captureSession stopRunning];
}
- (void)stop;
{
_running = NO;
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
if (self.captureSession) {
for (AVCaptureInput *input in self.captureSession.inputs) {
[self.captureSession removeInput:input];
}
for (AVCaptureOutput *output in self.captureSession.outputs) {
[self.captureSession removeOutput:output];
}
[self.captureSession stopRunning];
}
_captureSessionLoaded = NO;
}
// use front/back camera
- (void)switchCameras;
{
BOOL was_running = self.running;
if (was_running) {
[self stop];
}
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
} else {
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
}
if (was_running) {
[self start];
}
}
#pragma mark - Device Orientation Changes
- (void)deviceOrientationDidChange:(NSNotification*)notification
{
(void)notification;
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
switch (orientation)
{
case UIDeviceOrientationPortrait:
case UIDeviceOrientationPortraitUpsideDown:
case UIDeviceOrientationLandscapeLeft:
case UIDeviceOrientationLandscapeRight:
self.currentDeviceOrientation = orientation;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break;
}
NSLog(@"deviceOrientationDidChange: %d", (int)orientation);
[self updateOrientation];
}
#pragma mark - Private Interface
- (void)createCaptureSession;
{
// set a av capture session preset
self.captureSession = [[AVCaptureSession alloc] init];
if ([self.captureSession canSetSessionPreset:self.defaultAVCaptureSessionPreset]) {
[self.captureSession setSessionPreset:self.defaultAVCaptureSessionPreset];
} else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
} else {
NSLog(@"[Camera] Error: could not set session preset");
}
}
- (void)createCaptureDevice;
{
// setup the device
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self setDesiredCameraPosition:self.defaultAVCaptureDevicePosition];
NSLog(@"[Camera] device connected? %@", device.connected ? @"YES" : @"NO");
NSLog(@"[Camera] device position %@", (device.position == AVCaptureDevicePositionBack) ? @"back" : @"front");
}
- (void)createVideoPreviewLayer;
{
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
if ([self.captureVideoPreviewLayer.connection isVideoOrientationSupported])
{
[self.captureVideoPreviewLayer.connection setVideoOrientation:self.defaultAVCaptureVideoOrientation];
}
if (self.parentView != nil) {
self.captureVideoPreviewLayer.frame = self.parentView.bounds;
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.parentView.layer addSublayer:self.captureVideoPreviewLayer];
}
NSLog(@"[Camera] created AVCaptureVideoPreviewLayer");
}
- (void)setDesiredCameraPosition:(AVCaptureDevicePosition)desiredPosition;
{
for (AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([device position] == desiredPosition) {
[self.captureSession beginConfiguration];
NSError* error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
NSLog(@"error creating input %@", [error description]);
}
// support for autofocus
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocus configuration %@", [error description]);
}
}
[self.captureSession addInput:input];
for (AVCaptureInput *oldInput in self.captureSession.inputs) {
[self.captureSession removeInput:oldInput];
}
[self.captureSession addInput:input];
[self.captureSession commitConfiguration];
break;
}
}
}
- (void)startCaptureSession
{
if (!self.cameraAvailable) {
return;
}
if (self.captureSessionLoaded == NO) {
[self createCaptureSession];
[self createCaptureDevice];
[self createCaptureOutput];
// setup preview layer
if (self.useAVCaptureVideoPreviewLayer) {
[self createVideoPreviewLayer];
} else {
[self createCustomVideoPreview];
}
_captureSessionLoaded = YES;
}
[self.captureSession startRunning];
}
- (void)createCaptureOutput;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %s in a subclass", __FUNCTION__];
}
- (void)createCustomVideoPreview;
{
[NSException raise:NSInternalInconsistencyException
format:@"You must override %s in a subclass", __FUNCTION__];
}
- (void)updateOrientation;
{
// nothing to do here
}
- (void)updateSize;
{
if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetPhoto]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetHigh]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetMedium]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPresetLow]) {
//TODO: find the correct resolution
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset352x288]) {
self.imageWidth = 352;
self.imageHeight = 288;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset640x480]) {
self.imageWidth = 640;
self.imageHeight = 480;
} else if ([self.defaultAVCaptureSessionPreset isEqualToString:AVCaptureSessionPreset1280x720]) {
self.imageWidth = 1280;
self.imageHeight = 720;
} else {
self.imageWidth = 640;
self.imageHeight = 480;
}
}
- (void)lockFocus;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isFocusModeSupported:AVCaptureFocusModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked focus configuration %@", [error description]);
}
}
}
- (void) unlockFocus;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.focusMode = AVCaptureFocusModeContinuousAutoFocus;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autofocus configuration %@", [error description]);
}
}
}
- (void)lockExposure;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked exposure configuration %@", [error description]);
}
}
}
- (void) unlockExposure;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for autoexposure configuration %@", [error description]);
}
}
}
- (void)lockBalance;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeLocked]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.whiteBalanceMode = AVCaptureWhiteBalanceModeLocked;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for locked white balance configuration %@", [error description]);
}
}
}
- (void) unlockBalance;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
device.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
[device unlockForConfiguration];
} else {
NSLog(@"unable to lock device for auto white balance configuration %@", [error description]);
}
}
}
@end

View File

@ -0,0 +1,85 @@
//
// CvCamera2.h
//
// Created by Giles Payne on 2020/03/11.
//
#import <UIKit/UIKit.h>
#import <Accelerate/Accelerate.h>
#import <AVFoundation/AVFoundation.h>
#import <ImageIO/ImageIO.h>
#import "CVObjcUtil.h"
@class Mat;
@class CvAbstractCamera2;
CV_EXPORTS @interface CvAbstractCamera2 : NSObject
@property UIDeviceOrientation currentDeviceOrientation;
@property BOOL cameraAvailable;
@property (nonatomic, strong) AVCaptureSession* captureSession;
@property (nonatomic, strong) AVCaptureConnection* videoCaptureConnection;
@property (nonatomic, readonly) BOOL running;
@property (nonatomic, readonly) BOOL captureSessionLoaded;
@property (nonatomic, assign) int defaultFPS;
@property (nonatomic, readonly) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
@property (nonatomic, assign) AVCaptureDevicePosition defaultAVCaptureDevicePosition;
@property (nonatomic, assign) AVCaptureVideoOrientation defaultAVCaptureVideoOrientation;
@property (nonatomic, assign) BOOL useAVCaptureVideoPreviewLayer;
@property (nonatomic, strong) NSString *const defaultAVCaptureSessionPreset;
@property (nonatomic, assign) int imageWidth;
@property (nonatomic, assign) int imageHeight;
@property (nonatomic, strong) UIView* parentView;
- (void)start;
- (void)stop;
- (void)switchCameras;
- (id)initWithParentView:(UIView*)parent;
- (void)createCaptureOutput;
- (void)createVideoPreviewLayer;
- (void)updateOrientation;
- (void)lockFocus;
- (void)unlockFocus;
- (void)lockExposure;
- (void)unlockExposure;
- (void)lockBalance;
- (void)unlockBalance;
@end
///////////////////////////////// CvVideoCamera ///////////////////////////////////////////
@class CvVideoCamera2;
@protocol CvVideoCameraDelegate2 <NSObject>
- (void)processImage:(Mat*)image;
@end
CV_EXPORTS @interface CvVideoCamera2 : CvAbstractCamera2<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, weak) id<CvVideoCameraDelegate2> delegate;
@property (nonatomic, assign) BOOL grayscaleMode;
@property (nonatomic, assign) BOOL recordVideo;
@property (nonatomic, assign) BOOL rotateVideo;
@property (nonatomic, strong) AVAssetWriterInput* recordAssetWriterInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor* recordPixelBufferAdaptor;
@property (nonatomic, strong) AVAssetWriter* recordAssetWriter;
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation;
- (void)layoutPreviewLayer;
- (void)saveVideo;
- (NSURL *)videoFileURL;
- (NSString *)videoFileString;
@end
///////////////////////////////// CvPhotoCamera ///////////////////////////////////////////
@class CvPhotoCamera2;
@protocol CvPhotoCameraDelegate2 <NSObject>
- (void)photoCamera:(CvPhotoCamera2*)photoCamera capturedImage:(UIImage*)image;
- (void)photoCameraCancel:(CvPhotoCamera2*)photoCamera;
@end
CV_EXPORTS @interface CvPhotoCamera2 : CvAbstractCamera2<AVCapturePhotoCaptureDelegate>
@property (nonatomic, weak) id<CvPhotoCameraDelegate2> delegate;
- (void)takePicture;
@end

View File

@ -0,0 +1,138 @@
//
// CvPhotoCamera2.mm
//
// Created by Giles Payne on 2020/04/01.
//
#import "CvCamera2.h"
#pragma mark - Private Interface
@interface CvPhotoCamera2 ()
{
id<CvPhotoCameraDelegate2> _delegate;
}
@property (nonatomic, strong) AVCaptureStillImageOutput* stillImageOutput;
@end
#pragma mark - Implementation
@implementation CvPhotoCamera2
#pragma mark Public
- (void)setDelegate:(id<CvPhotoCameraDelegate2>)newDelegate {
_delegate = newDelegate;
}
- (id<CvPhotoCameraDelegate2>)delegate {
return _delegate;
}
#pragma mark - Public interface
- (void)takePicture
{
if (self.cameraAvailable == NO) {
return;
}
self.cameraAvailable = NO;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoCaptureConnection
completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error == nil && imageSampleBuffer != NULL)
{
// TODO check
// NSNumber* imageOrientation = [UIImage cgImageOrientationForUIDeviceOrientation:currentDeviceOrientation];
// CMSetAttachment(imageSampleBuffer, kCGImagePropertyOrientation, imageOrientation, 1);
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession stopRunning];
// Make sure we create objects on the main thread in the main context
UIImage* newImage = [UIImage imageWithData:jpegData];
//UIImageOrientation orientation = [newImage imageOrientation];
// TODO: only apply rotation, don't scale, since we can set this directly in the camera
/*
switch (orientation) {
case UIImageOrientationUp:
case UIImageOrientationDown:
newImage = [newImage imageWithAppliedRotationAndMaxSize:CGSizeMake(640.0, 480.0)];
break;
case UIImageOrientationLeft:
case UIImageOrientationRight:
newImage = [newImage imageWithMaxSize:CGSizeMake(640.0, 480.0)];
default:
break;
}
*/
// We have captured the image, we can allow the user to take another picture
self.cameraAvailable = YES;
NSLog(@"CvPhotoCamera2 captured image");
[self.delegate photoCamera:self capturedImage:newImage];
[self.captureSession startRunning];
});
}
}];
}
- (void)stop;
{
[super stop];
self.stillImageOutput = nil;
}
#pragma mark - Private Interface
- (void)createStillImageOutput;
{
// setup still image output with jpeg codec
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.captureSession addOutput:self.stillImageOutput];
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([port.mediaType isEqual:AVMediaTypeVideo]) {
self.videoCaptureConnection = connection;
break;
}
}
if (self.videoCaptureConnection) {
break;
}
}
NSLog(@"[Camera] still image output created");
}
- (void)createCaptureOutput;
{
[self createStillImageOutput];
}
- (void)createCustomVideoPreview;
{
//do nothing, always use AVCaptureVideoPreviewLayer
}
@end

View File

@ -0,0 +1,575 @@
//
// CvVideoCamera2.mm
//
// Created by Giles Payne on 2020/03/11.
//
#import "Mat.h"
#import "CvCamera2.h"
#import <UIKit/UIKit.h>
static CGFloat DegreesToRadians(CGFloat degrees) {return degrees * M_PI / 180;}
#pragma mark - Private Interface
@interface CvVideoCamera2 () {
int recordingCountDown;
}
- (void)createVideoDataOutput;
- (void)createVideoFileOutput;
@property (nonatomic, strong) CALayer *customPreviewLayer;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
@end
#pragma mark - Implementation
@implementation CvVideoCamera2
{
id<CvVideoCameraDelegate2> _delegate;
dispatch_queue_t videoDataOutputQueue;
CMTime lastSampleTime;
}
- (void)setDelegate:(id<CvVideoCameraDelegate2>)newDelegate {
_delegate = newDelegate;
}
- (id<CvVideoCameraDelegate2>)delegate {
return _delegate;
}
#pragma mark - Constructors
- (id)initWithParentView:(UIView*)parent {
self = [super initWithParentView:parent];
if (self) {
parent.contentMode = UIViewContentModeScaleAspectFill;
self.useAVCaptureVideoPreviewLayer = NO;
self.recordVideo = NO;
self.rotateVideo = NO;
self.defaultAVCaptureDevicePosition = AVCaptureDevicePositionBack;
self.defaultAVCaptureSessionPreset = AVCaptureSessionPresetHigh;
self.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
self.defaultFPS = 30;
self.grayscaleMode = NO;
}
return self;
}
#pragma mark - Public interface
- (void)start {
if (self.running == YES) {
return;
}
recordingCountDown = 10;
[super start];
if (self.recordVideo == YES) {
NSError* error = nil;
if ([[NSFileManager defaultManager] fileExistsAtPath:[self videoFileString]]) {
[[NSFileManager defaultManager] removeItemAtPath:[self videoFileString] error:&error];
}
if (error == nil) {
NSLog(@"[Camera] Delete file %@", [self videoFileString]);
}
}
}
- (void)stop {
if (self.running == YES) {
[super stop];
if (self.recordVideo == YES) {
if (self.recordAssetWriter) {
if (self.recordAssetWriter.status == AVAssetWriterStatusWriting) {
[self.recordAssetWriter finishWritingWithCompletionHandler:^void() {
NSLog(@"[Camera] recording stopped");
}];
} else {
NSLog(@"[Camera] Recording Error: asset writer status is not writing");
}
}
}
if (self.customPreviewLayer) {
[self.customPreviewLayer removeFromSuperlayer];
self.customPreviewLayer = nil;
}
}
}
// TODO fix
- (void)adjustLayoutToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (interfaceOrientation) {
case UIInterfaceOrientationPortrait:
NSLog(@"to Portrait");
rotation_angle = 270;
break;
case UIInterfaceOrientationPortraitUpsideDown:
rotation_angle = 90;
NSLog(@"to UpsideDown");
break;
case UIInterfaceOrientationLandscapeLeft:
rotation_angle = 0;
NSLog(@"to LandscapeLeft");
break;
case UIInterfaceOrientationLandscapeRight:
rotation_angle = 180;
NSLog(@"to LandscapeRight");
break;
default:
break; // leave the layer in its last known orientation
}
switch (self.defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
// TODO fix
- (void)layoutPreviewLayer {
NSLog(@"layout preview layer");
if (self.parentView != nil) {
CALayer* layer = self.customPreviewLayer;
CGRect bounds = self.customPreviewLayer.bounds;
int rotation_angle = 0;
bool flip_bounds = false;
switch (self.currentDeviceOrientation) {
case UIDeviceOrientationPortrait:
rotation_angle = 270;
break;
case UIDeviceOrientationPortraitUpsideDown:
rotation_angle = 90;
break;
case UIDeviceOrientationLandscapeLeft:
NSLog(@"left");
rotation_angle = 180;
break;
case UIDeviceOrientationLandscapeRight:
NSLog(@"right");
rotation_angle = 0;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
default:
break; // leave the layer in its last known orientation
}
switch (self.defaultAVCaptureVideoOrientation) {
case AVCaptureVideoOrientationLandscapeRight:
rotation_angle += 180;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
rotation_angle += 270;
break;
case AVCaptureVideoOrientationPortrait:
rotation_angle += 90;
case AVCaptureVideoOrientationLandscapeLeft:
break;
default:
break;
}
rotation_angle = rotation_angle % 360;
if (rotation_angle == 90 || rotation_angle == 270) {
flip_bounds = true;
}
if (flip_bounds) {
NSLog(@"flip bounds");
bounds = CGRectMake(0, 0, bounds.size.height, bounds.size.width);
}
layer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
layer.affineTransform = CGAffineTransformMakeRotation( DegreesToRadians(rotation_angle) );
layer.bounds = bounds;
}
}
#pragma mark - Private Interface
- (void)createVideoDataOutput {
// Make a video data output
self.videoDataOutput = [AVCaptureVideoDataOutput new];
// In grayscale mode we want YUV (YpCbCr 4:2:0) so we can directly access the graylevel intensity values (Y component)
// In color mode we, BGRA format is used
OSType format = self.grayscaleMode ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:format]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// discard if the data output queue is blocked (as we process the still image)
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
if ( [self.captureSession canAddOutput:self.videoDataOutput] ) {
[self.captureSession addOutput:self.videoDataOutput];
}
[[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
// set default FPS
AVCaptureDeviceInput *currentInput = [self.captureSession.inputs objectAtIndex:0];
AVCaptureDevice *device = currentInput.device;
NSError *error = nil;
[device lockForConfiguration:&error];
float maxRate = ((AVFrameRateRange*) [device.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
if (maxRate > self.defaultFPS - 1 && error == nil) {
[device setActiveVideoMinFrameDuration:CMTimeMake(1, self.defaultFPS)];
[device setActiveVideoMaxFrameDuration:CMTimeMake(1, self.defaultFPS)];
NSLog(@"[Camera] FPS set to %d", self.defaultFPS);
} else {
NSLog(@"[Camera] unable to set defaultFPS at %d FPS, max is %f FPS", self.defaultFPS, maxRate);
}
if (error != nil) {
NSLog(@"[Camera] unable to set defaultFPS: %@", error);
}
[device unlockForConfiguration];
// set video mirroring for front camera (more intuitive)
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoMirroring) {
if (self.defaultAVCaptureDevicePosition == AVCaptureDevicePositionFront) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = YES;
} else {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoMirrored = NO;
}
}
// set default video orientation
if ([self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].supportsVideoOrientation) {
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation = self.defaultAVCaptureVideoOrientation;
}
// create a custom preview layer
self.customPreviewLayer = [CALayer layer];
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
self.customPreviewLayer.position = CGPointMake(self.parentView.frame.size.width/2., self.parentView.frame.size.height/2.);
[self updateOrientation];
// create a serial dispatch queue used for the sample buffer delegate as well as when a still image is captured
// a serial dispatch queue must be used to guarantee that video frames will be delivered in order
// see the header doc for setSampleBufferDelegate:queue: for more information
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[self.videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
NSLog(@"[Camera] created AVCaptureVideoDataOutput");
}
- (void)createVideoFileOutput {
/* Video File Output in H.264, via AVAsserWriter */
NSLog(@"Create Video with dimensions %dx%d", self.imageWidth, self.imageHeight);
NSDictionary *outputSettings
= [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:self.imageWidth], AVVideoWidthKey,
[NSNumber numberWithInt:self.imageHeight], AVVideoHeightKey,
AVVideoCodecH264, AVVideoCodecKey,
nil
];
self.recordAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
int pixelBufferFormat = (self.grayscaleMode == YES) ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : kCVPixelFormatType_32BGRA;
self.recordPixelBufferAdaptor =
[[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:self.recordAssetWriterInput
sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:pixelBufferFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
NSError* error = nil;
NSLog(@"Create AVAssetWriter with url: %@", [self videoFileURL]);
self.recordAssetWriter = [AVAssetWriter assetWriterWithURL:[self videoFileURL]
fileType:AVFileTypeMPEG4
error:&error];
if (error != nil) {
NSLog(@"[Camera] Unable to create AVAssetWriter: %@", error);
}
[self.recordAssetWriter addInput:self.recordAssetWriterInput];
self.recordAssetWriterInput.expectsMediaDataInRealTime = YES;
NSLog(@"[Camera] created AVAssetWriter");
}
- (void)createCaptureOutput {
[self createVideoDataOutput];
if (self.recordVideo == YES) {
[self createVideoFileOutput];
}
}
- (void)createCustomVideoPreview {
[self.parentView.layer addSublayer:self.customPreviewLayer];
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
frameSize.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) CFBridgingRetain(options),
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
#pragma mark - Protocol AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
(void)captureOutput;
(void)connection;
auto strongDelegate = self.delegate;
if (strongDelegate) {
// convert from Core Media to Core Video
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void* bufferAddress;
size_t width;
size_t height;
size_t bytesPerRow;
CGColorSpaceRef colorSpace;
CGContextRef context;
int format_opencv;
OSType format = CVPixelBufferGetPixelFormatType(imageBuffer);
if (format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) {
format_opencv = CV_8UC1;
bufferAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
width = CVPixelBufferGetWidthOfPlane(imageBuffer, 0);
height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
} else { // expect kCVPixelFormatType_32BGRA
format_opencv = CV_8UC4;
bufferAddress = CVPixelBufferGetBaseAddress(imageBuffer);
width = CVPixelBufferGetWidth(imageBuffer);
height = CVPixelBufferGetHeight(imageBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
}
// delegate image processing to the delegate
cv::Mat image((int)height, (int)width, format_opencv, bufferAddress, bytesPerRow);
CGImage* dstImage;
if ([strongDelegate respondsToSelector:@selector(processImage:)]) {
[strongDelegate processImage:[Mat fromNative:image]];
}
// check if matrix data pointer or dimensions were changed by the delegate
bool iOSimage = false;
if (height == (size_t)image.rows && width == (size_t)image.cols && format_opencv == image.type() && bufferAddress == image.data && bytesPerRow == image.step) {
iOSimage = true;
}
// (create color space, create graphics context, render buffer)
CGBitmapInfo bitmapInfo;
// basically we decide if it's a grayscale, rgb or rgba image
if (image.channels() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
bitmapInfo = kCGImageAlphaNone;
} else if (image.channels() == 3) {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaNone;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
bitmapInfo = kCGImageAlphaPremultipliedFirst;
if (iOSimage) {
bitmapInfo |= kCGBitmapByteOrder32Little;
} else {
bitmapInfo |= kCGBitmapByteOrder32Big;
}
}
if (iOSimage) {
context = CGBitmapContextCreate(bufferAddress, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
dstImage = CGBitmapContextCreateImage(context);
CGContextRelease(context);
} else {
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
dstImage = CGImageCreate(image.cols, // width
image.rows, // height
8, // bits per component
8 * image.elemSize(), // bits per pixel
image.step, // bytesPerRow
colorSpace, // colorspace
bitmapInfo, // bitmap info
provider, // CGDataProviderRef
NULL, // decode
false, // should interpolate
kCGRenderingIntentDefault // intent
);
CGDataProviderRelease(provider);
}
// render buffer
dispatch_sync(dispatch_get_main_queue(), ^{
self.customPreviewLayer.contents = (__bridge id)dstImage;
});
recordingCountDown--;
if (self.recordVideo == YES && recordingCountDown < 0) {
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// CMTimeShow(lastSampleTime);
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordAssetWriter startWriting];
[self.recordAssetWriter startSessionAtSourceTime:lastSampleTime];
if (self.recordAssetWriter.status != AVAssetWriterStatusWriting) {
NSLog(@"[Camera] Recording Error: asset writer status is not writing: %@", self.recordAssetWriter.error);
return;
} else {
NSLog(@"[Camera] Video recording started");
}
}
if (self.recordAssetWriterInput.readyForMoreMediaData) {
CVImageBufferRef pixelBuffer = [self pixelBufferFromCGImage:dstImage];
if (! [self.recordPixelBufferAdaptor appendPixelBuffer:pixelBuffer
withPresentationTime:lastSampleTime] ) {
NSLog(@"Video Writing Error");
}
if (pixelBuffer != nullptr)
CVPixelBufferRelease(pixelBuffer);
}
}
// cleanup
CGImageRelease(dstImage);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
}
- (void)updateOrientation {
if (self.rotateVideo == YES)
{
NSLog(@"rotate..");
self.customPreviewLayer.bounds = CGRectMake(0, 0, self.parentView.frame.size.width, self.parentView.frame.size.height);
[self layoutPreviewLayer];
}
}
- (void)saveVideo {
if (self.recordVideo == NO) {
return;
}
UISaveVideoAtPathToSavedPhotosAlbum([self videoFileString], nil, nil, NULL);
}
- (NSURL *)videoFileURL {
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
NSLog(@"file exists");
}
return outputURL;
}
- (NSString *)videoFileString {
NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"];
return outputPath;
}
@end