feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.puzzle15"
android:versionCode="301"
android:versionName="3.01" >
<uses-sdk android:minSdkVersion="8"/>
<application
android:icon="@drawable/icon"
android:label="@string/app_name" >
<activity
android:name=".Puzzle15Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

View File

@ -0,0 +1,6 @@
set(sample example-15-puzzle)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.puzzle15"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.puzzle15"
>
<application
android:icon="@drawable/icon"
android:label="@string/app_name">
<activity
android:name=".Puzzle15Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,6 @@
<resources>
<string name="app_name">OCV 15 Puzzle</string>
<string name="menu_toggle_tile_numbers">Show/hide tile numbers</string>
<string name="menu_start_new_game">Start new game</string>
</resources>

View File

@ -0,0 +1,153 @@
package org.opencv.samples.puzzle15;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.WindowManager;
import java.util.Collections;
import java.util.List;
public class Puzzle15Activity extends CameraActivity implements CvCameraViewListener, View.OnTouchListener {
private static final String TAG = "Puzzle15::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private Puzzle15Processor mPuzzle15;
private MenuItem mItemHideNumbers;
private MenuItem mItemStartNewGame;
private int mGameWidth;
private int mGameHeight;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
/* Now enable camera view to start receiving frames */
mOpenCvCameraView.setOnTouchListener(Puzzle15Activity.this);
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.d(TAG, "Creating and setting view");
mOpenCvCameraView = (CameraBridgeViewBase) new JavaCameraView(this, -1);
setContentView(mOpenCvCameraView);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mPuzzle15 = new Puzzle15Processor();
mPuzzle15.prepareNewGame();
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemHideNumbers = menu.add("Show/hide tile numbers");
mItemStartNewGame = menu.add("Start new game");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemStartNewGame) {
/* We need to start new game */
mPuzzle15.prepareNewGame();
} else if (item == mItemHideNumbers) {
/* We need to enable or disable drawing of the tile numbers */
mPuzzle15.toggleTileNumbers();
}
return true;
}
public void onCameraViewStarted(int width, int height) {
mGameWidth = width;
mGameHeight = height;
mPuzzle15.prepareGameSize(width, height);
}
public void onCameraViewStopped() {
}
public boolean onTouch(View view, MotionEvent event) {
int xpos, ypos;
xpos = (view.getWidth() - mGameWidth) / 2;
xpos = (int)event.getX() - xpos;
ypos = (view.getHeight() - mGameHeight) / 2;
ypos = (int)event.getY() - ypos;
if (xpos >=0 && xpos <= mGameWidth && ypos >=0 && ypos <= mGameHeight) {
/* click is inside the picture. Deliver this event to processor */
mPuzzle15.deliverTouchEvent(xpos, ypos);
}
return false;
}
public Mat onCameraFrame(Mat inputFrame) {
return mPuzzle15.puzzleFrame(inputFrame);
}
}

View File

@ -0,0 +1,195 @@
package org.opencv.samples.puzzle15;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;
import android.util.Log;
/**
* This class is a controller for puzzle game.
* It converts the image from Camera into the shuffled image
*/
public class Puzzle15Processor {
private static final int GRID_SIZE = 4;
private static final int GRID_AREA = GRID_SIZE * GRID_SIZE;
private static final int GRID_EMPTY_INDEX = GRID_AREA - 1;
private static final String TAG = "Puzzle15Processor";
private static final Scalar GRID_EMPTY_COLOR = new Scalar(0x33, 0x33, 0x33, 0xFF);
private int[] mIndexes;
private int[] mTextWidths;
private int[] mTextHeights;
private Mat mRgba15;
private Mat[] mCells15;
private boolean mShowTileNumbers = true;
public Puzzle15Processor() {
mTextWidths = new int[GRID_AREA];
mTextHeights = new int[GRID_AREA];
mIndexes = new int [GRID_AREA];
for (int i = 0; i < GRID_AREA; i++)
mIndexes[i] = i;
}
/* this method is intended to make processor prepared for a new game */
public synchronized void prepareNewGame() {
do {
shuffle(mIndexes);
} while (!isPuzzleSolvable());
}
/* This method is to make the processor know the size of the frames that
* will be delivered via puzzleFrame.
* If the frames will be different size - then the result is unpredictable
*/
public synchronized void prepareGameSize(int width, int height) {
mRgba15 = new Mat(height, width, CvType.CV_8UC4);
mCells15 = new Mat[GRID_AREA];
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE);
}
}
for (int i = 0; i < GRID_AREA; i++) {
Size s = Imgproc.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
mTextHeights[i] = (int) s.height;
mTextWidths[i] = (int) s.width;
}
}
/* this method to be called from the outside. it processes the frame and shuffles
* the tiles as specified by mIndexes array
*/
public synchronized Mat puzzleFrame(Mat inputPicture) {
Mat[] cells = new Mat[GRID_AREA];
int rows = inputPicture.rows();
int cols = inputPicture.cols();
rows = rows - rows%4;
cols = cols - cols%4;
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
cells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
}
}
rows = rows - rows%4;
cols = cols - cols%4;
// copy shuffled tiles
for (int i = 0; i < GRID_AREA; i++) {
int idx = mIndexes[i];
if (idx == GRID_EMPTY_INDEX)
mCells15[i].setTo(GRID_EMPTY_COLOR);
else {
cells[idx].copyTo(mCells15[i]);
if (mShowTileNumbers) {
Imgproc.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2,
(rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
}
}
}
for (int i = 0; i < GRID_AREA; i++)
cells[i].release();
drawGrid(cols, rows, mRgba15);
return mRgba15;
}
public void toggleTileNumbers() {
mShowTileNumbers = !mShowTileNumbers;
}
public void deliverTouchEvent(int x, int y) {
int rows = mRgba15.rows();
int cols = mRgba15.cols();
int row = (int) Math.floor(y * GRID_SIZE / rows);
int col = (int) Math.floor(x * GRID_SIZE / cols);
if (row < 0 || row >= GRID_SIZE || col < 0 || col >= GRID_SIZE) {
Log.e(TAG, "It is not expected to get touch event outside of picture");
return ;
}
int idx = row * GRID_SIZE + col;
int idxtoswap = -1;
// left
if (idxtoswap < 0 && col > 0)
if (mIndexes[idx - 1] == GRID_EMPTY_INDEX)
idxtoswap = idx - 1;
// right
if (idxtoswap < 0 && col < GRID_SIZE - 1)
if (mIndexes[idx + 1] == GRID_EMPTY_INDEX)
idxtoswap = idx + 1;
// top
if (idxtoswap < 0 && row > 0)
if (mIndexes[idx - GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx - GRID_SIZE;
// bottom
if (idxtoswap < 0 && row < GRID_SIZE - 1)
if (mIndexes[idx + GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx + GRID_SIZE;
// swap
if (idxtoswap >= 0) {
synchronized (this) {
int touched = mIndexes[idx];
mIndexes[idx] = mIndexes[idxtoswap];
mIndexes[idxtoswap] = touched;
}
}
}
private void drawGrid(int cols, int rows, Mat drawMat) {
for (int i = 1; i < GRID_SIZE; i++) {
Imgproc.line(drawMat, new Point(0, i * rows / GRID_SIZE), new Point(cols, i * rows / GRID_SIZE), new Scalar(0, 255, 0, 255), 3);
Imgproc.line(drawMat, new Point(i * cols / GRID_SIZE, 0), new Point(i * cols / GRID_SIZE, rows), new Scalar(0, 255, 0, 255), 3);
}
}
private static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
int temp = array[i - 1];
int randIx = (int) (Math.random() * i);
array[i - 1] = array[randIx];
array[randIx] = temp;
}
}
private boolean isPuzzleSolvable() {
int sum = 0;
for (int i = 0; i < GRID_AREA; i++) {
if (mIndexes[i] == GRID_EMPTY_INDEX)
sum += (i / GRID_SIZE) + 1;
else {
int smaller = 0;
for (int j = i + 1; j < GRID_AREA; j++) {
if (mIndexes[j] < mIndexes[i])
smaller++;
}
sum += smaller;
}
}
return sum % 2 == 0;
}
}

View File

@ -0,0 +1,17 @@
# ----------------------------------------------------------------------------
# CMake file for Android samples. See root CMakeLists.txt
#
# ----------------------------------------------------------------------------
add_custom_target(opencv_android_examples)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wmissing-declarations)
add_subdirectory(15-puzzle)
add_subdirectory(face-detection)
add_subdirectory(image-manipulations)
add_subdirectory(camera-calibration)
add_subdirectory(color-blob-detection)
add_subdirectory(tutorial-1-camerapreview)
add_subdirectory(tutorial-2-mixedprocessing)
add_subdirectory(tutorial-3-cameracontrol)
add_subdirectory(tutorial-4-opencl)

View File

@ -0,0 +1,94 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:@ANDROID_GRADLE_PLUGIN_VERSION@'
classpath 'org.jetbrains.kotlin:kotlin-gradle-plugin:@KOTLIN_PLUGIN_VERSION@'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
//allprojects {
// gradle.projectsEvaluated {
// tasks.withType(JavaCompile) {
// options.compilerArgs << "-Xlint:unchecked"
// options.compilerArgs << "-Xlint:deprecation"
// }
// }
//}
gradle.afterProject { project ->
if (project.pluginManager.hasPlugin('com.android.application')
|| project.pluginManager.hasPlugin('com.android.library')
|| project.pluginManager.hasPlugin('com.android.test')
|| project.pluginManager.hasPlugin('com.android.feature') ) {
if (true) {
gradle.println("Override build ABIs for the project ${project.name}")
project.android {
splits {
abi {
enable true
universalApk false
@ANDROID_ABI_FILTER@
}
}
}
}
if (true) {
gradle.println("Override lintOptions for the project ${project.name}")
project.android {
lintOptions {
// checkReleaseBuilds false
abortOnError false
}
}
}
// (you still need to re-build OpenCV with debug information to debug it)
if (true) {
gradle.println("Override doNotStrip-debug for the project ${project.name}")
project.android {
buildTypes {
debug {
packagingOptions {
doNotStrip '**/*.so' // controlled by OpenCV CMake scripts
}
}
}
}
}
if (false || project.hasProperty("doNotStrip")) {
gradle.println("Override doNotStrip-release for the project ${project.name}")
project.android {
buildTypes {
release {
packagingOptions {
doNotStrip '**/*.so' // controlled by OpenCV CMake scripts
}
}
}
}
}
}
}

View File

@ -0,0 +1,6 @@
set(sample example-camera-calibration)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.cameracalibration"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.cameracalibration"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon">
<activity android:name="CameraCalibrationActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -0,0 +1,12 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:opencv="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/camera_calibration_java_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
<group android:checkableBehavior="single">
<item android:id="@+id/calibrate"
android:title="@string/action_calibrate"
android:showAsAction="ifRoom|withText" />
<item android:id="@+id/preview_mode"
android:title="@string/preview_mode">
<menu>
<group android:checkableBehavior="single">
<item android:id="@+id/calibration"
android:title="@string/calibration"
android:checked="true" />
<item android:id="@+id/undistortion"
android:title="@string/undistortion" />
<item android:id="@+id/comparison"
android:title="@string/comparison" />
</group>
</menu>
</item>
</group>
</menu>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV Camera Calibration</string>
<string name="action_calibrate">Calibrate</string>
<string name="calibration">Calibration</string>
<string name="undistortion">Undistortion</string>
<string name="comparison">Comparison</string>
<string name="preview_mode">Preview mode</string>
<string name="calibration_successful">Successfully calibrated!\nAvg. re-projection error:</string>
<string name="calibration_unsuccessful">Unsuccessful calibration.\nTry again</string>
<string name="more_samples">Please, capture more samples</string>
<string name="calibrating">Calibrating...</string>
<string name="please_wait">Please, wait</string>
<string name="original">Original</string>
<string name="undistorted">Undistorted</string>
</resources>

View File

@ -0,0 +1,69 @@
package org.opencv.samples.cameracalibration;
import org.opencv.core.Mat;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.util.Log;
public abstract class CalibrationResult {
private static final String TAG = "OCV::CalibrationResult";
private static final int CAMERA_MATRIX_ROWS = 3;
private static final int CAMERA_MATRIX_COLS = 3;
private static final int DISTORTION_COEFFICIENTS_SIZE = 5;
public static void save(Activity activity, Mat cameraMatrix, Mat distortionCoefficients) {
SharedPreferences sharedPref = activity.getPreferences(Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPref.edit();
double[] cameraMatrixArray = new double[CAMERA_MATRIX_ROWS * CAMERA_MATRIX_COLS];
cameraMatrix.get(0, 0, cameraMatrixArray);
for (int i = 0; i < CAMERA_MATRIX_ROWS; i++) {
for (int j = 0; j < CAMERA_MATRIX_COLS; j++) {
int id = i * CAMERA_MATRIX_ROWS + j;
editor.putFloat(Integer.toString(id), (float)cameraMatrixArray[id]);
}
}
double[] distortionCoefficientsArray = new double[DISTORTION_COEFFICIENTS_SIZE];
distortionCoefficients.get(0, 0, distortionCoefficientsArray);
int shift = CAMERA_MATRIX_ROWS * CAMERA_MATRIX_COLS;
for (int i = shift; i < DISTORTION_COEFFICIENTS_SIZE + shift; i++) {
editor.putFloat(Integer.toString(i), (float)distortionCoefficientsArray[i-shift]);
}
editor.apply();
Log.i(TAG, "Saved camera matrix: " + cameraMatrix.dump());
Log.i(TAG, "Saved distortion coefficients: " + distortionCoefficients.dump());
}
public static boolean tryLoad(Activity activity, Mat cameraMatrix, Mat distortionCoefficients) {
SharedPreferences sharedPref = activity.getPreferences(Context.MODE_PRIVATE);
if (sharedPref.getFloat("0", -1) == -1) {
Log.i(TAG, "No previous calibration results found");
return false;
}
double[] cameraMatrixArray = new double[CAMERA_MATRIX_ROWS * CAMERA_MATRIX_COLS];
for (int i = 0; i < CAMERA_MATRIX_ROWS; i++) {
for (int j = 0; j < CAMERA_MATRIX_COLS; j++) {
int id = i * CAMERA_MATRIX_ROWS + j;
cameraMatrixArray[id] = sharedPref.getFloat(Integer.toString(id), -1);
}
}
cameraMatrix.put(0, 0, cameraMatrixArray);
Log.i(TAG, "Loaded camera matrix: " + cameraMatrix.dump());
double[] distortionCoefficientsArray = new double[DISTORTION_COEFFICIENTS_SIZE];
int shift = CAMERA_MATRIX_ROWS * CAMERA_MATRIX_COLS;
for (int i = shift; i < DISTORTION_COEFFICIENTS_SIZE + shift; i++) {
distortionCoefficientsArray[i - shift] = sharedPref.getFloat(Integer.toString(i), -1);
}
distortionCoefficients.put(0, 0, distortionCoefficientsArray);
Log.i(TAG, "Loaded distortion coefficients: " + distortionCoefficients.dump());
return true;
}
}

View File

@ -0,0 +1,235 @@
// This sample is based on "Camera calibration With OpenCV" tutorial:
// https://docs.opencv.org/3.4/d4/d94/tutorial_camera_calibration.html
//
// It uses standard OpenCV asymmetric circles grid pattern 11x4:
// https://github.com/opencv/opencv/blob/3.4/doc/acircles_pattern.png
// The results are the camera matrix and 5 distortion coefficients.
//
// Tap on highlighted pattern to capture pattern corners for calibration.
// Move pattern along the whole screen and capture data.
//
// When you've captured necessary amount of pattern corners (usually ~20 are enough),
// press "Calibrate" button for performing camera calibration.
package org.opencv.samples.cameracalibration;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import android.app.ProgressDialog;
import android.content.res.Resources;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.widget.Toast;
import java.util.Collections;
import java.util.List;
public class CameraCalibrationActivity extends CameraActivity implements CvCameraViewListener2, OnTouchListener {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private CameraCalibrator mCalibrator;
private OnCameraFrameRender mOnCameraFrameRender;
private Menu mMenu;
private int mWidth;
private int mHeight;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(CameraCalibrationActivity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public CameraCalibrationActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.camera_calibration_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.camera_calibration_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
super.onCreateOptionsMenu(menu);
getMenuInflater().inflate(R.menu.calibration, menu);
mMenu = menu;
return true;
}
@Override
public boolean onPrepareOptionsMenu (Menu menu) {
super.onPrepareOptionsMenu(menu);
menu.findItem(R.id.preview_mode).setEnabled(true);
if (mCalibrator != null && !mCalibrator.isCalibrated()) {
menu.findItem(R.id.preview_mode).setEnabled(false);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.calibration:
mOnCameraFrameRender =
new OnCameraFrameRender(new CalibrationFrameRender(mCalibrator));
item.setChecked(true);
return true;
case R.id.undistortion:
mOnCameraFrameRender =
new OnCameraFrameRender(new UndistortionFrameRender(mCalibrator));
item.setChecked(true);
return true;
case R.id.comparison:
mOnCameraFrameRender =
new OnCameraFrameRender(new ComparisonFrameRender(mCalibrator, mWidth, mHeight, getResources()));
item.setChecked(true);
return true;
case R.id.calibrate:
final Resources res = getResources();
if (mCalibrator.getCornersBufferSize() < 2) {
(Toast.makeText(this, res.getString(R.string.more_samples), Toast.LENGTH_SHORT)).show();
return true;
}
mOnCameraFrameRender = new OnCameraFrameRender(new PreviewFrameRender());
new AsyncTask<Void, Void, Void>() {
private ProgressDialog calibrationProgress;
@Override
protected void onPreExecute() {
calibrationProgress = new ProgressDialog(CameraCalibrationActivity.this);
calibrationProgress.setTitle(res.getString(R.string.calibrating));
calibrationProgress.setMessage(res.getString(R.string.please_wait));
calibrationProgress.setCancelable(false);
calibrationProgress.setIndeterminate(true);
calibrationProgress.show();
}
@Override
protected Void doInBackground(Void... arg0) {
mCalibrator.calibrate();
return null;
}
@Override
protected void onPostExecute(Void result) {
calibrationProgress.dismiss();
mCalibrator.clearCorners();
mOnCameraFrameRender = new OnCameraFrameRender(new CalibrationFrameRender(mCalibrator));
String resultMessage = (mCalibrator.isCalibrated()) ?
res.getString(R.string.calibration_successful) + " " + mCalibrator.getAvgReprojectionError() :
res.getString(R.string.calibration_unsuccessful);
(Toast.makeText(CameraCalibrationActivity.this, resultMessage, Toast.LENGTH_SHORT)).show();
if (mCalibrator.isCalibrated()) {
CalibrationResult.save(CameraCalibrationActivity.this,
mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients());
}
}
}.execute();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
public void onCameraViewStarted(int width, int height) {
if (mWidth != width || mHeight != height) {
mWidth = width;
mHeight = height;
mCalibrator = new CameraCalibrator(mWidth, mHeight);
if (CalibrationResult.tryLoad(this, mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients())) {
mCalibrator.setCalibrated();
} else {
if (mMenu != null && !mCalibrator.isCalibrated()) {
mMenu.findItem(R.id.preview_mode).setEnabled(false);
}
}
mOnCameraFrameRender = new OnCameraFrameRender(new CalibrationFrameRender(mCalibrator));
}
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return mOnCameraFrameRender.render(inputFrame);
}
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.d(TAG, "onTouch invoked");
mCalibrator.addCorners();
return false;
}
}

View File

@ -0,0 +1,170 @@
package org.opencv.samples.cameracalibration;
import java.util.ArrayList;
import java.util.List;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDouble;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.MatOfPoint3f;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import android.util.Log;
public class CameraCalibrator {
private static final String TAG = "OCV::CameraCalibrator";
private final Size mPatternSize = new Size(4, 11);
private final int mCornersSize = (int)(mPatternSize.width * mPatternSize.height);
private boolean mPatternWasFound = false;
private MatOfPoint2f mCorners = new MatOfPoint2f();
private List<Mat> mCornersBuffer = new ArrayList<>();
private boolean mIsCalibrated = false;
private Mat mCameraMatrix = new Mat();
private Mat mDistortionCoefficients = new Mat();
private int mFlags;
private double mRms;
private double mSquareSize = 0.0181;
private Size mImageSize;
public CameraCalibrator(int width, int height) {
mImageSize = new Size(width, height);
mFlags = Calib3d.CALIB_FIX_PRINCIPAL_POINT +
Calib3d.CALIB_ZERO_TANGENT_DIST +
Calib3d.CALIB_FIX_ASPECT_RATIO +
Calib3d.CALIB_FIX_K4 +
Calib3d.CALIB_FIX_K5;
Mat.eye(3, 3, CvType.CV_64FC1).copyTo(mCameraMatrix);
mCameraMatrix.put(0, 0, 1.0);
Mat.zeros(5, 1, CvType.CV_64FC1).copyTo(mDistortionCoefficients);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public void processFrame(Mat grayFrame, Mat rgbaFrame) {
findPattern(grayFrame);
renderFrame(rgbaFrame);
}
public void calibrate() {
ArrayList<Mat> rvecs = new ArrayList<>();
ArrayList<Mat> tvecs = new ArrayList<>();
Mat reprojectionErrors = new Mat();
ArrayList<Mat> objectPoints = new ArrayList<>();
objectPoints.add(Mat.zeros(mCornersSize, 1, CvType.CV_32FC3));
calcBoardCornerPositions(objectPoints.get(0));
for (int i = 1; i < mCornersBuffer.size(); i++) {
objectPoints.add(objectPoints.get(0));
}
Calib3d.calibrateCamera(objectPoints, mCornersBuffer, mImageSize,
mCameraMatrix, mDistortionCoefficients, rvecs, tvecs, mFlags);
mIsCalibrated = Core.checkRange(mCameraMatrix)
&& Core.checkRange(mDistortionCoefficients);
mRms = computeReprojectionErrors(objectPoints, rvecs, tvecs, reprojectionErrors);
Log.i(TAG, String.format("Average re-projection error: %f", mRms));
Log.i(TAG, "Camera matrix: " + mCameraMatrix.dump());
Log.i(TAG, "Distortion coefficients: " + mDistortionCoefficients.dump());
}
public void clearCorners() {
mCornersBuffer.clear();
}
private void calcBoardCornerPositions(Mat corners) {
final int cn = 3;
float[] positions = new float[mCornersSize * cn];
for (int i = 0; i < mPatternSize.height; i++) {
for (int j = 0; j < mPatternSize.width * cn; j += cn) {
positions[(int) (i * mPatternSize.width * cn + j + 0)] =
(2 * (j / cn) + i % 2) * (float) mSquareSize;
positions[(int) (i * mPatternSize.width * cn + j + 1)] =
i * (float) mSquareSize;
positions[(int) (i * mPatternSize.width * cn + j + 2)] = 0;
}
}
corners.create(mCornersSize, 1, CvType.CV_32FC3);
corners.put(0, 0, positions);
}
private double computeReprojectionErrors(List<Mat> objectPoints,
List<Mat> rvecs, List<Mat> tvecs, Mat perViewErrors) {
MatOfPoint2f cornersProjected = new MatOfPoint2f();
double totalError = 0;
double error;
float[] viewErrors = new float[objectPoints.size()];
MatOfDouble distortionCoefficients = new MatOfDouble(mDistortionCoefficients);
int totalPoints = 0;
for (int i = 0; i < objectPoints.size(); i++) {
MatOfPoint3f points = new MatOfPoint3f(objectPoints.get(i));
Calib3d.projectPoints(points, rvecs.get(i), tvecs.get(i),
mCameraMatrix, distortionCoefficients, cornersProjected);
error = Core.norm(mCornersBuffer.get(i), cornersProjected, Core.NORM_L2);
int n = objectPoints.get(i).rows();
viewErrors[i] = (float) Math.sqrt(error * error / n);
totalError += error * error;
totalPoints += n;
}
perViewErrors.create(objectPoints.size(), 1, CvType.CV_32FC1);
perViewErrors.put(0, 0, viewErrors);
return Math.sqrt(totalError / totalPoints);
}
private void findPattern(Mat grayFrame) {
mPatternWasFound = Calib3d.findCirclesGrid(grayFrame, mPatternSize,
mCorners, Calib3d.CALIB_CB_ASYMMETRIC_GRID);
}
public void addCorners() {
if (mPatternWasFound) {
mCornersBuffer.add(mCorners.clone());
}
}
private void drawPoints(Mat rgbaFrame) {
Calib3d.drawChessboardCorners(rgbaFrame, mPatternSize, mCorners, mPatternWasFound);
}
private void renderFrame(Mat rgbaFrame) {
drawPoints(rgbaFrame);
Imgproc.putText(rgbaFrame, "Captured: " + mCornersBuffer.size(), new Point(rgbaFrame.cols() / 3 * 2, rgbaFrame.rows() * 0.1),
Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0));
}
public Mat getCameraMatrix() {
return mCameraMatrix;
}
public Mat getDistortionCoefficients() {
return mDistortionCoefficients;
}
public int getCornersBufferSize() {
return mCornersBuffer.size();
}
public double getAvgReprojectionError() {
return mRms;
}
public boolean isCalibrated() {
return mIsCalibrated;
}
public void setCalibrated() {
mIsCalibrated = true;
}
}

View File

@ -0,0 +1,103 @@
package org.opencv.samples.cameracalibration;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Range;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import android.content.res.Resources;
abstract class FrameRender {
protected CameraCalibrator mCalibrator;
public abstract Mat render(CvCameraViewFrame inputFrame);
}
class PreviewFrameRender extends FrameRender {
@Override
public Mat render(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
}
class CalibrationFrameRender extends FrameRender {
public CalibrationFrameRender(CameraCalibrator calibrator) {
mCalibrator = calibrator;
}
@Override
public Mat render(CvCameraViewFrame inputFrame) {
Mat rgbaFrame = inputFrame.rgba();
Mat grayFrame = inputFrame.gray();
mCalibrator.processFrame(grayFrame, rgbaFrame);
return rgbaFrame;
}
}
class UndistortionFrameRender extends FrameRender {
public UndistortionFrameRender(CameraCalibrator calibrator) {
mCalibrator = calibrator;
}
@Override
public Mat render(CvCameraViewFrame inputFrame) {
Mat renderedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type());
Calib3d.undistort(inputFrame.rgba(), renderedFrame,
mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients());
return renderedFrame;
}
}
class ComparisonFrameRender extends FrameRender {
private int mWidth;
private int mHeight;
private Resources mResources;
public ComparisonFrameRender(CameraCalibrator calibrator, int width, int height, Resources resources) {
mCalibrator = calibrator;
mWidth = width;
mHeight = height;
mResources = resources;
}
@Override
public Mat render(CvCameraViewFrame inputFrame) {
Mat undistortedFrame = new Mat(inputFrame.rgba().size(), inputFrame.rgba().type());
Calib3d.undistort(inputFrame.rgba(), undistortedFrame,
mCalibrator.getCameraMatrix(), mCalibrator.getDistortionCoefficients());
Mat comparisonFrame = inputFrame.rgba();
undistortedFrame.colRange(new Range(0, mWidth / 2)).copyTo(comparisonFrame.colRange(new Range(mWidth / 2, mWidth)));
List<MatOfPoint> border = new ArrayList<MatOfPoint>();
final int shift = (int)(mWidth * 0.005);
border.add(new MatOfPoint(new Point(mWidth / 2 - shift, 0), new Point(mWidth / 2 + shift, 0),
new Point(mWidth / 2 + shift, mHeight), new Point(mWidth / 2 - shift, mHeight)));
Imgproc.fillPoly(comparisonFrame, border, new Scalar(255, 255, 255));
Imgproc.putText(comparisonFrame, mResources.getString(R.string.original), new Point(mWidth * 0.1, mHeight * 0.1),
Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0));
Imgproc.putText(comparisonFrame, mResources.getString(R.string.undistorted), new Point(mWidth * 0.6, mHeight * 0.1),
Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 0));
return comparisonFrame;
}
}
class OnCameraFrameRender {
private FrameRender mFrameRender;
public OnCameraFrameRender(FrameRender frameRender) {
mFrameRender = frameRender;
}
public Mat render(CvCameraViewFrame inputFrame) {
return mFrameRender.render(inputFrame);
}
}

View File

@ -0,0 +1,6 @@
set(sample example-color-blob-detection)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.colorblobdetect"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.colorblobdetect"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon"
android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >
<activity android:name="ColorBlobDetectionActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/color_blob_detection_activity_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV Color Blob Detection</string>
</resources>

View File

@ -0,0 +1,205 @@
package org.opencv.samples.colorblobdetect;
import java.util.Collections;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnTouchListener;
import android.view.SurfaceView;
public class ColorBlobDetectionActivity extends CameraActivity implements OnTouchListener, CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ColorBlobDetectionActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.color_blob_detection_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.color_blob_detection_activity_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 64);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = convertScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE, 0, 0, Imgproc.INTER_LINEAR_EXACT);
mIsColorSelected = true;
touchedRegionRgba.release();
touchedRegionHsv.release();
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.i(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(4, 68, 4, 68);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar convertScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}

View File

@ -0,0 +1,108 @@
package org.opencv.samples.colorblobdetect;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
// Cache
Mat mPyrDownMat = new Mat();
Mat mHsvMat = new Mat();
Mat mMask = new Mat();
Mat mDilatedMask = new Mat();
Mat mHierarchy = new Mat();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) {
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) {
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum() {
return mSpectrum;
}
public void setMinContourArea(double area) {
mMinContourArea = area;
}
public void process(Mat rgbaImage) {
Imgproc.pyrDown(rgbaImage, mPyrDownMat);
Imgproc.pyrDown(mPyrDownMat, mPyrDownMat);
Imgproc.cvtColor(mPyrDownMat, mHsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mHsvMat, mLowerBound, mUpperBound, mMask);
Imgproc.dilate(mMask, mDilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(mDilatedMask, contours, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) {
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext()) {
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) {
Core.multiply(contour, new Scalar(4,4), contour);
mContours.add(contour);
}
}
}
public List<MatOfPoint> getContours() {
return mContours;
}
}

View File

@ -0,0 +1,12 @@
set(sample example-face-detection)
if(BUILD_FAT_JAVA_LIB)
set(native_deps opencv_java)
else()
set(native_deps opencv_objdetect)
endif()
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}" NATIVE_DEPS ${native_deps})
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,43 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.facedetect"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
externalNativeBuild {
cmake {
arguments "-DOpenCV_DIR=" + project(':opencv').projectDir + "/@ANDROID_PROJECT_JNI_PATH@"@OPENCV_ANDROID_CMAKE_EXTRA_ARGS@
targets "detection_based_tracker"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
externalNativeBuild {
cmake {
path '@ANDROID_SAMPLE_JNI_PATH@/CMakeLists.txt'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.facedetect"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon">
<activity android:name="FdActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

View File

@ -0,0 +1,23 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#OPENCV_INSTALL_MODULES:=off
#OPENCV_LIB_TYPE:=SHARED
ifdef OPENCV_ANDROID_SDK
ifneq ("","$(wildcard $(OPENCV_ANDROID_SDK)/OpenCV.mk)")
include ${OPENCV_ANDROID_SDK}/OpenCV.mk
else
include ${OPENCV_ANDROID_SDK}/sdk/native/jni/OpenCV.mk
endif
else
include ../../sdk/native/jni/OpenCV.mk
endif
LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp
LOCAL_C_INCLUDES += $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_based_tracker
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,4 @@
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a
APP_PLATFORM := android-8

View File

@ -0,0 +1,15 @@
cmake_minimum_required(VERSION 3.6)
set(target detection_based_tracker)
project(${target} CXX)
set(ANDROID_OPENCV_COMPONENTS "opencv_java" CACHE STRING "")
message(STATUS "ANDROID_ABI=${ANDROID_ABI}")
find_package(OpenCV REQUIRED COMPONENTS ${ANDROID_OPENCV_COMPONENTS})
file(GLOB srcs *.cpp *.c)
file(GLOB hdrs *.hpp *.h)
include_directories("${CMAKE_CURRENT_LIST_DIR}")
add_library(${target} SHARED ${srcs} ${hdrs})
target_link_libraries(${target} ${ANDROID_OPENCV_COMPONENTS})

View File

@ -0,0 +1,251 @@
#include <DetectionBasedTracker_jni.h>
#include <opencv2/core.hpp>
#include <opencv2/objdetect.hpp>
#include <string>
#include <vector>
#include <android/log.h>
#define LOG_TAG "FaceDetection/DetectionBasedTracker"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
using namespace std;
using namespace cv;
inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
{
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
IDetector(),
Detector(detector)
{
LOGD("CascadeDetectorAdapter::Detect::Detect");
CV_Assert(detector);
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
{
LOGD("CascadeDetectorAdapter::Detect: begin");
LOGD("CascadeDetectorAdapter::Detect: scaleFactor=%.2f, minNeighbours=%d, minObjSize=(%dx%d), maxObjSize=(%dx%d)", scaleFactor, minNeighbours, minObjSize.width, minObjSize.height, maxObjSize.width, maxObjSize.height);
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
LOGD("CascadeDetectorAdapter::Detect: end");
}
virtual ~CascadeDetectorAdapter()
{
LOGD("CascadeDetectorAdapter::Detect::~Detect");
}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
struct DetectorAgregator
{
cv::Ptr<CascadeDetectorAdapter> mainDetector;
cv::Ptr<CascadeDetectorAdapter> trackingDetector;
cv::Ptr<DetectionBasedTracker> tracker;
DetectorAgregator(cv::Ptr<CascadeDetectorAdapter>& _mainDetector, cv::Ptr<CascadeDetectorAdapter>& _trackingDetector):
mainDetector(_mainDetector),
trackingDetector(_trackingDetector)
{
CV_Assert(_mainDetector);
CV_Assert(_trackingDetector);
DetectionBasedTracker::Parameters DetectorParams;
tracker = makePtr<DetectionBasedTracker>(mainDetector, trackingDetector, DetectorParams);
}
};
JNIEXPORT jlong JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject enter");
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr);
jlong result = 0;
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject");
try
{
cv::Ptr<CascadeDetectorAdapter> mainDetector = makePtr<CascadeDetectorAdapter>(
makePtr<CascadeClassifier>(stdFileName));
cv::Ptr<CascadeDetectorAdapter> trackingDetector = makePtr<CascadeDetectorAdapter>(
makePtr<CascadeClassifier>(stdFileName));
result = (jlong)new DetectorAgregator(mainDetector, trackingDetector);
if (faceSize > 0)
{
mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(const cv::Exception& e)
{
LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeCreateObject caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeCreateObject()");
return 0;
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject exit");
return result;
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject");
try
{
if(thiz != 0)
{
((DetectorAgregator*)thiz)->tracker->stop();
delete (DetectorAgregator*)thiz;
}
}
catch(const cv::Exception& e)
{
LOGD("nativeestroyObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDestroyObject caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeDestroyObject()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart");
try
{
((DetectorAgregator*)thiz)->tracker->run();
}
catch(const cv::Exception& e)
{
LOGD("nativeStart caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStart caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeStart()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass, jlong thiz)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop");
try
{
((DetectorAgregator*)thiz)->tracker->stop();
}
catch(const cv::Exception& e)
{
LOGD("nativeStop caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeStop caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeStop()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop exit");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass, jlong thiz, jint faceSize)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize -- BEGIN");
try
{
if (faceSize > 0)
{
((DetectorAgregator*)thiz)->mainDetector->setMinObjectSize(Size(faceSize, faceSize));
//((DetectorAgregator*)thiz)->trackingDetector->setMinObjectSize(Size(faceSize, faceSize));
}
}
catch(const cv::Exception& e)
{
LOGD("nativeStop caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeSetFaceSize caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code of DetectionBasedTracker.nativeSetFaceSize()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize -- END");
}
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect");
try
{
vector<Rect> RectFaces;
((DetectorAgregator*)thiz)->tracker->process(*((Mat*)imageGray));
((DetectorAgregator*)thiz)->tracker->getObjects(RectFaces);
*((Mat*)faces) = Mat(RectFaces, true);
}
catch(const cv::Exception& e)
{
LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je)
je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what());
}
catch (...)
{
LOGD("nativeDetect caught unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code DetectionBasedTracker.nativeDetect()");
}
LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect END");
}

View File

@ -0,0 +1,61 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_opencv_samples_fd_DetectionBasedTracker */
#ifndef _Included_org_opencv_samples_fd_DetectionBasedTracker
#define _Included_org_opencv_samples_fd_DetectionBasedTracker
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeCreateObject
* Signature: (Ljava/lang/String;F)J
*/
JNIEXPORT jlong JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeCreateObject
(JNIEnv *, jclass, jstring, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDestroyObject
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDestroyObject
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStart
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStart
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStop
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeStop
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeSetFaceSize
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDetect
* Signature: (JJJ)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect
(JNIEnv *, jclass, jlong, jlong, jlong);
#ifdef __cplusplus
}
#endif
#endif

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/fd_activity_surface_view" />
</LinearLayout>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV Face Detection</string>
</resources>

View File

@ -0,0 +1,41 @@
package org.opencv.samples.facedetect;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBasedTracker
{
public DetectionBasedTracker(String cascadeName, int minFaceSize) {
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
}
public void start() {
nativeStart(mNativeObj);
}
public void stop() {
nativeStop(mNativeObj);
}
public void setMinFaceSize(int size) {
nativeSetFaceSize(mNativeObj, size);
}
public void detect(Mat imageGray, MatOfRect faces) {
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release() {
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
}

View File

@ -0,0 +1,259 @@
package org.opencv.samples.facedetect;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
public class FdActivity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private MenuItem mItemFace50;
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private MenuItem mItemType;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
private String[] mDetectorName;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detection_based_tracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public FdActivity() {
mDetectorName = new String[2];
mDetectorName[JAVA_DETECTOR] = "Java";
mDetectorName[NATIVE_DETECTOR] = "Native (tracking)";
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
return mRgba;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemFace50 = menu.add("Face size 50%");
mItemFace40 = menu.add("Face size 40%");
mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemFace50)
setMinFaceSize(0.5f);
else if (item == mItemFace40)
setMinFaceSize(0.4f);
else if (item == mItemFace30)
setMinFaceSize(0.3f);
else if (item == mItemFace20)
setMinFaceSize(0.2f);
else if (item == mItemType) {
int tmpDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[tmpDetectorType]);
setDetectorType(tmpDetectorType);
}
return true;
}
private void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
private void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
}

View File

@ -0,0 +1,6 @@
set(sample example-image-manipulations)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.imagemanipulations"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.imagemanipulations"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon">
<activity android:name="ImageManipulationsActivity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/image_manipulations_activity_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV Image Manipulations</string>
</resources>

View File

@ -0,0 +1,327 @@
package org.opencv.samples.imagemanipulations;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.imgproc.Imgproc;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
public class ImageManipulationsActivity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_SEPIA = 3;
public static final int VIEW_MODE_SOBEL = 4;
public static final int VIEW_MODE_ZOOM = 5;
public static final int VIEW_MODE_PIXELIZE = 6;
public static final int VIEW_MODE_POSTERIZE = 7;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewHist;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewSepia;
private MenuItem mItemPreviewSobel;
private MenuItem mItemPreviewZoom;
private MenuItem mItemPreviewPixelize;
private MenuItem mItemPreviewPosterize;
private CameraBridgeViewBase mOpenCvCameraView;
private Size mSize0;
private Mat mIntermediateMat;
private Mat mMat0;
private MatOfInt mChannels[];
private MatOfInt mHistSize;
private int mHistSizeNum = 25;
private MatOfFloat mRanges;
private Scalar mColorsRGB[];
private Scalar mColorsHue[];
private Scalar mWhilte;
private Point mP1;
private Point mP2;
private float mBuff[];
private Mat mSepiaKernel;
public static int viewMode = VIEW_MODE_RGBA;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public ImageManipulationsActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.image_manipulations_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.image_manipulations_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewHist = menu.add("Histograms");
mItemPreviewCanny = menu.add("Canny");
mItemPreviewSepia = menu.add("Sepia");
mItemPreviewSobel = menu.add("Sobel");
mItemPreviewZoom = menu.add("Zoom");
mItemPreviewPixelize = menu.add("Pixelize");
mItemPreviewPosterize = menu.add("Posterize");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
viewMode = VIEW_MODE_RGBA;
if (item == mItemPreviewHist)
viewMode = VIEW_MODE_HIST;
else if (item == mItemPreviewCanny)
viewMode = VIEW_MODE_CANNY;
else if (item == mItemPreviewSepia)
viewMode = VIEW_MODE_SEPIA;
else if (item == mItemPreviewSobel)
viewMode = VIEW_MODE_SOBEL;
else if (item == mItemPreviewZoom)
viewMode = VIEW_MODE_ZOOM;
else if (item == mItemPreviewPixelize)
viewMode = VIEW_MODE_PIXELIZE;
else if (item == mItemPreviewPosterize)
viewMode = VIEW_MODE_POSTERIZE;
return true;
}
public void onCameraViewStarted(int width, int height) {
mIntermediateMat = new Mat();
mSize0 = new Size();
mChannels = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) };
mBuff = new float[mHistSizeNum];
mHistSize = new MatOfInt(mHistSizeNum);
mRanges = new MatOfFloat(0f, 256f);
mMat0 = new Mat();
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
mColorsHue = new Scalar[] {
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
};
mWhilte = Scalar.all(255);
mP1 = new Point();
mP2 = new Point();
// Fill sepia kernel
mSepiaKernel = new Mat(4, 4, CvType.CV_32F);
mSepiaKernel.put(0, 0, /* R */0.189f, 0.769f, 0.393f, 0f);
mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
}
public void onCameraViewStopped() {
// Explicitly deallocate Mats
if (mIntermediateMat != null)
mIntermediateMat.release();
mIntermediateMat = null;
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat rgba = inputFrame.rgba();
Size sizeRgba = rgba.size();
Mat rgbaInnerWindow;
int rows = (int) sizeRgba.height;
int cols = (int) sizeRgba.width;
int left = cols / 8;
int top = rows / 8;
int width = cols * 3 / 4;
int height = rows * 3 / 4;
switch (ImageManipulationsActivity.viewMode) {
case ImageManipulationsActivity.VIEW_MODE_RGBA:
break;
case ImageManipulationsActivity.VIEW_MODE_HIST:
Mat hist = new Mat();
int thikness = (int) (sizeRgba.width / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
int offset = (int) ((sizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB
for(int c=0; c<3; c++) {
Imgproc.calcHist(Arrays.asList(rgba), mChannels[c], mMat0, hist, mHistSize, mRanges);
Core.normalize(hist, hist, sizeRgba.height/2, 0, Core.NORM_INF);
hist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = sizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Imgproc.line(rgba, mP1, mP2, mColorsRGB[c], thikness);
}
}
// Value and Hue
Imgproc.cvtColor(rgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
// Value
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, hist, mHistSize, mRanges);
Core.normalize(hist, hist, sizeRgba.height/2, 0, Core.NORM_INF);
hist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = sizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Imgproc.line(rgba, mP1, mP2, mWhilte, thikness);
}
// Hue
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, hist, mHistSize, mRanges);
Core.normalize(hist, hist, sizeRgba.height/2, 0, Core.NORM_INF);
hist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = sizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Imgproc.line(rgba, mP1, mP2, mColorsHue[h], thikness);
}
break;
case ImageManipulationsActivity.VIEW_MODE_CANNY:
rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
rgbaInnerWindow.release();
break;
case ImageManipulationsActivity.VIEW_MODE_SOBEL:
Mat gray = inputFrame.gray();
Mat grayInnerWindow = gray.submat(top, top + height, left, left + width);
rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
Imgproc.Sobel(grayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
grayInnerWindow.release();
rgbaInnerWindow.release();
break;
case ImageManipulationsActivity.VIEW_MODE_SEPIA:
rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
Core.transform(rgbaInnerWindow, rgbaInnerWindow, mSepiaKernel);
rgbaInnerWindow.release();
break;
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
Mat zoomCorner = rgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
Mat mZoomWindow = rgba.submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
Imgproc.resize(mZoomWindow, zoomCorner, zoomCorner.size(), 0, 0, Imgproc.INTER_LINEAR_EXACT);
Size wsize = mZoomWindow.size();
Imgproc.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
zoomCorner.release();
mZoomWindow.release();
break;
case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
Imgproc.resize(rgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
Imgproc.resize(mIntermediateMat, rgbaInnerWindow, rgbaInnerWindow.size(), 0., 0., Imgproc.INTER_NEAREST);
rgbaInnerWindow.release();
break;
case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
/*
Imgproc.cvtColor(rgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
Imgproc.cvtColor(mIntermediateMat, rgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
*/
rgbaInnerWindow = rgba.submat(top, top + height, left, left + width);
Imgproc.Canny(rgbaInnerWindow, mIntermediateMat, 80, 90);
rgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
Core.convertScaleAbs(rgbaInnerWindow, mIntermediateMat, 1./16, 0);
Core.convertScaleAbs(mIntermediateMat, rgbaInnerWindow, 16, 0);
rgbaInnerWindow.release();
break;
}
return rgba;
}
}

View File

@ -0,0 +1,6 @@
set(sample example-mobilenet-objdetect)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.opencv_mobilenet"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 341
versionName "3.41"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.opencv_mobilenet"
>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AppCompat.NoActionBar"> <!--Full screen mode-->
<activity android:name=".MainActivity"
android:screenOrientation="landscape"> <!--Screen orientation-->
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<!--Allow to use a camera-->
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="org.opencv.samples.opencv_mobilenet.MainActivity">
<org.opencv.android.JavaCameraView
android:id="@+id/CameraView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:visibility="visible" />
</FrameLayout>

View File

@ -0,0 +1,3 @@
<resources>
<string name="app_name">opencv_mobilenet</string>
</resources>

View File

@ -0,0 +1,164 @@
package org.opencv.samples.opencv_mobilenet;
import android.content.Context;
import android.content.res.AssetManager;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.dnn.Net;
import org.opencv.dnn.Dnn;
import org.opencv.imgproc.Imgproc;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class MainActivity extends AppCompatActivity implements CvCameraViewListener2 {
// Initialize OpenCV manager.
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
break;
}
default: {
super.onManagerConnected(status);
break;
}
}
}
};
@Override
public void onResume() {
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, mLoaderCallback);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// Set up camera listener.
mOpenCvCameraView = (CameraBridgeViewBase)findViewById(R.id.CameraView);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
// Load a network.
public void onCameraViewStarted(int width, int height) {
String proto = getPath("MobileNetSSD_deploy.prototxt", this);
String weights = getPath("MobileNetSSD_deploy.caffemodel", this);
net = Dnn.readNetFromCaffe(proto, weights);
Log.i(TAG, "Network loaded successfully");
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
final int IN_WIDTH = 300;
final int IN_HEIGHT = 300;
final float WH_RATIO = (float)IN_WIDTH / IN_HEIGHT;
final double IN_SCALE_FACTOR = 0.007843;
final double MEAN_VAL = 127.5;
final double THRESHOLD = 0.2;
// Get a new frame
Mat frame = inputFrame.rgba();
Imgproc.cvtColor(frame, frame, Imgproc.COLOR_RGBA2RGB);
// Forward image through network.
Mat blob = Dnn.blobFromImage(frame, IN_SCALE_FACTOR,
new Size(IN_WIDTH, IN_HEIGHT),
new Scalar(MEAN_VAL, MEAN_VAL, MEAN_VAL), /*swapRB*/false, /*crop*/false);
net.setInput(blob);
Mat detections = net.forward();
int cols = frame.cols();
int rows = frame.rows();
detections = detections.reshape(1, (int)detections.total() / 7);
for (int i = 0; i < detections.rows(); ++i) {
double confidence = detections.get(i, 2)[0];
if (confidence > THRESHOLD) {
int classId = (int)detections.get(i, 1)[0];
int left = (int)(detections.get(i, 3)[0] * cols);
int top = (int)(detections.get(i, 4)[0] * rows);
int right = (int)(detections.get(i, 5)[0] * cols);
int bottom = (int)(detections.get(i, 6)[0] * rows);
// Draw rectangle around detected object.
Imgproc.rectangle(frame, new Point(left, top), new Point(right, bottom),
new Scalar(0, 255, 0));
String label = classNames[classId] + ": " + confidence;
int[] baseLine = new int[1];
Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);
// Draw background for label.
Imgproc.rectangle(frame, new Point(left, top - labelSize.height),
new Point(left + labelSize.width, top + baseLine[0]),
new Scalar(255, 255, 255), Imgproc.FILLED);
// Write class name and confidence.
Imgproc.putText(frame, label, new Point(left, top),
Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0));
}
}
return frame;
}
public void onCameraViewStopped() {}
// Upload file to storage and return a path.
private static String getPath(String file, Context context) {
AssetManager assetManager = context.getAssets();
BufferedInputStream inputStream = null;
try {
// Read data from assets.
inputStream = new BufferedInputStream(assetManager.open(file));
byte[] data = new byte[inputStream.available()];
inputStream.read(data);
inputStream.close();
// Create copy file in storage.
File outFile = new File(context.getFilesDir(), file);
FileOutputStream os = new FileOutputStream(outFile);
os.write(data);
os.close();
// Return a path to file which may be read in common way.
return outFile.getAbsolutePath();
} catch (IOException ex) {
Log.i(TAG, "Failed to upload a file");
}
return "";
}
private static final String TAG = "OpenCV/Sample/MobileNet";
private static final String[] classNames = {"background",
"aeroplane", "bicycle", "bird", "boat",
"bottle", "bus", "car", "cat", "chair",
"cow", "diningtable", "dog", "horse",
"motorbike", "person", "pottedplant",
"sheep", "sofa", "train", "tvmonitor"};
private Net net;
private CameraBridgeViewBase mOpenCvCameraView;
}

View File

@ -0,0 +1,6 @@
set(sample example-tutorial-1-camerapreview)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.tutorial1"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.tutorial1"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon"
android:theme="@android:style/Theme.NoTitleBar.Fullscreen" >
<activity android:name="Tutorial1Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,15 @@
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:opencv="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:visibility="gone"
android:id="@+id/tutorial1_activity_java_surface_view"
opencv:show_fps="true"
opencv:camera_id="any" />
</FrameLayout>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV T1 Preview</string>
</resources>

View File

@ -0,0 +1,106 @@
package org.opencv.samples.tutorial1;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import android.os.Bundle;
import android.util.Log;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.WindowManager;
import java.util.Collections;
import java.util.List;
public class Tutorial1Activity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private CameraBridgeViewBase mOpenCvCameraView;
private boolean mIsJavaCamera = true;
private MenuItem mItemSwitchCamera = null;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial1Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial1_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
}

View File

@ -0,0 +1,12 @@
set(sample example-tutorial-2-mixedprocessing)
if(BUILD_FAT_JAVA_LIB)
set(native_deps opencv_java)
else()
set(native_deps opencv_features2d)
endif()
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}" NATIVE_DEPS ${native_deps})
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,43 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.tutorial2"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
externalNativeBuild {
cmake {
arguments "-DOpenCV_DIR=" + project(':opencv').projectDir + "/@ANDROID_PROJECT_JNI_PATH@"@OPENCV_ANDROID_CMAKE_EXTRA_ARGS@
targets "mixed_sample"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
externalNativeBuild {
cmake {
path '@ANDROID_SAMPLE_JNI_PATH@/CMakeLists.txt'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,34 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.tutorial2"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon">
<activity android:name="Tutorial2Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
</manifest>

View File

@ -0,0 +1,19 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
ifdef OPENCV_ANDROID_SDK
ifneq ("","$(wildcard $(OPENCV_ANDROID_SDK)/OpenCV.mk)")
include ${OPENCV_ANDROID_SDK}/OpenCV.mk
else
include ${OPENCV_ANDROID_SDK}/sdk/native/jni/OpenCV.mk
endif
else
include ../../sdk/native/jni/OpenCV.mk
endif
LOCAL_MODULE := mixed_sample
LOCAL_SRC_FILES := jni_part.cpp
LOCAL_LDLIBS += -llog -ldl
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,4 @@
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a
APP_PLATFORM := android-8

View File

@ -0,0 +1,15 @@
cmake_minimum_required(VERSION 3.6)
set(target mixed_sample)
project(${target} CXX)
set(ANDROID_OPENCV_COMPONENTS "opencv_java" CACHE STRING "")
message(STATUS "ANDROID_ABI=${ANDROID_ABI}")
find_package(OpenCV REQUIRED COMPONENTS ${ANDROID_OPENCV_COMPONENTS})
file(GLOB srcs *.cpp *.c)
file(GLOB hdrs *.hpp *.h)
include_directories("${CMAKE_CURRENT_LIST_DIR}")
add_library(${target} SHARED ${srcs} ${hdrs})
target_link_libraries(${target} ${ANDROID_OPENCV_COMPONENTS})

View File

@ -0,0 +1,27 @@
#include <jni.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <vector>
using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba);
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
{
Mat& mGr = *(Mat*)addrGray;
Mat& mRgb = *(Mat*)addrRgba;
vector<KeyPoint> v;
Ptr<FeatureDetector> detector = FastFeatureDetector::create(50);
detector->detect(mGr, v);
for( unsigned int i = 0; i < v.size(); i++ )
{
const KeyPoint& kp = v[i];
circle(mRgb, Point(kp.pt.x, kp.pt.y), 10, Scalar(255,0,0,255));
}
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial2_activity_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV T2 Mixed Processing</string>
</resources>

View File

@ -0,0 +1,181 @@
package org.opencv.samples.tutorial2;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.imgproc.Imgproc;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.WindowManager;
import java.util.Collections;
import java.util.List;
public class Tutorial2Activity extends CameraActivity implements CvCameraViewListener2 {
private static final String TAG = "OCVSample::Activity";
private static final int VIEW_MODE_RGBA = 0;
private static final int VIEW_MODE_GRAY = 1;
private static final int VIEW_MODE_CANNY = 2;
private static final int VIEW_MODE_FEATURES = 5;
private int mViewMode;
private Mat mRgba;
private Mat mIntermediateMat;
private Mat mGray;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private CameraBridgeViewBase mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial2Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
mItemPreviewFeatures = menu.add("Find features");
return true;
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGray.release();
mIntermediateMat.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
// input frame has gray scale format
Imgproc.cvtColor(inputFrame.gray(), mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_RGBA:
// input frame has RBGA format
mRgba = inputFrame.rgba();
break;
case VIEW_MODE_CANNY:
// input frame has gray scale format
mRgba = inputFrame.rgba();
Imgproc.Canny(inputFrame.gray(), mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_FEATURES:
// input frame has RGBA format
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
FindFeatures(mGray.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
return mRgba;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mViewMode = VIEW_MODE_RGBA;
} else if (item == mItemPreviewGray) {
mViewMode = VIEW_MODE_GRAY;
} else if (item == mItemPreviewCanny) {
mViewMode = VIEW_MODE_CANNY;
} else if (item == mItemPreviewFeatures) {
mViewMode = VIEW_MODE_FEATURES;
}
return true;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
}

View File

@ -0,0 +1,6 @@
set(sample example-tutorial-3-cameracontrol)
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}" SDK_TARGET 11 "${ANDROID_SDK_TARGET}")
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,31 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.tutorial3"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,35 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.tutorial3"
>
<application
android:label="@string/app_name"
android:icon="@drawable/icon">
<activity android:name="Tutorial3Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>
<uses-feature android:name="android.hardware.camera.autofocus" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front" android:required="false"/>
<uses-feature android:name="android.hardware.camera.front.autofocus" android:required="false"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
</manifest>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,12 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.samples.tutorial3.Tutorial3View
android:layout_width="match_parent"
android:layout_height="match_parent"
android:visibility="gone"
android:id="@+id/tutorial3_activity_java_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV T3 Camera Control</string>
</resources>

View File

@ -0,0 +1,202 @@
package org.opencv.samples.tutorial3;
import java.text.SimpleDateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraActivity;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import android.annotation.SuppressLint;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.widget.Toast;
public class Tutorial3Activity extends CameraActivity implements CvCameraViewListener2, OnTouchListener {
private static final String TAG = "OCVSample::Activity";
private Tutorial3View mOpenCvCameraView;
private List<Size> mResolutionList;
private Menu mMenu;
private boolean mCameraStarted = false;
private boolean mMenuItemsCreated = false;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(Tutorial3Activity.this);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Tutorial3Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial3_surface_view);
mOpenCvCameraView = (Tutorial3View) findViewById(R.id.tutorial3_activity_java_surface_view);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume()
{
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return Collections.singletonList(mOpenCvCameraView);
}
public void onDestroy() {
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onCameraViewStarted(int width, int height) {
mCameraStarted = true;
setupMenuItems();
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
return inputFrame.rgba();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
mMenu = menu;
setupMenuItems();
return true;
}
private void setupMenuItems() {
if (mMenu == null || !mCameraStarted || mMenuItemsCreated) {
return;
}
List<String> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return;
}
mColorEffectsMenu = mMenu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<String> effectItr = effects.listIterator();
for (String effect: effects) {
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, effect);
idx++;
}
mResolutionMenu = mMenu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
idx = 0;
for (Size resolution: mResolutionList) {
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString());
idx++;
}
mMenuItemsCreated = true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
}
else if (item.getGroupId() == 2)
{
int id = item.getItemId();
Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
@SuppressLint("SimpleDateFormat")
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}
}

View File

@ -0,0 +1,87 @@
package org.opencv.samples.tutorial3;
import java.io.FileOutputStream;
import java.util.List;
import org.opencv.android.JavaCameraView;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
public class Tutorial3View extends JavaCameraView implements PictureCallback {
private static final String TAG = "Sample::Tutorial3View";
private String mPictureFileName;
public Tutorial3View(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<String> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List<Size> getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}

View File

@ -0,0 +1,27 @@
set(sample example-tutorial-4-opencl)
if(NOT DEFINED ANDROID_OPENCL_SDK)
message(STATUS "Sample ${sample} is disabled, because ANDROID_OPENCL_SDK is not specified")
return()
endif()
if(ANDROID_NATIVE_API_LEVEL LESS 14)
message(STATUS "Sample ${sample} is disabled, because ANDROID_NATIVE_API_LEVEL < 14")
return()
endif()
if(BUILD_FAT_JAVA_LIB)
set(native_deps opencv_java)
else()
set(native_deps opencv_imgproc)
endif()
include_directories(${ANDROID_OPENCL_SDK}/include)
link_directories(${ANDROID_OPENCL_SDK}/lib/${ANDROID_NDK_ABI_NAME})
add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}"
LIBRARY_DEPS "${OPENCV_ANDROID_LIB_DIR}"
SDK_TARGET 21 "${ANDROID_SDK_TARGET}"
NATIVE_DEPS ${native_deps} -lGLESv2 -lEGL -lOpenCL
COPY_LIBS YES
)
if(TARGET ${sample})
add_dependencies(opencv_android_examples ${sample})
endif()

View File

@ -0,0 +1,43 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion @ANDROID_COMPILE_SDK_VERSION@
defaultConfig {
applicationId "org.opencv.samples.tutorial4"
minSdkVersion @ANDROID_MIN_SDK_VERSION@
targetSdkVersion @ANDROID_TARGET_SDK_VERSION@
versionCode 301
versionName "3.01"
externalNativeBuild {
cmake {
arguments "-DOpenCV_DIR=" + project(':opencv').projectDir + "/@ANDROID_PROJECT_JNI_PATH@"@OPENCV_ANDROID_CMAKE_EXTRA_ARGS@
targets "JNIpart"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
aidl.srcDirs = @ANDROID_SAMPLE_JAVA_PATH@
res.srcDirs = @ANDROID_SAMPLE_RES_PATH@
manifest.srcFile '@ANDROID_SAMPLE_MANIFEST_PATH@'
}
}
externalNativeBuild {
cmake {
path '@ANDROID_SAMPLE_JNI_PATH@/CMakeLists.txt'
}
}
}
dependencies {
//implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':opencv')
}

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.tutorial4"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="14"
android:targetSdkVersion="21" />
<uses-feature android:glEsVersion="0x00020000" android:required="true"/>
<uses-feature android:name="android.hardware.camera"/>
<uses-feature android:name="android.hardware.camera2" android:required="false"/>
<uses-permission android:name="android.permission.CAMERA"/>
<application
android:allowBackup="true"
android:icon="@drawable/icon">
<activity
android:name=".Tutorial4Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,27 @@
LOCAL_PATH := $(call my-dir)
# add OpenCV
include $(CLEAR_VARS)
OPENCV_INSTALL_MODULES:=on
ifdef OPENCV_ANDROID_SDK
ifneq ("","$(wildcard $(OPENCV_ANDROID_SDK)/OpenCV.mk)")
include ${OPENCV_ANDROID_SDK}/OpenCV.mk
else
include ${OPENCV_ANDROID_SDK}/sdk/native/jni/OpenCV.mk
endif
else
include ../../sdk/native/jni/OpenCV.mk
endif
ifndef OPENCL_SDK
$(error Specify OPENCL_SDK to Android OpenCL SDK location)
endif
# add OpenCL
LOCAL_C_INCLUDES += $(OPENCL_SDK)/include
LOCAL_LDLIBS += -L$(OPENCL_SDK)/lib/$(TARGET_ARCH_ABI) -lOpenCL
LOCAL_MODULE := JNIpart
LOCAL_SRC_FILES := jni.c CLprocessor.cpp
LOCAL_LDLIBS += -llog -lGLESv2 -lEGL
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,4 @@
APP_STL := gnustl_static
APP_GNUSTL_FORCE_CPP_FEATURES := exceptions rtti
APP_ABI := armeabi-v7a
APP_PLATFORM := android-14

View File

@ -0,0 +1,283 @@
#define __CL_ENABLE_EXCEPTIONS
#define CL_USE_DEPRECATED_OPENCL_1_1_APIS /*let's give a chance for OpenCL 1.1 devices*/
#include <CL/cl.hpp>
#include <GLES2/gl2.h>
#include <EGL/egl.h>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/core/ocl.hpp>
#include "common.hpp"
const char oclProgB2B[] = "// clBuffer to clBuffer";
const char oclProgI2B[] = "// clImage to clBuffer";
const char oclProgI2I[] = \
"__constant sampler_t sampler = CLK_NORMALIZED_COORDS_FALSE | CLK_ADDRESS_CLAMP_TO_EDGE | CLK_FILTER_NEAREST; \n" \
"\n" \
"__kernel void Laplacian( \n" \
" __read_only image2d_t imgIn, \n" \
" __write_only image2d_t imgOut \n" \
" ) { \n" \
" \n" \
" const int2 pos = {get_global_id(0), get_global_id(1)}; \n" \
" \n" \
" float4 sum = (float4) 0.0f; \n" \
" sum += read_imagef(imgIn, sampler, pos + (int2)(-1,0)); \n" \
" sum += read_imagef(imgIn, sampler, pos + (int2)(+1,0)); \n" \
" sum += read_imagef(imgIn, sampler, pos + (int2)(0,-1)); \n" \
" sum += read_imagef(imgIn, sampler, pos + (int2)(0,+1)); \n" \
" sum -= read_imagef(imgIn, sampler, pos) * 4; \n" \
" \n" \
" write_imagef(imgOut, pos, sum*10); \n" \
"} \n";
void dumpCLinfo()
{
LOGD("*** OpenCL info ***");
try
{
std::vector<cl::Platform> platforms;
cl::Platform::get(&platforms);
LOGD("OpenCL info: Found %d OpenCL platforms", platforms.size());
for (int i = 0; i < platforms.size(); ++i)
{
std::string name = platforms[i].getInfo<CL_PLATFORM_NAME>();
std::string version = platforms[i].getInfo<CL_PLATFORM_VERSION>();
std::string profile = platforms[i].getInfo<CL_PLATFORM_PROFILE>();
std::string extensions = platforms[i].getInfo<CL_PLATFORM_EXTENSIONS>();
LOGD( "OpenCL info: Platform[%d] = %s, ver = %s, prof = %s, ext = %s",
i, name.c_str(), version.c_str(), profile.c_str(), extensions.c_str() );
}
std::vector<cl::Device> devices;
platforms[0].getDevices(CL_DEVICE_TYPE_ALL, &devices);
for (int i = 0; i < devices.size(); ++i)
{
std::string name = devices[i].getInfo<CL_DEVICE_NAME>();
std::string extensions = devices[i].getInfo<CL_DEVICE_EXTENSIONS>();
cl_ulong type = devices[i].getInfo<CL_DEVICE_TYPE>();
LOGD( "OpenCL info: Device[%d] = %s (%s), ext = %s",
i, name.c_str(), (type==CL_DEVICE_TYPE_GPU ? "GPU" : "CPU"), extensions.c_str() );
}
}
catch(const cl::Error& e)
{
LOGE( "OpenCL info: error while gathering OpenCL info: %s (%d)", e.what(), e.err() );
}
catch(const std::exception& e)
{
LOGE( "OpenCL info: error while gathering OpenCL info: %s", e.what() );
}
catch(...)
{
LOGE( "OpenCL info: unknown error while gathering OpenCL info" );
}
LOGD("*******************");
}
cl::Context theContext;
cl::CommandQueue theQueue;
cl::Program theProgB2B, theProgI2B, theProgI2I;
bool haveOpenCL = false;
extern "C" void initCL()
{
dumpCLinfo();
EGLDisplay mEglDisplay = eglGetCurrentDisplay();
if (mEglDisplay == EGL_NO_DISPLAY)
LOGE("initCL: eglGetCurrentDisplay() returned 'EGL_NO_DISPLAY', error = %x", eglGetError());
EGLContext mEglContext = eglGetCurrentContext();
if (mEglContext == EGL_NO_CONTEXT)
LOGE("initCL: eglGetCurrentContext() returned 'EGL_NO_CONTEXT', error = %x", eglGetError());
cl_context_properties props[] =
{ CL_GL_CONTEXT_KHR, (cl_context_properties) mEglContext,
CL_EGL_DISPLAY_KHR, (cl_context_properties) mEglDisplay,
CL_CONTEXT_PLATFORM, 0,
0 };
try
{
haveOpenCL = false;
cl::Platform p = cl::Platform::getDefault();
std::string ext = p.getInfo<CL_PLATFORM_EXTENSIONS>();
if(ext.find("cl_khr_gl_sharing") == std::string::npos)
LOGE("Warning: CL-GL sharing isn't supported by PLATFORM");
props[5] = (cl_context_properties) p();
theContext = cl::Context(CL_DEVICE_TYPE_GPU, props);
std::vector<cl::Device> devs = theContext.getInfo<CL_CONTEXT_DEVICES>();
LOGD("Context returned %d devices, taking the 1st one", devs.size());
ext = devs[0].getInfo<CL_DEVICE_EXTENSIONS>();
if(ext.find("cl_khr_gl_sharing") == std::string::npos)
LOGE("Warning: CL-GL sharing isn't supported by DEVICE");
theQueue = cl::CommandQueue(theContext, devs[0]);
cl::Program::Sources src(1, std::make_pair(oclProgI2I, sizeof(oclProgI2I)));
theProgI2I = cl::Program(theContext, src);
theProgI2I.build(devs);
cv::ocl::attachContext(p.getInfo<CL_PLATFORM_NAME>(), p(), theContext(), devs[0]());
if( cv::ocl::useOpenCL() )
LOGD("OpenCV+OpenCL works OK!");
else
LOGE("Can't init OpenCV with OpenCL TAPI");
haveOpenCL = true;
}
catch(const cl::Error& e)
{
LOGE("cl::Error: %s (%d)", e.what(), e.err());
}
catch(const std::exception& e)
{
LOGE("std::exception: %s", e.what());
}
catch(...)
{
LOGE( "OpenCL info: unknown error while initializing OpenCL stuff" );
}
LOGD("initCL completed");
}
extern "C" void closeCL()
{
}
#define GL_TEXTURE_2D 0x0DE1
void procOCL_I2I(int texIn, int texOut, int w, int h)
{
LOGD("Processing OpenCL Direct (image2d)");
if(!haveOpenCL)
{
LOGE("OpenCL isn't initialized");
return;
}
LOGD("procOCL_I2I(%d, %d, %d, %d)", texIn, texOut, w, h);
cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn);
cl::ImageGL imgOut(theContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, texOut);
std::vector < cl::Memory > images;
images.push_back(imgIn);
images.push_back(imgOut);
int64_t t = getTimeMs();
theQueue.enqueueAcquireGLObjects(&images);
theQueue.finish();
LOGD("enqueueAcquireGLObjects() costs %d ms", getTimeInterval(t));
t = getTimeMs();
cl::Kernel Laplacian(theProgI2I, "Laplacian"); //TODO: may be done once
Laplacian.setArg(0, imgIn);
Laplacian.setArg(1, imgOut);
theQueue.finish();
LOGD("Kernel() costs %d ms", getTimeInterval(t));
t = getTimeMs();
theQueue.enqueueNDRangeKernel(Laplacian, cl::NullRange, cl::NDRange(w, h), cl::NullRange);
theQueue.finish();
LOGD("enqueueNDRangeKernel() costs %d ms", getTimeInterval(t));
t = getTimeMs();
theQueue.enqueueReleaseGLObjects(&images);
theQueue.finish();
LOGD("enqueueReleaseGLObjects() costs %d ms", getTimeInterval(t));
}
void procOCL_OCV(int texIn, int texOut, int w, int h)
{
LOGD("Processing OpenCL via OpenCV");
if(!haveOpenCL)
{
LOGE("OpenCL isn't initialized");
return;
}
int64_t t = getTimeMs();
cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn);
std::vector < cl::Memory > images(1, imgIn);
theQueue.enqueueAcquireGLObjects(&images);
theQueue.finish();
cv::UMat uIn, uOut, uTmp;
cv::ocl::convertFromImage(imgIn(), uIn);
LOGD("loading texture data to OpenCV UMat costs %d ms", getTimeInterval(t));
theQueue.enqueueReleaseGLObjects(&images);
t = getTimeMs();
//cv::blur(uIn, uOut, cv::Size(5, 5));
cv::Laplacian(uIn, uTmp, CV_8U);
cv:multiply(uTmp, 10, uOut);
cv::ocl::finish();
LOGD("OpenCV processing costs %d ms", getTimeInterval(t));
t = getTimeMs();
cl::ImageGL imgOut(theContext, CL_MEM_WRITE_ONLY, GL_TEXTURE_2D, 0, texOut);
images.clear();
images.push_back(imgOut);
theQueue.enqueueAcquireGLObjects(&images);
cl_mem clBuffer = (cl_mem)uOut.handle(cv::ACCESS_READ);
cl_command_queue q = (cl_command_queue)cv::ocl::Queue::getDefault().ptr();
size_t offset = 0;
size_t origin[3] = { 0, 0, 0 };
size_t region[3] = { w, h, 1 };
CV_Assert(clEnqueueCopyBufferToImage (q, clBuffer, imgOut(), offset, origin, region, 0, NULL, NULL) == CL_SUCCESS);
theQueue.enqueueReleaseGLObjects(&images);
cv::ocl::finish();
LOGD("uploading results to texture costs %d ms", getTimeInterval(t));
}
void drawFrameProcCPU(int w, int h, int texOut)
{
LOGD("Processing on CPU");
int64_t t;
// let's modify pixels in FBO texture in C++ code (on CPU)
static cv::Mat m;
m.create(h, w, CV_8UC4);
// read
t = getTimeMs();
// expecting FBO to be bound
glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, m.data);
LOGD("glReadPixels() costs %d ms", getTimeInterval(t));
// modify
t = getTimeMs();
cv::Laplacian(m, m, CV_8U);
m *= 10;
LOGD("Laplacian() costs %d ms", getTimeInterval(t));
// write back
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texOut);
t = getTimeMs();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, m.data);
LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t));
}
enum ProcMode {PROC_MODE_NO_PROC=0, PROC_MODE_CPU=1, PROC_MODE_OCL_DIRECT=2, PROC_MODE_OCL_OCV=3};
extern "C" void processFrame(int tex1, int tex2, int w, int h, int mode)
{
switch(mode)
{
//case PROC_MODE_NO_PROC:
case PROC_MODE_CPU:
drawFrameProcCPU(w, h, tex2);
break;
case PROC_MODE_OCL_DIRECT:
procOCL_I2I(tex1, tex2, w, h);
break;
case PROC_MODE_OCL_OCV:
procOCL_OCV(tex1, tex2, w, h);
break;
default:
LOGE("Unexpected processing mode: %d", mode);
}
}

View File

@ -0,0 +1,15 @@
cmake_minimum_required(VERSION 3.6)
set(target mixed_sample)
project(${target} CXX)
set(ANDROID_OPENCV_COMPONENTS "opencv_java" CACHE STRING "")
message(STATUS "ANDROID_ABI=${ANDROID_ABI}")
find_package(OpenCV REQUIRED COMPONENTS ${ANDROID_OPENCV_COMPONENTS})
file(GLOB srcs *.cpp *.c)
file(GLOB hdrs *.hpp *.h)
include_directories("${CMAKE_CURRENT_LIST_DIR}")
add_library(${target} SHARED ${srcs} ${hdrs})
target_link_libraries(${target} ${ANDROID_OPENCV_COMPONENTS} -lGLESv2 -lEGL -lOpenCL)

View File

@ -0,0 +1,19 @@
#include <android/log.h>
#define LOG_TAG "JNIpart"
//#define LOGD(...)
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
#include <time.h> // clock_gettime
static inline int64_t getTimeMs()
{
struct timespec now;
clock_gettime(CLOCK_MONOTONIC, &now);
return (int64_t) now.tv_sec*1000 + now.tv_nsec/1000000;
}
static inline int getTimeInterval(int64_t startTime)
{
return int(getTimeMs() - startTime);
}

View File

@ -0,0 +1,20 @@
#include <jni.h>
int initCL();
void closeCL();
void processFrame(int tex1, int tex2, int w, int h, int mode);
JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativePart_initCL(JNIEnv * env, jclass cls)
{
return initCL();
}
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_closeCL(JNIEnv * env, jclass cls)
{
closeCL();
}
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_processFrame(JNIEnv * env, jclass cls, jint tex1, jint tex2, jint w, jint h, jint mode)
{
processFrame(tex1, tex2, w, h, mode);
}

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<lint>
</lint>

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -0,0 +1,26 @@
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.samples.tutorial4.MyGLSurfaceView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:id="@+id/my_gl_surface_view" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation = "vertical" >
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/fps_text_view"
android:text="FPS:" />
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/proc_mode_text_view"
android:text="Processing mode:" />
</LinearLayout>
</FrameLayout>

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
<group android:checkableBehavior="single">
<item android:id="@+id/no_proc" android:title="No processing" />
<item android:id="@+id/cpu" android:title="Use CPU code" />
<item android:id="@+id/ocl_direct" android:title="Use OpenCL direct" />
<item android:id="@+id/ocl_ocv" android:title="Use OpenCL via OpenCV" />
</group>
</menu>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OpenCV Tutorial 4 - Use OpenCL</string>
</resources>

View File

@ -0,0 +1,112 @@
package org.opencv.samples.tutorial4;
import org.opencv.android.CameraGLSurfaceView;
import android.app.Activity;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.widget.TextView;
import android.widget.Toast;
public class MyGLSurfaceView extends CameraGLSurfaceView implements CameraGLSurfaceView.CameraTextureListener {
static final String LOGTAG = "MyGLSurfaceView";
protected int procMode = NativePart.PROCESSING_MODE_NO_PROCESSING;
static final String[] procModeName = new String[] {"No Processing", "CPU", "OpenCL Direct", "OpenCL via OpenCV"};
protected int frameCounter;
protected long lastNanoTime;
TextView mFpsText = null;
public MyGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
public boolean onTouchEvent(MotionEvent e) {
if(e.getAction() == MotionEvent.ACTION_DOWN)
((Activity)getContext()).openOptionsMenu();
return true;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
//NativePart.initCL();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
//NativePart.closeCL();
super.surfaceDestroyed(holder);
}
public void setProcessingMode(int newMode) {
if(newMode>=0 && newMode<procModeName.length)
procMode = newMode;
else
Log.e(LOGTAG, "Ignoring invalid processing mode: " + newMode);
((Activity) getContext()).runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getContext(), "Selected mode: " + procModeName[procMode], Toast.LENGTH_LONG).show();
}
});
}
@Override
public void onCameraViewStarted(int width, int height) {
((Activity) getContext()).runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getContext(), "onCameraViewStarted", Toast.LENGTH_SHORT).show();
}
});
NativePart.initCL();
frameCounter = 0;
lastNanoTime = System.nanoTime();
}
@Override
public void onCameraViewStopped() {
((Activity) getContext()).runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getContext(), "onCameraViewStopped", Toast.LENGTH_SHORT).show();
}
});
}
@Override
public boolean onCameraTexture(int texIn, int texOut, int width, int height) {
// FPS
frameCounter++;
if(frameCounter >= 30)
{
final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime));
Log.i(LOGTAG, "drawFrame() FPS: "+fps);
if(mFpsText != null) {
Runnable fpsUpdater = new Runnable() {
public void run() {
mFpsText.setText("FPS: " + fps);
}
};
new Handler(Looper.getMainLooper()).post(fpsUpdater);
} else {
Log.d(LOGTAG, "mFpsText == null");
mFpsText = (TextView)((Activity) getContext()).findViewById(R.id.fps_text_view);
}
frameCounter = 0;
lastNanoTime = System.nanoTime();
}
if(procMode == NativePart.PROCESSING_MODE_NO_PROCESSING)
return false;
NativePart.processFrame(texIn, texOut, width, height, procMode);
return true;
}
}

View File

@ -0,0 +1,18 @@
package org.opencv.samples.tutorial4;
public class NativePart {
static
{
System.loadLibrary("opencv_java4");
System.loadLibrary("JNIpart");
}
public static final int PROCESSING_MODE_NO_PROCESSING = 0;
public static final int PROCESSING_MODE_CPU = 1;
public static final int PROCESSING_MODE_OCL_DIRECT = 2;
public static final int PROCESSING_MODE_OCL_OCV = 3;
public static native int initCL();
public static native void closeCL();
public static native void processFrame(int tex1, int tex2, int w, int h, int mode);
}

View File

@ -0,0 +1,102 @@
package org.opencv.samples.tutorial4;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.widget.TextView;
public class Tutorial4Activity extends Activity {
private MyGLSurfaceView mView;
private TextView mProcMode;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
//mView = new MyGLSurfaceView(this, null);
//setContentView(mView);
setContentView(R.layout.activity);
mView = (MyGLSurfaceView) findViewById(R.id.my_gl_surface_view);
mView.setCameraTextureListener(mView);
TextView tv = (TextView)findViewById(R.id.fps_text_view);
mProcMode = (TextView)findViewById(R.id.proc_mode_text_view);
runOnUiThread(new Runnable() {
public void run() {
mProcMode.setText("Processing mode: No processing");
}
});
mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING);
}
@Override
protected void onPause() {
mView.onPause();
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
mView.onResume();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.no_proc:
runOnUiThread(new Runnable() {
public void run() {
mProcMode.setText("Processing mode: No Processing");
}
});
mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING);
return true;
case R.id.cpu:
runOnUiThread(new Runnable() {
public void run() {
mProcMode.setText("Processing mode: CPU");
}
});
mView.setProcessingMode(NativePart.PROCESSING_MODE_CPU);
return true;
case R.id.ocl_direct:
runOnUiThread(new Runnable() {
public void run() {
mProcMode.setText("Processing mode: OpenCL direct");
}
});
mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_DIRECT);
return true;
case R.id.ocl_ocv:
runOnUiThread(new Runnable() {
public void run() {
mProcMode.setText("Processing mode: OpenCL via OpenCV (TAPI)");
}
});
mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_OCV);
return true;
default:
return false;
}
}
}