feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake

1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试
2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程
3.重整权利声明文件,重整代码工程,确保最小化侵权风险

Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake
Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
wangzhengyang
2022-05-10 09:54:44 +08:00
parent ecdd171c6f
commit 718c41634f
10018 changed files with 3593797 additions and 186748 deletions

View File

@ -0,0 +1,104 @@
set(MODULE_NAME "java_bindings_generator")
set(OPENCV_MODULE_IS_PART_OF_WORLD FALSE)
ocv_add_module(${MODULE_NAME} INTERNAL)
set(OPENCV_JAVA_SIGNATURES_FILE "${CMAKE_CURRENT_BINARY_DIR}/opencv_java_signatures.json" CACHE INTERNAL "")
set(OPENCV_JAVA_BINDINGS_DIR "${CMAKE_CURRENT_BINARY_DIR}" CACHE INTERNAL "")
file(REMOVE_RECURSE "${OPENCV_JAVA_BINDINGS_DIR}/gen")
file(REMOVE "${OPENCV_DEPHELPER}/gen_opencv_java_source") # force re-run after CMake
# This file is included from a subdirectory
set(JAVA_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/..")
include(${JAVA_SOURCE_DIR}/common.cmake)
set(__remap_config "") # list of remapped ".in" files (configure_file)
set(__remap_targets "")
macro(ocv_remap_files files_list_var)
set(target_dir "${OpenCV_BINARY_DIR}/configured")
foreach(f ${${files_list_var}})
if(NOT "${f}" MATCHES "^(.*)\\.in$")
#continue() # since CMake 3.2+
else()
set(f_ "${CMAKE_MATCH_1}")
file(RELATIVE_PATH rel_path0 "${OpenCV_SOURCE_DIR}" "${f}")
file(RELATIVE_PATH rel_path1 "${OpenCV_SOURCE_DIR}" "${f_}")
set(__target_file "${target_dir}/${rel_path1}")
configure_file("${f}" "${__target_file}" @ONLY)
if(__remap_config)
set(__remap_config "${__remap_config},\n")
endif()
set(__remap_config "${__remap_config} { \"src\": \"${rel_path0}\", \"target\": \"${__target_file}\" }")
list(APPEND __remap_targets "${__target_file}")
endif()
endforeach()
endmacro()
# common files
file(GLOB_RECURSE deps "${CMAKE_CURRENT_SOURCE_DIR}/src/*" "${CMAKE_CURRENT_SOURCE_DIR}/android*/*" "${CMAKE_CURRENT_SOURCE_DIR}/templates/*")
ocv_remap_files(deps)
set(__modules_config "") # list of OpenCV modules
foreach(m ${OPENCV_JAVA_MODULES})
set(module_java_dir "${OPENCV_MODULE_${m}_LOCATION}/misc/java")
list(APPEND deps ${OPENCV_MODULE_${m}_HEADERS})
file(GLOB_RECURSE misc_files "${module_java_dir}/*")
list(APPEND deps ${misc_files})
string(REGEX REPLACE "^opencv_" "" m_ "${m}")
if(__modules_config)
set(__modules_config "${__modules_config},\n")
endif()
file(RELATIVE_PATH rel_path "${OpenCV_SOURCE_DIR}" "${OPENCV_MODULE_${m}_LOCATION}")
set(__modules_config "${__modules_config} { \"name\": \"${m_}\", \"location\": \"${rel_path}\" }")
ocv_remap_files(misc_files)
endforeach(m)
set(CONFIG_FILE "${CMAKE_CURRENT_BINARY_DIR}/gen_java.json")
set(__config_str
"{
\"rootdir\": \"${OpenCV_SOURCE_DIR}\",
\"modules\": [
${__modules_config}
],
\"files_remap\": [
${__remap_config}
]
}
")
if(EXISTS "${CONFIG_FILE}")
file(READ "${CONFIG_FILE}" __content)
else()
set(__content "")
endif()
if(NOT "${__content}" STREQUAL "${__config_str}")
file(WRITE "${CONFIG_FILE}" "${__config_str}")
file(REMOVE "${OPENCV_DEPHELPER}/gen_opencv_java_source")
endif()
unset(__config_str)
set(java_generated_files
# "${OPENCV_JAVA_SIGNATURES_FILE}"
"${OPENCV_DEPHELPER}/gen_opencv_java_source"
)
add_custom_command(
OUTPUT ${java_generated_files}
COMMAND ${PYTHON_DEFAULT_EXECUTABLE} "${JAVA_SOURCE_DIR}/generator/gen_java.py" -p "${JAVA_SOURCE_DIR}/../python/src2/gen2.py" -c "${CONFIG_FILE}"
COMMAND ${CMAKE_COMMAND} -E touch "${OPENCV_DEPHELPER}/gen_opencv_java_source"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}"
DEPENDS "${JAVA_SOURCE_DIR}/generator/gen_java.py"
"${JAVA_SOURCE_DIR}/../python/src2/gen2.py"
"${JAVA_SOURCE_DIR}/../python/src2/hdr_parser.py"
# don't, result of file(WRITE): "${CMAKE_CURRENT_BINARY_DIR}/gen_java.json"
${deps} ${__remap_targets}
# not allowed (file(WRITE) result): "${CONFIG_FILE}"
COMMENT "Generate files for Java bindings"
)
add_custom_target(gen_opencv_java_source DEPENDS ${java_generated_files}
SOURCES "${JAVA_SOURCE_DIR}/generator/gen_java.py"
"${CMAKE_CURRENT_BINARY_DIR}/gen_java.json"
)

View File

@ -0,0 +1,302 @@
package org.opencv.android;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@TargetApi(21)
public class Camera2Renderer extends CameraGLRendererBase {
protected final String LOGTAG = "Camera2Renderer";
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private Size mPreviewSize = new Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
Camera2Renderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected void doStart() {
Log.d(LOGTAG, "doStart");
startBackgroundThread();
super.doStart();
}
@Override
protected void doStop() {
Log.d(LOGTAG, "doStop");
super.doStop();
stopBackgroundThread();
}
boolean cacPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height);
if(mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) mView.getContext()
.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager
.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float)width / height;
for (Size psize : map.getOutputSizes(SurfaceTexture.class)) {
int w = psize.getWidth(), h = psize.getHeight();
Log.d(LOGTAG, "trying size: "+w+"x"+h);
if ( width >= w && height >= h &&
bestWidth <= w && bestHeight <= h &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight);
if( bestWidth == 0 || bestHeight == 0 ||
mPreviewSize.getWidth() == bestWidth &&
mPreviewSize.getHeight() == bestHeight )
return false;
else {
mPreviewSize = new Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "cacPreviewSize - Security Exception");
}
return false;
}
@Override
protected void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if(camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return;
}
if(id == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if( id == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK ||
id == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
mCameraID = cameraID;
break;
}
}
}
if(mCameraID != null) {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException(
"Time out waiting to lock camera opening.");
}
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception");
} catch (InterruptedException e) {
Log.e(LOGTAG, "OpenCamera - Interrupted Exception");
}
}
@Override
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
mCameraOpenCloseLock.release();
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
};
private void createCameraPreviewSession() {
int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")");
if(w<0 || h<0)
return;
try {
mCameraOpenCloseLock.acquire();
if (null == mCameraDevice) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
if(null == mSTexture) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null");
return;
}
mSTexture.setDefaultBufferSize(w, h);
Surface surface = new Surface(mSTexture);
mPreviewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCaptureSession failed");
}
mCameraOpenCloseLock.release();
}
@Override
public void onConfigureFailed(
CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
mCameraOpenCloseLock.release();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession");
} catch (InterruptedException e) {
throw new RuntimeException(
"Interrupted while createCameraPreviewSession", e);
}
finally {
//mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if(mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread");
}
}
@Override
protected void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")");
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
try {
mCameraOpenCloseLock.acquire();
boolean needReconfig = cacPreviewSize(width, height);
mCameraWidth = mPreviewSize.getWidth();
mCameraHeight = mPreviewSize.getHeight();
if( !needReconfig ) {
mCameraOpenCloseLock.release();
return;
}
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
mCameraOpenCloseLock.release();
createCameraPreviewSession();
} catch (InterruptedException e) {
mCameraOpenCloseLock.release();
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
}
}

View File

@ -0,0 +1,440 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.View;
@TargetApi(15)
public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "CameraGLRendererBase";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES = -1, prog2D = -1;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
protected int mCameraWidth = -1, mCameraHeight = -1;
protected int mFBOWidth = -1, mFBOHeight = -1;
protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1;
protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY;
protected SurfaceTexture mSTexture;
protected boolean mHaveSurface = false;
protected boolean mHaveFBO = false;
protected boolean mUpdateST = false;
protected boolean mEnabled = true;
protected boolean mIsStarted = false;
protected CameraGLSurfaceView mView;
protected abstract void openCamera(int id);
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight
public CameraGLRendererBase(CameraGLSurfaceView view) {
mView = view;
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mUpdateST = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
mHaveSurface = true;
updateState();
setPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
initShaders();
}
private void initShaders() {
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
progOES = loadShader(vss, fssOES);
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition");
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPosOES);
GLES20.glEnableVertexAttribArray(vTCOES);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
}
private void initSurfaceTexture() {
Log.d(LOGTAG, "initSurfaceTexture");
deleteSurfaceTexture();
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
}
private void deleteSurfaceTexture() {
Log.d(LOGTAG, "deleteSurfaceTexture");
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private static void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
Log.d("CameraGLRendererBase", "loadShader");
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] status = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not link shader program: "+GLES20.glGetProgramInfoLog(program));
program = 0;
return 0;
}
GLES20.glValidateProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0);
if (status[0] == 0)
{
Log.e("CameraGLRendererBase", "Shader program validation error: "+GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
return 0;
}
Log.d("CameraGLRendererBase", "Shader program is built OK");
return program;
}
private void deleteFBO()
{
Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
mFBOWidth = mFBOHeight = 0;
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
deleteFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError());
int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus);
mFBOWidth = width;
mFBOHeight = height;
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
public synchronized void enableView() {
Log.d(LOGTAG, "enableView");
mEnabled = true;
updateState();
}
public synchronized void disableView() {
Log.d(LOGTAG, "disableView");
mEnabled = false;
updateState();
}
protected void updateState() {
Log.d(LOGTAG, "updateState");
Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface);
boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE;
if (willStart != mIsStarted) {
if(willStart) doStart();
else doStop();
} else {
Log.d(LOGTAG, "keeping State unchanged");
}
Log.d(LOGTAG, "updateState end");
}
protected synchronized void doStart() {
Log.d(LOGTAG, "doStart");
initSurfaceTexture();
openCamera(mCameraIndex);
mIsStarted = true;
if(mCameraWidth>0 && mCameraHeight>0)
setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted()
}
protected void doStop() {
Log.d(LOGTAG, "doStop");
synchronized(this) {
mUpdateST = false;
mIsStarted = false;
mHaveFBO = false;
closeCamera();
deleteSurfaceTexture();
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStopped();
}
protected void setPreviewSize(int width, int height) {
synchronized(this) {
mHaveFBO = false;
mCameraWidth = width;
mCameraHeight = height;
setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight
initFBO(mCameraWidth, mCameraHeight);
mHaveFBO = true;
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight);
}
public void setCameraIndex(int cameraIndex) {
disableView();
mCameraIndex = cameraIndex;
enableView();
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
disableView();
mMaxCameraWidth = maxWidth;
mMaxCameraHeight = maxHeight;
enableView();
}
public void onResume() {
Log.i(LOGTAG, "onResume");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mHaveSurface = false;
updateState();
mCameraWidth = mCameraHeight = -1;
}
}

View File

@ -0,0 +1,119 @@
package org.opencv.android;
import org.opencv.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
public class CameraGLSurfaceView extends GLSurfaceView {
private static final String LOGTAG = "CameraGLSurfaceView";
public interface CameraTextureListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when a new preview frame from Camera is ready.
* @param texIn - the OpenGL texture ID that contains frame in RGBA format
* @param texOut - the OpenGL texture ID that can be used to store modified frame image t display
* @param width - the width of the frame
* @param height - the height of the frame
* @return `true` if `texOut` should be displayed, `false` - to show `texIn`
*/
public boolean onCameraTexture(int texIn, int texOut, int width, int height);
};
private CameraTextureListener mTexListener;
private CameraGLRendererBase mRenderer;
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
styledAttrs.recycle();
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this);
else
mRenderer = new CameraRenderer(this);
setCameraIndex(cameraIndex);
setEGLContextClientVersion(2);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void setCameraTextureListener(CameraTextureListener texListener)
{
mTexListener = texListener;
}
public CameraTextureListener getCameraTextureListener()
{
return mTexListener;
}
public void setCameraIndex(int cameraIndex) {
mRenderer.setCameraIndex(cameraIndex);
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.mHaveSurface = false;
super.surfaceDestroyed(holder);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
super.surfaceChanged(holder, format, w, h);
}
@Override
public void onResume() {
Log.i(LOGTAG, "onResume");
super.onResume();
mRenderer.onResume();
}
@Override
public void onPause() {
Log.i(LOGTAG, "onPause");
mRenderer.onPause();
super.onPause();
}
public void enableView() {
mRenderer.enableView();
}
public void disableView() {
mRenderer.disableView();
}
}

View File

@ -0,0 +1,166 @@
package org.opencv.android;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
@TargetApi(15)
@SuppressWarnings("deprecation")
public class CameraRenderer extends CameraGLRendererBase {
public static final String LOGTAG = "CameraRenderer";
private Camera mCamera;
private boolean mPreviewStarted = false;
CameraRenderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected synchronized void closeCamera() {
Log.i(LOGTAG, "closeCamera");
if(mCamera != null) {
mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release();
mCamera = null;
}
}
@Override
protected synchronized void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
closeCamera();
if (id == CameraBridgeViewBase.CAMERA_ID_ANY) {
Log.d(LOGTAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.i(LOGTAG, "Trying to open BACK camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.i(LOGTAG, "Trying to open FRONT camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.e(LOGTAG, "Back camera not found!");
} else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.e(LOGTAG, "Front camera not found!");
} else {
Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if(mCamera == null) {
Log.e(LOGTAG, "Error: can't open camera");
return;
}
Camera.Parameters params = mCamera.getParameters();
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
try {
mCamera.setPreviewTexture(mSTexture);
} catch (IOException ioe) {
Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
}
}
@Override
public synchronized void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return;
}
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0;
if (psize.size() > 0) {
float aspect = (float)width / height;
for (Size size : psize) {
int w = size.width, h = size.height;
Log.d(LOGTAG, "checking camera preview size: "+w+"x"+h);
if ( w <= width && h <= height &&
w >= bestWidth && h >= bestHeight &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
if(bestWidth <= 0 || bestHeight <= 0) {
bestWidth = psize.get(0).width;
bestHeight = psize.get(0).height;
Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight);
} else {
Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight);
}
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
mCameraWidth = bestWidth;
mCameraHeight = bestHeight;
param.setPreviewSize(bestWidth, bestHeight);
}
param.set("orientation", "landscape");
mCamera.setParameters(param);
mCamera.startPreview();
mPreviewStarted = true;
}
}

View File

@ -0,0 +1,447 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.ViewGroup.LayoutParams;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
@TargetApi(21)
public class JavaCamera2View extends CameraBridgeViewBase {
private static final String LOGTAG = "JavaCamera2View";
protected ImageReader mImageReader;
protected int mPreviewFormat = ImageFormat.YUV_420_888;
protected CameraDevice mCameraDevice;
protected CameraCaptureSession mCaptureSession;
protected CaptureRequest.Builder mPreviewRequestBuilder;
protected String mCameraID;
protected android.util.Size mPreviewSize = new android.util.Size(-1, -1);
private HandlerThread mBackgroundThread;
protected Handler mBackgroundHandler;
public JavaCamera2View(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCamera2View(Context context, AttributeSet attrs) {
super(context, attrs);
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("OpenCVCameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if (mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread", e);
}
}
protected boolean initializeCamera() {
Log.i(LOGTAG, "initializeCamera");
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if (camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return false;
}
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if ((mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) ||
(mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
) {
mCameraID = cameraID;
break;
}
}
}
if (mCameraID != null) {
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
} else { // make JavaCamera2View behaves in the same way as JavaCameraView
Log.i(LOGTAG, "Trying to open camera with the value (" + mCameraIndex + ")");
if (mCameraIndex < camList.length) {
mCameraID = camList[mCameraIndex];
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
} else {
// CAMERA_DISCONNECTED is used when the camera id is no longer valid
throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED);
}
}
return true;
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception", e);
}
return false;
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
}
};
private void createCameraPreviewSession() {
final int w = mPreviewSize.getWidth(), h = mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession(" + w + "x" + h + ")");
if (w < 0 || h < 0)
return;
try {
if (null == mCameraDevice) {
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
mImageReader = ImageReader.newInstance(w, h, mPreviewFormat, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null)
return;
// sanity checks - 3 planes
Image.Plane[] planes = image.getPlanes();
assert (planes.length == 3);
assert (image.getFormat() == mPreviewFormat);
JavaCamera2Frame tempFrame = new JavaCamera2Frame(image);
deliverAndDrawFrame(tempFrame);
tempFrame.release();
image.close();
}
}, mBackgroundHandler);
Surface surface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.i(LOGTAG, "createCaptureSession::onConfigured");
if (null == mCameraDevice) {
return; // camera is already closed
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (Exception e) {
Log.e(LOGTAG, "createCaptureSession failed", e);
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
}
},
null
);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession", e);
}
}
@Override
protected void disconnectCamera() {
Log.i(LOGTAG, "close camera");
try {
CameraDevice c = mCameraDevice;
mCameraDevice = null;
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != c) {
c.close();
}
} finally {
stopBackgroundThread();
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
}
Log.i(LOGTAG, "camera closed!");
}
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
android.util.Size size = (android.util.Size)obj;
return size.getWidth();
}
@Override
public int getHeight(Object obj) {
android.util.Size size = (android.util.Size)obj;
return size.getHeight();
}
}
boolean calcPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
if (mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
List<android.util.Size> sizes_list = Arrays.asList(sizes);
Size frameSize = calculateCameraFrameSize(sizes_list, new JavaCameraSizeAccessor(), width, height);
Log.i(LOGTAG, "Selected preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
assert(!(frameSize.width == 0 || frameSize.height == 0));
if (mPreviewSize.getWidth() == frameSize.width && mPreviewSize.getHeight() == frameSize.height)
return false;
else {
mPreviewSize = new android.util.Size((int)frameSize.width, (int)frameSize.height);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "calcPreviewSize - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "calcPreviewSize - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "calcPreviewSize - Security Exception", e);
}
return false;
}
@Override
protected boolean connectCamera(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize(" + width + "x" + height + ")");
startBackgroundThread();
initializeCamera();
try {
boolean needReconfig = calcPreviewSize(width, height);
mFrameWidth = mPreviewSize.getWidth();
mFrameHeight = mPreviewSize.getHeight();
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
AllocateCache();
if (needReconfig) {
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
createCameraPreviewSession();
}
} catch (RuntimeException e) {
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
return true;
}
private class JavaCamera2Frame implements CvCameraViewFrame {
@Override
public Mat gray() {
Image.Plane[] planes = mImage.getPlanes();
int w = mImage.getWidth();
int h = mImage.getHeight();
assert(planes[0].getPixelStride() == 1);
ByteBuffer y_plane = planes[0].getBuffer();
int y_plane_step = planes[0].getRowStride();
mGray = new Mat(h, w, CvType.CV_8UC1, y_plane, y_plane_step);
return mGray;
}
@Override
public Mat rgba() {
Image.Plane[] planes = mImage.getPlanes();
int w = mImage.getWidth();
int h = mImage.getHeight();
int chromaPixelStride = planes[1].getPixelStride();
if (chromaPixelStride == 2) { // Chroma channels are interleaved
assert(planes[0].getPixelStride() == 1);
assert(planes[2].getPixelStride() == 2);
ByteBuffer y_plane = planes[0].getBuffer();
int y_plane_step = planes[0].getRowStride();
ByteBuffer uv_plane1 = planes[1].getBuffer();
int uv_plane1_step = planes[1].getRowStride();
ByteBuffer uv_plane2 = planes[2].getBuffer();
int uv_plane2_step = planes[2].getRowStride();
Mat y_mat = new Mat(h, w, CvType.CV_8UC1, y_plane, y_plane_step);
Mat uv_mat1 = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane1, uv_plane1_step);
Mat uv_mat2 = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane2, uv_plane2_step);
long addr_diff = uv_mat2.dataAddr() - uv_mat1.dataAddr();
if (addr_diff > 0) {
assert(addr_diff == 1);
Imgproc.cvtColorTwoPlane(y_mat, uv_mat1, mRgba, Imgproc.COLOR_YUV2RGBA_NV12);
} else {
assert(addr_diff == -1);
Imgproc.cvtColorTwoPlane(y_mat, uv_mat2, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
}
return mRgba;
} else { // Chroma channels are not interleaved
byte[] yuv_bytes = new byte[w*(h+h/2)];
ByteBuffer y_plane = planes[0].getBuffer();
ByteBuffer u_plane = planes[1].getBuffer();
ByteBuffer v_plane = planes[2].getBuffer();
int yuv_bytes_offset = 0;
int y_plane_step = planes[0].getRowStride();
if (y_plane_step == w) {
y_plane.get(yuv_bytes, 0, w*h);
yuv_bytes_offset = w*h;
} else {
int padding = y_plane_step - w;
for (int i = 0; i < h; i++){
y_plane.get(yuv_bytes, yuv_bytes_offset, w);
yuv_bytes_offset += w;
if (i < h - 1) {
y_plane.position(y_plane.position() + padding);
}
}
assert(yuv_bytes_offset == w * h);
}
int chromaRowStride = planes[1].getRowStride();
int chromaRowPadding = chromaRowStride - w/2;
if (chromaRowPadding == 0){
// When the row stride of the chroma channels equals their width, we can copy
// the entire channels in one go
u_plane.get(yuv_bytes, yuv_bytes_offset, w*h/4);
yuv_bytes_offset += w*h/4;
v_plane.get(yuv_bytes, yuv_bytes_offset, w*h/4);
} else {
// When not equal, we need to copy the channels row by row
for (int i = 0; i < h/2; i++){
u_plane.get(yuv_bytes, yuv_bytes_offset, w/2);
yuv_bytes_offset += w/2;
if (i < h/2-1){
u_plane.position(u_plane.position() + chromaRowPadding);
}
}
for (int i = 0; i < h/2; i++){
v_plane.get(yuv_bytes, yuv_bytes_offset, w/2);
yuv_bytes_offset += w/2;
if (i < h/2-1){
v_plane.position(v_plane.position() + chromaRowPadding);
}
}
}
Mat yuv_mat = new Mat(h+h/2, w, CvType.CV_8UC1);
yuv_mat.put(0, 0, yuv_bytes);
Imgproc.cvtColor(yuv_mat, mRgba, Imgproc.COLOR_YUV2RGBA_I420, 4);
return mRgba;
}
}
public JavaCamera2Frame(Image image) {
super();
mImage = image;
mRgba = new Mat();
mGray = new Mat();
}
public void release() {
mRgba.release();
mGray.release();
}
private Image mImage;
private Mat mRgba;
private Mat mGray;
};
}

View File

@ -0,0 +1,391 @@
package org.opencv.android;
import java.io.File;
import java.util.StringTokenizer;
import org.opencv.core.Core;
import org.opencv.engine.OpenCVEngineInterface;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.net.Uri;
import android.os.IBinder;
import android.os.RemoteException;
import android.util.Log;
class AsyncServiceHelper
{
public static boolean initOpenCV(String Version, final Context AppContext,
final LoaderCallbackInterface Callback)
{
AsyncServiceHelper helper = new AsyncServiceHelper(Version, AppContext, Callback);
Intent intent = new Intent("org.opencv.engine.BIND");
intent.setPackage("org.opencv.engine");
if (AppContext.bindService(intent, helper.mServiceConnection, Context.BIND_AUTO_CREATE))
{
return true;
}
else
{
AppContext.unbindService(helper.mServiceConnection);
InstallService(AppContext, Callback);
return false;
}
}
protected AsyncServiceHelper(String Version, Context AppContext, LoaderCallbackInterface Callback)
{
mOpenCVersion = Version;
mUserAppCallback = Callback;
mAppContext = AppContext;
}
protected static final String TAG = "OpenCVManager/Helper";
protected static final int MINIMUM_ENGINE_VERSION = 2;
protected OpenCVEngineInterface mEngineService;
protected LoaderCallbackInterface mUserAppCallback;
protected String mOpenCVersion;
protected Context mAppContext;
protected static boolean mServiceInstallationProgress = false;
protected static boolean mLibraryInstallationProgress = false;
protected static boolean InstallServiceQuiet(Context context)
{
boolean result = true;
try
{
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(OPEN_CV_SERVICE_URL));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
catch(Exception e)
{
result = false;
}
return result;
}
protected static void InstallService(final Context AppContext, final LoaderCallbackInterface Callback)
{
if (!mServiceInstallationProgress)
{
Log.d(TAG, "Request new service installation");
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV Manager via Google Play");
boolean result = InstallServiceQuiet(AppContext);
if (result)
{
mServiceInstallationProgress = true;
Log.d(TAG, "Package installation started");
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
int Status = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Unbind from service");
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
Callback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install()
{
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "Waiting for OpenCV canceled by user");
mServiceInstallationProgress = false;
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
InstallServiceQuiet(AppContext);
}
};
Callback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
}
/**
* URL of OpenCV Manager page on Google Play Market.
*/
protected static final String OPEN_CV_SERVICE_URL = "market://details?id=org.opencv.engine";
protected ServiceConnection mServiceConnection = new ServiceConnection()
{
public void onServiceConnected(ComponentName className, IBinder service)
{
Log.d(TAG, "Service connection created");
mEngineService = OpenCVEngineInterface.Stub.asInterface(service);
if (null == mEngineService)
{
Log.d(TAG, "OpenCV Manager Service connection fails. May be service was not installed?");
InstallService(mAppContext, mUserAppCallback);
}
else
{
mServiceInstallationProgress = false;
try
{
if (mEngineService.getEngineVersion() < MINIMUM_ENGINE_VERSION)
{
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
return;
}
Log.d(TAG, "Trying to get library path");
String path = mEngineService.getLibPathByVersion(mOpenCVersion);
if ((null == path) || (path.length() == 0))
{
if (!mLibraryInstallationProgress)
{
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV lib via Google Play");
try
{
if (mEngineService.installVersion(mOpenCVersion))
{
mLibraryInstallationProgress = true;
Log.d(TAG, "Package installation started");
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
} catch (RemoteException e) {
e.printStackTrace();;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
public void cancel() {
Log.d(TAG, "OpenCV library installation was canceled");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
mLibraryInstallationProgress = false;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.d(TAG, "Waiting for current installation");
try
{
if (!mEngineService.installVersion(mOpenCVersion))
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
else
{
Log.d(TAG, "Waiting for package installation");
}
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
} catch (RemoteException e) {
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
return;
}
else
{
Log.d(TAG, "Trying to get library list");
mLibraryInstallationProgress = false;
String libs = mEngineService.getLibraryList(mOpenCVersion);
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
int status;
if (initOpenCVLibs(path, libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
status = LoaderCallbackInterface.SUCCESS;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
status = LoaderCallbackInterface.INIT_FAILED;
}
Log.d(TAG, "Init finished with status " + status);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(status);
}
}
catch (RemoteException e)
{
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
}
public void onServiceDisconnected(ComponentName className)
{
mEngineService = null;
}
};
private boolean loadLibrary(String AbsPath)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + AbsPath);
try
{
System.load(AbsPath);
Log.d(TAG, "OpenCV libs init was ok!");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + AbsPath + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private boolean initOpenCVLibs(String Path, String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
if ((null != Path) && (Path.length() != 0))
{
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
String AbsLibraryPath = Path + File.separator + splitter.nextToken();
result &= loadLibrary(AbsLibraryPath);
}
}
else
{
// If the dependencies list is not defined or empty.
String AbsLibraryPath = Path + File.separator + "libopencv_java4.so";
result = loadLibrary(AbsLibraryPath);
}
return result;
}
else
{
Log.d(TAG, "Library path \"" + Path + "\" is empty");
return false;
}
}
}

View File

@ -0,0 +1,141 @@
package org.opencv.android;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.util.Log;
/**
* Basic implementation of LoaderCallbackInterface.
*/
public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
public BaseLoaderCallback(Context AppContext) {
mAppContext = AppContext;
}
public void onManagerConnected(int status)
{
switch (status)
{
/** OpenCV initialization was successful. **/
case LoaderCallbackInterface.SUCCESS:
{
/** Application must override this method to handle successful library initialization. **/
} break;
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
} break;
/** Package installation has been canceled. **/
case LoaderCallbackInterface.INSTALL_CANCELED:
{
Log.d(TAG, "OpenCV library installation was canceled by user");
finish();
} break;
/** Application is incompatible with this version of OpenCV Manager. Possibly, a service update is required. **/
case LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION:
{
Log.d(TAG, "OpenCV Manager Service is uncompatible with this app!");
AlertDialog IncomatibilityMessage = new AlertDialog.Builder(mAppContext).create();
IncomatibilityMessage.setTitle("OpenCV Manager");
IncomatibilityMessage.setMessage("OpenCV Manager service is incompatible with this app. Try to update it via Google Play.");
IncomatibilityMessage.setCancelable(false); // This blocks the 'BACK' button
IncomatibilityMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
IncomatibilityMessage.show();
} break;
/** Other status, i.e. INIT_FAILED. **/
default:
{
Log.e(TAG, "OpenCV loading failed!");
AlertDialog InitFailedDialog = new AlertDialog.Builder(mAppContext).create();
InitFailedDialog.setTitle("OpenCV error");
InitFailedDialog.setMessage("OpenCV was not initialised correctly. Application will be shut down");
InitFailedDialog.setCancelable(false); // This blocks the 'BACK' button
InitFailedDialog.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
InitFailedDialog.show();
} break;
}
}
public void onPackageInstall(final int operation, final InstallCallbackInterface callback)
{
switch (operation)
{
case InstallCallbackInterface.NEW_INSTALLATION:
{
AlertDialog InstallMessage = new AlertDialog.Builder(mAppContext).create();
InstallMessage.setTitle("Package not found");
InstallMessage.setMessage(callback.getPackageName() + " package was not found! Try to install it?");
InstallMessage.setCancelable(false); // This blocks the 'BACK' button
InstallMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Yes", new OnClickListener()
{
public void onClick(DialogInterface dialog, int which)
{
callback.install();
}
});
InstallMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "No", new OnClickListener() {
public void onClick(DialogInterface dialog, int which)
{
callback.cancel();
}
});
InstallMessage.show();
} break;
case InstallCallbackInterface.INSTALLATION_PROGRESS:
{
AlertDialog WaitMessage = new AlertDialog.Builder(mAppContext).create();
WaitMessage.setTitle("OpenCV is not ready");
WaitMessage.setMessage("Installation is in progress. Wait or exit?");
WaitMessage.setCancelable(false); // This blocks the 'BACK' button
WaitMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Wait", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.wait_install();
}
});
WaitMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "Exit", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.cancel();
}
});
WaitMessage.show();
} break;
}
}
void finish()
{
((Activity) mAppContext).finish();
}
protected Context mAppContext;
private final static String TAG = "OCV/BaseLoaderCallback";
}

View File

@ -0,0 +1,60 @@
package org.opencv.android;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import java.util.ArrayList;
import java.util.List;
import static android.Manifest.permission.CAMERA;
public class CameraActivity extends Activity {
private static final int CAMERA_PERMISSION_REQUEST_CODE = 200;
protected List<? extends CameraBridgeViewBase> getCameraViewList() {
return new ArrayList<CameraBridgeViewBase>();
}
protected void onCameraPermissionGranted() {
List<? extends CameraBridgeViewBase> cameraViews = getCameraViewList();
if (cameraViews == null) {
return;
}
for (CameraBridgeViewBase cameraBridgeViewBase: cameraViews) {
if (cameraBridgeViewBase != null) {
cameraBridgeViewBase.setCameraPermissionGranted();
}
}
}
@Override
protected void onStart() {
super.onStart();
boolean havePermission = true;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (checkSelfPermission(CAMERA) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{CAMERA}, CAMERA_PERMISSION_REQUEST_CODE);
havePermission = false;
}
}
if (havePermission) {
onCameraPermissionGranted();
}
}
@Override
@TargetApi(Build.VERSION_CODES.M)
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
if (requestCode == CAMERA_PERMISSION_REQUEST_CODE && grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
onCameraPermissionGranted();
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}

View File

@ -0,0 +1,519 @@
package org.opencv.android;
import java.util.List;
import org.opencv.BuildConfig;
import org.opencv.R;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener.
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
protected static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private final Object mSyncObject = new Object();
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected boolean mCameraPermissionGranted = false;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* @param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can signal camera permission has been granted.
* The actual onCameraViewStarted callback will be delivered only after setCameraPermissionGranted
* and enableView have been called and surface is available
*/
public void setCameraPermissionGranted() {
synchronized(mSyncObject) {
mCameraPermissionGranted = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after setCameraPermissionGranted
* and enableView have been called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the screen
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* @param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mCameraPermissionGranted && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
if (BuildConfig.DEBUG)
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTexture constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
Log.d(TAG, "trying size: " + width + "x" + height);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
if ((calcWidth == 0 || calcHeight == 0) && supportedSizes.size() > 0)
{
Log.i(TAG, "fallback to the first frame size");
Object size = supportedSizes.get(0);
calcWidth = accessor.getWidth(size);
calcHeight = accessor.getHeight(size);
}
return new Size(calcWidth, calcHeight);
}
}

View File

@ -0,0 +1,66 @@
package org.opencv.android;
import java.text.DecimalFormat;
import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "FpsMeter";
private static final int STEP = 20;
private static final DecimalFormat FPS_FORMAT = new DecimalFormat("0.00");
private int mFramesCounter;
private double mFrequency;
private long mprevFrameTime;
private String mStrfps;
Paint mPaint;
boolean mIsInitialized = false;
int mWidth = 0;
int mHeight = 0;
public void init() {
mFramesCounter = 0;
mFrequency = Core.getTickFrequency();
mprevFrameTime = Core.getTickCount();
mStrfps = "";
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setTextSize(20);
}
public void measure() {
if (!mIsInitialized) {
init();
mIsInitialized = true;
} else {
mFramesCounter++;
if (mFramesCounter % STEP == 0) {
long time = Core.getTickCount();
double fps = STEP * mFrequency / (time - mprevFrameTime);
mprevFrameTime = time;
if (mWidth != 0 && mHeight != 0)
mStrfps = FPS_FORMAT.format(fps) + " FPS@" + Integer.valueOf(mWidth) + "x" + Integer.valueOf(mHeight);
else
mStrfps = FPS_FORMAT.format(fps) + " FPS";
Log.i(TAG, mStrfps);
}
}
}
public void setResolution(int width, int height) {
mWidth = width;
mHeight = height;
}
public void draw(Canvas canvas, float offsetx, float offsety) {
Log.d(TAG, mStrfps);
canvas.drawText(mStrfps, offsetx, offsety, mPaint);
}
}

View File

@ -0,0 +1,34 @@
package org.opencv.android;
/**
* Installation callback interface.
*/
public interface InstallCallbackInterface
{
/**
* New package installation is required.
*/
static final int NEW_INSTALLATION = 0;
/**
* Current package installation is in progress.
*/
static final int INSTALLATION_PROGRESS = 1;
/**
* Target package name.
* @return Return target package name.
*/
public String getPackageName();
/**
* Installation is approved.
*/
public void install();
/**
* Installation is canceled.
*/
public void cancel();
/**
* Wait for package installation.
*/
public void wait_install();
};

View File

@ -0,0 +1,379 @@
package org.opencv.android;
import java.util.List;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ViewGroup.LayoutParams;
import org.opencv.BuildConfig;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mPreviewFormat = ImageFormat.NV21;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
@Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public JavaCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == CAMERA_ID_ANY) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CAMERA_ID_BACK) {
Log.i(TAG, "Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CAMERA_ID_FRONT) {
Log.i(TAG, "Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CAMERA_ID_BACK) {
Log.e(TAG, "Back camera not found!");
} else if (localCameraIndex == CAMERA_ID_FRONT) {
Log.e(TAG, "Front camera not found!");
} else {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
/* Image format NV21 causes issues in the Android emulators */
if (Build.FINGERPRINT.startsWith("generic")
|| Build.FINGERPRINT.startsWith("unknown")
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.MANUFACTURER.contains("Genymotion")
|| (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
|| "google_sdk".equals(Build.PRODUCT))
params.setPreviewFormat(ImageFormat.YV12); // "generic" or "android" = android emulator
else
params.setPreviewFormat(ImageFormat.NV21);
mPreviewFormat = params.getPreviewFormat();
Log.d(TAG, "Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
private boolean mCameraFrameReady = false;
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
mCameraFrameReady = false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Waiting for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
mCameraFrameReady = false;
}
@Override
public void onPreviewFrame(byte[] frame, Camera arg1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
synchronized (this) {
mFrameChain[mChainIdx].put(0, 0, frame);
mCameraFrameReady = true;
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
@Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
private class CameraWorker implements Runnable {
@Override
public void run() {
do {
boolean hasFrame = false;
synchronized (JavaCameraView.this) {
try {
while (!mCameraFrameReady && !mStopThread) {
JavaCameraView.this.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mCameraFrameReady)
{
mChainIdx = 1 - mChainIdx;
mCameraFrameReady = false;
hasFrame = true;
}
}
if (!mStopThread && hasFrame) {
if (!mFrameChain[1 - mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

View File

@ -0,0 +1,40 @@
package org.opencv.android;
/**
* Interface for callback object in case of asynchronous initialization of OpenCV.
*/
public interface LoaderCallbackInterface
{
/**
* OpenCV initialization finished successfully.
*/
static final int SUCCESS = 0;
/**
* Google Play Market cannot be invoked.
*/
static final int MARKET_ERROR = 2;
/**
* OpenCV library installation has been canceled by the user.
*/
static final int INSTALL_CANCELED = 3;
/**
* This version of OpenCV Manager Service is incompatible with the app. Possibly, a service update is required.
*/
static final int INCOMPATIBLE_MANAGER_VERSION = 4;
/**
* OpenCV library initialization has failed.
*/
static final int INIT_FAILED = 0xff;
/**
* Callback method, called after OpenCV library initialization.
* @param status status of initialization (see initialization status constants).
*/
public void onManagerConnected(int status);
/**
* Callback method, called in case the package installation is needed.
* @param callback answer object with approve and cancel methods and the package description.
*/
public void onPackageInstall(final int operation, InstallCallbackInterface callback);
};

View File

@ -0,0 +1,132 @@
package org.opencv.android;
import android.content.Context;
/**
* Helper class provides common initialization methods for OpenCV library.
*/
public class OpenCVLoader
{
/**
* OpenCV Library version 2.4.2.
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* OpenCV Library version 2.4.4.
*/
public static final String OPENCV_VERSION_2_4_4 = "2.4.4";
/**
* OpenCV Library version 2.4.5.
*/
public static final String OPENCV_VERSION_2_4_5 = "2.4.5";
/**
* OpenCV Library version 2.4.6.
*/
public static final String OPENCV_VERSION_2_4_6 = "2.4.6";
/**
* OpenCV Library version 2.4.7.
*/
public static final String OPENCV_VERSION_2_4_7 = "2.4.7";
/**
* OpenCV Library version 2.4.8.
*/
public static final String OPENCV_VERSION_2_4_8 = "2.4.8";
/**
* OpenCV Library version 2.4.9.
*/
public static final String OPENCV_VERSION_2_4_9 = "2.4.9";
/**
* OpenCV Library version 2.4.10.
*/
public static final String OPENCV_VERSION_2_4_10 = "2.4.10";
/**
* OpenCV Library version 2.4.11.
*/
public static final String OPENCV_VERSION_2_4_11 = "2.4.11";
/**
* OpenCV Library version 2.4.12.
*/
public static final String OPENCV_VERSION_2_4_12 = "2.4.12";
/**
* OpenCV Library version 2.4.13.
*/
public static final String OPENCV_VERSION_2_4_13 = "2.4.13";
/**
* OpenCV Library version 3.0.0.
*/
public static final String OPENCV_VERSION_3_0_0 = "3.0.0";
/**
* OpenCV Library version 3.1.0.
*/
public static final String OPENCV_VERSION_3_1_0 = "3.1.0";
/**
* OpenCV Library version 3.2.0.
*/
public static final String OPENCV_VERSION_3_2_0 = "3.2.0";
/**
* OpenCV Library version 3.3.0.
*/
public static final String OPENCV_VERSION_3_3_0 = "3.3.0";
/**
* OpenCV Library version 3.4.0.
*/
public static final String OPENCV_VERSION_3_4_0 = "3.4.0";
/**
* Current OpenCV Library version
*/
public static final String OPENCV_VERSION = "@OPENCV_VERSION_MAJOR@.@OPENCV_VERSION_MINOR@.@OPENCV_VERSION_PATCH@";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug()
{
return StaticHelper.initOpenCV(false);
}
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @param InitCuda load and initialize CUDA runtime libraries.
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug(boolean InitCuda)
{
return StaticHelper.initOpenCV(InitCuda);
}
/**
* Loads and initializes OpenCV library using OpenCV Engine service.
* @param Version OpenCV library version.
* @param AppContext application context for connecting to the service.
* @param Callback object, that implements LoaderCallbackInterface for handling the connection status.
* @return Returns true if initialization of OpenCV is successful.
*/
public static boolean initAsync(String Version, Context AppContext,
LoaderCallbackInterface Callback)
{
return AsyncServiceHelper.initOpenCV(Version, AppContext, Callback);
}
}

View File

@ -0,0 +1,104 @@
package org.opencv.android;
import org.opencv.core.Core;
import java.util.StringTokenizer;
import android.util.Log;
class StaticHelper {
public static boolean initOpenCV(boolean InitCuda)
{
boolean result;
String libs = "";
if(InitCuda)
{
loadLibrary("cudart");
loadLibrary("nppc");
loadLibrary("nppi");
loadLibrary("npps");
loadLibrary("cufft");
loadLibrary("cublas");
}
Log.d(TAG, "Trying to get library list");
try
{
System.loadLibrary("opencv_info");
libs = getLibraryList();
}
catch(UnsatisfiedLinkError e)
{
Log.e(TAG, "OpenCV error: Cannot load info library for OpenCV");
}
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
if (initOpenCVLibs(libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
result = true;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
result = false;
}
return result;
}
private static boolean loadLibrary(String Name)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + Name);
try
{
System.loadLibrary(Name);
Log.d(TAG, "Library " + Name + " loaded");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + Name + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private static boolean initOpenCVLibs(String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
result &= loadLibrary(splitter.nextToken());
}
}
else
{
// If dependencies list is not defined or empty.
result = loadLibrary("opencv_java4");
}
return result;
}
private static final String TAG = "OpenCV/StaticHelper";
private static native String getLibraryList();
}

View File

@ -0,0 +1,139 @@
package org.opencv.android;
import android.content.Context;
import android.graphics.Bitmap;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class Utils {
public static String exportResource(Context context, int resourceId) {
return exportResource(context, resourceId, "OpenCV_data");
}
public static String exportResource(Context context, int resourceId, String dirname) {
String fullname = context.getResources().getString(resourceId);
String resName = fullname.substring(fullname.lastIndexOf("/") + 1);
try {
InputStream is = context.getResources().openRawResource(resourceId);
File resDir = context.getDir(dirname, Context.MODE_PRIVATE);
File resFile = new File(resDir, resName);
FileOutputStream os = new FileOutputStream(resFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
return resFile.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
throw new CvException("Failed to export resource " + resName
+ ". Exception thrown: " + e);
}
}
public static Mat loadResource(Context context, int resourceId) throws IOException
{
return loadResource(context, resourceId, -1);
}
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
InputStream is = context.getResources().openRawResource(resourceId);
ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
encoded.put(0, 0, os.toByteArray());
os.close();
Mat decoded = Imgcodecs.imdecode(encoded, flags);
encoded.release();
return decoded;
}
/**
* Converts Android Bitmap to OpenCV Mat.
* <p>
* This function converts an Android Bitmap image to the OpenCV Mat.
* <br>'ARGB_8888' and 'RGB_565' input Bitmap formats are supported.
* <br>The output Mat is always created of the same size as the input Bitmap and of the 'CV_8UC4' type,
* it keeps the image in RGBA format.
* <br>This function throws an exception if the conversion fails.
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so it may be empty.
* @param unPremultiplyAlpha is a flag, that determines, whether the bitmap needs to be converted from alpha premultiplied format (like Android keeps 'ARGB_8888' ones) to regular one; this flag is ignored for 'RGB_565' bitmaps.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat, boolean unPremultiplyAlpha) {
if (bmp == null)
throw new IllegalArgumentException("bmp == null");
if (mat == null)
throw new IllegalArgumentException("mat == null");
nBitmapToMat2(bmp, mat.nativeObj, unPremultiplyAlpha);
}
/**
* Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so Mat may be empty.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat) {
bitmapToMat(bmp, mat, false);
}
/**
* Converts OpenCV Mat to Android Bitmap.
* <p>
* <br>This function converts an image in the OpenCV Mat representation to the Android Bitmap.
* <br>The input Mat object has to be of the types 'CV_8UC1' (gray-scale), 'CV_8UC3' (RGB) or 'CV_8UC4' (RGBA).
* <br>The output Bitmap object has to be of the same size as the input Mat and of the types 'ARGB_8888' or 'RGB_565'.
* <br>This function throws an exception if the conversion fails.
*
* @param mat is a valid input Mat object of types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
* @param premultiplyAlpha is a flag, that determines, whether the Mat needs to be converted to alpha premultiplied format (like Android keeps 'ARGB_8888' bitmaps); the flag is ignored for 'RGB_565' bitmaps.
*/
public static void matToBitmap(Mat mat, Bitmap bmp, boolean premultiplyAlpha) {
if (mat == null)
throw new IllegalArgumentException("mat == null");
if (bmp == null)
throw new IllegalArgumentException("bmp == null");
nMatToBitmap2(mat.nativeObj, bmp, premultiplyAlpha);
}
/**
* Short form of the <b>matToBitmap(mat, bmp, premultiplyAlpha=false)</b>
* @param mat is a valid input Mat object of the types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
*/
public static void matToBitmap(Mat mat, Bitmap bmp) {
matToBitmap(mat, bmp, false);
}
private static native void nBitmapToMat2(Bitmap b, long m_addr, boolean unPremultiplyAlpha);
private static native void nMatToBitmap2(long m_addr, Bitmap b, boolean premultiplyAlpha);
}

View File

@ -0,0 +1,33 @@
package org.opencv.engine;
/**
* Class provides a Java interface for OpenCV Engine Service. It's synchronous with native OpenCVEngine class.
*/
interface OpenCVEngineInterface
{
/**
* @return Returns service version.
*/
int getEngineVersion();
/**
* Finds an installed OpenCV library.
* @param OpenCV version.
* @return Returns path to OpenCV native libs or an empty string if OpenCV can not be found.
*/
String getLibPathByVersion(String version);
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
boolean installVersion(String version);
/**
* Returns list of libraries in loading order, separated by semicolon.
* @param OpenCV version.
* @return Returns names of OpenCV libraries, separated by semicolon.
*/
String getLibraryList(String version);
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#ifndef __OPENCV_JAVA_COMMON_H__
#define __OPENCV_JAVA_COMMON_H__
#include <stdexcept>
#include <string>
extern "C" {
#if !defined(__ppc__)
// to suppress warning from jni.h on OS X
# define TARGET_RT_MAC_CFM 0
#endif
#include <jni.h>
// make -fvisibility=hidden work with java 1.7
#if defined(__linux__) && !defined(__ANDROID__) && !defined (JNI_VERSION_1_8)
// adapted from jdk1.8/jni.h
#if (defined(__GNUC__) && ((__GNUC__ > 4) || (__GNUC__ == 4) && (__GNUC_MINOR__ > 2))) || __has_attribute(visibility)
#undef JNIEXPORT
#define JNIEXPORT __attribute__((visibility("default")))
#undef JNIIMPORT
#define JNIIMPORT __attribute__((visibility("default")))
#endif
#endif
} // extern "C"
#include "opencv_java.hpp"
#include "opencv2/core/utility.hpp"
#include "converters.h"
#include "listconverters.hpp"
#ifdef _MSC_VER
# pragma warning(disable:4800 4244)
#endif
#endif //__OPENCV_JAVA_COMMON_H__

View File

@ -0,0 +1,352 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#define LOG_TAG "org.opencv.utils.Converters"
#include "common.h"
using namespace cv;
// vector_int
void Mat_to_vector_int(Mat& mat, std::vector<int>& v_int)
{
v_int.clear();
CHECK_MAT(mat.type()==CV_32SC1 && mat.cols==1);
v_int = (std::vector<int>) mat;
}
void vector_int_to_Mat(std::vector<int>& v_int, Mat& mat)
{
mat = Mat(v_int, true);
}
//vector_double
void Mat_to_vector_double(Mat& mat, std::vector<double>& v_double)
{
v_double.clear();
CHECK_MAT(mat.type()==CV_64FC1 && mat.cols==1);
v_double = (std::vector<double>) mat;
}
void vector_double_to_Mat(std::vector<double>& v_double, Mat& mat)
{
mat = Mat(v_double, true);
}
// vector_float
void Mat_to_vector_float(Mat& mat, std::vector<float>& v_float)
{
v_float.clear();
CHECK_MAT(mat.type()==CV_32FC1 && mat.cols==1);
v_float = (std::vector<float>) mat;
}
void vector_float_to_Mat(std::vector<float>& v_float, Mat& mat)
{
mat = Mat(v_float, true);
}
//vector_uchar
void Mat_to_vector_uchar(Mat& mat, std::vector<uchar>& v_uchar)
{
v_uchar.clear();
CHECK_MAT(mat.type()==CV_8UC1 && mat.cols==1);
v_uchar = (std::vector<uchar>) mat;
}
void vector_uchar_to_Mat(std::vector<uchar>& v_uchar, Mat& mat)
{
mat = Mat(v_uchar, true);
}
void Mat_to_vector_char(Mat& mat, std::vector<char>& v_char)
{
v_char.clear();
CHECK_MAT(mat.type()==CV_8SC1 && mat.cols==1);
v_char = (std::vector<char>) mat;
}
void vector_char_to_Mat(std::vector<char>& v_char, Mat& mat)
{
mat = Mat(v_char, true);
}
//vector_Rect
void Mat_to_vector_Rect(Mat& mat, std::vector<Rect>& v_rect)
{
v_rect.clear();
CHECK_MAT(mat.type()==CV_32SC4 && mat.cols==1);
v_rect = (std::vector<Rect>) mat;
}
void vector_Rect_to_Mat(std::vector<Rect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
//vector_Rect2d
void Mat_to_vector_Rect2d(Mat& mat, std::vector<Rect2d>& v_rect)
{
v_rect.clear();
CHECK_MAT(mat.type()==CV_64FC4 && mat.cols==1);
v_rect = (std::vector<Rect2d>) mat;
}
void vector_Rect2d_to_Mat(std::vector<Rect2d>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
//vector_RotatedRect
void Mat_to_vector_RotatedRect(Mat& mat, std::vector<RotatedRect>& v_rect)
{
v_rect.clear();
CHECK_MAT(mat.type()==CV_32FC(5) && mat.cols==1);
v_rect = (std::vector<RotatedRect>) mat;
}
void vector_RotatedRect_to_Mat(std::vector<RotatedRect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
//vector_Point
void Mat_to_vector_Point(Mat& mat, std::vector<Point>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_32SC2 && mat.cols==1);
v_point = (std::vector<Point>) mat;
}
//vector_Point2f
void Mat_to_vector_Point2f(Mat& mat, std::vector<Point2f>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_32FC2 && mat.cols==1);
v_point = (std::vector<Point2f>) mat;
}
//vector_Point2d
void Mat_to_vector_Point2d(Mat& mat, std::vector<Point2d>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_64FC2 && mat.cols==1);
v_point = (std::vector<Point2d>) mat;
}
//vector_Point3i
void Mat_to_vector_Point3i(Mat& mat, std::vector<Point3i>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_32SC3 && mat.cols==1);
v_point = (std::vector<Point3i>) mat;
}
//vector_Point3f
void Mat_to_vector_Point3f(Mat& mat, std::vector<Point3f>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_32FC3 && mat.cols==1);
v_point = (std::vector<Point3f>) mat;
}
//vector_Point3d
void Mat_to_vector_Point3d(Mat& mat, std::vector<Point3d>& v_point)
{
v_point.clear();
CHECK_MAT(mat.type()==CV_64FC3 && mat.cols==1);
v_point = (std::vector<Point3d>) mat;
}
void vector_Point_to_Mat(std::vector<Point>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
void vector_Point2f_to_Mat(std::vector<Point2f>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
void vector_Point2d_to_Mat(std::vector<Point2d>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
void vector_Point3i_to_Mat(std::vector<Point3i>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
void vector_Point3f_to_Mat(std::vector<Point3f>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
void vector_Point3d_to_Mat(std::vector<Point3d>& v_point, Mat& mat)
{
mat = Mat(v_point, true);
}
//vector_Mat
void Mat_to_vector_Mat(cv::Mat& mat, std::vector<cv::Mat>& v_mat)
{
v_mat.clear();
if(mat.type() == CV_32SC2 && mat.cols == 1)
{
v_mat.reserve(mat.rows);
for(int i=0; i<mat.rows; i++)
{
Vec<int, 2> a = mat.at< Vec<int, 2> >(i, 0);
long long addr = (((long long)a[0])<<32) | (a[1]&0xffffffff);
Mat& m = *( (Mat*) addr );
v_mat.push_back(m);
}
} else {
LOGD("Mat_to_vector_Mat() FAILED: mat.type() == CV_32SC2 && mat.cols == 1");
}
}
void vector_Mat_to_Mat(std::vector<cv::Mat>& v_mat, cv::Mat& mat)
{
int count = (int)v_mat.size();
mat.create(count, 1, CV_32SC2);
for(int i=0; i<count; i++)
{
long long addr = (long long) new Mat(v_mat[i]);
mat.at< Vec<int, 2> >(i, 0) = Vec<int, 2>(addr>>32, addr&0xffffffff);
}
}
void Mat_to_vector_vector_Point(Mat& mat, std::vector< std::vector< Point > >& vv_pt)
{
std::vector<Mat> vm;
vm.reserve( mat.rows );
Mat_to_vector_Mat(mat, vm);
for(size_t i=0; i<vm.size(); i++)
{
std::vector<Point> vpt;
Mat_to_vector_Point(vm[i], vpt);
vv_pt.push_back(vpt);
}
}
void Mat_to_vector_vector_Point2f(Mat& mat, std::vector< std::vector< Point2f > >& vv_pt)
{
std::vector<Mat> vm;
vm.reserve( mat.rows );
Mat_to_vector_Mat(mat, vm);
for(size_t i=0; i<vm.size(); i++)
{
std::vector<Point2f> vpt;
Mat_to_vector_Point2f(vm[i], vpt);
vv_pt.push_back(vpt);
}
}
void Mat_to_vector_vector_Point3f(Mat& mat, std::vector< std::vector< Point3f > >& vv_pt)
{
std::vector<Mat> vm;
vm.reserve( mat.rows );
Mat_to_vector_Mat(mat, vm);
for(size_t i=0; i<vm.size(); i++)
{
std::vector<Point3f> vpt;
Mat_to_vector_Point3f(vm[i], vpt);
vv_pt.push_back(vpt);
}
}
void Mat_to_vector_vector_char(Mat& mat, std::vector< std::vector< char > >& vv_ch)
{
std::vector<Mat> vm;
vm.reserve( mat.rows );
Mat_to_vector_Mat(mat, vm);
for(size_t i=0; i<vm.size(); i++)
{
std::vector<char> vch;
Mat_to_vector_char(vm[i], vch);
vv_ch.push_back(vch);
}
}
void vector_vector_char_to_Mat(std::vector< std::vector< char > >& vv_ch, Mat& mat)
{
std::vector<Mat> vm;
vm.reserve( vv_ch.size() );
for(size_t i=0; i<vv_ch.size(); i++)
{
Mat m;
vector_char_to_Mat(vv_ch[i], m);
vm.push_back(m);
}
vector_Mat_to_Mat(vm, mat);
}
void vector_vector_Point_to_Mat(std::vector< std::vector< Point > >& vv_pt, Mat& mat)
{
std::vector<Mat> vm;
vm.reserve( vv_pt.size() );
for(size_t i=0; i<vv_pt.size(); i++)
{
Mat m;
vector_Point_to_Mat(vv_pt[i], m);
vm.push_back(m);
}
vector_Mat_to_Mat(vm, mat);
}
void vector_vector_Point2f_to_Mat(std::vector< std::vector< Point2f > >& vv_pt, Mat& mat)
{
std::vector<Mat> vm;
vm.reserve( vv_pt.size() );
for(size_t i=0; i<vv_pt.size(); i++)
{
Mat m;
vector_Point2f_to_Mat(vv_pt[i], m);
vm.push_back(m);
}
vector_Mat_to_Mat(vm, mat);
}
void vector_vector_Point3f_to_Mat(std::vector< std::vector< Point3f > >& vv_pt, Mat& mat)
{
std::vector<Mat> vm;
vm.reserve( vv_pt.size() );
for(size_t i=0; i<vv_pt.size(); i++)
{
Mat m;
vector_Point3f_to_Mat(vv_pt[i], m);
vm.push_back(m);
}
vector_Mat_to_Mat(vm, mat);
}
void vector_Vec4i_to_Mat(std::vector<Vec4i>& v_vec, Mat& mat)
{
mat = Mat(v_vec, true);
}
void vector_Vec4f_to_Mat(std::vector<Vec4f>& v_vec, Mat& mat)
{
mat = Mat(v_vec, true);
}
void vector_Vec6f_to_Mat(std::vector<Vec6f>& v_vec, Mat& mat)
{
mat = Mat(v_vec, true);
}

View File

@ -0,0 +1,63 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "opencv2/opencv_modules.hpp"
#include "opencv2/core.hpp"
void Mat_to_vector_int(cv::Mat& mat, std::vector<int>& v_int);
void vector_int_to_Mat(std::vector<int>& v_int, cv::Mat& mat);
void Mat_to_vector_double(cv::Mat& mat, std::vector<double>& v_double);
void vector_double_to_Mat(std::vector<double>& v_double, cv::Mat& mat);
void Mat_to_vector_float(cv::Mat& mat, std::vector<float>& v_float);
void vector_float_to_Mat(std::vector<float>& v_float, cv::Mat& mat);
void Mat_to_vector_uchar(cv::Mat& mat, std::vector<uchar>& v_uchar);
void vector_uchar_to_Mat(std::vector<uchar>& v_uchar, cv::Mat& mat);
void Mat_to_vector_char(cv::Mat& mat, std::vector<char>& v_char);
void vector_char_to_Mat(std::vector<char>& v_char, cv::Mat& mat);
void Mat_to_vector_Rect(cv::Mat& mat, std::vector<cv::Rect>& v_rect);
void vector_Rect_to_Mat(std::vector<cv::Rect>& v_rect, cv::Mat& mat);
void Mat_to_vector_Rect2d(cv::Mat& mat, std::vector<cv::Rect2d>& v_rect);
void vector_Rect2d_to_Mat(std::vector<cv::Rect2d>& v_rect, cv::Mat& mat);
void Mat_to_vector_RotatedRect(cv::Mat& mat, std::vector<cv::RotatedRect>& v_rect);
void vector_RotatedRect_to_Mat(std::vector<cv::RotatedRect>& v_rect, cv::Mat& mat);
void Mat_to_vector_Point(cv::Mat& mat, std::vector<cv::Point>& v_point);
void Mat_to_vector_Point2f(cv::Mat& mat, std::vector<cv::Point2f>& v_point);
void Mat_to_vector_Point2d(cv::Mat& mat, std::vector<cv::Point2d>& v_point);
void Mat_to_vector_Point3i(cv::Mat& mat, std::vector<cv::Point3i>& v_point);
void Mat_to_vector_Point3f(cv::Mat& mat, std::vector<cv::Point3f>& v_point);
void Mat_to_vector_Point3d(cv::Mat& mat, std::vector<cv::Point3d>& v_point);
void vector_Point_to_Mat(std::vector<cv::Point>& v_point, cv::Mat& mat);
void vector_Point2f_to_Mat(std::vector<cv::Point2f>& v_point, cv::Mat& mat);
void vector_Point2d_to_Mat(std::vector<cv::Point2d>& v_point, cv::Mat& mat);
void vector_Point3i_to_Mat(std::vector<cv::Point3i>& v_point, cv::Mat& mat);
void vector_Point3f_to_Mat(std::vector<cv::Point3f>& v_point, cv::Mat& mat);
void vector_Point3d_to_Mat(std::vector<cv::Point3d>& v_point, cv::Mat& mat);
void vector_Vec4i_to_Mat(std::vector<cv::Vec4i>& v_vec, cv::Mat& mat);
void vector_Vec4f_to_Mat(std::vector<cv::Vec4f>& v_vec, cv::Mat& mat);
void vector_Vec6f_to_Mat(std::vector<cv::Vec6f>& v_vec, cv::Mat& mat);
void Mat_to_vector_Mat(cv::Mat& mat, std::vector<cv::Mat>& v_mat);
void vector_Mat_to_Mat(std::vector<cv::Mat>& v_mat, cv::Mat& mat);
void Mat_to_vector_vector_char(cv::Mat& mat, std::vector< std::vector< char > >& vv_ch);
void vector_vector_char_to_Mat(std::vector< std::vector< char > >& vv_ch, cv::Mat& mat);
void Mat_to_vector_vector_Point(cv::Mat& mat, std::vector< std::vector< cv::Point > >& vv_pt);
void vector_vector_Point_to_Mat(std::vector< std::vector< cv::Point > >& vv_pt, cv::Mat& mat);
void Mat_to_vector_vector_Point2f(cv::Mat& mat, std::vector< std::vector< cv::Point2f > >& vv_pt);
void vector_vector_Point2f_to_Mat(std::vector< std::vector< cv::Point2f > >& vv_pt, cv::Mat& mat);
void Mat_to_vector_vector_Point3f(cv::Mat& mat, std::vector< std::vector< cv::Point3f > >& vv_pt);
void vector_vector_Point3f_to_Mat(std::vector< std::vector< cv::Point3f > >& vv_pt, cv::Mat& mat);

View File

@ -0,0 +1,42 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "common.h"
#include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_FEATURES2D
# include "opencv2/features2d.hpp"
#endif
#ifdef HAVE_OPENCV_VIDEO
# include "opencv2/video.hpp"
#endif
#ifdef HAVE_OPENCV_CONTRIB
# include "opencv2/contrib.hpp"
#endif
extern "C" {
JNIEXPORT jint JNICALL
JNI_OnLoad(JavaVM* vm, void* )
{
JNIEnv* env;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_6) != JNI_OK)
return -1;
/* get class with (*env)->FindClass */
/* register methods with (*env)->RegisterNatives */
return JNI_VERSION_1_6;
}
JNIEXPORT void JNICALL
JNI_OnUnload(JavaVM*, void*)
{
//do nothing
}
} // extern "C"

View File

@ -0,0 +1,110 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
// Author: abratchik
#define LOG_TAG "org.opencv.utils.Converters"
#include "common.h"
jobject vector_String_to_List(JNIEnv* env, std::vector<cv::String>& vs) {
static jclass juArrayList = ARRAYLIST(env);
static jmethodID m_create = CONSTRUCTOR(env, juArrayList);
jmethodID m_add = LIST_ADD(env, juArrayList);
jobject result = env->NewObject(juArrayList, m_create, vs.size());
for (std::vector<cv::String>::iterator it = vs.begin(); it != vs.end(); ++it) {
jstring element = env->NewStringUTF((*it).c_str());
env->CallBooleanMethod(result, m_add, element);
env->DeleteLocalRef(element);
}
return result;
}
std::vector<cv::String> List_to_vector_String(JNIEnv* env, jobject list)
{
static jclass juArrayList = ARRAYLIST(env);
jmethodID m_size = LIST_SIZE(env,juArrayList);
jmethodID m_get = LIST_GET(env, juArrayList);
jint len = env->CallIntMethod(list, m_size);
std::vector<cv::String> result;
result.reserve(len);
for (jint i=0; i<len; i++)
{
jstring element = static_cast<jstring>(env->CallObjectMethod(list, m_get, i));
const char* pchars = env->GetStringUTFChars(element, NULL);
result.push_back(pchars);
env->ReleaseStringUTFChars(element, pchars);
env->DeleteLocalRef(element);
}
return result;
}
void Copy_vector_String_to_List(JNIEnv* env, std::vector<cv::String>& vs, jobject list)
{
static jclass juArrayList = ARRAYLIST(env);
jmethodID m_clear = LIST_CLEAR(env, juArrayList);
jmethodID m_add = LIST_ADD(env, juArrayList);
env->CallVoidMethod(list, m_clear);
for (std::vector<cv::String>::iterator it = vs.begin(); it != vs.end(); ++it)
{
jstring element = env->NewStringUTF((*it).c_str());
env->CallBooleanMethod(list, m_add, element);
env->DeleteLocalRef(element);
}
}
jobject vector_string_to_List(JNIEnv* env, std::vector<std::string>& vs) {
static jclass juArrayList = ARRAYLIST(env);
static jmethodID m_create = CONSTRUCTOR(env, juArrayList);
jmethodID m_add = LIST_ADD(env, juArrayList);
jobject result = env->NewObject(juArrayList, m_create, vs.size());
for (std::vector<std::string>::iterator it = vs.begin(); it != vs.end(); ++it) {
jstring element = env->NewStringUTF((*it).c_str());
env->CallBooleanMethod(result, m_add, element);
env->DeleteLocalRef(element);
}
return result;
}
std::vector<std::string> List_to_vector_string(JNIEnv* env, jobject list)
{
static jclass juArrayList = ARRAYLIST(env);
jmethodID m_size = LIST_SIZE(env,juArrayList);
jmethodID m_get = LIST_GET(env, juArrayList);
jint len = env->CallIntMethod(list, m_size);
std::vector<std::string> result;
result.reserve(len);
for (jint i=0; i<len; i++)
{
jstring element = static_cast<jstring>(env->CallObjectMethod(list, m_get, i));
const char* pchars = env->GetStringUTFChars(element, NULL);
result.push_back(pchars);
env->ReleaseStringUTFChars(element, pchars);
env->DeleteLocalRef(element);
}
return result;
}
void Copy_vector_string_to_List(JNIEnv* env, std::vector<std::string>& vs, jobject list)
{
static jclass juArrayList = ARRAYLIST(env);
jmethodID m_clear = LIST_CLEAR(env, juArrayList);
jmethodID m_add = LIST_ADD(env, juArrayList);
env->CallVoidMethod(list, m_clear);
for (std::vector<std::string>::iterator it = vs.begin(); it != vs.end(); ++it)
{
jstring element = env->NewStringUTF((*it).c_str());
env->CallBooleanMethod(list, m_add, element);
env->DeleteLocalRef(element);
}
}

View File

@ -0,0 +1,26 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
// Author: abratchik
#ifndef LISTCONVERTERS_HPP
#define LISTCONVERTERS_HPP
#include "opencv2/opencv_modules.hpp"
#include "opencv2/core.hpp"
jobject vector_String_to_List(JNIEnv* env, std::vector<cv::String>& vs);
std::vector<cv::String> List_to_vector_String(JNIEnv* env, jobject list);
void Copy_vector_String_to_List(JNIEnv* env, std::vector<cv::String>& vs, jobject list);
jobject vector_string_to_List(JNIEnv* env, std::vector<std::string>& vs);
std::vector<std::string> List_to_vector_string(JNIEnv* env, jobject list);
void Copy_vector_string_to_List(JNIEnv* env, std::vector<std::string>& vs, jobject list);
#endif /* LISTCONVERTERS_HPP */

View File

@ -0,0 +1,8 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "common.h"
// Include all generated JNI code
#include "opencv_jni.hpp"

View File

@ -0,0 +1,38 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
// Author: abratchik
#undef LOGE
#undef LOGD
#ifdef __ANDROID__
# include <android/log.h>
# define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
# ifdef DEBUG
# define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
# else
# define LOGD(...)
# endif
#else
# define LOGE(...)
# define LOGD(...)
#endif
#ifndef OPENCV_JAVA_HPP
#define OPENCV_JAVA_HPP
#define MATOFINT(ENV) static_cast<jclass>(ENV->NewGlobalRef(ENV->FindClass("org/opencv/core/MatOfInt")))
#define GETNATIVEOBJ(ENV, CLS, MAT) ENV->GetLongField(MAT, ENV->GetFieldID(CLS, "nativeObj", "J"))
#define CONSTRUCTOR(ENV, CLS) ENV->GetMethodID(CLS, "<init>", "(I)V")
#define ARRAYLIST(ENV) static_cast<jclass>(ENV->NewGlobalRef(ENV->FindClass("java/util/ArrayList")))
#define LIST_ADD(ENV, LIST) ENV->GetMethodID(LIST, "add", "(Ljava/lang/Object;)Z")
#define LIST_GET(ENV, LIST) ENV->GetMethodID(LIST, "get", "(I)Ljava/lang/Object;")
#define LIST_SIZE(ENV, LIST) ENV->GetMethodID(LIST, "size", "()I")
#define LIST_CLEAR(ENV, LIST) ENV->GetMethodID(LIST, "clear", "()V")
#define CHECK_MAT(cond) if(!(cond)){ LOGD("FAILED: " #cond); return; }
#endif // OPENCV_JAVA_HPP

View File

@ -0,0 +1,165 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#ifdef __ANDROID__
#include <android/bitmap.h>
#define LOG_TAG "org.opencv.android.Utils"
#include "common.h"
using namespace cv;
extern "C" {
/*
* Class: org_opencv_android_Utils
* Method: void nBitmapToMat2(Bitmap b, long m_addr, boolean unPremultiplyAlpha)
*/
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nBitmapToMat2
(JNIEnv * env, jclass, jobject bitmap, jlong m_addr, jboolean needUnPremultiplyAlpha);
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nBitmapToMat2
(JNIEnv * env, jclass, jobject bitmap, jlong m_addr, jboolean needUnPremultiplyAlpha)
{
AndroidBitmapInfo info;
void* pixels = 0;
Mat& dst = *((Mat*)m_addr);
try {
LOGD("nBitmapToMat");
CV_Assert( AndroidBitmap_getInfo(env, bitmap, &info) >= 0 );
CV_Assert( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||
info.format == ANDROID_BITMAP_FORMAT_RGB_565 );
CV_Assert( AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0 );
CV_Assert( pixels );
dst.create(info.height, info.width, CV_8UC4);
if( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 )
{
LOGD("nBitmapToMat: RGBA_8888 -> CV_8UC4");
Mat tmp(info.height, info.width, CV_8UC4, pixels);
if(needUnPremultiplyAlpha) cvtColor(tmp, dst, COLOR_mRGBA2RGBA);
else tmp.copyTo(dst);
} else {
// info.format == ANDROID_BITMAP_FORMAT_RGB_565
LOGD("nBitmapToMat: RGB_565 -> CV_8UC4");
Mat tmp(info.height, info.width, CV_8UC2, pixels);
cvtColor(tmp, dst, COLOR_BGR5652RGBA);
}
AndroidBitmap_unlockPixels(env, bitmap);
return;
} catch(const cv::Exception& e) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGE("nBitmapToMat caught cv::Exception: %s", e.what());
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return;
} catch (...) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGE("nBitmapToMat caught unknown exception (...)");
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {nBitmapToMat}");
return;
}
}
// old signature is left for binary compatibility with 2.4.0 & 2.4.1, to removed in 2.5
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nBitmapToMat
(JNIEnv * env, jclass, jobject bitmap, jlong m_addr);
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nBitmapToMat
(JNIEnv * env, jclass, jobject bitmap, jlong m_addr)
{
Java_org_opencv_android_Utils_nBitmapToMat2(env, 0, bitmap, m_addr, false);
}
/*
* Class: org_opencv_android_Utils
* Method: void nMatToBitmap2(long m_addr, Bitmap b, boolean premultiplyAlpha)
*/
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap2
(JNIEnv * env, jclass, jlong m_addr, jobject bitmap, jboolean needPremultiplyAlpha);
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap2
(JNIEnv * env, jclass, jlong m_addr, jobject bitmap, jboolean needPremultiplyAlpha)
{
AndroidBitmapInfo info;
void* pixels = 0;
Mat& src = *((Mat*)m_addr);
try {
LOGD("nMatToBitmap");
CV_Assert( AndroidBitmap_getInfo(env, bitmap, &info) >= 0 );
CV_Assert( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||
info.format == ANDROID_BITMAP_FORMAT_RGB_565 );
CV_Assert( src.dims == 2 && info.height == (uint32_t)src.rows && info.width == (uint32_t)src.cols );
CV_Assert( src.type() == CV_8UC1 || src.type() == CV_8UC3 || src.type() == CV_8UC4 );
CV_Assert( AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0 );
CV_Assert( pixels );
if( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 )
{
Mat tmp(info.height, info.width, CV_8UC4, pixels);
if(src.type() == CV_8UC1)
{
LOGD("nMatToBitmap: CV_8UC1 -> RGBA_8888");
cvtColor(src, tmp, COLOR_GRAY2RGBA);
} else if(src.type() == CV_8UC3){
LOGD("nMatToBitmap: CV_8UC3 -> RGBA_8888");
cvtColor(src, tmp, COLOR_RGB2RGBA);
} else if(src.type() == CV_8UC4){
LOGD("nMatToBitmap: CV_8UC4 -> RGBA_8888");
if(needPremultiplyAlpha) cvtColor(src, tmp, COLOR_RGBA2mRGBA);
else src.copyTo(tmp);
}
} else {
// info.format == ANDROID_BITMAP_FORMAT_RGB_565
Mat tmp(info.height, info.width, CV_8UC2, pixels);
if(src.type() == CV_8UC1)
{
LOGD("nMatToBitmap: CV_8UC1 -> RGB_565");
cvtColor(src, tmp, COLOR_GRAY2BGR565);
} else if(src.type() == CV_8UC3){
LOGD("nMatToBitmap: CV_8UC3 -> RGB_565");
cvtColor(src, tmp, COLOR_RGB2BGR565);
} else if(src.type() == CV_8UC4){
LOGD("nMatToBitmap: CV_8UC4 -> RGB_565");
cvtColor(src, tmp, COLOR_RGBA2BGR565);
}
}
AndroidBitmap_unlockPixels(env, bitmap);
return;
} catch(const cv::Exception& e) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGE("nMatToBitmap caught cv::Exception: %s", e.what());
jclass je = env->FindClass("org/opencv/core/CvException");
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, e.what());
return;
} catch (...) {
AndroidBitmap_unlockPixels(env, bitmap);
LOGE("nMatToBitmap caught unknown exception (...)");
jclass je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, "Unknown exception in JNI code {nMatToBitmap}");
return;
}
}
// old signature is left for binary compatibility with 2.4.0 & 2.4.1, to removed in 2.5
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap
(JNIEnv * env, jclass, jlong m_addr, jobject bitmap);
JNIEXPORT void JNICALL Java_org_opencv_android_Utils_nMatToBitmap
(JNIEnv * env, jclass, jlong m_addr, jobject bitmap)
{
Java_org_opencv_android_Utils_nMatToBitmap2(env, 0, m_addr, bitmap, false);
}
} // extern "C"
#endif //__ANDROID__

View File

@ -0,0 +1,8 @@
package org.opencv.osgi;
/**
* Dummy interface to allow some integration testing within OSGi implementation.
*/
public interface OpenCVInterface
{
}

View File

@ -0,0 +1,18 @@
package org.opencv.osgi;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This class is intended to provide a convenient way to load OpenCV's native
* library from the Java bundle. If Blueprint is enabled in the OSGi container
* this class will be instantiated automatically and the init() method called
* loading the native library.
*/
public class OpenCVNativeLoader implements OpenCVInterface {
public void init() {
System.loadLibrary("opencv_java@OPENCV_JAVA_LIB_NAME_SUFFIX@");
Logger.getLogger("org.opencv.osgi").log(Level.INFO, "Successfully loaded OpenCV native library.");
}
}

View File

@ -0,0 +1,806 @@
package org.opencv.utils;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.MatOfPoint3f;
import org.opencv.core.Point;
import org.opencv.core.Point3;
import org.opencv.core.Size;
import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;
import org.opencv.core.Rect2d;
import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint;
public class Converters {
public static Mat vector_Point_to_Mat(List<Point> pts) {
return vector_Point_to_Mat(pts, CvType.CV_32S);
}
public static Mat vector_Point2f_to_Mat(List<Point> pts) {
return vector_Point_to_Mat(pts, CvType.CV_32F);
}
public static Mat vector_Point2d_to_Mat(List<Point> pts) {
return vector_Point_to_Mat(pts, CvType.CV_64F);
}
public static Mat vector_Point_to_Mat(List<Point> pts, int typeDepth) {
Mat res;
int count = (pts != null) ? pts.size() : 0;
if (count > 0) {
switch (typeDepth) {
case CvType.CV_32S: {
res = new Mat(count, 1, CvType.CV_32SC2);
int[] buff = new int[count * 2];
for (int i = 0; i < count; i++) {
Point p = pts.get(i);
buff[i * 2] = (int) p.x;
buff[i * 2 + 1] = (int) p.y;
}
res.put(0, 0, buff);
}
break;
case CvType.CV_32F: {
res = new Mat(count, 1, CvType.CV_32FC2);
float[] buff = new float[count * 2];
for (int i = 0; i < count; i++) {
Point p = pts.get(i);
buff[i * 2] = (float) p.x;
buff[i * 2 + 1] = (float) p.y;
}
res.put(0, 0, buff);
}
break;
case CvType.CV_64F: {
res = new Mat(count, 1, CvType.CV_64FC2);
double[] buff = new double[count * 2];
for (int i = 0; i < count; i++) {
Point p = pts.get(i);
buff[i * 2] = p.x;
buff[i * 2 + 1] = p.y;
}
res.put(0, 0, buff);
}
break;
default:
throw new IllegalArgumentException("'typeDepth' can be CV_32S, CV_32F or CV_64F");
}
} else {
res = new Mat();
}
return res;
}
public static Mat vector_Point3i_to_Mat(List<Point3> pts) {
return vector_Point3_to_Mat(pts, CvType.CV_32S);
}
public static Mat vector_Point3f_to_Mat(List<Point3> pts) {
return vector_Point3_to_Mat(pts, CvType.CV_32F);
}
public static Mat vector_Point3d_to_Mat(List<Point3> pts) {
return vector_Point3_to_Mat(pts, CvType.CV_64F);
}
public static Mat vector_Point3_to_Mat(List<Point3> pts, int typeDepth) {
Mat res;
int count = (pts != null) ? pts.size() : 0;
if (count > 0) {
switch (typeDepth) {
case CvType.CV_32S: {
res = new Mat(count, 1, CvType.CV_32SC3);
int[] buff = new int[count * 3];
for (int i = 0; i < count; i++) {
Point3 p = pts.get(i);
buff[i * 3] = (int) p.x;
buff[i * 3 + 1] = (int) p.y;
buff[i * 3 + 2] = (int) p.z;
}
res.put(0, 0, buff);
}
break;
case CvType.CV_32F: {
res = new Mat(count, 1, CvType.CV_32FC3);
float[] buff = new float[count * 3];
for (int i = 0; i < count; i++) {
Point3 p = pts.get(i);
buff[i * 3] = (float) p.x;
buff[i * 3 + 1] = (float) p.y;
buff[i * 3 + 2] = (float) p.z;
}
res.put(0, 0, buff);
}
break;
case CvType.CV_64F: {
res = new Mat(count, 1, CvType.CV_64FC3);
double[] buff = new double[count * 3];
for (int i = 0; i < count; i++) {
Point3 p = pts.get(i);
buff[i * 3] = p.x;
buff[i * 3 + 1] = p.y;
buff[i * 3 + 2] = p.z;
}
res.put(0, 0, buff);
}
break;
default:
throw new IllegalArgumentException("'typeDepth' can be CV_32S, CV_32F or CV_64F");
}
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_Point2f(Mat m, List<Point> pts) {
Mat_to_vector_Point(m, pts);
}
public static void Mat_to_vector_Point2d(Mat m, List<Point> pts) {
Mat_to_vector_Point(m, pts);
}
public static void Mat_to_vector_Point(Mat m, List<Point> pts) {
if (pts == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
int type = m.type();
if (m.cols() != 1)
throw new IllegalArgumentException("Input Mat should have one column\n" + m);
pts.clear();
if (type == CvType.CV_32SC2) {
int[] buff = new int[2 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point(buff[i * 2], buff[i * 2 + 1]));
}
} else if (type == CvType.CV_32FC2) {
float[] buff = new float[2 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point(buff[i * 2], buff[i * 2 + 1]));
}
} else if (type == CvType.CV_64FC2) {
double[] buff = new double[2 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point(buff[i * 2], buff[i * 2 + 1]));
}
} else {
throw new IllegalArgumentException(
"Input Mat should be of CV_32SC2, CV_32FC2 or CV_64FC2 type\n" + m);
}
}
public static void Mat_to_vector_Point3i(Mat m, List<Point3> pts) {
Mat_to_vector_Point3(m, pts);
}
public static void Mat_to_vector_Point3f(Mat m, List<Point3> pts) {
Mat_to_vector_Point3(m, pts);
}
public static void Mat_to_vector_Point3d(Mat m, List<Point3> pts) {
Mat_to_vector_Point3(m, pts);
}
public static void Mat_to_vector_Point3(Mat m, List<Point3> pts) {
if (pts == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
int type = m.type();
if (m.cols() != 1)
throw new IllegalArgumentException("Input Mat should have one column\n" + m);
pts.clear();
if (type == CvType.CV_32SC3) {
int[] buff = new int[3 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point3(buff[i * 3], buff[i * 3 + 1], buff[i * 3 + 2]));
}
} else if (type == CvType.CV_32FC3) {
float[] buff = new float[3 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point3(buff[i * 3], buff[i * 3 + 1], buff[i * 3 + 2]));
}
} else if (type == CvType.CV_64FC3) {
double[] buff = new double[3 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
pts.add(new Point3(buff[i * 3], buff[i * 3 + 1], buff[i * 3 + 2]));
}
} else {
throw new IllegalArgumentException(
"Input Mat should be of CV_32SC3, CV_32FC3 or CV_64FC3 type\n" + m);
}
}
public static Mat vector_Mat_to_Mat(List<Mat> mats) {
Mat res;
int count = (mats != null) ? mats.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32SC2);
int[] buff = new int[count * 2];
for (int i = 0; i < count; i++) {
long addr = mats.get(i).nativeObj;
buff[i * 2] = (int) (addr >> 32);
buff[i * 2 + 1] = (int) (addr & 0xffffffff);
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_Mat(Mat m, List<Mat> mats) {
if (mats == null)
throw new IllegalArgumentException("mats == null");
int count = m.rows();
if (CvType.CV_32SC2 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_32SC2 != m.type() || m.cols()!=1\n" + m);
mats.clear();
int[] buff = new int[count * 2];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
long addr = (((long) buff[i * 2]) << 32) | (((long) buff[i * 2 + 1]) & 0xffffffffL);
mats.add(new Mat(addr));
}
}
public static Mat vector_float_to_Mat(List<Float> fs) {
Mat res;
int count = (fs != null) ? fs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32FC1);
float[] buff = new float[count];
for (int i = 0; i < count; i++) {
float f = fs.get(i);
buff[i] = f;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_float(Mat m, List<Float> fs) {
if (fs == null)
throw new IllegalArgumentException("fs == null");
int count = m.rows();
if (CvType.CV_32FC1 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_32FC1 != m.type() || m.cols()!=1\n" + m);
fs.clear();
float[] buff = new float[count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
fs.add(buff[i]);
}
}
public static Mat vector_uchar_to_Mat(List<Byte> bs) {
Mat res;
int count = (bs != null) ? bs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_8UC1);
byte[] buff = new byte[count];
for (int i = 0; i < count; i++) {
byte b = bs.get(i);
buff[i] = b;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_uchar(Mat m, List<Byte> us) {
if (us == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
if (CvType.CV_8UC1 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_8UC1 != m.type() || m.cols()!=1\n" + m);
us.clear();
byte[] buff = new byte[count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
us.add(buff[i]);
}
}
public static Mat vector_char_to_Mat(List<Byte> bs) {
Mat res;
int count = (bs != null) ? bs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_8SC1);
byte[] buff = new byte[count];
for (int i = 0; i < count; i++) {
byte b = bs.get(i);
buff[i] = b;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static Mat vector_int_to_Mat(List<Integer> is) {
Mat res;
int count = (is != null) ? is.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32SC1);
int[] buff = new int[count];
for (int i = 0; i < count; i++) {
int v = is.get(i);
buff[i] = v;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_int(Mat m, List<Integer> is) {
if (is == null)
throw new IllegalArgumentException("is == null");
int count = m.rows();
if (CvType.CV_32SC1 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_32SC1 != m.type() || m.cols()!=1\n" + m);
is.clear();
int[] buff = new int[count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
is.add(buff[i]);
}
}
public static void Mat_to_vector_char(Mat m, List<Byte> bs) {
if (bs == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
if (CvType.CV_8SC1 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_8SC1 != m.type() || m.cols()!=1\n" + m);
bs.clear();
byte[] buff = new byte[count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
bs.add(buff[i]);
}
}
public static Mat vector_Rect_to_Mat(List<Rect> rs) {
Mat res;
int count = (rs != null) ? rs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32SC4);
int[] buff = new int[4 * count];
for (int i = 0; i < count; i++) {
Rect r = rs.get(i);
buff[4 * i] = r.x;
buff[4 * i + 1] = r.y;
buff[4 * i + 2] = r.width;
buff[4 * i + 3] = r.height;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_Rect(Mat m, List<Rect> rs) {
if (rs == null)
throw new IllegalArgumentException("rs == null");
int count = m.rows();
if (CvType.CV_32SC4 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_32SC4 != m.type() || m.rows()!=1\n" + m);
rs.clear();
int[] buff = new int[4 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
rs.add(new Rect(buff[4 * i], buff[4 * i + 1], buff[4 * i + 2], buff[4 * i + 3]));
}
}
public static Mat vector_Rect2d_to_Mat(List<Rect2d> rs) {
Mat res;
int count = (rs != null) ? rs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_64FC4);
double[] buff = new double[4 * count];
for (int i = 0; i < count; i++) {
Rect2d r = rs.get(i);
buff[4 * i] = r.x;
buff[4 * i + 1] = r.y;
buff[4 * i + 2] = r.width;
buff[4 * i + 3] = r.height;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_Rect2d(Mat m, List<Rect2d> rs) {
if (rs == null)
throw new IllegalArgumentException("rs == null");
int count = m.rows();
if (CvType.CV_64FC4 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_64FC4 != m.type() || m.rows()!=1\n" + m);
rs.clear();
double[] buff = new double[4 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
rs.add(new Rect2d(buff[4 * i], buff[4 * i + 1], buff[4 * i + 2], buff[4 * i + 3]));
}
}
public static Mat vector_KeyPoint_to_Mat(List<KeyPoint> kps) {
Mat res;
int count = (kps != null) ? kps.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_64FC(7));
double[] buff = new double[count * 7];
for (int i = 0; i < count; i++) {
KeyPoint kp = kps.get(i);
buff[7 * i] = kp.pt.x;
buff[7 * i + 1] = kp.pt.y;
buff[7 * i + 2] = kp.size;
buff[7 * i + 3] = kp.angle;
buff[7 * i + 4] = kp.response;
buff[7 * i + 5] = kp.octave;
buff[7 * i + 6] = kp.class_id;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_KeyPoint(Mat m, List<KeyPoint> kps) {
if (kps == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
if (CvType.CV_64FC(7) != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_64FC(7) != m.type() || m.cols()!=1\n" + m);
kps.clear();
double[] buff = new double[7 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
kps.add(new KeyPoint((float) buff[7 * i], (float) buff[7 * i + 1], (float) buff[7 * i + 2], (float) buff[7 * i + 3],
(float) buff[7 * i + 4], (int) buff[7 * i + 5], (int) buff[7 * i + 6]));
}
}
// vector_vector_Point
public static Mat vector_vector_Point_to_Mat(List<MatOfPoint> pts, List<Mat> mats) {
Mat res;
int lCount = (pts != null) ? pts.size() : 0;
if (lCount > 0) {
mats.addAll(pts);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_vector_Point(Mat m, List<MatOfPoint> pts) {
if (pts == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
for (Mat mi : mats) {
MatOfPoint pt = new MatOfPoint(mi);
pts.add(pt);
mi.release();
}
mats.clear();
}
// vector_vector_Point2f
public static void Mat_to_vector_vector_Point2f(Mat m, List<MatOfPoint2f> pts) {
if (pts == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
for (Mat mi : mats) {
MatOfPoint2f pt = new MatOfPoint2f(mi);
pts.add(pt);
mi.release();
}
mats.clear();
}
// vector_vector_Point2f
public static Mat vector_vector_Point2f_to_Mat(List<MatOfPoint2f> pts, List<Mat> mats) {
Mat res;
int lCount = (pts != null) ? pts.size() : 0;
if (lCount > 0) {
mats.addAll(pts);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
// vector_vector_Point3f
public static void Mat_to_vector_vector_Point3f(Mat m, List<MatOfPoint3f> pts) {
if (pts == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
for (Mat mi : mats) {
MatOfPoint3f pt = new MatOfPoint3f(mi);
pts.add(pt);
mi.release();
}
mats.clear();
}
// vector_vector_Point3f
public static Mat vector_vector_Point3f_to_Mat(List<MatOfPoint3f> pts, List<Mat> mats) {
Mat res;
int lCount = (pts != null) ? pts.size() : 0;
if (lCount > 0) {
mats.addAll(pts);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
// vector_vector_KeyPoint
public static Mat vector_vector_KeyPoint_to_Mat(List<MatOfKeyPoint> kps, List<Mat> mats) {
Mat res;
int lCount = (kps != null) ? kps.size() : 0;
if (lCount > 0) {
mats.addAll(kps);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_vector_KeyPoint(Mat m, List<MatOfKeyPoint> kps) {
if (kps == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
for (Mat mi : mats) {
MatOfKeyPoint vkp = new MatOfKeyPoint(mi);
kps.add(vkp);
mi.release();
}
mats.clear();
}
public static Mat vector_double_to_Mat(List<Double> ds) {
Mat res;
int count = (ds != null) ? ds.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_64FC1);
double[] buff = new double[count];
for (int i = 0; i < count; i++) {
double v = ds.get(i);
buff[i] = v;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_double(Mat m, List<Double> ds) {
if (ds == null)
throw new IllegalArgumentException("ds == null");
int count = m.rows();
if (CvType.CV_64FC1 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_64FC1 != m.type() || m.cols()!=1\n" + m);
ds.clear();
double[] buff = new double[count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
ds.add(buff[i]);
}
}
public static Mat vector_DMatch_to_Mat(List<DMatch> matches) {
Mat res;
int count = (matches != null) ? matches.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_64FC4);
double[] buff = new double[count * 4];
for (int i = 0; i < count; i++) {
DMatch m = matches.get(i);
buff[4 * i] = m.queryIdx;
buff[4 * i + 1] = m.trainIdx;
buff[4 * i + 2] = m.imgIdx;
buff[4 * i + 3] = m.distance;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_DMatch(Mat m, List<DMatch> matches) {
if (matches == null)
throw new IllegalArgumentException("Output List can't be null");
int count = m.rows();
if (CvType.CV_64FC4 != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_64FC4 != m.type() || m.cols()!=1\n" + m);
matches.clear();
double[] buff = new double[4 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
matches.add(new DMatch((int) buff[4 * i], (int) buff[4 * i + 1], (int) buff[4 * i + 2], (float) buff[4 * i + 3]));
}
}
// vector_vector_DMatch
public static Mat vector_vector_DMatch_to_Mat(List<MatOfDMatch> lvdm, List<Mat> mats) {
Mat res;
int lCount = (lvdm != null) ? lvdm.size() : 0;
if (lCount > 0) {
mats.addAll(lvdm);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_vector_DMatch(Mat m, List<MatOfDMatch> lvdm) {
if (lvdm == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
lvdm.clear();
for (Mat mi : mats) {
MatOfDMatch vdm = new MatOfDMatch(mi);
lvdm.add(vdm);
mi.release();
}
mats.clear();
}
// vector_vector_char
public static Mat vector_vector_char_to_Mat(List<MatOfByte> lvb, List<Mat> mats) {
Mat res;
int lCount = (lvb != null) ? lvb.size() : 0;
if (lCount > 0) {
mats.addAll(lvb);
res = vector_Mat_to_Mat(mats);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_vector_char(Mat m, List<List<Byte>> llb) {
if (llb == null)
throw new IllegalArgumentException("Output List can't be null");
if (m == null)
throw new IllegalArgumentException("Input Mat can't be null");
List<Mat> mats = new ArrayList<Mat>(m.rows());
Mat_to_vector_Mat(m, mats);
for (Mat mi : mats) {
List<Byte> lb = new ArrayList<Byte>();
Mat_to_vector_char(mi, lb);
llb.add(lb);
mi.release();
}
mats.clear();
}
public static Mat vector_RotatedRect_to_Mat(List<RotatedRect> rs) {
Mat res;
int count = (rs != null) ? rs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32FC(5));
float[] buff = new float[5 * count];
for (int i = 0; i < count; i++) {
RotatedRect r = rs.get(i);
buff[5 * i] = (float)r.center.x;
buff[5 * i + 1] = (float)r.center.y;
buff[5 * i + 2] = (float)r.size.width;
buff[5 * i + 3] = (float)r.size.height;
buff[5 * i + 4] = (float)r.angle;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_RotatedRect(Mat m, List<RotatedRect> rs) {
if (rs == null)
throw new IllegalArgumentException("rs == null");
int count = m.rows();
if (CvType.CV_32FC(5) != m.type() || m.cols() != 1)
throw new IllegalArgumentException(
"CvType.CV_32FC5 != m.type() || m.rows()!=1\n" + m);
rs.clear();
float[] buff = new float[5 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
rs.add(new RotatedRect(new Point(buff[5 * i], buff[5 * i + 1]), new Size(buff[5 * i + 2], buff[5 * i + 3]), buff[5 * i + 4]));
}
}
}

View File

@ -0,0 +1,52 @@
//
// This file is auto-generated. Please don't modify it!
//
#undef LOG_TAG
#include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_$M
#include <string>
#include "opencv2/$m.hpp"
$includes
#define LOG_TAG "org.opencv.$m"
#include "common.h"
using namespace cv;
/// throw java exception
#undef throwJavaException
#define throwJavaException throwJavaException_$m
static void throwJavaException(JNIEnv *env, const std::exception *e, const char *method) {
std::string what = "unknown exception";
jclass je = 0;
if(e) {
std::string exception_type = "std::exception";
if(dynamic_cast<const cv::Exception*>(e)) {
exception_type = "cv::Exception";
je = env->FindClass("org/opencv/core/CvException");
}
what = exception_type + ": " + e->what();
}
if(!je) je = env->FindClass("java/lang/Exception");
env->ThrowNew(je, what.c_str());
LOGE("%s caught %s", method, what.c_str());
(void)method; // avoid "unused" warning
}
extern "C" {
$code
} // extern "C"
#endif // HAVE_OPENCV_$M

View File

@ -0,0 +1,17 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
$docs$annotation
public class $jname {
protected final long nativeObj;
protected $jname(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
// internal usage only
public static $jname __fromPtr__(long addr) { return new $jname(addr); }

View File

@ -0,0 +1,14 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
$docs$annotation
public class $jname extends $base {
protected $jname(long addr) { super(addr); }
// internal usage only
public static $jname __fromPtr__(long addr) { return new $jname(addr); }

View File

@ -0,0 +1,9 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.$module;
$imports
$docs$annotation
public class $jname {