feat: 切换后端至PaddleOCR-NCNN,切换工程为CMake
1.项目后端整体迁移至PaddleOCR-NCNN算法,已通过基本的兼容性测试 2.工程改为使用CMake组织,后续为了更好地兼容第三方库,不再提供QMake工程 3.重整权利声明文件,重整代码工程,确保最小化侵权风险 Log: 切换后端至PaddleOCR-NCNN,切换工程为CMake Change-Id: I4d5d2c5d37505a4a24b389b1a4c5d12f17bfa38c
This commit is contained in:
49
3rdparty/opencv-4.5.4/samples/java/ant/build.xml
vendored
Normal file
49
3rdparty/opencv-4.5.4/samples/java/ant/build.xml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
<project name="SimpleSample" basedir="." default="rebuild-run">
|
||||
|
||||
<property name="src.dir" value="src"/>
|
||||
|
||||
<property name="lib.dir" value="${ocvJarDir}"/>
|
||||
<path id="classpath">
|
||||
<fileset dir="${lib.dir}" includes="**/*.jar"/>
|
||||
</path>
|
||||
|
||||
<property name="build.dir" value="build"/>
|
||||
<property name="classes.dir" value="${build.dir}/classes"/>
|
||||
<property name="jar.dir" value="${build.dir}/jar"/>
|
||||
|
||||
<property name="main-class" value="${ant.project.name}"/>
|
||||
|
||||
|
||||
<target name="clean">
|
||||
<delete dir="${build.dir}"/>
|
||||
</target>
|
||||
|
||||
<target name="compile">
|
||||
<mkdir dir="${classes.dir}"/>
|
||||
<javac includeantruntime="false" srcdir="${src.dir}" destdir="${classes.dir}" classpathref="classpath"/>
|
||||
</target>
|
||||
|
||||
<target name="jar" depends="compile">
|
||||
<mkdir dir="${jar.dir}"/>
|
||||
<jar destfile="${jar.dir}/${ant.project.name}.jar" basedir="${classes.dir}">
|
||||
<manifest>
|
||||
<attribute name="Main-Class" value="${main-class}"/>
|
||||
</manifest>
|
||||
</jar>
|
||||
</target>
|
||||
|
||||
<target name="run" depends="jar">
|
||||
<java fork="true" classname="${main-class}">
|
||||
<sysproperty key="java.library.path" path="${ocvLibDir}"/>
|
||||
<classpath>
|
||||
<path refid="classpath"/>
|
||||
<path location="${jar.dir}/${ant.project.name}.jar"/>
|
||||
</classpath>
|
||||
</java>
|
||||
</target>
|
||||
|
||||
<target name="rebuild" depends="clean,jar"/>
|
||||
|
||||
<target name="rebuild-run" depends="clean,run"/>
|
||||
|
||||
</project>
|
21
3rdparty/opencv-4.5.4/samples/java/ant/src/SimpleSample.java
vendored
Normal file
21
3rdparty/opencv-4.5.4/samples/java/ant/src/SimpleSample.java
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Scalar;
|
||||
|
||||
class SimpleSample {
|
||||
|
||||
static{ System.loadLibrary(Core.NATIVE_LIBRARY_NAME); }
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Welcome to OpenCV " + Core.VERSION);
|
||||
Mat m = new Mat(5, 10, CvType.CV_8UC1, new Scalar(0));
|
||||
System.out.println("OpenCV Mat: " + m);
|
||||
Mat mr1 = m.row(1);
|
||||
mr1.setTo(new Scalar(1));
|
||||
Mat mc5 = m.col(5);
|
||||
mc5.setTo(new Scalar(5));
|
||||
System.out.println("OpenCV Mat data:\n" + m.dump());
|
||||
}
|
||||
|
||||
}
|
14
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/project.clj
vendored
Normal file
14
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/project.clj
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
(defproject simple-sample "0.1.0-SNAPSHOT"
|
||||
:pom-addition [:developers [:developer {:id "magomimmo"}
|
||||
[:name "Mimmo Cosenza"]
|
||||
[:url "https://github.com/magomimmoo"]]]
|
||||
|
||||
:description "A simple project to start REPLing with OpenCV"
|
||||
:url "http://example.com/FIXME"
|
||||
:license {:name "Apache 2.0 License"
|
||||
:url "https://www.apache.org/licenses/LICENSE-2.0"}
|
||||
:dependencies [[org.clojure/clojure "1.5.1"]
|
||||
[opencv/opencv "2.4.7"]
|
||||
[opencv/opencv-native "2.4.7"]]
|
||||
:main simple-sample.core
|
||||
:injections [(clojure.lang.RT/loadLibrary org.opencv.core.Core/NATIVE_LIBRARY_NAME)])
|
16
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/src/simple_sample/core.clj
vendored
Normal file
16
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/src/simple_sample/core.clj
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
;;; to run this code from the terminal: "$ lein run". It will save a
|
||||
;;; blurred image version of resources/images/lena.png as
|
||||
;;; resources/images/blurred.png
|
||||
|
||||
(ns simple-sample.core
|
||||
(:import [org.opencv.core Point Rect Mat CvType Size Scalar]
|
||||
org.opencv.imgcodecs.Imgcodecs
|
||||
org.opencv.imgproc.Imgproc))
|
||||
|
||||
(defn -main [& args]
|
||||
(let [lena (Imgcodecs/imread "resources/images/lena.png")
|
||||
blurred (Mat. 512 512 CvType/CV_8UC3)]
|
||||
(print "Blurring...")
|
||||
(Imgproc/GaussianBlur lena blurred (Size. 5 5) 3 3)
|
||||
(Imgcodecs/imwrite "resources/images/blurred.png" blurred)
|
||||
(println "done!")))
|
7
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/test/simple_sample/core_test.clj
vendored
Normal file
7
3rdparty/opencv-4.5.4/samples/java/clojure/simple-sample/test/simple_sample/core_test.clj
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
(ns simple-sample.core-test
|
||||
(:require [clojure.test :refer :all]
|
||||
[simple-sample.core :refer :all]))
|
||||
|
||||
(deftest a-test
|
||||
(testing "FIXME, I fail."
|
||||
(is (= 0 1))))
|
7
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.classpath
vendored
Normal file
7
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.classpath
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.USER_LIBRARY/opencv-2.4.4"/>
|
||||
<classpathentry kind="output" path="bin"/>
|
||||
</classpath>
|
17
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.project
vendored
Normal file
17
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.project
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>HelloCV</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
11
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.settings/org.eclipse.jdt.core.prefs
vendored
Normal file
11
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/.settings/org.eclipse.jdt.core.prefs
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
14
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/src/Main.java
vendored
Normal file
14
3rdparty/opencv-4.5.4/samples/java/eclipse/HelloCV/src/Main.java
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
|
||||
public class Main {
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Welcome to OpenCV " + Core.VERSION);
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
Mat m = Mat.eye(3, 3, CvType.CV_8UC1);
|
||||
System.out.println("m = " + m.dump());
|
||||
}
|
||||
|
||||
}
|
23
3rdparty/opencv-4.5.4/samples/java/opencv_version.java
vendored
Normal file
23
3rdparty/opencv-4.5.4/samples/java/opencv_version.java
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
import org.opencv.core.Core;
|
||||
|
||||
class opencv_version {
|
||||
|
||||
static { System.loadLibrary(Core.NATIVE_LIBRARY_NAME); }
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
if ((1==args.length) && (0==args[0].compareTo("--build"))) {
|
||||
|
||||
System.out.println(Core.getBuildInformation());
|
||||
} else
|
||||
if ((1==args.length) && (0==args[0].compareTo("--help"))) {
|
||||
|
||||
System.out.println("\t--build\n\t\tprint complete build info");
|
||||
System.out.println("\t--help\n\t\tprint this help");
|
||||
} else {
|
||||
|
||||
System.out.println("Welcome to OpenCV " + Core.VERSION);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
13
3rdparty/opencv-4.5.4/samples/java/sbt/README
vendored
Normal file
13
3rdparty/opencv-4.5.4/samples/java/sbt/README
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
A demo of the Java wrapper for OpenCV with two examples:
|
||||
1) feature detection and matching and
|
||||
2) face detection.
|
||||
The examples are coded in Scala and Java.
|
||||
Anyone familiar with Java should be able to read the Scala examples.
|
||||
Please feel free to contribute code examples in Scala or Java, or any JVM language.
|
||||
|
||||
To run the examples:
|
||||
1) Install OpenCV and copy the OpenCV jar to lib/.
|
||||
This jar must match the native libraries installed in your system.
|
||||
If this isn't the case, you may get a java.lang.UnsatisfiedLinkError at runtime.
|
||||
2) Go to the root directory and type "sbt/sbt run".
|
||||
This should generate images in your current directory.
|
0
3rdparty/opencv-4.5.4/samples/java/sbt/lib/copy_opencv_jar_here
vendored
Normal file
0
3rdparty/opencv-4.5.4/samples/java/sbt/lib/copy_opencv_jar_here
vendored
Normal file
22
3rdparty/opencv-4.5.4/samples/java/sbt/project/build.scala
vendored
Normal file
22
3rdparty/opencv-4.5.4/samples/java/sbt/project/build.scala
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object OpenCVJavaDemoBuild extends Build {
|
||||
def scalaSettings = Seq(
|
||||
scalaVersion := "2.10.0",
|
||||
scalacOptions ++= Seq(
|
||||
"-optimize",
|
||||
"-unchecked",
|
||||
"-deprecation"
|
||||
)
|
||||
)
|
||||
|
||||
def buildSettings =
|
||||
Project.defaultSettings ++
|
||||
scalaSettings
|
||||
|
||||
lazy val root = {
|
||||
val settings = buildSettings ++ Seq(name := "OpenCVJavaDemo")
|
||||
Project(id = "OpenCVJavaDemo", base = file("."), settings = settings)
|
||||
}
|
||||
}
|
1
3rdparty/opencv-4.5.4/samples/java/sbt/project/plugins.sbt
vendored
Normal file
1
3rdparty/opencv-4.5.4/samples/java/sbt/project/plugins.sbt
vendored
Normal file
@ -0,0 +1 @@
|
||||
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")
|
1
3rdparty/opencv-4.5.4/samples/java/sbt/sbt/sbt
vendored
Normal file
1
3rdparty/opencv-4.5.4/samples/java/sbt/sbt/sbt
vendored
Normal file
@ -0,0 +1 @@
|
||||
java -Xms512M -Xmx1536M -Xss1M -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=384M -jar `dirname $0`/sbt-launch.jar "$@"
|
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/sbt/sbt-launch.jar
vendored
Normal file
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/sbt/sbt-launch.jar
vendored
Normal file
Binary file not shown.
45
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/java/DetectFaceDemo.java
vendored
Normal file
45
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/java/DetectFaceDemo.java
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfRect;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.objdetect.CascadeClassifier;
|
||||
|
||||
/*
|
||||
* Detects faces in an image, draws boxes around them, and writes the results
|
||||
* to "faceDetection.png".
|
||||
*/
|
||||
public class DetectFaceDemo {
|
||||
public void run() {
|
||||
System.out.println("\nRunning DetectFaceDemo");
|
||||
|
||||
// Create a face detector from the cascade file in the resources
|
||||
// directory.
|
||||
CascadeClassifier faceDetector = new CascadeClassifier(getClass()
|
||||
.getResource("/lbpcascade_frontalface.xml").getPath());
|
||||
Mat image = Imgcodecs.imread(getClass().getResource(
|
||||
"/AverageMaleFace.jpg").getPath());
|
||||
|
||||
// Detect faces in the image.
|
||||
// MatOfRect is a special container class for Rect.
|
||||
MatOfRect faceDetections = new MatOfRect();
|
||||
faceDetector.detectMultiScale(image, faceDetections);
|
||||
|
||||
System.out.println(String.format("Detected %s faces",
|
||||
faceDetections.toArray().length));
|
||||
|
||||
// Draw a bounding box around each face.
|
||||
for (Rect rect : faceDetections.toArray()) {
|
||||
Imgproc.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x
|
||||
+ rect.width, rect.y + rect.height), new Scalar(0, 255, 0));
|
||||
}
|
||||
|
||||
// Save the visualized detection.
|
||||
String filename = "faceDetection.png";
|
||||
System.out.println(String.format("Writing %s", filename));
|
||||
Imgcodecs.imwrite(filename, image);
|
||||
}
|
||||
}
|
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/AverageMaleFace.jpg
vendored
Normal file
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/AverageMaleFace.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/img1.png
vendored
Normal file
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/img1.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 272 KiB |
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/img2.png
vendored
Normal file
BIN
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/resources/img2.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 278 KiB |
23
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/Main.scala
vendored
Normal file
23
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/Main.scala
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* The main runner for the Java demos.
|
||||
* Demos whose name begins with "Scala" are written in the Scala language,
|
||||
* demonstrating the generic nature of the interface.
|
||||
* The other demos are in Java.
|
||||
* Currently, all demos are run, sequentially.
|
||||
*
|
||||
* You're invited to submit your own examples, in any JVM language of
|
||||
* your choosing so long as you can get them to build.
|
||||
*/
|
||||
|
||||
import org.opencv.core.Core
|
||||
|
||||
object Main extends App {
|
||||
// We must load the native library before using any OpenCV functions.
|
||||
// You must load this library _exactly once_ per Java invocation.
|
||||
// If you load it more than once, you will get a java.lang.UnsatisfiedLinkError.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME)
|
||||
|
||||
ScalaCorrespondenceMatchingDemo.run()
|
||||
ScalaDetectFaceDemo.run()
|
||||
new DetectFaceDemo().run()
|
||||
}
|
69
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/ScalaCorrespondenceMatchingDemo.scala
vendored
Normal file
69
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/ScalaCorrespondenceMatchingDemo.scala
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
import org.opencv.imgcodecs.Imgcodecs
|
||||
import org.opencv.features2d.DescriptorExtractor
|
||||
import org.opencv.features2d.Features2d
|
||||
import org.opencv.core.MatOfKeyPoint
|
||||
import org.opencv.core.Mat
|
||||
import org.opencv.features2d.FeatureDetector
|
||||
import org.opencv.features2d.DescriptorMatcher
|
||||
import org.opencv.core.MatOfDMatch
|
||||
import reflect._
|
||||
|
||||
/*
|
||||
* Finds corresponding points between a pair of images using local descriptors.
|
||||
* The correspondences are visualized in the image "scalaCorrespondences.png",
|
||||
* which is written to disk.
|
||||
*/
|
||||
object ScalaCorrespondenceMatchingDemo {
|
||||
def run() {
|
||||
println(s"\nRunning ${classTag[this.type].toString.replace("$", "")}")
|
||||
|
||||
// Detects keypoints and extracts descriptors in a given image of type Mat.
|
||||
def detectAndExtract(mat: Mat) = {
|
||||
// A special container class for KeyPoint.
|
||||
val keyPoints = new MatOfKeyPoint
|
||||
// We're using the ORB detector.
|
||||
val detector = FeatureDetector.create(FeatureDetector.ORB)
|
||||
detector.detect(mat, keyPoints)
|
||||
|
||||
println(s"There were ${keyPoints.toArray.size} KeyPoints detected")
|
||||
|
||||
// Let's just use the best KeyPoints.
|
||||
val sorted = keyPoints.toArray.sortBy(_.response).reverse.take(50)
|
||||
// There isn't a constructor that takes Array[KeyPoint], so we unpack
|
||||
// the array and use the constructor that can take any number of
|
||||
// arguments.
|
||||
val bestKeyPoints: MatOfKeyPoint = new MatOfKeyPoint(sorted: _*)
|
||||
|
||||
// We're using the ORB descriptor.
|
||||
val extractor = DescriptorExtractor.create(DescriptorExtractor.ORB)
|
||||
val descriptors = new Mat
|
||||
extractor.compute(mat, bestKeyPoints, descriptors)
|
||||
|
||||
println(s"${descriptors.rows} descriptors were extracted, each with dimension ${descriptors.cols}")
|
||||
|
||||
(bestKeyPoints, descriptors)
|
||||
}
|
||||
|
||||
// Load the images from the |resources| directory.
|
||||
val leftImage = Imgcodecs.imread(getClass.getResource("/img1.png").getPath)
|
||||
val rightImage = Imgcodecs.imread(getClass.getResource("/img2.png").getPath)
|
||||
|
||||
// Detect KeyPoints and extract descriptors.
|
||||
val (leftKeyPoints, leftDescriptors) = detectAndExtract(leftImage)
|
||||
val (rightKeyPoints, rightDescriptors) = detectAndExtract(rightImage)
|
||||
|
||||
// Match the descriptors.
|
||||
val matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE)
|
||||
// A special container class for DMatch.
|
||||
val dmatches = new MatOfDMatch
|
||||
// The backticks are because "match" is a keyword in Scala.
|
||||
matcher.`match`(leftDescriptors, rightDescriptors, dmatches)
|
||||
|
||||
// Visualize the matches and save the visualization.
|
||||
val correspondenceImage = new Mat
|
||||
Features2d.drawMatches(leftImage, leftKeyPoints, rightImage, rightKeyPoints, dmatches, correspondenceImage)
|
||||
val filename = "scalaCorrespondences.png"
|
||||
println(s"Writing ${filename}")
|
||||
assert(Imgcodecs.imwrite(filename, correspondenceImage))
|
||||
}
|
||||
}
|
44
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/ScalaDetectFaceDemo.scala
vendored
Normal file
44
3rdparty/opencv-4.5.4/samples/java/sbt/src/main/scala/ScalaDetectFaceDemo.scala
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
import org.opencv.core.Core
|
||||
import org.opencv.core.MatOfRect
|
||||
import org.opencv.core.Point
|
||||
import org.opencv.core.Scalar
|
||||
import org.opencv.imgcodecs.Imgcodecs
|
||||
import org.opencv.imgproc.Imgproc
|
||||
import org.opencv.objdetect.CascadeClassifier
|
||||
import reflect._
|
||||
|
||||
/*
|
||||
* Detects faces in an image, draws boxes around them, and writes the results
|
||||
* to "scalaFaceDetection.png".
|
||||
*/
|
||||
object ScalaDetectFaceDemo {
|
||||
def run() {
|
||||
println(s"\nRunning ${classTag[this.type].toString.replace("$", "")}")
|
||||
|
||||
// Create a face detector from the cascade file in the resources directory.
|
||||
val faceDetector = new CascadeClassifier(getClass.getResource("/lbpcascade_frontalface.xml").getPath)
|
||||
val image = Imgcodecs.imread(getClass.getResource("/AverageMaleFace.jpg").getPath)
|
||||
|
||||
// Detect faces in the image.
|
||||
// MatOfRect is a special container class for Rect.
|
||||
val faceDetections = new MatOfRect
|
||||
faceDetector.detectMultiScale(image, faceDetections)
|
||||
|
||||
println(s"Detected ${faceDetections.toArray.size} faces")
|
||||
|
||||
// Draw a bounding box around each face.
|
||||
for (rect <- faceDetections.toArray) {
|
||||
Imgproc.rectangle(
|
||||
image,
|
||||
new Point(rect.x, rect.y),
|
||||
new Point(rect.x + rect.width,
|
||||
rect.y + rect.height),
|
||||
new Scalar(0, 255, 0))
|
||||
}
|
||||
|
||||
// Save the visualized detection.
|
||||
val filename = "scalaFaceDetection.png"
|
||||
println(s"Writing ${filename}")
|
||||
assert(Imgcodecs.imwrite(filename, image))
|
||||
}
|
||||
}
|
39
3rdparty/opencv-4.5.4/samples/java/tutorial_code/CMakeLists.txt
vendored
Normal file
39
3rdparty/opencv-4.5.4/samples/java/tutorial_code/CMakeLists.txt
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
# ----------------------------------------------------------------------------
|
||||
# CMake file for Java tutorials compilation.
|
||||
#
|
||||
# ----------------------------------------------------------------------------
|
||||
if(NOT ANT_EXECUTABLE OR NOT TARGET opencv_java)
|
||||
return()
|
||||
endif()
|
||||
|
||||
project(compile_java_tutorials)
|
||||
|
||||
set(curdir "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
set(opencv_tutorial_java_bin_dir "${CMAKE_CURRENT_BINARY_DIR}/.compiled")
|
||||
set(TUTORIALS_DIRS "")
|
||||
|
||||
file(GLOB children RELATIVE ${curdir} ${curdir}/*/*)
|
||||
foreach(child ${children})
|
||||
if(IS_DIRECTORY ${curdir}/${child})
|
||||
file(GLOB contains_java_files "${child}/*.java")
|
||||
if(contains_java_files)
|
||||
list(APPEND TUTORIALS_DIRS ${child})
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
add_custom_target("${PROJECT_NAME}"
|
||||
DEPENDS opencv_java
|
||||
)
|
||||
|
||||
foreach(TUTORIAL_DIR ${TUTORIALS_DIRS})
|
||||
get_filename_component(TUTORIAL_NAME ${TUTORIAL_DIR} NAME_WE)
|
||||
add_custom_command(TARGET "${PROJECT_NAME}"
|
||||
COMMAND ${ANT_EXECUTABLE} -q
|
||||
-DocvJarDir="${OpenCV_BINARY_DIR}/bin"
|
||||
-DsrcDir="${TUTORIAL_DIR}"
|
||||
-DdstDir="${opencv_tutorial_java_bin_dir}/${TUTORIAL_NAME}"
|
||||
WORKING_DIRECTORY "${curdir}"
|
||||
COMMENT "Compile the tutorial: ${TUTORIAL_NAME}"
|
||||
)
|
||||
endforeach()
|
@ -0,0 +1,173 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CalcBackProject1 {
|
||||
private Mat hue;
|
||||
private Mat histImg = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
private JLabel backprojLabel;
|
||||
private JLabel histImgLabel;
|
||||
private static final int MAX_SLIDER = 180;
|
||||
private int bins = 25;
|
||||
|
||||
public CalcBackProject1(String[] args) {
|
||||
//! [Read the image]
|
||||
if (args.length != 1) {
|
||||
System.err.println("You must supply one argument that corresponds to the path to the image.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Mat src = Imgcodecs.imread(args[0]);
|
||||
if (src.empty()) {
|
||||
System.err.println("Empty image: " + args[0]);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Read the image]
|
||||
|
||||
//! [Transform it to HSV]
|
||||
Mat hsv = new Mat();
|
||||
Imgproc.cvtColor(src, hsv, Imgproc.COLOR_BGR2HSV);
|
||||
//! [Transform it to HSV]
|
||||
|
||||
//! [Use only the Hue value]
|
||||
hue = new Mat(hsv.size(), hsv.depth());
|
||||
Core.mixChannels(Arrays.asList(hsv), Arrays.asList(hue), new MatOfInt(0, 0));
|
||||
//! [Use only the Hue value]
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Back Projection 1 demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
//! [Show the image]
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
//! [Show the image]
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
//! [Create Trackbar to enter the number of bins]
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("* Hue bins: "));
|
||||
JSlider slider = new JSlider(0, MAX_SLIDER, bins);
|
||||
slider.setMajorTickSpacing(25);
|
||||
slider.setMinorTickSpacing(5);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
bins = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
//! [Create Trackbar to enter the number of bins]
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgLabel);
|
||||
|
||||
backprojLabel = new JLabel();
|
||||
imgPanel.add(backprojLabel);
|
||||
|
||||
histImgLabel = new JLabel();
|
||||
imgPanel.add(histImgLabel);
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
//! [initialize]
|
||||
int histSize = Math.max(bins, 2);
|
||||
float[] hueRange = {0, 180};
|
||||
//! [initialize]
|
||||
|
||||
//! [Get the Histogram and normalize it]
|
||||
Mat hist = new Mat();
|
||||
List<Mat> hueList = Arrays.asList(hue);
|
||||
Imgproc.calcHist(hueList, new MatOfInt(0), new Mat(), hist, new MatOfInt(histSize), new MatOfFloat(hueRange), false);
|
||||
Core.normalize(hist, hist, 0, 255, Core.NORM_MINMAX);
|
||||
//! [Get the Histogram and normalize it]
|
||||
|
||||
//! [Get Backprojection]
|
||||
Mat backproj = new Mat();
|
||||
Imgproc.calcBackProject(hueList, new MatOfInt(0), hist, backproj, new MatOfFloat(hueRange), 1);
|
||||
//! [Get Backprojection]
|
||||
|
||||
//! [Draw the backproj]
|
||||
Image backprojImg = HighGui.toBufferedImage(backproj);
|
||||
backprojLabel.setIcon(new ImageIcon(backprojImg));
|
||||
//! [Draw the backproj]
|
||||
|
||||
//! [Draw the histogram]
|
||||
int w = 400, h = 400;
|
||||
int binW = (int) Math.round((double) w / histSize);
|
||||
histImg = Mat.zeros(h, w, CvType.CV_8UC3);
|
||||
|
||||
float[] histData = new float[(int) (hist.total() * hist.channels())];
|
||||
hist.get(0, 0, histData);
|
||||
for (int i = 0; i < bins; i++) {
|
||||
Imgproc.rectangle(histImg, new Point(i * binW, h),
|
||||
new Point((i + 1) * binW, h - Math.round(histData[i] * h / 255.0)), new Scalar(0, 0, 255), Imgproc.FILLED);
|
||||
}
|
||||
Image histImage = HighGui.toBufferedImage(histImg);
|
||||
histImgLabel.setIcon(new ImageIcon(histImage));
|
||||
//! [Draw the histogram]
|
||||
|
||||
frame.repaint();
|
||||
frame.pack();
|
||||
}
|
||||
}
|
||||
|
||||
public class CalcBackProjectDemo1 {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CalcBackProject1(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,189 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.awt.event.MouseAdapter;
|
||||
import java.awt.event.MouseEvent;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Range;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CalcBackProject2 {
|
||||
private Mat src;
|
||||
private Mat hsv = new Mat();
|
||||
private Mat mask = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
private JLabel backprojLabel;
|
||||
private JLabel maskImgLabel;
|
||||
private static final int MAX_SLIDER = 255;
|
||||
private int low = 20;
|
||||
private int up = 20;
|
||||
|
||||
public CalcBackProject2(String[] args) {
|
||||
/// Read the image
|
||||
if (args.length != 1) {
|
||||
System.err.println("You must supply one argument that corresponds to the path to the image.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
src = Imgcodecs.imread(args[0]);
|
||||
if (src.empty()) {
|
||||
System.err.println("Empty image: " + args[0]);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Transform it to HSV
|
||||
Imgproc.cvtColor(src, hsv, Imgproc.COLOR_BGR2HSV);
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Back Projection 2 demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
/// Set Trackbars for floodfill thresholds
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Low thresh"));
|
||||
JSlider slider = new JSlider(0, MAX_SLIDER, low);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
low = source.getValue();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
sliderPanel.add(new JLabel("High thresh"));
|
||||
slider = new JSlider(0, MAX_SLIDER, up);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
up = source.getValue();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
/// Set a Mouse Callback
|
||||
imgLabel.addMouseListener(new MouseAdapter() {
|
||||
@Override
|
||||
public void mousePressed(MouseEvent e) {
|
||||
update(e.getX(), e.getY());
|
||||
}
|
||||
});
|
||||
imgPanel.add(imgLabel);
|
||||
|
||||
maskImgLabel = new JLabel();
|
||||
imgPanel.add(maskImgLabel);
|
||||
|
||||
backprojLabel = new JLabel();
|
||||
imgPanel.add(backprojLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update(int x, int y) {
|
||||
// Fill and get the mask
|
||||
Point seed = new Point(x, y);
|
||||
|
||||
int newMaskVal = 255;
|
||||
Scalar newVal = new Scalar(120, 120, 120);
|
||||
|
||||
int connectivity = 8;
|
||||
int flags = connectivity + (newMaskVal << 8) + Imgproc.FLOODFILL_FIXED_RANGE + Imgproc.FLOODFILL_MASK_ONLY;
|
||||
|
||||
Mat mask2 = Mat.zeros(src.rows() + 2, src.cols() + 2, CvType.CV_8U);
|
||||
Imgproc.floodFill(src, mask2, seed, newVal, new Rect(), new Scalar(low, low, low), new Scalar(up, up, up), flags);
|
||||
mask = mask2.submat(new Range(1, mask2.rows() - 1), new Range(1, mask2.cols() - 1));
|
||||
|
||||
Image maskImg = HighGui.toBufferedImage(mask);
|
||||
maskImgLabel.setIcon(new ImageIcon(maskImg));
|
||||
|
||||
int hBins = 30, sBins = 32;
|
||||
int[] histSize = { hBins, sBins };
|
||||
float[] ranges = { 0, 180, 0, 256 };
|
||||
int[] channels = { 0, 1 };
|
||||
|
||||
/// Get the Histogram and normalize it
|
||||
Mat hist = new Mat();
|
||||
List<Mat> hsvList = Arrays.asList(hsv);
|
||||
Imgproc.calcHist(hsvList, new MatOfInt(channels), mask, hist, new MatOfInt(histSize), new MatOfFloat(ranges), false );
|
||||
|
||||
Core.normalize(hist, hist, 0, 255, Core.NORM_MINMAX);
|
||||
|
||||
/// Get Backprojection
|
||||
Mat backproj = new Mat();
|
||||
Imgproc.calcBackProject(hsvList, new MatOfInt(channels), hist, backproj, new MatOfFloat(ranges), 1);
|
||||
|
||||
Image backprojImg = HighGui.toBufferedImage(backproj);
|
||||
backprojLabel.setIcon(new ImageIcon(backprojImg));
|
||||
|
||||
frame.repaint();
|
||||
frame.pack();
|
||||
}
|
||||
}
|
||||
|
||||
public class CalcBackProjectDemo2 {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CalcBackProject2(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CalcHist {
|
||||
public void run(String[] args) {
|
||||
//! [Load image]
|
||||
String filename = args.length > 0 ? args[0] : "../data/lena.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Load image]
|
||||
|
||||
//! [Separate the image in 3 places ( B, G and R )]
|
||||
List<Mat> bgrPlanes = new ArrayList<>();
|
||||
Core.split(src, bgrPlanes);
|
||||
//! [Separate the image in 3 places ( B, G and R )]
|
||||
|
||||
//! [Establish the number of bins]
|
||||
int histSize = 256;
|
||||
//! [Establish the number of bins]
|
||||
|
||||
//! [Set the ranges ( for B,G,R) )]
|
||||
float[] range = {0, 256}; //the upper boundary is exclusive
|
||||
MatOfFloat histRange = new MatOfFloat(range);
|
||||
//! [Set the ranges ( for B,G,R) )]
|
||||
|
||||
//! [Set histogram param]
|
||||
boolean accumulate = false;
|
||||
//! [Set histogram param]
|
||||
|
||||
//! [Compute the histograms]
|
||||
Mat bHist = new Mat(), gHist = new Mat(), rHist = new Mat();
|
||||
Imgproc.calcHist(bgrPlanes, new MatOfInt(0), new Mat(), bHist, new MatOfInt(histSize), histRange, accumulate);
|
||||
Imgproc.calcHist(bgrPlanes, new MatOfInt(1), new Mat(), gHist, new MatOfInt(histSize), histRange, accumulate);
|
||||
Imgproc.calcHist(bgrPlanes, new MatOfInt(2), new Mat(), rHist, new MatOfInt(histSize), histRange, accumulate);
|
||||
//! [Compute the histograms]
|
||||
|
||||
//! [Draw the histograms for B, G and R]
|
||||
int histW = 512, histH = 400;
|
||||
int binW = (int) Math.round((double) histW / histSize);
|
||||
|
||||
Mat histImage = new Mat( histH, histW, CvType.CV_8UC3, new Scalar( 0,0,0) );
|
||||
//! [Draw the histograms for B, G and R]
|
||||
|
||||
//! [Normalize the result to ( 0, histImage.rows )]
|
||||
Core.normalize(bHist, bHist, 0, histImage.rows(), Core.NORM_MINMAX);
|
||||
Core.normalize(gHist, gHist, 0, histImage.rows(), Core.NORM_MINMAX);
|
||||
Core.normalize(rHist, rHist, 0, histImage.rows(), Core.NORM_MINMAX);
|
||||
//! [Normalize the result to ( 0, histImage.rows )]
|
||||
|
||||
//! [Draw for each channel]
|
||||
float[] bHistData = new float[(int) (bHist.total() * bHist.channels())];
|
||||
bHist.get(0, 0, bHistData);
|
||||
float[] gHistData = new float[(int) (gHist.total() * gHist.channels())];
|
||||
gHist.get(0, 0, gHistData);
|
||||
float[] rHistData = new float[(int) (rHist.total() * rHist.channels())];
|
||||
rHist.get(0, 0, rHistData);
|
||||
|
||||
for( int i = 1; i < histSize; i++ ) {
|
||||
Imgproc.line(histImage, new Point(binW * (i - 1), histH - Math.round(bHistData[i - 1])),
|
||||
new Point(binW * (i), histH - Math.round(bHistData[i])), new Scalar(255, 0, 0), 2);
|
||||
Imgproc.line(histImage, new Point(binW * (i - 1), histH - Math.round(gHistData[i - 1])),
|
||||
new Point(binW * (i), histH - Math.round(gHistData[i])), new Scalar(0, 255, 0), 2);
|
||||
Imgproc.line(histImage, new Point(binW * (i - 1), histH - Math.round(rHistData[i - 1])),
|
||||
new Point(binW * (i), histH - Math.round(rHistData[i])), new Scalar(0, 0, 255), 2);
|
||||
}
|
||||
//! [Draw for each channel]
|
||||
|
||||
//! [Display]
|
||||
HighGui.imshow( "Source image", src );
|
||||
HighGui.imshow( "calcHist Demo", histImage );
|
||||
HighGui.waitKey(0);
|
||||
//! [Display]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class CalcHistDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new CalcHist().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfFloat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.Range;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CompareHist {
|
||||
public void run(String[] args) {
|
||||
//! [Load three images with different environment settings]
|
||||
if (args.length != 3) {
|
||||
System.err.println("You must supply 3 arguments that correspond to the paths to 3 images.");
|
||||
System.exit(0);
|
||||
}
|
||||
Mat srcBase = Imgcodecs.imread(args[0]);
|
||||
Mat srcTest1 = Imgcodecs.imread(args[1]);
|
||||
Mat srcTest2 = Imgcodecs.imread(args[2]);
|
||||
if (srcBase.empty() || srcTest1.empty() || srcTest2.empty()) {
|
||||
System.err.println("Cannot read the images");
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Load three images with different environment settings]
|
||||
|
||||
//! [Convert to HSV]
|
||||
Mat hsvBase = new Mat(), hsvTest1 = new Mat(), hsvTest2 = new Mat();
|
||||
Imgproc.cvtColor( srcBase, hsvBase, Imgproc.COLOR_BGR2HSV );
|
||||
Imgproc.cvtColor( srcTest1, hsvTest1, Imgproc.COLOR_BGR2HSV );
|
||||
Imgproc.cvtColor( srcTest2, hsvTest2, Imgproc.COLOR_BGR2HSV );
|
||||
//! [Convert to HSV]
|
||||
|
||||
//! [Convert to HSV half]
|
||||
Mat hsvHalfDown = hsvBase.submat( new Range( hsvBase.rows()/2, hsvBase.rows() - 1 ), new Range( 0, hsvBase.cols() - 1 ) );
|
||||
//! [Convert to HSV half]
|
||||
|
||||
//! [Using 50 bins for hue and 60 for saturation]
|
||||
int hBins = 50, sBins = 60;
|
||||
int[] histSize = { hBins, sBins };
|
||||
|
||||
// hue varies from 0 to 179, saturation from 0 to 255
|
||||
float[] ranges = { 0, 180, 0, 256 };
|
||||
|
||||
// Use the 0-th and 1-st channels
|
||||
int[] channels = { 0, 1 };
|
||||
//! [Using 50 bins for hue and 60 for saturation]
|
||||
|
||||
//! [Calculate the histograms for the HSV images]
|
||||
Mat histBase = new Mat(), histHalfDown = new Mat(), histTest1 = new Mat(), histTest2 = new Mat();
|
||||
|
||||
List<Mat> hsvBaseList = Arrays.asList(hsvBase);
|
||||
Imgproc.calcHist(hsvBaseList, new MatOfInt(channels), new Mat(), histBase, new MatOfInt(histSize), new MatOfFloat(ranges), false);
|
||||
Core.normalize(histBase, histBase, 0, 1, Core.NORM_MINMAX);
|
||||
|
||||
List<Mat> hsvHalfDownList = Arrays.asList(hsvHalfDown);
|
||||
Imgproc.calcHist(hsvHalfDownList, new MatOfInt(channels), new Mat(), histHalfDown, new MatOfInt(histSize), new MatOfFloat(ranges), false);
|
||||
Core.normalize(histHalfDown, histHalfDown, 0, 1, Core.NORM_MINMAX);
|
||||
|
||||
List<Mat> hsvTest1List = Arrays.asList(hsvTest1);
|
||||
Imgproc.calcHist(hsvTest1List, new MatOfInt(channels), new Mat(), histTest1, new MatOfInt(histSize), new MatOfFloat(ranges), false);
|
||||
Core.normalize(histTest1, histTest1, 0, 1, Core.NORM_MINMAX);
|
||||
|
||||
List<Mat> hsvTest2List = Arrays.asList(hsvTest2);
|
||||
Imgproc.calcHist(hsvTest2List, new MatOfInt(channels), new Mat(), histTest2, new MatOfInt(histSize), new MatOfFloat(ranges), false);
|
||||
Core.normalize(histTest2, histTest2, 0, 1, Core.NORM_MINMAX);
|
||||
//! [Calculate the histograms for the HSV images]
|
||||
|
||||
//! [Apply the histogram comparison methods]
|
||||
for( int compareMethod = 0; compareMethod < 4; compareMethod++ ) {
|
||||
double baseBase = Imgproc.compareHist( histBase, histBase, compareMethod );
|
||||
double baseHalf = Imgproc.compareHist( histBase, histHalfDown, compareMethod );
|
||||
double baseTest1 = Imgproc.compareHist( histBase, histTest1, compareMethod );
|
||||
double baseTest2 = Imgproc.compareHist( histBase, histTest2, compareMethod );
|
||||
|
||||
System.out.println("Method " + compareMethod + " Perfect, Base-Half, Base-Test(1), Base-Test(2) : " + baseBase + " / " + baseHalf
|
||||
+ " / " + baseTest1 + " / " + baseTest2);
|
||||
}
|
||||
//! [Apply the histogram comparison methods]
|
||||
}
|
||||
}
|
||||
|
||||
public class CompareHistDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new CompareHist().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class EqualizeHist {
|
||||
public void run(String[] args) {
|
||||
//! [Load image]
|
||||
String filename = args.length > 0 ? args[0] : "../data/lena.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Load image]
|
||||
|
||||
//! [Convert to grayscale]
|
||||
Imgproc.cvtColor(src, src, Imgproc.COLOR_BGR2GRAY);
|
||||
//! [Convert to grayscale]
|
||||
|
||||
//! [Apply Histogram Equalization]
|
||||
Mat dst = new Mat();
|
||||
Imgproc.equalizeHist( src, dst );
|
||||
//! [Apply Histogram Equalization]
|
||||
|
||||
//! [Display results]
|
||||
HighGui.imshow( "Source image", src );
|
||||
HighGui.imshow( "Equalized Image", dst );
|
||||
//! [Display results]
|
||||
|
||||
//! [Wait until user exits the program]
|
||||
HighGui.waitKey(0);
|
||||
//! [Wait until user exits the program]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class EqualizeHistDemo {
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new EqualizeHist().run(args);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,186 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.List;
|
||||
|
||||
class GeometricDrawingRun{
|
||||
|
||||
private static final int W = 400;
|
||||
|
||||
public void run(){
|
||||
//! [create_images]
|
||||
/// Windows names
|
||||
String atom_window = "Drawing 1: Atom";
|
||||
String rook_window = "Drawing 2: Rook";
|
||||
|
||||
/// Create black empty images
|
||||
Mat atom_image = Mat.zeros( W, W, CvType.CV_8UC3 );
|
||||
Mat rook_image = Mat.zeros( W, W, CvType.CV_8UC3 );
|
||||
//! [create_images]
|
||||
|
||||
//! [draw_atom]
|
||||
/// 1. Draw a simple atom:
|
||||
/// -----------------------
|
||||
MyEllipse( atom_image, 90.0 );
|
||||
MyEllipse( atom_image, 0.0 );
|
||||
MyEllipse( atom_image, 45.0 );
|
||||
MyEllipse( atom_image, -45.0 );
|
||||
|
||||
/// 1.b. Creating circles
|
||||
MyFilledCircle( atom_image, new Point( W/2, W/2) );
|
||||
//! [draw_atom]
|
||||
|
||||
//! [draw_rook]
|
||||
/// 2. Draw a rook
|
||||
/// ------------------
|
||||
/// 2.a. Create a convex polygon
|
||||
MyPolygon( rook_image );
|
||||
|
||||
//! [rectangle]
|
||||
/// 2.b. Creating rectangles
|
||||
Imgproc.rectangle( rook_image,
|
||||
new Point( 0, 7*W/8 ),
|
||||
new Point( W, W),
|
||||
new Scalar( 0, 255, 255 ),
|
||||
-1,
|
||||
8,
|
||||
0 );
|
||||
//! [rectangle]
|
||||
|
||||
/// 2.c. Create a few lines
|
||||
MyLine( rook_image, new Point( 0, 15*W/16 ), new Point( W, 15*W/16 ) );
|
||||
MyLine( rook_image, new Point( W/4, 7*W/8 ), new Point( W/4, W ) );
|
||||
MyLine( rook_image, new Point( W/2, 7*W/8 ), new Point( W/2, W ) );
|
||||
MyLine( rook_image, new Point( 3*W/4, 7*W/8 ), new Point( 3*W/4, W ) );
|
||||
//! [draw_rook]
|
||||
|
||||
/// 3. Display your stuff!
|
||||
HighGui.imshow( atom_window, atom_image );
|
||||
HighGui.moveWindow( atom_window, 0, 200 );
|
||||
HighGui.imshow( rook_window, rook_image );
|
||||
HighGui.moveWindow( rook_window, W, 200 );
|
||||
|
||||
HighGui.waitKey( 0 );
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Function Declaration
|
||||
|
||||
/**
|
||||
* @function MyEllipse
|
||||
* @brief Draw a fixed-size ellipse with different angles
|
||||
*/
|
||||
//! [my_ellipse]
|
||||
private void MyEllipse( Mat img, double angle ) {
|
||||
int thickness = 2;
|
||||
int lineType = 8;
|
||||
int shift = 0;
|
||||
|
||||
Imgproc.ellipse( img,
|
||||
new Point( W/2, W/2 ),
|
||||
new Size( W/4, W/16 ),
|
||||
angle,
|
||||
0.0,
|
||||
360.0,
|
||||
new Scalar( 255, 0, 0 ),
|
||||
thickness,
|
||||
lineType,
|
||||
shift );
|
||||
}
|
||||
//! [my_ellipse]
|
||||
/**
|
||||
* @function MyFilledCircle
|
||||
* @brief Draw a fixed-size filled circle
|
||||
*/
|
||||
//! [my_filled_circle]
|
||||
private void MyFilledCircle( Mat img, Point center ) {
|
||||
int thickness = -1;
|
||||
int lineType = 8;
|
||||
int shift = 0;
|
||||
|
||||
Imgproc.circle( img,
|
||||
center,
|
||||
W/32,
|
||||
new Scalar( 0, 0, 255 ),
|
||||
thickness,
|
||||
lineType,
|
||||
shift );
|
||||
}
|
||||
//! [my_filled_circle]
|
||||
/**
|
||||
* @function MyPolygon
|
||||
* @function Draw a simple concave polygon (rook)
|
||||
*/
|
||||
//! [my_polygon]
|
||||
private void MyPolygon( Mat img ) {
|
||||
int lineType = 8;
|
||||
int shift = 0;
|
||||
|
||||
/** Create some points */
|
||||
Point[] rook_points = new Point[20];
|
||||
rook_points[0] = new Point( W/4, 7*W/8 );
|
||||
rook_points[1] = new Point( 3*W/4, 7*W/8 );
|
||||
rook_points[2] = new Point( 3*W/4, 13*W/16 );
|
||||
rook_points[3] = new Point( 11*W/16, 13*W/16 );
|
||||
rook_points[4] = new Point( 19*W/32, 3*W/8 );
|
||||
rook_points[5] = new Point( 3*W/4, 3*W/8 );
|
||||
rook_points[6] = new Point( 3*W/4, W/8 );
|
||||
rook_points[7] = new Point( 26*W/40, W/8 );
|
||||
rook_points[8] = new Point( 26*W/40, W/4 );
|
||||
rook_points[9] = new Point( 22*W/40, W/4 );
|
||||
rook_points[10] = new Point( 22*W/40, W/8 );
|
||||
rook_points[11] = new Point( 18*W/40, W/8 );
|
||||
rook_points[12] = new Point( 18*W/40, W/4 );
|
||||
rook_points[13] = new Point( 14*W/40, W/4 );
|
||||
rook_points[14] = new Point( 14*W/40, W/8 );
|
||||
rook_points[15] = new Point( W/4, W/8 );
|
||||
rook_points[16] = new Point( W/4, 3*W/8 );
|
||||
rook_points[17] = new Point( 13*W/32, 3*W/8 );
|
||||
rook_points[18] = new Point( 5*W/16, 13*W/16 );
|
||||
rook_points[19] = new Point( W/4, 13*W/16 );
|
||||
|
||||
MatOfPoint matPt = new MatOfPoint();
|
||||
matPt.fromArray(rook_points);
|
||||
|
||||
List<MatOfPoint> ppt = new ArrayList<MatOfPoint>();
|
||||
ppt.add(matPt);
|
||||
|
||||
Imgproc.fillPoly(img,
|
||||
ppt,
|
||||
new Scalar( 255, 255, 255 ),
|
||||
lineType,
|
||||
shift,
|
||||
new Point(0,0) );
|
||||
}
|
||||
//! [my_polygon]
|
||||
/**
|
||||
* @function MyLine
|
||||
* @brief Draw a simple line
|
||||
*/
|
||||
//! [my_line]
|
||||
private void MyLine( Mat img, Point start, Point end ) {
|
||||
int thickness = 2;
|
||||
int lineType = 8;
|
||||
int shift = 0;
|
||||
|
||||
Imgproc.line( img,
|
||||
start,
|
||||
end,
|
||||
new Scalar( 0, 0, 0 ),
|
||||
thickness,
|
||||
lineType,
|
||||
shift );
|
||||
}
|
||||
//! [my_line]
|
||||
}
|
||||
|
||||
public class BasicGeometricDrawing {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new GeometricDrawingRun().run();
|
||||
}
|
||||
}
|
58
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/HitMiss/HitMiss.java
vendored
Normal file
58
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/HitMiss/HitMiss.java
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class HitMissRun{
|
||||
|
||||
public void run() {
|
||||
Mat input_image = new Mat( 8, 8, CvType.CV_8UC1 );
|
||||
int row = 0, col = 0;
|
||||
input_image.put(row ,col,
|
||||
0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 255, 255, 255, 0, 0, 0, 255,
|
||||
0, 255, 255, 255, 0, 0, 0, 0,
|
||||
0, 255, 255, 255, 0, 255, 0, 0,
|
||||
0, 0, 255, 0, 0, 0, 0, 0,
|
||||
0, 0, 255, 0, 0, 255, 255, 0,
|
||||
0, 255, 0, 255, 0, 0, 255, 0,
|
||||
0, 255, 255, 255, 0, 0, 0, 0);
|
||||
|
||||
Mat kernel = new Mat( 3, 3, CvType.CV_16S );
|
||||
kernel.put(row ,col,
|
||||
0, 1, 0,
|
||||
1, -1, 1,
|
||||
0, 1, 0 );
|
||||
|
||||
Mat output_image = new Mat();
|
||||
Imgproc.morphologyEx(input_image, output_image, Imgproc.MORPH_HITMISS, kernel);
|
||||
|
||||
int rate = 50;
|
||||
Core.add(kernel, new Scalar(1), kernel);
|
||||
Core.multiply(kernel, new Scalar(127), kernel);
|
||||
kernel.convertTo(kernel, CvType.CV_8U);
|
||||
|
||||
Imgproc.resize(kernel, kernel, new Size(), rate, rate, Imgproc.INTER_NEAREST);
|
||||
HighGui.imshow("kernel", kernel);
|
||||
HighGui.moveWindow("kernel", 0, 0);
|
||||
|
||||
Imgproc.resize(input_image, input_image, new Size(), rate, rate, Imgproc.INTER_NEAREST);
|
||||
HighGui.imshow("Original", input_image);
|
||||
HighGui.moveWindow("Original", 0, 200);
|
||||
|
||||
Imgproc.resize(output_image, output_image, new Size(), rate, rate, Imgproc.INTER_NEAREST);
|
||||
HighGui.imshow("Hit or Miss", output_image);
|
||||
HighGui.moveWindow("Hit or Miss", 500, 200);
|
||||
|
||||
HighGui.waitKey(0);
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class HitMiss
|
||||
{
|
||||
public static void main(String[] args) {
|
||||
// load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new HitMissRun().run();
|
||||
}
|
||||
}
|
67
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/Pyramids/Pyramids.java
vendored
Normal file
67
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/Pyramids/Pyramids.java
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class PyramidsRun {
|
||||
|
||||
String window_name = "Pyramids Demo";
|
||||
|
||||
public void run(String[] args) {
|
||||
/// General instructions
|
||||
System.out.println("\n" +
|
||||
" Zoom In-Out demo \n" +
|
||||
"------------------ \n" +
|
||||
" * [i] -> Zoom [i]n \n" +
|
||||
" * [o] -> Zoom [o]ut \n" +
|
||||
" * [ESC] -> Close program \n");
|
||||
|
||||
//! [load]
|
||||
String filename = ((args.length > 0) ? args[0] : "../data/chicky_512.png");
|
||||
|
||||
// Load the image
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image!");
|
||||
System.out.println("Program Arguments: [image_name -- default ../data/chicky_512.png] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [loop]
|
||||
while (true){
|
||||
//! [show_image]
|
||||
HighGui.imshow( window_name, src );
|
||||
//! [show_image]
|
||||
char c = (char) HighGui.waitKey(0);
|
||||
c = Character.toLowerCase(c);
|
||||
|
||||
if( c == 27 ){
|
||||
break;
|
||||
//![pyrup]
|
||||
}else if( c == 'i'){
|
||||
Imgproc.pyrUp( src, src, new Size( src.cols()*2, src.rows()*2 ) );
|
||||
System.out.println( "** Zoom In: Image x 2" );
|
||||
//![pyrup]
|
||||
//![pyrdown]
|
||||
}else if( c == 'o'){
|
||||
Imgproc.pyrDown( src, src, new Size( src.cols()/2, src.rows()/2 ) );
|
||||
System.out.println( "** Zoom Out: Image / 2" );
|
||||
//![pyrdown]
|
||||
}
|
||||
}
|
||||
//! [loop]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class Pyramids {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new PyramidsRun().run(args);
|
||||
}
|
||||
}
|
101
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/Smoothing/Smoothing.java
vendored
Normal file
101
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/Smoothing/Smoothing.java
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class SmoothingRun {
|
||||
|
||||
/// Global Variables
|
||||
int DELAY_CAPTION = 1500;
|
||||
int DELAY_BLUR = 100;
|
||||
int MAX_KERNEL_LENGTH = 31;
|
||||
|
||||
Mat src = new Mat(), dst = new Mat();
|
||||
String windowName = "Filter Demo 1";
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
String filename = ((args.length > 0) ? args[0] : "../data/lena.jpg");
|
||||
|
||||
src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_COLOR);
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image");
|
||||
System.out.println("Usage: ./Smoothing [image_name -- default ../data/lena.jpg] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
if( displayCaption( "Original Image" ) != 0 ) { System.exit(0); }
|
||||
|
||||
dst = src.clone();
|
||||
if( displayDst( DELAY_CAPTION ) != 0 ) { System.exit(0); }
|
||||
|
||||
/// Applying Homogeneous blur
|
||||
if( displayCaption( "Homogeneous Blur" ) != 0 ) { System.exit(0); }
|
||||
|
||||
//! [blur]
|
||||
for (int i = 1; i < MAX_KERNEL_LENGTH; i = i + 2) {
|
||||
Imgproc.blur(src, dst, new Size(i, i), new Point(-1, -1));
|
||||
displayDst(DELAY_BLUR);
|
||||
}
|
||||
//! [blur]
|
||||
|
||||
/// Applying Gaussian blur
|
||||
if( displayCaption( "Gaussian Blur" ) != 0 ) { System.exit(0); }
|
||||
|
||||
//! [gaussianblur]
|
||||
for (int i = 1; i < MAX_KERNEL_LENGTH; i = i + 2) {
|
||||
Imgproc.GaussianBlur(src, dst, new Size(i, i), 0, 0);
|
||||
displayDst(DELAY_BLUR);
|
||||
}
|
||||
//! [gaussianblur]
|
||||
|
||||
/// Applying Median blur
|
||||
if( displayCaption( "Median Blur" ) != 0 ) { System.exit(0); }
|
||||
|
||||
//! [medianblur]
|
||||
for (int i = 1; i < MAX_KERNEL_LENGTH; i = i + 2) {
|
||||
Imgproc.medianBlur(src, dst, i);
|
||||
displayDst(DELAY_BLUR);
|
||||
}
|
||||
//! [medianblur]
|
||||
|
||||
/// Applying Bilateral Filter
|
||||
if( displayCaption( "Bilateral Blur" ) != 0 ) { System.exit(0); }
|
||||
|
||||
//![bilateralfilter]
|
||||
for (int i = 1; i < MAX_KERNEL_LENGTH; i = i + 2) {
|
||||
Imgproc.bilateralFilter(src, dst, i, i * 2, i / 2);
|
||||
displayDst(DELAY_BLUR);
|
||||
}
|
||||
//![bilateralfilter]
|
||||
|
||||
/// Done
|
||||
displayCaption( "Done!" );
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
int displayCaption(String caption) {
|
||||
dst = Mat.zeros(src.size(), src.type());
|
||||
Imgproc.putText(dst, caption,
|
||||
new Point(src.cols() / 4, src.rows() / 2),
|
||||
Imgproc.FONT_HERSHEY_COMPLEX, 1, new Scalar(255, 255, 255));
|
||||
|
||||
return displayDst(DELAY_CAPTION);
|
||||
}
|
||||
|
||||
int displayDst(int delay) {
|
||||
HighGui.imshow( windowName, dst );
|
||||
int c = HighGui.waitKey( delay );
|
||||
if (c >= 0) { return -1; }
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
public class Smoothing {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new SmoothingRun().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,86 @@
|
||||
import java.util.Scanner;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
class BasicLinearTransforms {
|
||||
private byte saturate(double val) {
|
||||
int iVal = (int) Math.round(val);
|
||||
iVal = iVal > 255 ? 255 : (iVal < 0 ? 0 : iVal);
|
||||
return (byte) iVal;
|
||||
}
|
||||
|
||||
public void run(String[] args) {
|
||||
/// Read image given by user
|
||||
//! [basic-linear-transform-load]
|
||||
String imagePath = args.length > 0 ? args[0] : "../data/lena.jpg";
|
||||
Mat image = Imgcodecs.imread(imagePath);
|
||||
if (image.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [basic-linear-transform-load]
|
||||
|
||||
//! [basic-linear-transform-output]
|
||||
Mat newImage = Mat.zeros(image.size(), image.type());
|
||||
//! [basic-linear-transform-output]
|
||||
|
||||
//! [basic-linear-transform-parameters]
|
||||
double alpha = 1.0; /*< Simple contrast control */
|
||||
int beta = 0; /*< Simple brightness control */
|
||||
|
||||
/// Initialize values
|
||||
System.out.println(" Basic Linear Transforms ");
|
||||
System.out.println("-------------------------");
|
||||
try (Scanner scanner = new Scanner(System.in)) {
|
||||
System.out.print("* Enter the alpha value [1.0-3.0]: ");
|
||||
alpha = scanner.nextDouble();
|
||||
System.out.print("* Enter the beta value [0-100]: ");
|
||||
beta = scanner.nextInt();
|
||||
}
|
||||
//! [basic-linear-transform-parameters]
|
||||
|
||||
/// Do the operation newImage(i,j) = alpha*image(i,j) + beta
|
||||
/// Instead of these 'for' loops we could have used simply:
|
||||
/// image.convertTo(newImage, -1, alpha, beta);
|
||||
/// but we wanted to show you how to access the pixels :)
|
||||
//! [basic-linear-transform-operation]
|
||||
byte[] imageData = new byte[(int) (image.total()*image.channels())];
|
||||
image.get(0, 0, imageData);
|
||||
byte[] newImageData = new byte[(int) (newImage.total()*newImage.channels())];
|
||||
for (int y = 0; y < image.rows(); y++) {
|
||||
for (int x = 0; x < image.cols(); x++) {
|
||||
for (int c = 0; c < image.channels(); c++) {
|
||||
double pixelValue = imageData[(y * image.cols() + x) * image.channels() + c];
|
||||
/// Java byte range is [-128, 127]
|
||||
pixelValue = pixelValue < 0 ? pixelValue + 256 : pixelValue;
|
||||
newImageData[(y * image.cols() + x) * image.channels() + c]
|
||||
= saturate(alpha * pixelValue + beta);
|
||||
}
|
||||
}
|
||||
}
|
||||
newImage.put(0, 0, newImageData);
|
||||
//! [basic-linear-transform-operation]
|
||||
|
||||
//! [basic-linear-transform-display]
|
||||
/// Show stuff
|
||||
HighGui.imshow("Original Image", image);
|
||||
HighGui.imshow("New Image", newImage);
|
||||
|
||||
/// Wait until user press some key
|
||||
HighGui.waitKey();
|
||||
//! [basic-linear-transform-display]
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class BasicLinearTransformsDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new BasicLinearTransforms().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,202 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.awt.event.ActionListener;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JCheckBox;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
class ChangingContrastBrightnessImage {
|
||||
private static int MAX_VALUE_ALPHA = 500;
|
||||
private static int MAX_VALUE_BETA_GAMMA = 200;
|
||||
private static final String WINDOW_NAME = "Changing the contrast and brightness of an image demo";
|
||||
private static final String ALPHA_NAME = "Alpha gain (contrast)";
|
||||
private static final String BETA_NAME = "Beta bias (brightness)";
|
||||
private static final String GAMMA_NAME = "Gamma correction";
|
||||
private JFrame frame;
|
||||
private Mat matImgSrc = new Mat();
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgModifLabel;
|
||||
private JPanel controlPanel;
|
||||
private JPanel alphaBetaPanel;
|
||||
private JPanel gammaPanel;
|
||||
private double alphaValue = 1.0;
|
||||
private double betaValue = 0.0;
|
||||
private double gammaValue = 1.0;
|
||||
private JCheckBox methodCheckBox;
|
||||
private JSlider sliderAlpha;
|
||||
private JSlider sliderBeta;
|
||||
private JSlider sliderGamma;
|
||||
|
||||
public ChangingContrastBrightnessImage(String[] args) {
|
||||
String imagePath = args.length > 0 ? args[0] : "../data/lena.jpg";
|
||||
matImgSrc = Imgcodecs.imread(imagePath);
|
||||
if (matImgSrc.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame(WINDOW_NAME);
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(matImgSrc);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
controlPanel = new JPanel();
|
||||
controlPanel.setLayout(new BoxLayout(controlPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
methodCheckBox = new JCheckBox("Do gamma correction");
|
||||
methodCheckBox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
JCheckBox cb = (JCheckBox) e.getSource();
|
||||
if (cb.isSelected()) {
|
||||
controlPanel.remove(alphaBetaPanel);
|
||||
controlPanel.add(gammaPanel);
|
||||
performGammaCorrection();
|
||||
frame.revalidate();
|
||||
frame.repaint();
|
||||
frame.pack();
|
||||
} else {
|
||||
controlPanel.remove(gammaPanel);
|
||||
controlPanel.add(alphaBetaPanel);
|
||||
performLinearTransformation();
|
||||
frame.revalidate();
|
||||
frame.repaint();
|
||||
frame.pack();
|
||||
}
|
||||
}
|
||||
});
|
||||
controlPanel.add(methodCheckBox);
|
||||
|
||||
alphaBetaPanel = new JPanel();
|
||||
alphaBetaPanel.setLayout(new BoxLayout(alphaBetaPanel, BoxLayout.PAGE_AXIS));
|
||||
alphaBetaPanel.add(new JLabel(ALPHA_NAME));
|
||||
sliderAlpha = new JSlider(0, MAX_VALUE_ALPHA, 100);
|
||||
sliderAlpha.setMajorTickSpacing(50);
|
||||
sliderAlpha.setMinorTickSpacing(10);
|
||||
sliderAlpha.setPaintTicks(true);
|
||||
sliderAlpha.setPaintLabels(true);
|
||||
sliderAlpha.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
alphaValue = sliderAlpha.getValue() / 100.0;
|
||||
performLinearTransformation();
|
||||
}
|
||||
});
|
||||
alphaBetaPanel.add(sliderAlpha);
|
||||
|
||||
alphaBetaPanel.add(new JLabel(BETA_NAME));
|
||||
sliderBeta = new JSlider(0, MAX_VALUE_BETA_GAMMA, 100);
|
||||
sliderBeta.setMajorTickSpacing(20);
|
||||
sliderBeta.setMinorTickSpacing(5);
|
||||
sliderBeta.setPaintTicks(true);
|
||||
sliderBeta.setPaintLabels(true);
|
||||
sliderBeta.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
betaValue = sliderBeta.getValue() - 100;
|
||||
performLinearTransformation();
|
||||
}
|
||||
});
|
||||
alphaBetaPanel.add(sliderBeta);
|
||||
controlPanel.add(alphaBetaPanel);
|
||||
|
||||
gammaPanel = new JPanel();
|
||||
gammaPanel.setLayout(new BoxLayout(gammaPanel, BoxLayout.PAGE_AXIS));
|
||||
gammaPanel.add(new JLabel(GAMMA_NAME));
|
||||
sliderGamma = new JSlider(0, MAX_VALUE_BETA_GAMMA, 100);
|
||||
sliderGamma.setMajorTickSpacing(20);
|
||||
sliderGamma.setMinorTickSpacing(5);
|
||||
sliderGamma.setPaintTicks(true);
|
||||
sliderGamma.setPaintLabels(true);
|
||||
sliderGamma.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
gammaValue = sliderGamma.getValue() / 100.0;
|
||||
performGammaCorrection();
|
||||
}
|
||||
});
|
||||
gammaPanel.add(sliderGamma);
|
||||
|
||||
pane.add(controlPanel, BorderLayout.PAGE_START);
|
||||
JPanel framePanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
framePanel.add(imgSrcLabel);
|
||||
imgModifLabel = new JLabel(new ImageIcon(img));
|
||||
framePanel.add(imgModifLabel);
|
||||
pane.add(framePanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void performLinearTransformation() {
|
||||
Mat img = new Mat();
|
||||
matImgSrc.convertTo(img, -1, alphaValue, betaValue);
|
||||
imgModifLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(img)));
|
||||
frame.repaint();
|
||||
}
|
||||
|
||||
private byte saturate(double val) {
|
||||
int iVal = (int) Math.round(val);
|
||||
iVal = iVal > 255 ? 255 : (iVal < 0 ? 0 : iVal);
|
||||
return (byte) iVal;
|
||||
}
|
||||
|
||||
private void performGammaCorrection() {
|
||||
//! [changing-contrast-brightness-gamma-correction]
|
||||
Mat lookUpTable = new Mat(1, 256, CvType.CV_8U);
|
||||
byte[] lookUpTableData = new byte[(int) (lookUpTable.total()*lookUpTable.channels())];
|
||||
for (int i = 0; i < lookUpTable.cols(); i++) {
|
||||
lookUpTableData[i] = saturate(Math.pow(i / 255.0, gammaValue) * 255.0);
|
||||
}
|
||||
lookUpTable.put(0, 0, lookUpTableData);
|
||||
Mat img = new Mat();
|
||||
Core.LUT(matImgSrc, lookUpTable, img);
|
||||
//! [changing-contrast-brightness-gamma-correction]
|
||||
|
||||
imgModifLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(img)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class ChangingContrastBrightnessImageDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new ChangingContrastBrightnessImage(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
159
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/erosion_dilatation/MorphologyDemo1.java
vendored
Normal file
159
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/erosion_dilatation/MorphologyDemo1.java
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.awt.event.ActionListener;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JComboBox;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class MorphologyDemo1 {
|
||||
private static final String[] ELEMENT_TYPE = { "Rectangle", "Cross", "Ellipse" };
|
||||
private static final String[] MORPH_OP = { "Erosion", "Dilatation" };
|
||||
private static final int MAX_KERNEL_SIZE = 21;
|
||||
private Mat matImgSrc;
|
||||
private Mat matImgDst = new Mat();
|
||||
private int elementType = Imgproc.CV_SHAPE_RECT;
|
||||
private int kernelSize = 0;
|
||||
private boolean doErosion = true;
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
|
||||
//! [constructor]
|
||||
public MorphologyDemo1(String[] args) {
|
||||
String imagePath = args.length > 0 ? args[0] : "../data/LinuxLogo.jpg";
|
||||
matImgSrc = Imgcodecs.imread(imagePath);
|
||||
if (matImgSrc.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Erosion and dilatation demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(matImgSrc);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
//! [constructor]
|
||||
|
||||
//! [components]
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
JComboBox<String> elementTypeBox = new JComboBox<>(ELEMENT_TYPE);
|
||||
elementTypeBox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
@SuppressWarnings("unchecked")
|
||||
JComboBox<String> cb = (JComboBox<String>)e.getSource();
|
||||
if (cb.getSelectedIndex() == 0) {
|
||||
elementType = Imgproc.CV_SHAPE_RECT;
|
||||
} else if (cb.getSelectedIndex() == 1) {
|
||||
elementType = Imgproc.CV_SHAPE_CROSS;
|
||||
} else if (cb.getSelectedIndex() == 2) {
|
||||
elementType = Imgproc.CV_SHAPE_ELLIPSE;
|
||||
}
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(elementTypeBox);
|
||||
|
||||
sliderPanel.add(new JLabel("Kernel size: 2n + 1"));
|
||||
JSlider slider = new JSlider(0, MAX_KERNEL_SIZE, 0);
|
||||
slider.setMajorTickSpacing(5);
|
||||
slider.setMinorTickSpacing(5);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
kernelSize = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
|
||||
JComboBox<String> morphOpBox = new JComboBox<>(MORPH_OP);
|
||||
morphOpBox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
@SuppressWarnings("unchecked")
|
||||
JComboBox<String> cb = (JComboBox<String>)e.getSource();
|
||||
doErosion = cb.getSelectedIndex() == 0;
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(morphOpBox);
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
//! [components]
|
||||
|
||||
//! [update]
|
||||
private void update() {
|
||||
//! [kernel]
|
||||
Mat element = Imgproc.getStructuringElement(elementType, new Size(2 * kernelSize + 1, 2 * kernelSize + 1),
|
||||
new Point(kernelSize, kernelSize));
|
||||
//! [kernel]
|
||||
|
||||
if (doErosion) {
|
||||
//! [erosion]
|
||||
Imgproc.erode(matImgSrc, matImgDst, element);
|
||||
//! [erosion]
|
||||
} else {
|
||||
//! [dilation]
|
||||
Imgproc.dilate(matImgSrc, matImgDst, element);
|
||||
//! [dilation]
|
||||
}
|
||||
Image img = HighGui.toBufferedImage(matImgDst);
|
||||
imgLabel.setIcon(new ImageIcon(img));
|
||||
frame.repaint();
|
||||
}
|
||||
//! [update]
|
||||
|
||||
//! [main]
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new MorphologyDemo1(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
//! [main]
|
||||
}
|
152
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.java
vendored
Normal file
152
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/morph_lines_detection/Morphology_3.java
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
/**
|
||||
* @file Morphology_3.java
|
||||
* @brief Use morphology transformations for extracting horizontal and vertical lines sample code
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class Morphology_3Run {
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
//! [load_image]
|
||||
// Check number of arguments
|
||||
if (args.length == 0){
|
||||
System.out.println("Not enough parameters!");
|
||||
System.out.println("Program Arguments: [image_path]");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
// Load the image
|
||||
Mat src = Imgcodecs.imread(args[0]);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image: " + args[0]);
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
// Show source image
|
||||
HighGui.imshow("src", src);
|
||||
//! [load_image]
|
||||
|
||||
//! [gray]
|
||||
// Transform source image to gray if it is not already
|
||||
Mat gray = new Mat();
|
||||
|
||||
if (src.channels() == 3)
|
||||
{
|
||||
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
|
||||
}
|
||||
else
|
||||
{
|
||||
gray = src;
|
||||
}
|
||||
|
||||
// Show gray image
|
||||
showWaitDestroy("gray" , gray);
|
||||
//! [gray]
|
||||
|
||||
//! [bin]
|
||||
// Apply adaptiveThreshold at the bitwise_not of gray
|
||||
Mat bw = new Mat();
|
||||
Core.bitwise_not(gray, gray);
|
||||
Imgproc.adaptiveThreshold(gray, bw, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 15, -2);
|
||||
|
||||
// Show binary image
|
||||
showWaitDestroy("binary" , bw);
|
||||
//! [bin]
|
||||
|
||||
//! [init]
|
||||
// Create the images that will use to extract the horizontal and vertical lines
|
||||
Mat horizontal = bw.clone();
|
||||
Mat vertical = bw.clone();
|
||||
//! [init]
|
||||
|
||||
//! [horiz]
|
||||
// Specify size on horizontal axis
|
||||
int horizontal_size = horizontal.cols() / 30;
|
||||
|
||||
// Create structure element for extracting horizontal lines through morphology operations
|
||||
Mat horizontalStructure = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(horizontal_size,1));
|
||||
|
||||
// Apply morphology operations
|
||||
Imgproc.erode(horizontal, horizontal, horizontalStructure);
|
||||
Imgproc.dilate(horizontal, horizontal, horizontalStructure);
|
||||
|
||||
// Show extracted horizontal lines
|
||||
showWaitDestroy("horizontal" , horizontal);
|
||||
//! [horiz]
|
||||
|
||||
//! [vert]
|
||||
// Specify size on vertical axis
|
||||
int vertical_size = vertical.rows() / 30;
|
||||
|
||||
// Create structure element for extracting vertical lines through morphology operations
|
||||
Mat verticalStructure = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size( 1,vertical_size));
|
||||
|
||||
// Apply morphology operations
|
||||
Imgproc.erode(vertical, vertical, verticalStructure);
|
||||
Imgproc.dilate(vertical, vertical, verticalStructure);
|
||||
|
||||
// Show extracted vertical lines
|
||||
showWaitDestroy("vertical", vertical);
|
||||
//! [vert]
|
||||
|
||||
//! [smooth]
|
||||
// Inverse vertical image
|
||||
Core.bitwise_not(vertical, vertical);
|
||||
showWaitDestroy("vertical_bit" , vertical);
|
||||
|
||||
// Extract edges and smooth image according to the logic
|
||||
// 1. extract edges
|
||||
// 2. dilate(edges)
|
||||
// 3. src.copyTo(smooth)
|
||||
// 4. blur smooth img
|
||||
// 5. smooth.copyTo(src, edges)
|
||||
|
||||
// Step 1
|
||||
Mat edges = new Mat();
|
||||
Imgproc.adaptiveThreshold(vertical, edges, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 3, -2);
|
||||
showWaitDestroy("edges", edges);
|
||||
|
||||
// Step 2
|
||||
Mat kernel = Mat.ones(2, 2, CvType.CV_8UC1);
|
||||
Imgproc.dilate(edges, edges, kernel);
|
||||
showWaitDestroy("dilate", edges);
|
||||
|
||||
// Step 3
|
||||
Mat smooth = new Mat();
|
||||
vertical.copyTo(smooth);
|
||||
|
||||
// Step 4
|
||||
Imgproc.blur(smooth, smooth, new Size(2, 2));
|
||||
|
||||
// Step 5
|
||||
smooth.copyTo(vertical, edges);
|
||||
|
||||
// Show final result
|
||||
showWaitDestroy("smooth - final", vertical);
|
||||
//! [smooth]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
private void showWaitDestroy(String winname, Mat img) {
|
||||
HighGui.imshow(winname, img);
|
||||
HighGui.moveWindow(winname, 500, 0);
|
||||
HighGui.waitKey(0);
|
||||
HighGui.destroyWindow(winname);
|
||||
}
|
||||
}
|
||||
|
||||
public class Morphology_3 {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new Morphology_3Run().run(args);
|
||||
}
|
||||
}
|
143
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/opening_closing_hats/MorphologyDemo2.java
vendored
Normal file
143
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/opening_closing_hats/MorphologyDemo2.java
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.awt.event.ActionListener;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JComboBox;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class MorphologyDemo2 {
|
||||
private static final String[] MORPH_OP = { "Opening", "Closing", "Gradient", "Top Hat", "Black Hat" };
|
||||
private static final int[] MORPH_OP_TYPE = { Imgproc.MORPH_OPEN, Imgproc.MORPH_CLOSE,
|
||||
Imgproc.MORPH_GRADIENT, Imgproc.MORPH_TOPHAT, Imgproc.MORPH_BLACKHAT };
|
||||
private static final String[] ELEMENT_TYPE = { "Rectangle", "Cross", "Ellipse" };
|
||||
private static final int MAX_KERNEL_SIZE = 21;
|
||||
private Mat matImgSrc;
|
||||
private Mat matImgDst = new Mat();
|
||||
private int morphOpType = Imgproc.MORPH_OPEN;
|
||||
private int elementType = Imgproc.CV_SHAPE_RECT;
|
||||
private int kernelSize = 0;
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
|
||||
public MorphologyDemo2(String[] args) {
|
||||
String imagePath = args.length > 0 ? args[0] : "../data/LinuxLogo.jpg";
|
||||
matImgSrc = Imgcodecs.imread(imagePath);
|
||||
if (matImgSrc.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Morphology Transformations demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(matImgSrc);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
JComboBox<String> morphOpBox = new JComboBox<>(MORPH_OP);
|
||||
morphOpBox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
@SuppressWarnings("unchecked")
|
||||
JComboBox<String> cb = (JComboBox<String>)e.getSource();
|
||||
morphOpType = MORPH_OP_TYPE[cb.getSelectedIndex()];
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(morphOpBox);
|
||||
|
||||
JComboBox<String> elementTypeBox = new JComboBox<>(ELEMENT_TYPE);
|
||||
elementTypeBox.addActionListener(new ActionListener() {
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
@SuppressWarnings("unchecked")
|
||||
JComboBox<String> cb = (JComboBox<String>)e.getSource();
|
||||
if (cb.getSelectedIndex() == 0) {
|
||||
elementType = Imgproc.CV_SHAPE_RECT;
|
||||
} else if (cb.getSelectedIndex() == 1) {
|
||||
elementType = Imgproc.CV_SHAPE_CROSS;
|
||||
} else if (cb.getSelectedIndex() == 2) {
|
||||
elementType = Imgproc.CV_SHAPE_ELLIPSE;
|
||||
}
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(elementTypeBox);
|
||||
|
||||
sliderPanel.add(new JLabel("Kernel size: 2n + 1"));
|
||||
JSlider slider = new JSlider(0, MAX_KERNEL_SIZE, 0);
|
||||
slider.setMajorTickSpacing(5);
|
||||
slider.setMinorTickSpacing(5);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
kernelSize = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
Mat element = Imgproc.getStructuringElement(elementType, new Size(2 * kernelSize + 1, 2 * kernelSize + 1),
|
||||
new Point(kernelSize, kernelSize));
|
||||
|
||||
Imgproc.morphologyEx(matImgSrc, matImgDst, morphOpType, element);
|
||||
Image img = HighGui.toBufferedImage(matImgDst);
|
||||
imgLabel.setIcon(new ImageIcon(img));
|
||||
frame.repaint();
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new MorphologyDemo2(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
144
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/threshold/Threshold.java
vendored
Normal file
144
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/threshold/Threshold.java
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class Threshold {
|
||||
private static int MAX_VALUE = 255;
|
||||
private static int MAX_TYPE = 4;
|
||||
private static int MAX_BINARY_VALUE = 255;
|
||||
private static final String WINDOW_NAME = "Threshold Demo";
|
||||
private static final String TRACKBAR_TYPE = "<html><body>Type: <br> 0: Binary <br> "
|
||||
+ "1: Binary Inverted <br> 2: Truncate <br> "
|
||||
+ "3: To Zero <br> 4: To Zero Inverted</body></html>";
|
||||
private static final String TRACKBAR_VALUE = "Value";
|
||||
private int thresholdValue = 0;
|
||||
private int thresholdType = 3;
|
||||
private Mat src;
|
||||
private Mat srcGray = new Mat();
|
||||
private Mat dst = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
|
||||
public Threshold(String[] args) {
|
||||
//! [load]
|
||||
String imagePath = "../data/stuff.jpg";
|
||||
if (args.length > 0) {
|
||||
imagePath = args[0];
|
||||
}
|
||||
// Load an image
|
||||
src = Imgcodecs.imread(imagePath);
|
||||
if (src.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
// Convert the image to Gray
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
//! [load]
|
||||
|
||||
//! [window]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame(WINDOW_NAME);
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(srcGray);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
//! [window]
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [trackbar]
|
||||
sliderPanel.add(new JLabel(TRACKBAR_TYPE));
|
||||
// Create Trackbar to choose type of Threshold
|
||||
JSlider sliderThreshType = new JSlider(0, MAX_TYPE, thresholdType);
|
||||
sliderThreshType.setMajorTickSpacing(1);
|
||||
sliderThreshType.setMinorTickSpacing(1);
|
||||
sliderThreshType.setPaintTicks(true);
|
||||
sliderThreshType.setPaintLabels(true);
|
||||
sliderPanel.add(sliderThreshType);
|
||||
|
||||
sliderPanel.add(new JLabel(TRACKBAR_VALUE));
|
||||
// Create Trackbar to choose Threshold value
|
||||
JSlider sliderThreshValue = new JSlider(0, MAX_VALUE, 0);
|
||||
sliderThreshValue.setMajorTickSpacing(50);
|
||||
sliderThreshValue.setMinorTickSpacing(10);
|
||||
sliderThreshValue.setPaintTicks(true);
|
||||
sliderThreshValue.setPaintLabels(true);
|
||||
sliderPanel.add(sliderThreshValue);
|
||||
//! [trackbar]
|
||||
|
||||
//! [on_trackbar]
|
||||
sliderThreshType.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
thresholdType = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
|
||||
sliderThreshValue.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
thresholdValue = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
//! [on_trackbar]
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
//! [Threshold_Demo]
|
||||
private void update() {
|
||||
Imgproc.threshold(srcGray, dst, thresholdValue, MAX_BINARY_VALUE, thresholdType);
|
||||
Image img = HighGui.toBufferedImage(dst);
|
||||
imgLabel.setIcon(new ImageIcon(img));
|
||||
frame.repaint();
|
||||
}
|
||||
//! [Threshold_Demo]
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new Threshold(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
259
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java
vendored
Normal file
259
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java
vendored
Normal file
@ -0,0 +1,259 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.awt.event.WindowAdapter;
|
||||
import java.awt.event.WindowEvent;
|
||||
import java.util.List;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.SwingWorker;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
public class ThresholdInRange {
|
||||
private static int MAX_VALUE = 255;
|
||||
private static int MAX_VALUE_H = 360/2;
|
||||
private static final String WINDOW_NAME = "Thresholding Operations using inRange demo";
|
||||
private static final String LOW_H_NAME = "Low H";
|
||||
private static final String LOW_S_NAME = "Low S";
|
||||
private static final String LOW_V_NAME = "Low V";
|
||||
private static final String HIGH_H_NAME = "High H";
|
||||
private static final String HIGH_S_NAME = "High S";
|
||||
private static final String HIGH_V_NAME = "High V";
|
||||
private JSlider sliderLowH;
|
||||
private JSlider sliderHighH;
|
||||
private JSlider sliderLowS;
|
||||
private JSlider sliderHighS;
|
||||
private JSlider sliderLowV;
|
||||
private JSlider sliderHighV;
|
||||
private VideoCapture cap;
|
||||
private Mat matFrame = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgCaptureLabel;
|
||||
private JLabel imgDetectionLabel;
|
||||
private CaptureTask captureTask;
|
||||
|
||||
public ThresholdInRange(String[] args) {
|
||||
int cameraDevice = 0;
|
||||
if (args.length > 0) {
|
||||
cameraDevice = Integer.parseInt(args[0]);
|
||||
}
|
||||
//! [cap]
|
||||
cap = new VideoCapture(cameraDevice);
|
||||
//! [cap]
|
||||
if (!cap.isOpened()) {
|
||||
System.err.println("Cannot open camera: " + cameraDevice);
|
||||
System.exit(0);
|
||||
}
|
||||
if (!cap.read(matFrame)) {
|
||||
System.err.println("Cannot read camera stream.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//! [window]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame(WINDOW_NAME);
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
frame.addWindowListener(new WindowAdapter() {
|
||||
@Override
|
||||
public void windowClosing(WindowEvent windowEvent) {
|
||||
captureTask.cancel(true);
|
||||
}
|
||||
});
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(matFrame);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
//! [window]
|
||||
|
||||
captureTask = new CaptureTask();
|
||||
captureTask.execute();
|
||||
}
|
||||
|
||||
//! [while]
|
||||
private class CaptureTask extends SwingWorker<Void, Mat> {
|
||||
@Override
|
||||
protected Void doInBackground() {
|
||||
Mat matFrame = new Mat();
|
||||
|
||||
while (!isCancelled()) {
|
||||
if (!cap.read(matFrame)) {
|
||||
break;
|
||||
}
|
||||
publish(matFrame.clone());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process(List<Mat> frames) {
|
||||
Mat frame = frames.get(frames.size() - 1);
|
||||
Mat frameHSV = new Mat();
|
||||
Imgproc.cvtColor(frame, frameHSV, Imgproc.COLOR_BGR2HSV);
|
||||
Mat thresh = new Mat();
|
||||
Core.inRange(frameHSV, new Scalar(sliderLowH.getValue(), sliderLowS.getValue(), sliderLowV.getValue()),
|
||||
new Scalar(sliderHighH.getValue(), sliderHighS.getValue(), sliderHighV.getValue()), thresh);
|
||||
update(frame, thresh);
|
||||
}
|
||||
}
|
||||
//! [while]
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [trackbar]
|
||||
sliderPanel.add(new JLabel(LOW_H_NAME));
|
||||
sliderLowH = new JSlider(0, MAX_VALUE_H, 0);
|
||||
sliderLowH.setMajorTickSpacing(50);
|
||||
sliderLowH.setMinorTickSpacing(10);
|
||||
sliderLowH.setPaintTicks(true);
|
||||
sliderLowH.setPaintLabels(true);
|
||||
sliderPanel.add(sliderLowH);
|
||||
|
||||
sliderPanel.add(new JLabel(HIGH_H_NAME));
|
||||
sliderHighH = new JSlider(0, MAX_VALUE_H, MAX_VALUE_H);
|
||||
sliderHighH.setMajorTickSpacing(50);
|
||||
sliderHighH.setMinorTickSpacing(10);
|
||||
sliderHighH.setPaintTicks(true);
|
||||
sliderHighH.setPaintLabels(true);
|
||||
sliderPanel.add(sliderHighH);
|
||||
|
||||
sliderPanel.add(new JLabel(LOW_S_NAME));
|
||||
sliderLowS = new JSlider(0, MAX_VALUE, 0);
|
||||
sliderLowS.setMajorTickSpacing(50);
|
||||
sliderLowS.setMinorTickSpacing(10);
|
||||
sliderLowS.setPaintTicks(true);
|
||||
sliderLowS.setPaintLabels(true);
|
||||
sliderPanel.add(sliderLowS);
|
||||
|
||||
sliderPanel.add(new JLabel(HIGH_S_NAME));
|
||||
sliderHighS = new JSlider(0, MAX_VALUE, MAX_VALUE);
|
||||
sliderHighS.setMajorTickSpacing(50);
|
||||
sliderHighS.setMinorTickSpacing(10);
|
||||
sliderHighS.setPaintTicks(true);
|
||||
sliderHighS.setPaintLabels(true);
|
||||
sliderPanel.add(sliderHighS);
|
||||
|
||||
sliderPanel.add(new JLabel(LOW_V_NAME));
|
||||
sliderLowV = new JSlider(0, MAX_VALUE, 0);
|
||||
sliderLowV.setMajorTickSpacing(50);
|
||||
sliderLowV.setMinorTickSpacing(10);
|
||||
sliderLowV.setPaintTicks(true);
|
||||
sliderLowV.setPaintLabels(true);
|
||||
sliderPanel.add(sliderLowV);
|
||||
|
||||
sliderPanel.add(new JLabel(HIGH_V_NAME));
|
||||
sliderHighV = new JSlider(0, MAX_VALUE, MAX_VALUE);
|
||||
sliderHighV.setMajorTickSpacing(50);
|
||||
sliderHighV.setMinorTickSpacing(10);
|
||||
sliderHighV.setPaintTicks(true);
|
||||
sliderHighV.setPaintLabels(true);
|
||||
sliderPanel.add(sliderHighV);
|
||||
//! [trackbar]
|
||||
|
||||
//! [low]
|
||||
sliderLowH.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valH = Math.min(sliderHighH.getValue()-1, source.getValue());
|
||||
sliderLowH.setValue(valH);
|
||||
}
|
||||
});
|
||||
//! [low]
|
||||
//! [high]
|
||||
sliderHighH.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valH = Math.max(source.getValue(), sliderLowH.getValue()+1);
|
||||
sliderHighH.setValue(valH);
|
||||
}
|
||||
});
|
||||
//! [high]
|
||||
sliderLowS.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valS = Math.min(sliderHighS.getValue()-1, source.getValue());
|
||||
sliderLowS.setValue(valS);
|
||||
}
|
||||
});
|
||||
sliderHighS.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valS = Math.max(source.getValue(), sliderLowS.getValue()+1);
|
||||
sliderHighS.setValue(valS);
|
||||
}
|
||||
});
|
||||
sliderLowV.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valV = Math.min(sliderHighV.getValue()-1, source.getValue());
|
||||
sliderLowV.setValue(valV);
|
||||
}
|
||||
});
|
||||
sliderHighV.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
int valV = Math.max(source.getValue(), sliderLowV.getValue()+1);
|
||||
sliderHighV.setValue(valV);
|
||||
}
|
||||
});
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
JPanel framePanel = new JPanel();
|
||||
imgCaptureLabel = new JLabel(new ImageIcon(img));
|
||||
framePanel.add(imgCaptureLabel);
|
||||
imgDetectionLabel = new JLabel(new ImageIcon(img));
|
||||
framePanel.add(imgDetectionLabel);
|
||||
pane.add(framePanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update(Mat imgCapture, Mat imgThresh) {
|
||||
//! [show]
|
||||
imgCaptureLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(imgCapture)));
|
||||
imgDetectionLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(imgThresh)));
|
||||
frame.repaint();
|
||||
//! [show]
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new ThresholdInRange(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,182 @@
|
||||
import java.awt.GridLayout;
|
||||
import java.awt.Image;
|
||||
import java.util.Hashtable;
|
||||
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class MatchTemplateDemoRun implements ChangeListener {
|
||||
|
||||
//! [declare]
|
||||
/// Global Variables
|
||||
Boolean use_mask = false;
|
||||
Mat img = new Mat(), templ = new Mat();
|
||||
Mat mask = new Mat();
|
||||
|
||||
int match_method;
|
||||
|
||||
JLabel imgDisplay = new JLabel(), resultDisplay = new JLabel();
|
||||
//! [declare]
|
||||
|
||||
public void run(String[] args) {
|
||||
if (args.length < 2) {
|
||||
System.out.println("Not enough parameters");
|
||||
System.out.println("Program arguments:\n<image_name> <template_name> [<mask_name>]");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
//! [load_image]
|
||||
/// Load image and template
|
||||
img = Imgcodecs.imread(args[0], Imgcodecs.IMREAD_COLOR);
|
||||
templ = Imgcodecs.imread(args[1], Imgcodecs.IMREAD_COLOR);
|
||||
//! [load_image]
|
||||
|
||||
if (args.length > 2) {
|
||||
use_mask = true;
|
||||
mask = Imgcodecs.imread(args[2], Imgcodecs.IMREAD_COLOR);
|
||||
}
|
||||
|
||||
if (img.empty() || templ.empty() || (use_mask && mask.empty())) {
|
||||
System.out.println("Can't read one of the images");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
matchingMethod();
|
||||
createJFrame();
|
||||
}
|
||||
|
||||
private void matchingMethod() {
|
||||
Mat result = new Mat();
|
||||
|
||||
//! [copy_source]
|
||||
/// Source image to display
|
||||
Mat img_display = new Mat();
|
||||
img.copyTo(img_display);
|
||||
//! [copy_source]
|
||||
|
||||
//! [create_result_matrix]
|
||||
/// Create the result matrix
|
||||
int result_cols = img.cols() - templ.cols() + 1;
|
||||
int result_rows = img.rows() - templ.rows() + 1;
|
||||
|
||||
result.create(result_rows, result_cols, CvType.CV_32FC1);
|
||||
//! [create_result_matrix]
|
||||
|
||||
//! [match_template]
|
||||
/// Do the Matching and Normalize
|
||||
Boolean method_accepts_mask = (Imgproc.TM_SQDIFF == match_method || match_method == Imgproc.TM_CCORR_NORMED);
|
||||
if (use_mask && method_accepts_mask) {
|
||||
Imgproc.matchTemplate(img, templ, result, match_method, mask);
|
||||
} else {
|
||||
Imgproc.matchTemplate(img, templ, result, match_method);
|
||||
}
|
||||
//! [match_template]
|
||||
|
||||
//! [normalize]
|
||||
Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());
|
||||
//! [normalize]
|
||||
|
||||
//! [best_match]
|
||||
/// Localizing the best match with minMaxLoc
|
||||
Point matchLoc;
|
||||
|
||||
Core.MinMaxLocResult mmr = Core.minMaxLoc(result);
|
||||
//! [best_match]
|
||||
|
||||
//! [match_loc]
|
||||
/// For SQDIFF and SQDIFF_NORMED, the best matches are lower values.
|
||||
/// For all the other methods, the higher the better
|
||||
if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
|
||||
matchLoc = mmr.minLoc;
|
||||
} else {
|
||||
matchLoc = mmr.maxLoc;
|
||||
}
|
||||
//! [match_loc]
|
||||
|
||||
//! [imshow]
|
||||
/// Show me what you got
|
||||
Imgproc.rectangle(img_display, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()),
|
||||
new Scalar(0, 0, 0), 2, 8, 0);
|
||||
Imgproc.rectangle(result, matchLoc, new Point(matchLoc.x + templ.cols(), matchLoc.y + templ.rows()),
|
||||
new Scalar(0, 0, 0), 2, 8, 0);
|
||||
|
||||
Image tmpImg = HighGui.toBufferedImage(img_display);
|
||||
ImageIcon icon = new ImageIcon(tmpImg);
|
||||
imgDisplay.setIcon(icon);
|
||||
|
||||
result.convertTo(result, CvType.CV_8UC1, 255.0);
|
||||
tmpImg = HighGui.toBufferedImage(result);
|
||||
icon = new ImageIcon(tmpImg);
|
||||
resultDisplay.setIcon(icon);
|
||||
//! [imshow]
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
if (!source.getValueIsAdjusting()) {
|
||||
match_method = source.getValue();
|
||||
matchingMethod();
|
||||
}
|
||||
}
|
||||
|
||||
private void createJFrame() {
|
||||
String title = "Source image; Control; Result image";
|
||||
JFrame frame = new JFrame(title);
|
||||
frame.setLayout(new GridLayout(2, 2));
|
||||
frame.add(imgDisplay);
|
||||
|
||||
//! [create_trackbar]
|
||||
int min = 0, max = 5;
|
||||
JSlider slider = new JSlider(JSlider.VERTICAL, min, max, match_method);
|
||||
//! [create_trackbar]
|
||||
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
|
||||
// Set the spacing for the minor tick mark
|
||||
slider.setMinorTickSpacing(1);
|
||||
|
||||
// Customizing the labels
|
||||
Hashtable<Integer, JLabel> labelTable = new Hashtable<>();
|
||||
labelTable.put(new Integer(0), new JLabel("0 - SQDIFF"));
|
||||
labelTable.put(new Integer(1), new JLabel("1 - SQDIFF NORMED"));
|
||||
labelTable.put(new Integer(2), new JLabel("2 - TM CCORR"));
|
||||
labelTable.put(new Integer(3), new JLabel("3 - TM CCORR NORMED"));
|
||||
labelTable.put(new Integer(4), new JLabel("4 - TM COEFF"));
|
||||
labelTable.put(new Integer(5), new JLabel("5 - TM COEFF NORMED : (Method)"));
|
||||
slider.setLabelTable(labelTable);
|
||||
|
||||
slider.addChangeListener(this);
|
||||
|
||||
frame.add(slider);
|
||||
|
||||
frame.add(resultDisplay);
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
}
|
||||
|
||||
public class MatchTemplateDemo {
|
||||
public static void main(String[] args) {
|
||||
// load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// run code
|
||||
new MatchTemplateDemoRun().run(args);
|
||||
}
|
||||
}
|
81
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/Filter2D/Filter2D_Demo.java
vendored
Normal file
81
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/Filter2D/Filter2D_Demo.java
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
/**
|
||||
* @file Filter2D_demo.java
|
||||
* @brief Sample code that shows how to implement your own linear filters by using filter2D function
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class Filter2D_DemoRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
// Declare variables
|
||||
Mat src, dst = new Mat();
|
||||
|
||||
Mat kernel = new Mat();
|
||||
Point anchor;
|
||||
double delta;
|
||||
int ddepth;
|
||||
int kernel_size;
|
||||
String window_name = "filter2D Demo";
|
||||
|
||||
//! [load]
|
||||
String imageName = ((args.length > 0) ? args[0] : "../data/lena.jpg");
|
||||
|
||||
// Load an image
|
||||
src = Imgcodecs.imread(imageName, Imgcodecs.IMREAD_COLOR);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image!");
|
||||
System.out.println("Program Arguments: [image_name -- default ../data/lena.jpg] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [init_arguments]
|
||||
// Initialize arguments for the filter
|
||||
anchor = new Point( -1, -1);
|
||||
delta = 0.0;
|
||||
ddepth = -1;
|
||||
//! [init_arguments]
|
||||
|
||||
// Loop - Will filter the image with different kernel sizes each 0.5 seconds
|
||||
int ind = 0;
|
||||
while( true )
|
||||
{
|
||||
//! [update_kernel]
|
||||
// Update kernel size for a normalized box filter
|
||||
kernel_size = 3 + 2*( ind%5 );
|
||||
Mat ones = Mat.ones( kernel_size, kernel_size, CvType.CV_32F );
|
||||
Core.multiply(ones, new Scalar(1/(double)(kernel_size*kernel_size)), kernel);
|
||||
//! [update_kernel]
|
||||
|
||||
//! [apply_filter]
|
||||
// Apply filter
|
||||
Imgproc.filter2D(src, dst, ddepth , kernel, anchor, delta, Core.BORDER_DEFAULT );
|
||||
//! [apply_filter]
|
||||
HighGui.imshow( window_name, dst );
|
||||
|
||||
int c = HighGui.waitKey(500);
|
||||
// Press 'ESC' to exit the program
|
||||
if( c == 27 )
|
||||
{ break; }
|
||||
|
||||
ind++;
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class Filter2D_Demo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new Filter2D_DemoRun().run(args);
|
||||
}
|
||||
}
|
77
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/HoughCircle/HoughCircles.java
vendored
Normal file
77
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/HoughCircle/HoughCircles.java
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
package sample;
|
||||
/**
|
||||
* @file HoughCircles.java
|
||||
* @brief This program demonstrates circle finding with the Hough transform
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class HoughCirclesRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
//! [load]
|
||||
String default_file = "../../../../data/smarties.png";
|
||||
String filename = ((args.length > 0) ? args[0] : default_file);
|
||||
|
||||
// Load an image
|
||||
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_COLOR);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image!");
|
||||
System.out.println("Program Arguments: [image_name -- default "
|
||||
+ default_file +"] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [convert_to_gray]
|
||||
Mat gray = new Mat();
|
||||
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
|
||||
//! [convert_to_gray]
|
||||
|
||||
//![reduce_noise]
|
||||
Imgproc.medianBlur(gray, gray, 5);
|
||||
//![reduce_noise]
|
||||
|
||||
//! [houghcircles]
|
||||
Mat circles = new Mat();
|
||||
Imgproc.HoughCircles(gray, circles, Imgproc.HOUGH_GRADIENT, 1.0,
|
||||
(double)gray.rows()/16, // change this value to detect circles with different distances to each other
|
||||
100.0, 30.0, 1, 30); // change the last two parameters
|
||||
// (min_radius & max_radius) to detect larger circles
|
||||
//! [houghcircles]
|
||||
|
||||
//! [draw]
|
||||
for (int x = 0; x < circles.cols(); x++) {
|
||||
double[] c = circles.get(0, x);
|
||||
Point center = new Point(Math.round(c[0]), Math.round(c[1]));
|
||||
// circle center
|
||||
Imgproc.circle(src, center, 1, new Scalar(0,100,100), 3, 8, 0 );
|
||||
// circle outline
|
||||
int radius = (int) Math.round(c[2]);
|
||||
Imgproc.circle(src, center, radius, new Scalar(255,0,255), 3, 8, 0 );
|
||||
}
|
||||
//! [draw]
|
||||
|
||||
//! [display]
|
||||
HighGui.imshow("detected circles", src);
|
||||
HighGui.waitKey();
|
||||
//! [display]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class HoughCircles {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new HoughCirclesRun().run(args);
|
||||
}
|
||||
}
|
96
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/HoughLine/HoughLines.java
vendored
Normal file
96
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/HoughLine/HoughLines.java
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
/**
|
||||
* @file HoughLines.java
|
||||
* @brief This program demonstrates line finding with the Hough transform
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class HoughLinesRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
// Declare the output variables
|
||||
Mat dst = new Mat(), cdst = new Mat(), cdstP;
|
||||
|
||||
//! [load]
|
||||
String default_file = "../../../../data/sudoku.png";
|
||||
String filename = ((args.length > 0) ? args[0] : default_file);
|
||||
|
||||
// Load an image
|
||||
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image!");
|
||||
System.out.println("Program Arguments: [image_name -- default "
|
||||
+ default_file +"] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [edge_detection]
|
||||
// Edge detection
|
||||
Imgproc.Canny(src, dst, 50, 200, 3, false);
|
||||
//! [edge_detection]
|
||||
|
||||
// Copy edges to the images that will display the results in BGR
|
||||
Imgproc.cvtColor(dst, cdst, Imgproc.COLOR_GRAY2BGR);
|
||||
cdstP = cdst.clone();
|
||||
|
||||
//! [hough_lines]
|
||||
// Standard Hough Line Transform
|
||||
Mat lines = new Mat(); // will hold the results of the detection
|
||||
Imgproc.HoughLines(dst, lines, 1, Math.PI/180, 150); // runs the actual detection
|
||||
//! [hough_lines]
|
||||
//! [draw_lines]
|
||||
// Draw the lines
|
||||
for (int x = 0; x < lines.rows(); x++) {
|
||||
double rho = lines.get(x, 0)[0],
|
||||
theta = lines.get(x, 0)[1];
|
||||
|
||||
double a = Math.cos(theta), b = Math.sin(theta);
|
||||
double x0 = a*rho, y0 = b*rho;
|
||||
Point pt1 = new Point(Math.round(x0 + 1000*(-b)), Math.round(y0 + 1000*(a)));
|
||||
Point pt2 = new Point(Math.round(x0 - 1000*(-b)), Math.round(y0 - 1000*(a)));
|
||||
Imgproc.line(cdst, pt1, pt2, new Scalar(0, 0, 255), 3, Imgproc.LINE_AA, 0);
|
||||
}
|
||||
//! [draw_lines]
|
||||
|
||||
//! [hough_lines_p]
|
||||
// Probabilistic Line Transform
|
||||
Mat linesP = new Mat(); // will hold the results of the detection
|
||||
Imgproc.HoughLinesP(dst, linesP, 1, Math.PI/180, 50, 50, 10); // runs the actual detection
|
||||
//! [hough_lines_p]
|
||||
//! [draw_lines_p]
|
||||
// Draw the lines
|
||||
for (int x = 0; x < linesP.rows(); x++) {
|
||||
double[] l = linesP.get(x, 0);
|
||||
Imgproc.line(cdstP, new Point(l[0], l[1]), new Point(l[2], l[3]), new Scalar(0, 0, 255), 3, Imgproc.LINE_AA, 0);
|
||||
}
|
||||
//! [draw_lines_p]
|
||||
|
||||
//! [imshow]
|
||||
// Show results
|
||||
HighGui.imshow("Source", src);
|
||||
HighGui.imshow("Detected Lines (in red) - Standard Hough Line Transform", cdst);
|
||||
HighGui.imshow("Detected Lines (in red) - Probabilistic Line Transform", cdstP);
|
||||
//! [imshow]
|
||||
|
||||
//! [exit]
|
||||
// Wait and Exit
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
//! [exit]
|
||||
}
|
||||
}
|
||||
|
||||
public class HoughLines {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new HoughLinesRun().run(args);
|
||||
}
|
||||
}
|
73
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/LaPlace/LaplaceDemo.java
vendored
Normal file
73
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/LaPlace/LaplaceDemo.java
vendored
Normal file
@ -0,0 +1,73 @@
|
||||
/**
|
||||
* @file LaplaceDemo.java
|
||||
* @brief Sample code showing how to detect edges using the Laplace operator
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class LaplaceDemoRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
//! [variables]
|
||||
// Declare the variables we are going to use
|
||||
Mat src, src_gray = new Mat(), dst = new Mat();
|
||||
int kernel_size = 3;
|
||||
int scale = 1;
|
||||
int delta = 0;
|
||||
int ddepth = CvType.CV_16S;
|
||||
String window_name = "Laplace Demo";
|
||||
//! [variables]
|
||||
|
||||
//! [load]
|
||||
String imageName = ((args.length > 0) ? args[0] : "../data/lena.jpg");
|
||||
|
||||
src = Imgcodecs.imread(imageName, Imgcodecs.IMREAD_COLOR); // Load an image
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image");
|
||||
System.out.println("Program Arguments: [image_name -- default ../data/lena.jpg] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [reduce_noise]
|
||||
// Reduce noise by blurring with a Gaussian filter ( kernel size = 3 )
|
||||
Imgproc.GaussianBlur( src, src, new Size(3, 3), 0, 0, Core.BORDER_DEFAULT );
|
||||
//! [reduce_noise]
|
||||
|
||||
//! [convert_to_gray]
|
||||
// Convert the image to grayscale
|
||||
Imgproc.cvtColor( src, src_gray, Imgproc.COLOR_RGB2GRAY );
|
||||
//! [convert_to_gray]
|
||||
|
||||
/// Apply Laplace function
|
||||
Mat abs_dst = new Mat();
|
||||
//! [laplacian]
|
||||
Imgproc.Laplacian( src_gray, dst, ddepth, kernel_size, scale, delta, Core.BORDER_DEFAULT );
|
||||
//! [laplacian]
|
||||
|
||||
//! [convert]
|
||||
// converting back to CV_8U
|
||||
Core.convertScaleAbs( dst, abs_dst );
|
||||
//! [convert]
|
||||
|
||||
//! [display]
|
||||
HighGui.imshow( window_name, abs_dst );
|
||||
HighGui.waitKey(0);
|
||||
//! [display]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class LaplaceDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new LaplaceDemoRun().run(args);
|
||||
}
|
||||
}
|
94
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/MakeBorder/CopyMakeBorder.java
vendored
Normal file
94
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/MakeBorder/CopyMakeBorder.java
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
/**
|
||||
* @file CopyMakeBorder.java
|
||||
* @brief Sample code that shows the functionality of copyMakeBorder
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
import java.util.Random;
|
||||
|
||||
class CopyMakeBorderRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
//! [variables]
|
||||
// Declare the variables
|
||||
Mat src, dst = new Mat();
|
||||
int top, bottom, left, right;
|
||||
int borderType = Core.BORDER_CONSTANT;
|
||||
String window_name = "copyMakeBorder Demo";
|
||||
Random rng;
|
||||
//! [variables]
|
||||
|
||||
//! [load]
|
||||
String imageName = ((args.length > 0) ? args[0] : "../data/lena.jpg");
|
||||
|
||||
// Load an image
|
||||
src = Imgcodecs.imread(imageName, Imgcodecs.IMREAD_COLOR);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image!");
|
||||
System.out.println("Program Arguments: [image_name -- default ../data/lena.jpg] \n");
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
// Brief how-to for this program
|
||||
System.out.println("\n" +
|
||||
"\t copyMakeBorder Demo: \n" +
|
||||
"\t -------------------- \n" +
|
||||
" ** Press 'c' to set the border to a random constant value \n" +
|
||||
" ** Press 'r' to set the border to be replicated \n" +
|
||||
" ** Press 'ESC' to exit the program \n");
|
||||
|
||||
//![create_window]
|
||||
HighGui.namedWindow( window_name, HighGui.WINDOW_AUTOSIZE );
|
||||
//![create_window]
|
||||
|
||||
//! [init_arguments]
|
||||
// Initialize arguments for the filter
|
||||
top = (int) (0.05*src.rows()); bottom = top;
|
||||
left = (int) (0.05*src.cols()); right = left;
|
||||
//! [init_arguments]
|
||||
|
||||
while( true ) {
|
||||
//! [update_value]
|
||||
rng = new Random();
|
||||
Scalar value = new Scalar( rng.nextInt(256),
|
||||
rng.nextInt(256), rng.nextInt(256) );
|
||||
//! [update_value]
|
||||
|
||||
//! [copymakeborder]
|
||||
Core.copyMakeBorder( src, dst, top, bottom, left, right, borderType, value);
|
||||
//! [copymakeborder]
|
||||
//! [display]
|
||||
HighGui.imshow( window_name, dst );
|
||||
//! [display]
|
||||
|
||||
//![check_keypress]
|
||||
char c = (char) HighGui.waitKey(500);
|
||||
c = Character.toLowerCase(c);
|
||||
|
||||
if( c == 27 )
|
||||
{ break; }
|
||||
else if( c == 'c' )
|
||||
{ borderType = Core.BORDER_CONSTANT;}
|
||||
else if( c == 'r' )
|
||||
{ borderType = Core.BORDER_REPLICATE;}
|
||||
//![check_keypress]
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class CopyMakeBorder {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new CopyMakeBorderRun().run(args);
|
||||
}
|
||||
}
|
94
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/SobelDemo/SobelDemo.java
vendored
Normal file
94
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/SobelDemo/SobelDemo.java
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
/**
|
||||
* @file SobelDemo.java
|
||||
* @brief Sample code using Sobel and/or Scharr OpenCV functions to make a simple Edge Detector
|
||||
*/
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class SobelDemoRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
//! [declare_variables]
|
||||
// First we declare the variables we are going to use
|
||||
Mat src, src_gray = new Mat();
|
||||
Mat grad = new Mat();
|
||||
String window_name = "Sobel Demo - Simple Edge Detector";
|
||||
int scale = 1;
|
||||
int delta = 0;
|
||||
int ddepth = CvType.CV_16S;
|
||||
//! [declare_variables]
|
||||
|
||||
//! [load]
|
||||
// As usual we load our source image (src)
|
||||
// Check number of arguments
|
||||
if (args.length == 0){
|
||||
System.out.println("Not enough parameters!");
|
||||
System.out.println("Program Arguments: [image_path]");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
// Load the image
|
||||
src = Imgcodecs.imread(args[0]);
|
||||
|
||||
// Check if image is loaded fine
|
||||
if( src.empty() ) {
|
||||
System.out.println("Error opening image: " + args[0]);
|
||||
System.exit(-1);
|
||||
}
|
||||
//! [load]
|
||||
|
||||
//! [reduce_noise]
|
||||
// Remove noise by blurring with a Gaussian filter ( kernel size = 3 )
|
||||
Imgproc.GaussianBlur( src, src, new Size(3, 3), 0, 0, Core.BORDER_DEFAULT );
|
||||
//! [reduce_noise]
|
||||
|
||||
//! [convert_to_gray]
|
||||
// Convert the image to grayscale
|
||||
Imgproc.cvtColor( src, src_gray, Imgproc.COLOR_RGB2GRAY );
|
||||
//! [convert_to_gray]
|
||||
|
||||
//! [sobel]
|
||||
/// Generate grad_x and grad_y
|
||||
Mat grad_x = new Mat(), grad_y = new Mat();
|
||||
Mat abs_grad_x = new Mat(), abs_grad_y = new Mat();
|
||||
|
||||
/// Gradient X
|
||||
//Imgproc.Scharr( src_gray, grad_x, ddepth, 1, 0, scale, delta, Core.BORDER_DEFAULT );
|
||||
Imgproc.Sobel( src_gray, grad_x, ddepth, 1, 0, 3, scale, delta, Core.BORDER_DEFAULT );
|
||||
|
||||
/// Gradient Y
|
||||
//Imgproc.Scharr( src_gray, grad_y, ddepth, 0, 1, scale, delta, Core.BORDER_DEFAULT );
|
||||
Imgproc.Sobel( src_gray, grad_y, ddepth, 0, 1, 3, scale, delta, Core.BORDER_DEFAULT );
|
||||
//! [sobel]
|
||||
|
||||
//![convert]
|
||||
// converting back to CV_8U
|
||||
Core.convertScaleAbs( grad_x, abs_grad_x );
|
||||
Core.convertScaleAbs( grad_y, abs_grad_y );
|
||||
//![convert]
|
||||
|
||||
//! [add_weighted]
|
||||
/// Total Gradient (approximate)
|
||||
Core.addWeighted( abs_grad_x, 0.5, abs_grad_y, 0.5, 0, grad );
|
||||
//! [add_weighted]
|
||||
|
||||
//! [display]
|
||||
HighGui.imshow( window_name, grad );
|
||||
HighGui.waitKey(0);
|
||||
//! [display]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SobelDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new SobelDemoRun().run(args);
|
||||
}
|
||||
}
|
110
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java
vendored
Normal file
110
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java
vendored
Normal file
@ -0,0 +1,110 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class CannyDetectorDemo {
|
||||
private static final int MAX_LOW_THRESHOLD = 100;
|
||||
private static final int RATIO = 3;
|
||||
private static final int KERNEL_SIZE = 3;
|
||||
private static final Size BLUR_SIZE = new Size(3,3);
|
||||
private int lowThresh = 0;
|
||||
private Mat src;
|
||||
private Mat srcBlur = new Mat();
|
||||
private Mat detectedEdges = new Mat();
|
||||
private Mat dst = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
|
||||
public CannyDetectorDemo(String[] args) {
|
||||
String imagePath = args.length > 0 ? args[0] : "../data/fruits.jpg";
|
||||
src = Imgcodecs.imread(imagePath);
|
||||
if (src.empty()) {
|
||||
System.out.println("Empty image: " + imagePath);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Edge Map (Canny detector demo)");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Min Threshold:"));
|
||||
JSlider slider = new JSlider(0, MAX_LOW_THRESHOLD, 0);
|
||||
slider.setMajorTickSpacing(10);
|
||||
slider.setMinorTickSpacing(5);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
lowThresh = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
Imgproc.blur(src, srcBlur, BLUR_SIZE);
|
||||
Imgproc.Canny(srcBlur, detectedEdges, lowThresh, lowThresh * RATIO, KERNEL_SIZE, false);
|
||||
dst = new Mat(src.size(), CvType.CV_8UC3, Scalar.all(0));
|
||||
src.copyTo(dst, detectedEdges);
|
||||
Image img = HighGui.toBufferedImage(dst);
|
||||
imgLabel.setIcon(new ImageIcon(img));
|
||||
frame.repaint();
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CannyDetectorDemo(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,219 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
/**
|
||||
*
|
||||
* @brief Sample code showing how to segment overlapping objects using Laplacian filtering, in addition to Watershed
|
||||
* and Distance Transformation
|
||||
*
|
||||
*/
|
||||
class ImageSegmentation {
|
||||
public void run(String[] args) {
|
||||
//! [load_image]
|
||||
// Load the image
|
||||
String filename = args.length > 0 ? args[0] : "../data/cards.png";
|
||||
Mat srcOriginal = Imgcodecs.imread(filename);
|
||||
if (srcOriginal.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
// Show source image
|
||||
HighGui.imshow("Source Image", srcOriginal);
|
||||
//! [load_image]
|
||||
|
||||
//! [black_bg]
|
||||
// Change the background from white to black, since that will help later to
|
||||
// extract
|
||||
// better results during the use of Distance Transform
|
||||
Mat src = srcOriginal.clone();
|
||||
byte[] srcData = new byte[(int) (src.total() * src.channels())];
|
||||
src.get(0, 0, srcData);
|
||||
for (int i = 0; i < src.rows(); i++) {
|
||||
for (int j = 0; j < src.cols(); j++) {
|
||||
if (srcData[(i * src.cols() + j) * 3] == (byte) 255 && srcData[(i * src.cols() + j) * 3 + 1] == (byte) 255
|
||||
&& srcData[(i * src.cols() + j) * 3 + 2] == (byte) 255) {
|
||||
srcData[(i * src.cols() + j) * 3] = 0;
|
||||
srcData[(i * src.cols() + j) * 3 + 1] = 0;
|
||||
srcData[(i * src.cols() + j) * 3 + 2] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
src.put(0, 0, srcData);
|
||||
|
||||
// Show output image
|
||||
HighGui.imshow("Black Background Image", src);
|
||||
//! [black_bg]
|
||||
|
||||
//! [sharp]
|
||||
// Create a kernel that we will use to sharpen our image
|
||||
Mat kernel = new Mat(3, 3, CvType.CV_32F);
|
||||
// an approximation of second derivative, a quite strong kernel
|
||||
float[] kernelData = new float[(int) (kernel.total() * kernel.channels())];
|
||||
kernelData[0] = 1; kernelData[1] = 1; kernelData[2] = 1;
|
||||
kernelData[3] = 1; kernelData[4] = -8; kernelData[5] = 1;
|
||||
kernelData[6] = 1; kernelData[7] = 1; kernelData[8] = 1;
|
||||
kernel.put(0, 0, kernelData);
|
||||
|
||||
// do the laplacian filtering as it is
|
||||
// well, we need to convert everything in something more deeper then CV_8U
|
||||
// because the kernel has some negative values,
|
||||
// and we can expect in general to have a Laplacian image with negative values
|
||||
// BUT a 8bits unsigned int (the one we are working with) can contain values
|
||||
// from 0 to 255
|
||||
// so the possible negative number will be truncated
|
||||
Mat imgLaplacian = new Mat();
|
||||
Imgproc.filter2D(src, imgLaplacian, CvType.CV_32F, kernel);
|
||||
Mat sharp = new Mat();
|
||||
src.convertTo(sharp, CvType.CV_32F);
|
||||
Mat imgResult = new Mat();
|
||||
Core.subtract(sharp, imgLaplacian, imgResult);
|
||||
|
||||
// convert back to 8bits gray scale
|
||||
imgResult.convertTo(imgResult, CvType.CV_8UC3);
|
||||
imgLaplacian.convertTo(imgLaplacian, CvType.CV_8UC3);
|
||||
|
||||
// imshow( "Laplace Filtered Image", imgLaplacian );
|
||||
HighGui.imshow("New Sharped Image", imgResult);
|
||||
//! [sharp]
|
||||
|
||||
//! [bin]
|
||||
// Create binary image from source image
|
||||
Mat bw = new Mat();
|
||||
Imgproc.cvtColor(imgResult, bw, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.threshold(bw, bw, 40, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
|
||||
HighGui.imshow("Binary Image", bw);
|
||||
//! [bin]
|
||||
|
||||
//! [dist]
|
||||
// Perform the distance transform algorithm
|
||||
Mat dist = new Mat();
|
||||
Imgproc.distanceTransform(bw, dist, Imgproc.DIST_L2, 3);
|
||||
|
||||
// Normalize the distance image for range = {0.0, 1.0}
|
||||
// so we can visualize and threshold it
|
||||
Core.normalize(dist, dist, 0.0, 1.0, Core.NORM_MINMAX);
|
||||
Mat distDisplayScaled = new Mat();
|
||||
Core.multiply(dist, new Scalar(255), distDisplayScaled);
|
||||
Mat distDisplay = new Mat();
|
||||
distDisplayScaled.convertTo(distDisplay, CvType.CV_8U);
|
||||
HighGui.imshow("Distance Transform Image", distDisplay);
|
||||
//! [dist]
|
||||
|
||||
//! [peaks]
|
||||
// Threshold to obtain the peaks
|
||||
// This will be the markers for the foreground objects
|
||||
Imgproc.threshold(dist, dist, 0.4, 1.0, Imgproc.THRESH_BINARY);
|
||||
|
||||
// Dilate a bit the dist image
|
||||
Mat kernel1 = Mat.ones(3, 3, CvType.CV_8U);
|
||||
Imgproc.dilate(dist, dist, kernel1);
|
||||
Mat distDisplay2 = new Mat();
|
||||
dist.convertTo(distDisplay2, CvType.CV_8U);
|
||||
Core.multiply(distDisplay2, new Scalar(255), distDisplay2);
|
||||
HighGui.imshow("Peaks", distDisplay2);
|
||||
//! [peaks]
|
||||
|
||||
//! [seeds]
|
||||
// Create the CV_8U version of the distance image
|
||||
// It is needed for findContours()
|
||||
Mat dist_8u = new Mat();
|
||||
dist.convertTo(dist_8u, CvType.CV_8U);
|
||||
|
||||
// Find total markers
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(dist_8u, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
|
||||
// Create the marker image for the watershed algorithm
|
||||
Mat markers = Mat.zeros(dist.size(), CvType.CV_32S);
|
||||
|
||||
// Draw the foreground markers
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Imgproc.drawContours(markers, contours, i, new Scalar(i + 1), -1);
|
||||
}
|
||||
|
||||
// Draw the background marker
|
||||
Mat markersScaled = new Mat();
|
||||
markers.convertTo(markersScaled, CvType.CV_32F);
|
||||
Core.normalize(markersScaled, markersScaled, 0.0, 255.0, Core.NORM_MINMAX);
|
||||
Imgproc.circle(markersScaled, new Point(5, 5), 3, new Scalar(255, 255, 255), -1);
|
||||
Mat markersDisplay = new Mat();
|
||||
markersScaled.convertTo(markersDisplay, CvType.CV_8U);
|
||||
HighGui.imshow("Markers", markersDisplay);
|
||||
Imgproc.circle(markers, new Point(5, 5), 3, new Scalar(255, 255, 255), -1);
|
||||
//! [seeds]
|
||||
|
||||
//! [watershed]
|
||||
// Perform the watershed algorithm
|
||||
Imgproc.watershed(imgResult, markers);
|
||||
|
||||
Mat mark = Mat.zeros(markers.size(), CvType.CV_8U);
|
||||
markers.convertTo(mark, CvType.CV_8UC1);
|
||||
Core.bitwise_not(mark, mark);
|
||||
// imshow("Markers_v2", mark); // uncomment this if you want to see how the mark
|
||||
// image looks like at that point
|
||||
|
||||
// Generate random colors
|
||||
Random rng = new Random(12345);
|
||||
List<Scalar> colors = new ArrayList<>(contours.size());
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
int b = rng.nextInt(256);
|
||||
int g = rng.nextInt(256);
|
||||
int r = rng.nextInt(256);
|
||||
|
||||
colors.add(new Scalar(b, g, r));
|
||||
}
|
||||
|
||||
// Create the result image
|
||||
Mat dst = Mat.zeros(markers.size(), CvType.CV_8UC3);
|
||||
byte[] dstData = new byte[(int) (dst.total() * dst.channels())];
|
||||
dst.get(0, 0, dstData);
|
||||
|
||||
// Fill labeled objects with random colors
|
||||
int[] markersData = new int[(int) (markers.total() * markers.channels())];
|
||||
markers.get(0, 0, markersData);
|
||||
for (int i = 0; i < markers.rows(); i++) {
|
||||
for (int j = 0; j < markers.cols(); j++) {
|
||||
int index = markersData[i * markers.cols() + j];
|
||||
if (index > 0 && index <= contours.size()) {
|
||||
dstData[(i * dst.cols() + j) * 3 + 0] = (byte) colors.get(index - 1).val[0];
|
||||
dstData[(i * dst.cols() + j) * 3 + 1] = (byte) colors.get(index - 1).val[1];
|
||||
dstData[(i * dst.cols() + j) * 3 + 2] = (byte) colors.get(index - 1).val[2];
|
||||
} else {
|
||||
dstData[(i * dst.cols() + j) * 3 + 0] = 0;
|
||||
dstData[(i * dst.cols() + j) * 3 + 1] = 0;
|
||||
dstData[(i * dst.cols() + j) * 3 + 2] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
dst.put(0, 0, dstData);
|
||||
|
||||
// Visualize the final image
|
||||
HighGui.imshow("Final Result", dst);
|
||||
//! [watershed]
|
||||
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class ImageSegmentationDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new ImageSegmentation().run(args);
|
||||
}
|
||||
}
|
98
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java
vendored
Normal file
98
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java
vendored
Normal file
@ -0,0 +1,98 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class Remap {
|
||||
private Mat mapX = new Mat();
|
||||
private Mat mapY = new Mat();
|
||||
private Mat dst = new Mat();
|
||||
private int ind = 0;
|
||||
|
||||
//! [Update]
|
||||
private void updateMap() {
|
||||
float buffX[] = new float[(int) (mapX.total() * mapX.channels())];
|
||||
mapX.get(0, 0, buffX);
|
||||
|
||||
float buffY[] = new float[(int) (mapY.total() * mapY.channels())];
|
||||
mapY.get(0, 0, buffY);
|
||||
|
||||
for (int i = 0; i < mapX.rows(); i++) {
|
||||
for (int j = 0; j < mapX.cols(); j++) {
|
||||
switch (ind) {
|
||||
case 0:
|
||||
if( j > mapX.cols()*0.25 && j < mapX.cols()*0.75 && i > mapX.rows()*0.25 && i < mapX.rows()*0.75 ) {
|
||||
buffX[i*mapX.cols() + j] = 2*( j - mapX.cols()*0.25f ) + 0.5f;
|
||||
buffY[i*mapY.cols() + j] = 2*( i - mapX.rows()*0.25f ) + 0.5f;
|
||||
} else {
|
||||
buffX[i*mapX.cols() + j] = 0;
|
||||
buffY[i*mapY.cols() + j] = 0;
|
||||
}
|
||||
break;
|
||||
case 1:
|
||||
buffX[i*mapX.cols() + j] = j;
|
||||
buffY[i*mapY.cols() + j] = mapY.rows() - i;
|
||||
break;
|
||||
case 2:
|
||||
buffX[i*mapX.cols() + j] = mapY.cols() - j;
|
||||
buffY[i*mapY.cols() + j] = i;
|
||||
break;
|
||||
case 3:
|
||||
buffX[i*mapX.cols() + j] = mapY.cols() - j;
|
||||
buffY[i*mapY.cols() + j] = mapY.rows() - i;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
mapX.put(0, 0, buffX);
|
||||
mapY.put(0, 0, buffY);
|
||||
ind = (ind+1) % 4;
|
||||
}
|
||||
//! [Update]
|
||||
|
||||
public void run(String[] args) {
|
||||
String filename = args.length > 0 ? args[0] : "../data/chicky_512.png";
|
||||
//! [Load]
|
||||
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_COLOR);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Load]
|
||||
|
||||
//! [Create]
|
||||
mapX = new Mat(src.size(), CvType.CV_32F);
|
||||
mapY = new Mat(src.size(), CvType.CV_32F);
|
||||
//! [Create]
|
||||
|
||||
//! [Window]
|
||||
final String winname = "Remap demo";
|
||||
HighGui.namedWindow(winname, HighGui.WINDOW_AUTOSIZE);
|
||||
//! [Window]
|
||||
|
||||
//! [Loop]
|
||||
for (;;) {
|
||||
updateMap();
|
||||
Imgproc.remap(src, dst, mapX, mapY, Imgproc.INTER_LINEAR);
|
||||
HighGui.imshow(winname, dst);
|
||||
if (HighGui.waitKey(1000) == 27) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
//! [Loop]
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class RemapDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new Remap().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,80 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class GeometricTransforms {
|
||||
public void run(String[] args) {
|
||||
//! [Load the image]
|
||||
String filename = args.length > 0 ? args[0] : "../data/lena.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [Load the image]
|
||||
|
||||
//! [Set your 3 points to calculate the Affine Transform]
|
||||
Point[] srcTri = new Point[3];
|
||||
srcTri[0] = new Point( 0, 0 );
|
||||
srcTri[1] = new Point( src.cols() - 1, 0 );
|
||||
srcTri[2] = new Point( 0, src.rows() - 1 );
|
||||
|
||||
Point[] dstTri = new Point[3];
|
||||
dstTri[0] = new Point( 0, src.rows()*0.33 );
|
||||
dstTri[1] = new Point( src.cols()*0.85, src.rows()*0.25 );
|
||||
dstTri[2] = new Point( src.cols()*0.15, src.rows()*0.7 );
|
||||
//! [Set your 3 points to calculate the Affine Transform]
|
||||
|
||||
//! [Get the Affine Transform]
|
||||
Mat warpMat = Imgproc.getAffineTransform( new MatOfPoint2f(srcTri), new MatOfPoint2f(dstTri) );
|
||||
//! [Get the Affine Transform]
|
||||
|
||||
//! [Apply the Affine Transform just found to the src image]
|
||||
Mat warpDst = Mat.zeros( src.rows(), src.cols(), src.type() );
|
||||
|
||||
Imgproc.warpAffine( src, warpDst, warpMat, warpDst.size() );
|
||||
//! [Apply the Affine Transform just found to the src image]
|
||||
|
||||
/** Rotating the image after Warp */
|
||||
|
||||
//! [Compute a rotation matrix with respect to the center of the image]
|
||||
Point center = new Point(warpDst.cols() / 2, warpDst.rows() / 2);
|
||||
double angle = -50.0;
|
||||
double scale = 0.6;
|
||||
//! [Compute a rotation matrix with respect to the center of the image]
|
||||
|
||||
//! [Get the rotation matrix with the specifications above]
|
||||
Mat rotMat = Imgproc.getRotationMatrix2D( center, angle, scale );
|
||||
//! [Get the rotation matrix with the specifications above]
|
||||
|
||||
//! [Rotate the warped image]
|
||||
Mat warpRotateDst = new Mat();
|
||||
Imgproc.warpAffine( warpDst, warpRotateDst, rotMat, warpDst.size() );
|
||||
//! [Rotate the warped image]
|
||||
|
||||
//! [Show what you got]
|
||||
HighGui.imshow( "Source image", src );
|
||||
HighGui.imshow( "Warp", warpDst );
|
||||
HighGui.imshow( "Warp + Rotate", warpRotateDst );
|
||||
//! [Show what you got]
|
||||
|
||||
//! [Wait until user exits the program]
|
||||
HighGui.waitKey(0);
|
||||
//! [Wait until user exits the program]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class GeometricTransformsDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new GeometricTransforms().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,179 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class GeneralContours1 {
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgContoursLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 100;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public GeneralContours1(String[] args) {
|
||||
//! [setup]
|
||||
/// Load source image
|
||||
String filename = args.length > 0 ? args[0] : "../data/stuff.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Convert image to gray and blur it
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.blur(srcGray, srcGray, new Size(3, 3));
|
||||
//! [setup]
|
||||
|
||||
//! [createWindow]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Creating Bounding boxes and circles for contours demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
//! [createWindow]
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [trackbar]
|
||||
sliderPanel.add(new JLabel("Canny threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
//! [trackbar]
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgSrcLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
imgContoursLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(imgContoursLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
//! [Canny]
|
||||
/// Detect edges using Canny
|
||||
Mat cannyOutput = new Mat();
|
||||
Imgproc.Canny(srcGray, cannyOutput, threshold, threshold * 2);
|
||||
//! [Canny]
|
||||
|
||||
//! [findContours]
|
||||
/// Find contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(cannyOutput, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
//! [findContours]
|
||||
|
||||
//! [allthework]
|
||||
/// Approximate contours to polygons + get bounding rects and circles
|
||||
MatOfPoint2f[] contoursPoly = new MatOfPoint2f[contours.size()];
|
||||
Rect[] boundRect = new Rect[contours.size()];
|
||||
Point[] centers = new Point[contours.size()];
|
||||
float[][] radius = new float[contours.size()][1];
|
||||
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
contoursPoly[i] = new MatOfPoint2f();
|
||||
Imgproc.approxPolyDP(new MatOfPoint2f(contours.get(i).toArray()), contoursPoly[i], 3, true);
|
||||
boundRect[i] = Imgproc.boundingRect(new MatOfPoint(contoursPoly[i].toArray()));
|
||||
centers[i] = new Point();
|
||||
Imgproc.minEnclosingCircle(contoursPoly[i], centers[i], radius[i]);
|
||||
}
|
||||
//! [allthework]
|
||||
|
||||
//! [zeroMat]
|
||||
Mat drawing = Mat.zeros(cannyOutput.size(), CvType.CV_8UC3);
|
||||
//! [zeroMat]
|
||||
//! [forContour]
|
||||
/// Draw polygonal contour + bonding rects + circles
|
||||
List<MatOfPoint> contoursPolyList = new ArrayList<>(contoursPoly.length);
|
||||
for (MatOfPoint2f poly : contoursPoly) {
|
||||
contoursPolyList.add(new MatOfPoint(poly.toArray()));
|
||||
}
|
||||
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Scalar color = new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256));
|
||||
Imgproc.drawContours(drawing, contoursPolyList, i, color);
|
||||
Imgproc.rectangle(drawing, boundRect[i].tl(), boundRect[i].br(), color, 2);
|
||||
Imgproc.circle(drawing, centers[i], (int) radius[i][0], color, 2);
|
||||
}
|
||||
//! [forContour]
|
||||
|
||||
//! [showDrawings]
|
||||
/// Show in a window
|
||||
imgContoursLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(drawing)));
|
||||
frame.repaint();
|
||||
//! [showDrawings]
|
||||
}
|
||||
}
|
||||
|
||||
public class GeneralContoursDemo1 {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new GeneralContours1(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,176 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.RotatedRect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class GeneralContours2 {
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgContoursLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 100;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public GeneralContours2(String[] args) {
|
||||
//! [setup]
|
||||
/// Load source image
|
||||
String filename = args.length > 0 ? args[0] : "../data/stuff.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Convert image to gray and blur it
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.blur(srcGray, srcGray, new Size(3, 3));
|
||||
//! [setup]
|
||||
|
||||
//! [createWindow]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Creating Bounding rotated boxes and ellipses for contours demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
//! [createWindow]
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [trackbar]
|
||||
sliderPanel.add(new JLabel("Canny threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
//! [trackbar]
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgSrcLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
imgContoursLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(imgContoursLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
//! [Canny]
|
||||
/// Detect edges using Canny
|
||||
Mat cannyOutput = new Mat();
|
||||
Imgproc.Canny(srcGray, cannyOutput, threshold, threshold * 2);
|
||||
//! [Canny]
|
||||
|
||||
//! [findContours]
|
||||
/// Find contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(cannyOutput, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
//! [findContours]
|
||||
|
||||
/// Find the rotated rectangles and ellipses for each contour
|
||||
RotatedRect[] minRect = new RotatedRect[contours.size()];
|
||||
RotatedRect[] minEllipse = new RotatedRect[contours.size()];
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
minRect[i] = Imgproc.minAreaRect(new MatOfPoint2f(contours.get(i).toArray()));
|
||||
minEllipse[i] = new RotatedRect();
|
||||
if (contours.get(i).rows() > 5) {
|
||||
minEllipse[i] = Imgproc.fitEllipse(new MatOfPoint2f(contours.get(i).toArray()));
|
||||
}
|
||||
}
|
||||
|
||||
//! [zeroMat]
|
||||
/// Draw contours + rotated rects + ellipses
|
||||
Mat drawing = Mat.zeros(cannyOutput.size(), CvType.CV_8UC3);
|
||||
//! [zeroMat]
|
||||
//! [forContour]
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Scalar color = new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256));
|
||||
// contour
|
||||
Imgproc.drawContours(drawing, contours, i, color);
|
||||
// ellipse
|
||||
Imgproc.ellipse(drawing, minEllipse[i], color, 2);
|
||||
// rotated rectangle
|
||||
Point[] rectPoints = new Point[4];
|
||||
minRect[i].points(rectPoints);
|
||||
for (int j = 0; j < 4; j++) {
|
||||
Imgproc.line(drawing, rectPoints[j], rectPoints[(j+1) % 4], color);
|
||||
}
|
||||
}
|
||||
//! [forContour]
|
||||
|
||||
//! [showDrawings]
|
||||
/// Show in a window
|
||||
imgContoursLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(drawing)));
|
||||
frame.repaint();
|
||||
//! [showDrawings]
|
||||
}
|
||||
}
|
||||
|
||||
public class GeneralContoursDemo2 {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new GeneralContours2(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,137 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class FindContours {
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgContoursLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 100;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public FindContours(String[] args) {
|
||||
/// Load source image
|
||||
String filename = args.length > 0 ? args[0] : "../data/HappyFish.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Convert image to gray and blur it
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.blur(srcGray, srcGray, new Size(3, 3));
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Finding contours in your image demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Canny threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgSrcLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
imgContoursLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(imgContoursLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
/// Detect edges using Canny
|
||||
Mat cannyOutput = new Mat();
|
||||
Imgproc.Canny(srcGray, cannyOutput, threshold, threshold * 2);
|
||||
|
||||
/// Find contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(cannyOutput, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
|
||||
/// Draw contours
|
||||
Mat drawing = Mat.zeros(cannyOutput.size(), CvType.CV_8UC3);
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Scalar color = new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256));
|
||||
Imgproc.drawContours(drawing, contours, i, color, 2, Imgproc.LINE_8, hierarchy, 0, new Point());
|
||||
}
|
||||
|
||||
imgContoursLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(drawing)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class FindContoursDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new FindContours(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
154
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ShapeDescriptors/hull/HullDemo.java
vendored
Normal file
154
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ShapeDescriptors/hull/HullDemo.java
vendored
Normal file
@ -0,0 +1,154 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfInt;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class Hull {
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgContoursLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 100;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public Hull(String[] args) {
|
||||
/// Load source image
|
||||
String filename = args.length > 0 ? args[0] : "../data/stuff.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Convert image to gray and blur it
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.blur(srcGray, srcGray, new Size(3, 3));
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Convex Hull demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Canny threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgSrcLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
imgContoursLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(imgContoursLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
/// Detect edges using Canny
|
||||
Mat cannyOutput = new Mat();
|
||||
Imgproc.Canny(srcGray, cannyOutput, threshold, threshold * 2);
|
||||
|
||||
/// Find contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(cannyOutput, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
|
||||
/// Find the convex hull object for each contour
|
||||
List<MatOfPoint> hullList = new ArrayList<>();
|
||||
for (MatOfPoint contour : contours) {
|
||||
MatOfInt hull = new MatOfInt();
|
||||
Imgproc.convexHull(contour, hull);
|
||||
|
||||
Point[] contourArray = contour.toArray();
|
||||
Point[] hullPoints = new Point[hull.rows()];
|
||||
List<Integer> hullContourIdxList = hull.toList();
|
||||
for (int i = 0; i < hullContourIdxList.size(); i++) {
|
||||
hullPoints[i] = contourArray[hullContourIdxList.get(i)];
|
||||
}
|
||||
hullList.add(new MatOfPoint(hullPoints));
|
||||
}
|
||||
|
||||
/// Draw contours + hull results
|
||||
Mat drawing = Mat.zeros(cannyOutput.size(), CvType.CV_8UC3);
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Scalar color = new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256));
|
||||
Imgproc.drawContours(drawing, contours, i, color);
|
||||
Imgproc.drawContours(drawing, hullList, i, color );
|
||||
}
|
||||
|
||||
imgContoursLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(drawing)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class HullDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new Hull(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
178
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ShapeDescriptors/moments/MomentsDemo.java
vendored
Normal file
178
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ShapeDescriptors/moments/MomentsDemo.java
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.imgproc.Moments;
|
||||
|
||||
class MomentsClass {
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgSrcLabel;
|
||||
private JLabel imgContoursLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 100;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public MomentsClass(String[] args) {
|
||||
//! [setup]
|
||||
/// Load source image
|
||||
String filename = args.length > 0 ? args[0] : "../data/stuff.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/// Convert image to gray and blur it
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.blur(srcGray, srcGray, new Size(3, 3));
|
||||
//! [setup]
|
||||
|
||||
//! [createWindow]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Image Moments demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
//! [createWindow]
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [trackbar]
|
||||
sliderPanel.add(new JLabel("Canny threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
//! [trackbar]
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgSrcLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgSrcLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
imgContoursLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(imgContoursLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
//! [Canny]
|
||||
/// Detect edges using Canny
|
||||
Mat cannyOutput = new Mat();
|
||||
Imgproc.Canny(srcGray, cannyOutput, threshold, threshold * 2);
|
||||
//! [Canny]
|
||||
|
||||
//! [findContours]
|
||||
/// Find contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(cannyOutput, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
//! [findContours]
|
||||
|
||||
/// Get the moments
|
||||
List<Moments> mu = new ArrayList<>(contours.size());
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
mu.add(Imgproc.moments(contours.get(i)));
|
||||
}
|
||||
|
||||
/// Get the mass centers
|
||||
List<Point> mc = new ArrayList<>(contours.size());
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
//add 1e-5 to avoid division by zero
|
||||
mc.add(new Point(mu.get(i).m10 / (mu.get(i).m00 + 1e-5), mu.get(i).m01 / (mu.get(i).m00 + 1e-5)));
|
||||
}
|
||||
|
||||
//! [zeroMat]
|
||||
/// Draw contours
|
||||
Mat drawing = Mat.zeros(cannyOutput.size(), CvType.CV_8UC3);
|
||||
//! [zeroMat]
|
||||
//! [forContour]
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
Scalar color = new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256));
|
||||
Imgproc.drawContours(drawing, contours, i, color, 2);
|
||||
Imgproc.circle(drawing, mc.get(i), 4, color, -1);
|
||||
}
|
||||
//! [forContour]
|
||||
|
||||
//! [showDrawings]
|
||||
/// Show in a window
|
||||
imgContoursLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(drawing)));
|
||||
frame.repaint();
|
||||
//! [showDrawings]
|
||||
|
||||
/// Calculate the area with the moments 00 and compare with the result of the OpenCV function
|
||||
System.out.println("\t Info: Area and Contour Length \n");
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
System.out.format(" * Contour[%d] - Area (M_00) = %.2f - Area OpenCV: %.2f - Length: %.2f\n", i,
|
||||
mu.get(i).m00, Imgproc.contourArea(contours.get(i)),
|
||||
Imgproc.arcLength(new MatOfPoint2f(contours.get(i).toArray()), true));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class MomentsDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new MomentsClass(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Core.MinMaxLocResult;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class PointPolygonTest {
|
||||
public void run() {
|
||||
/// Create an image
|
||||
int r = 100;
|
||||
Mat src = Mat.zeros(new Size(4 * r, 4 * r), CvType.CV_8U);
|
||||
|
||||
/// Create a sequence of points to make a contour
|
||||
List<Point> vert = new ArrayList<>(6);
|
||||
vert.add(new Point(3 * r / 2, 1.34 * r));
|
||||
vert.add(new Point(1 * r, 2 * r));
|
||||
vert.add(new Point(3 * r / 2, 2.866 * r));
|
||||
vert.add(new Point(5 * r / 2, 2.866 * r));
|
||||
vert.add(new Point(3 * r, 2 * r));
|
||||
vert.add(new Point(5 * r / 2, 1.34 * r));
|
||||
|
||||
/// Draw it in src
|
||||
for (int i = 0; i < 6; i++) {
|
||||
Imgproc.line(src, vert.get(i), vert.get((i + 1) % 6), new Scalar(255), 3);
|
||||
}
|
||||
|
||||
/// Get the contours
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(src, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
|
||||
/// Calculate the distances to the contour
|
||||
Mat rawDist = new Mat(src.size(), CvType.CV_32F);
|
||||
float[] rawDistData = new float[(int) (rawDist.total() * rawDist.channels())];
|
||||
for (int i = 0; i < src.rows(); i++) {
|
||||
for (int j = 0; j < src.cols(); j++) {
|
||||
rawDistData[i * src.cols() + j] = (float) Imgproc
|
||||
.pointPolygonTest(new MatOfPoint2f(contours.get(0).toArray()), new Point(j, i), true);
|
||||
}
|
||||
}
|
||||
rawDist.put(0, 0, rawDistData);
|
||||
|
||||
MinMaxLocResult res = Core.minMaxLoc(rawDist);
|
||||
double minVal = Math.abs(res.minVal);
|
||||
double maxVal = Math.abs(res.maxVal);
|
||||
|
||||
/// Depicting the distances graphically
|
||||
Mat drawing = Mat.zeros(src.size(), CvType.CV_8UC3);
|
||||
byte[] drawingData = new byte[(int) (drawing.total() * drawing.channels())];
|
||||
for (int i = 0; i < src.rows(); i++) {
|
||||
for (int j = 0; j < src.cols(); j++) {
|
||||
if (rawDistData[i * src.cols() + j] < 0) {
|
||||
drawingData[(i * src.cols() + j) * 3] =
|
||||
(byte) (255 - Math.abs(rawDistData[i * src.cols() + j]) * 255 / minVal);
|
||||
} else if (rawDistData[i * src.cols() + j] > 0) {
|
||||
drawingData[(i * src.cols() + j) * 3 + 2] =
|
||||
(byte) (255 - rawDistData[i * src.cols() + j] * 255 / maxVal);
|
||||
} else {
|
||||
drawingData[(i * src.cols() + j) * 3] = (byte) 255;
|
||||
drawingData[(i * src.cols() + j) * 3 + 1] = (byte) 255;
|
||||
drawingData[(i * src.cols() + j) * 3 + 2] = (byte) 255;
|
||||
}
|
||||
}
|
||||
}
|
||||
drawing.put(0, 0, drawingData);
|
||||
Imgproc.circle(drawing, res.maxLoc, (int)res.maxVal, new Scalar(255, 255, 255), 2, 8, 0);
|
||||
|
||||
/// Show your results
|
||||
HighGui.imshow("Source", src);
|
||||
HighGui.imshow("Distance and inscribed circle", drawing);
|
||||
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class PointPolygonTestDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new PointPolygonTest().run();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,158 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.core.TermCriteria;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CornerSubPix {
|
||||
private Mat src = new Mat();
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
private static final int MAX_CORNERS = 25;
|
||||
private int maxCorners = 10;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public CornerSubPix(String[] args) {
|
||||
/// Load source image and convert it to gray
|
||||
String filename = args.length > 0 ? args[0] : "../data/pic3.png";
|
||||
src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Shi-Tomasi corner detector demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Max corners:"));
|
||||
JSlider slider = new JSlider(0, MAX_CORNERS, maxCorners);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
maxCorners = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
/// Parameters for Shi-Tomasi algorithm
|
||||
maxCorners = Math.max(maxCorners, 1);
|
||||
MatOfPoint corners = new MatOfPoint();
|
||||
double qualityLevel = 0.01;
|
||||
double minDistance = 10;
|
||||
int blockSize = 3, gradientSize = 3;
|
||||
boolean useHarrisDetector = false;
|
||||
double k = 0.04;
|
||||
|
||||
/// Copy the source image
|
||||
Mat copy = src.clone();
|
||||
|
||||
/// Apply corner detection
|
||||
Imgproc.goodFeaturesToTrack(srcGray, corners, maxCorners, qualityLevel, minDistance, new Mat(),
|
||||
blockSize, gradientSize, useHarrisDetector, k);
|
||||
|
||||
/// Draw corners detected
|
||||
System.out.println("** Number of corners detected: " + corners.rows());
|
||||
int[] cornersData = new int[(int) (corners.total() * corners.channels())];
|
||||
corners.get(0, 0, cornersData);
|
||||
int radius = 4;
|
||||
Mat matCorners = new Mat(corners.rows(), 2, CvType.CV_32F);
|
||||
float[] matCornersData = new float[(int) (matCorners.total() * matCorners.channels())];
|
||||
matCorners.get(0, 0, matCornersData);
|
||||
for (int i = 0; i < corners.rows(); i++) {
|
||||
Imgproc.circle(copy, new Point(cornersData[i * 2], cornersData[i * 2 + 1]), radius,
|
||||
new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Imgproc.FILLED);
|
||||
matCornersData[i * 2] = cornersData[i * 2];
|
||||
matCornersData[i * 2 + 1] = cornersData[i * 2 + 1];
|
||||
}
|
||||
matCorners.put(0, 0, matCornersData);
|
||||
|
||||
imgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(copy)));
|
||||
frame.repaint();
|
||||
|
||||
/// Set the needed parameters to find the refined corners
|
||||
Size winSize = new Size(5, 5);
|
||||
Size zeroZone = new Size(-1, -1);
|
||||
TermCriteria criteria = new TermCriteria(TermCriteria.EPS + TermCriteria.COUNT, 40, 0.001);
|
||||
|
||||
/// Calculate the refined corner locations
|
||||
Imgproc.cornerSubPix(srcGray, matCorners, winSize, zeroZone, criteria);
|
||||
|
||||
/// Write them down
|
||||
matCorners.get(0, 0, matCornersData);
|
||||
for (int i = 0; i < corners.rows(); i++) {
|
||||
System.out.println(
|
||||
" -- Refined Corner [" + i + "] (" + matCornersData[i * 2] + "," + matCornersData[i * 2 + 1] + ")");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class CornerSubPixDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CornerSubPix(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,190 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Core.MinMaxLocResult;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CornerDetector {
|
||||
private Mat src = new Mat();
|
||||
private Mat srcGray = new Mat();
|
||||
private Mat harrisDst = new Mat();
|
||||
private Mat shiTomasiDst = new Mat();
|
||||
private Mat harrisCopy = new Mat();
|
||||
private Mat shiTomasiCopy = new Mat();
|
||||
private Mat Mc = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel harrisImgLabel;
|
||||
private JLabel shiTomasiImgLabel;
|
||||
private static final int MAX_QUALITY_LEVEL = 100;
|
||||
private int qualityLevel = 50;
|
||||
private double harrisMinVal;
|
||||
private double harrisMaxVal;
|
||||
private double shiTomasiMinVal;
|
||||
private double shiTomasiMaxVal;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public CornerDetector(String[] args) {
|
||||
/// Load source image and convert it to gray
|
||||
String filename = args.length > 0 ? args[0] : "../data/building.jpg";
|
||||
src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Creating your own corner detector demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
|
||||
/// Set some parameters
|
||||
int blockSize = 3, apertureSize = 3;
|
||||
|
||||
/// My Harris matrix -- Using cornerEigenValsAndVecs
|
||||
Imgproc.cornerEigenValsAndVecs(srcGray, harrisDst, blockSize, apertureSize);
|
||||
|
||||
/* calculate Mc */
|
||||
Mc = Mat.zeros(srcGray.size(), CvType.CV_32F);
|
||||
|
||||
float[] harrisData = new float[(int) (harrisDst.total() * harrisDst.channels())];
|
||||
harrisDst.get(0, 0, harrisData);
|
||||
float[] McData = new float[(int) (Mc.total() * Mc.channels())];
|
||||
Mc.get(0, 0, McData);
|
||||
|
||||
for( int i = 0; i < srcGray.rows(); i++ ) {
|
||||
for( int j = 0; j < srcGray.cols(); j++ ) {
|
||||
float lambda1 = harrisData[(i*srcGray.cols() + j) * 6];
|
||||
float lambda2 = harrisData[(i*srcGray.cols() + j) * 6 + 1];
|
||||
McData[i*srcGray.cols()+j] = (float) (lambda1*lambda2 - 0.04f*Math.pow( ( lambda1 + lambda2 ), 2 ));
|
||||
}
|
||||
}
|
||||
Mc.put(0, 0, McData);
|
||||
|
||||
MinMaxLocResult res = Core.minMaxLoc(Mc);
|
||||
harrisMinVal = res.minVal;
|
||||
harrisMaxVal = res.maxVal;
|
||||
|
||||
/// My Shi-Tomasi -- Using cornerMinEigenVal
|
||||
Imgproc.cornerMinEigenVal(srcGray, shiTomasiDst, blockSize, apertureSize);
|
||||
res = Core.minMaxLoc(shiTomasiDst);
|
||||
shiTomasiMinVal = res.minVal;
|
||||
shiTomasiMaxVal = res.maxVal;
|
||||
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Max corners:"));
|
||||
JSlider slider = new JSlider(0, MAX_QUALITY_LEVEL, qualityLevel);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
qualityLevel = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
harrisImgLabel = new JLabel(new ImageIcon(img));
|
||||
shiTomasiImgLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(harrisImgLabel);
|
||||
imgPanel.add(shiTomasiImgLabel);
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
int qualityLevelVal = Math.max(qualityLevel, 1);
|
||||
|
||||
//Harris
|
||||
harrisCopy = src.clone();
|
||||
|
||||
float[] McData = new float[(int) (Mc.total() * Mc.channels())];
|
||||
Mc.get(0, 0, McData);
|
||||
for (int i = 0; i < srcGray.rows(); i++) {
|
||||
for (int j = 0; j < srcGray.cols(); j++) {
|
||||
if (McData[i * srcGray.cols() + j] > harrisMinVal
|
||||
+ (harrisMaxVal - harrisMinVal) * qualityLevelVal / MAX_QUALITY_LEVEL) {
|
||||
Imgproc.circle(harrisCopy, new Point(j, i), 4,
|
||||
new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Imgproc.FILLED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Shi-Tomasi
|
||||
shiTomasiCopy = src.clone();
|
||||
|
||||
float[] shiTomasiData = new float[(int) (shiTomasiDst.total() * shiTomasiDst.channels())];
|
||||
shiTomasiDst.get(0, 0, shiTomasiData);
|
||||
for (int i = 0; i < srcGray.rows(); i++) {
|
||||
for (int j = 0; j < srcGray.cols(); j++) {
|
||||
if (shiTomasiData[i * srcGray.cols() + j] > shiTomasiMinVal
|
||||
+ (shiTomasiMaxVal - shiTomasiMinVal) * qualityLevelVal / MAX_QUALITY_LEVEL) {
|
||||
Imgproc.circle(shiTomasiCopy, new Point(j, i), 4,
|
||||
new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Imgproc.FILLED);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
harrisImgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(harrisCopy)));
|
||||
shiTomasiImgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(shiTomasiCopy)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class CornerDetectorDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CornerDetector(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,134 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class GoodFeaturesToTrack {
|
||||
private Mat src = new Mat();
|
||||
private Mat srcGray = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
private static final int MAX_THRESHOLD = 100;
|
||||
private int maxCorners = 23;
|
||||
private Random rng = new Random(12345);
|
||||
|
||||
public GoodFeaturesToTrack(String[] args) {
|
||||
/// Load source image and convert it to gray
|
||||
String filename = args.length > 0 ? args[0] : "../data/pic3.png";
|
||||
src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Shi-Tomasi corner detector demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Max corners:"));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, maxCorners);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
maxCorners = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
/// Parameters for Shi-Tomasi algorithm
|
||||
maxCorners = Math.max(maxCorners, 1);
|
||||
MatOfPoint corners = new MatOfPoint();
|
||||
double qualityLevel = 0.01;
|
||||
double minDistance = 10;
|
||||
int blockSize = 3, gradientSize = 3;
|
||||
boolean useHarrisDetector = false;
|
||||
double k = 0.04;
|
||||
|
||||
/// Copy the source image
|
||||
Mat copy = src.clone();
|
||||
|
||||
/// Apply corner detection
|
||||
Imgproc.goodFeaturesToTrack(srcGray, corners, maxCorners, qualityLevel, minDistance, new Mat(),
|
||||
blockSize, gradientSize, useHarrisDetector, k);
|
||||
|
||||
/// Draw corners detected
|
||||
System.out.println("** Number of corners detected: " + corners.rows());
|
||||
int[] cornersData = new int[(int) (corners.total() * corners.channels())];
|
||||
corners.get(0, 0, cornersData);
|
||||
int radius = 4;
|
||||
for (int i = 0; i < corners.rows(); i++) {
|
||||
Imgproc.circle(copy, new Point(cornersData[i * 2], cornersData[i * 2 + 1]), radius,
|
||||
new Scalar(rng.nextInt(256), rng.nextInt(256), rng.nextInt(256)), Imgproc.FILLED);
|
||||
}
|
||||
|
||||
imgLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(copy)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class GoodFeaturesToTrackDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new GoodFeaturesToTrack(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,142 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class CornerHarris {
|
||||
private Mat srcGray = new Mat();
|
||||
private Mat dst = new Mat();
|
||||
private Mat dstNorm = new Mat();
|
||||
private Mat dstNormScaled = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
private JLabel cornerLabel;
|
||||
private static final int MAX_THRESHOLD = 255;
|
||||
private int threshold = 200;
|
||||
|
||||
public CornerHarris(String[] args) {
|
||||
/// Load source image and convert it to gray
|
||||
String filename = args.length > 0 ? args[0] : "../data/building.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Harris corner detector demo");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(src);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
update();
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
sliderPanel.add(new JLabel("Threshold: "));
|
||||
JSlider slider = new JSlider(0, MAX_THRESHOLD, threshold);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(10);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
threshold = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
sliderPanel.add(slider);
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
|
||||
JPanel imgPanel = new JPanel();
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
imgPanel.add(imgLabel);
|
||||
|
||||
Mat blackImg = Mat.zeros(srcGray.size(), CvType.CV_8U);
|
||||
cornerLabel = new JLabel(new ImageIcon(HighGui.toBufferedImage(blackImg)));
|
||||
imgPanel.add(cornerLabel);
|
||||
|
||||
pane.add(imgPanel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
dst = Mat.zeros(srcGray.size(), CvType.CV_32F);
|
||||
|
||||
/// Detector parameters
|
||||
int blockSize = 2;
|
||||
int apertureSize = 3;
|
||||
double k = 0.04;
|
||||
|
||||
/// Detecting corners
|
||||
Imgproc.cornerHarris(srcGray, dst, blockSize, apertureSize, k);
|
||||
|
||||
/// Normalizing
|
||||
Core.normalize(dst, dstNorm, 0, 255, Core.NORM_MINMAX);
|
||||
Core.convertScaleAbs(dstNorm, dstNormScaled);
|
||||
|
||||
/// Drawing a circle around corners
|
||||
float[] dstNormData = new float[(int) (dstNorm.total() * dstNorm.channels())];
|
||||
dstNorm.get(0, 0, dstNormData);
|
||||
|
||||
for (int i = 0; i < dstNorm.rows(); i++) {
|
||||
for (int j = 0; j < dstNorm.cols(); j++) {
|
||||
if ((int) dstNormData[i * dstNorm.cols() + j] > threshold) {
|
||||
Imgproc.circle(dstNormScaled, new Point(j, i), 5, new Scalar(0), 2, 8, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cornerLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(dstNormScaled)));
|
||||
frame.repaint();
|
||||
}
|
||||
}
|
||||
|
||||
public class CornerHarrisDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new CornerHarris(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
13
3rdparty/opencv-4.5.4/samples/java/tutorial_code/build.xml
vendored
Normal file
13
3rdparty/opencv-4.5.4/samples/java/tutorial_code/build.xml
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<project default="compile">
|
||||
|
||||
<property name="lib.dir" value="${ocvJarDir}"/>
|
||||
<path id="classpath">
|
||||
<fileset dir="${lib.dir}" includes="**/*.jar"/>
|
||||
</path>
|
||||
|
||||
<target name="compile">
|
||||
<mkdir dir="${dstDir}"/>
|
||||
<javac includeantruntime="false" srcdir="${srcDir}" destdir="${dstDir}" classpathref="classpath"/>
|
||||
</target>
|
||||
|
||||
</project>
|
51
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/AddingImages/AddingImages.java
vendored
Normal file
51
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/AddingImages/AddingImages.java
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Scanner;
|
||||
|
||||
class AddingImagesRun{
|
||||
public void run() {
|
||||
double alpha = 0.5; double beta; double input;
|
||||
|
||||
Mat src1, src2, dst = new Mat();
|
||||
|
||||
System.out.println(" Simple Linear Blender ");
|
||||
System.out.println("-----------------------");
|
||||
System.out.println("* Enter alpha [0.0-1.0]: ");
|
||||
Scanner scan = new Scanner( System.in ).useLocale(Locale.US);
|
||||
input = scan.nextDouble();
|
||||
|
||||
if( input >= 0.0 && input <= 1.0 )
|
||||
alpha = input;
|
||||
|
||||
//! [load]
|
||||
src1 = Imgcodecs.imread("../../images/LinuxLogo.jpg");
|
||||
src2 = Imgcodecs.imread("../../images/WindowsLogo.jpg");
|
||||
//! [load]
|
||||
|
||||
if( src1.empty() == true ){ System.out.println("Error loading src1"); return;}
|
||||
if( src2.empty() == true ){ System.out.println("Error loading src2"); return;}
|
||||
|
||||
//! [blend_images]
|
||||
beta = ( 1.0 - alpha );
|
||||
Core.addWeighted( src1, alpha, src2, beta, 0.0, dst);
|
||||
//! [blend_images]
|
||||
|
||||
//![display]
|
||||
HighGui.imshow("Linear Blend", dst);
|
||||
HighGui.waitKey(0);
|
||||
//![display]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class AddingImages {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new AddingImagesRun().run();
|
||||
}
|
||||
}
|
@ -0,0 +1,109 @@
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
|
||||
class DiscreteFourierTransformRun{
|
||||
private void help() {
|
||||
System.out.println("" +
|
||||
"This program demonstrated the use of the discrete Fourier transform (DFT). \n" +
|
||||
"The dft of an image is taken and it's power spectrum is displayed.\n" +
|
||||
"Usage:\n" +
|
||||
"./DiscreteFourierTransform [image_name -- default ../data/lena.jpg]");
|
||||
}
|
||||
|
||||
public void run(String[] args){
|
||||
|
||||
help();
|
||||
|
||||
String filename = ((args.length > 0) ? args[0] : "../data/lena.jpg");
|
||||
|
||||
Mat I = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if( I.empty() ) {
|
||||
System.out.println("Error opening image");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
//! [expand]
|
||||
Mat padded = new Mat(); //expand input image to optimal size
|
||||
int m = Core.getOptimalDFTSize( I.rows() );
|
||||
int n = Core.getOptimalDFTSize( I.cols() ); // on the border add zero values
|
||||
Core.copyMakeBorder(I, padded, 0, m - I.rows(), 0, n - I.cols(), Core.BORDER_CONSTANT, Scalar.all(0));
|
||||
//! [expand]
|
||||
|
||||
//! [complex_and_real]
|
||||
List<Mat> planes = new ArrayList<Mat>();
|
||||
padded.convertTo(padded, CvType.CV_32F);
|
||||
planes.add(padded);
|
||||
planes.add(Mat.zeros(padded.size(), CvType.CV_32F));
|
||||
Mat complexI = new Mat();
|
||||
Core.merge(planes, complexI); // Add to the expanded another plane with zeros
|
||||
//! [complex_and_real]
|
||||
|
||||
//! [dft]
|
||||
Core.dft(complexI, complexI); // this way the result may fit in the source matrix
|
||||
//! [dft]
|
||||
|
||||
// compute the magnitude and switch to logarithmic scale
|
||||
// => log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2))
|
||||
//! [magnitude]
|
||||
Core.split(complexI, planes); // planes.get(0) = Re(DFT(I)
|
||||
// planes.get(1) = Im(DFT(I))
|
||||
Core.magnitude(planes.get(0), planes.get(1), planes.get(0));// planes.get(0) = magnitude
|
||||
Mat magI = planes.get(0);
|
||||
//! [magnitude]
|
||||
|
||||
//! [log]
|
||||
Mat matOfOnes = Mat.ones(magI.size(), magI.type());
|
||||
Core.add(matOfOnes, magI, magI); // switch to logarithmic scale
|
||||
Core.log(magI, magI);
|
||||
//! [log]
|
||||
|
||||
//! [crop_rearrange]
|
||||
// crop the spectrum, if it has an odd number of rows or columns
|
||||
magI = magI.submat(new Rect(0, 0, magI.cols() & -2, magI.rows() & -2));
|
||||
|
||||
// rearrange the quadrants of Fourier image so that the origin is at the image center
|
||||
int cx = magI.cols()/2;
|
||||
int cy = magI.rows()/2;
|
||||
|
||||
Mat q0 = new Mat(magI, new Rect(0, 0, cx, cy)); // Top-Left - Create a ROI per quadrant
|
||||
Mat q1 = new Mat(magI, new Rect(cx, 0, cx, cy)); // Top-Right
|
||||
Mat q2 = new Mat(magI, new Rect(0, cy, cx, cy)); // Bottom-Left
|
||||
Mat q3 = new Mat(magI, new Rect(cx, cy, cx, cy)); // Bottom-Right
|
||||
|
||||
Mat tmp = new Mat(); // swap quadrants (Top-Left with Bottom-Right)
|
||||
q0.copyTo(tmp);
|
||||
q3.copyTo(q0);
|
||||
tmp.copyTo(q3);
|
||||
|
||||
q1.copyTo(tmp); // swap quadrant (Top-Right with Bottom-Left)
|
||||
q2.copyTo(q1);
|
||||
tmp.copyTo(q2);
|
||||
//! [crop_rearrange]
|
||||
|
||||
magI.convertTo(magI, CvType.CV_8UC1);
|
||||
//! [normalize]
|
||||
Core.normalize(magI, magI, 0, 255, Core.NORM_MINMAX, CvType.CV_8UC1); // Transform the matrix with float values
|
||||
// into a viewable image form (float between
|
||||
// values 0 and 255).
|
||||
//! [normalize]
|
||||
|
||||
HighGui.imshow("Input Image" , I ); // Show the result
|
||||
HighGui.imshow("Spectrum Magnitude", magI);
|
||||
HighGui.waitKey();
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public class DiscreteFourierTransform {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new DiscreteFourierTransformRun().run(args);
|
||||
}
|
||||
}
|
120
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java
vendored
Normal file
120
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/mat_mask_operations/MatMaskOperations.java
vendored
Normal file
@ -0,0 +1,120 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
class MatMaskOperationsRun {
|
||||
|
||||
public void run(String[] args) {
|
||||
|
||||
String filename = "../data/lena.jpg";
|
||||
|
||||
int img_codec = Imgcodecs.IMREAD_COLOR;
|
||||
if (args.length != 0) {
|
||||
filename = args[0];
|
||||
if (args.length >= 2 && args[1].equals("G"))
|
||||
img_codec = Imgcodecs.IMREAD_GRAYSCALE;
|
||||
}
|
||||
|
||||
Mat src = Imgcodecs.imread(filename, img_codec);
|
||||
|
||||
if (src.empty()) {
|
||||
System.out.println("Can't open image [" + filename + "]");
|
||||
System.out.println("Program Arguments: [image_path -- default ../data/lena.jpg] [G -- grayscale]");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
HighGui.namedWindow("Input", HighGui.WINDOW_AUTOSIZE);
|
||||
HighGui.namedWindow("Output", HighGui.WINDOW_AUTOSIZE);
|
||||
|
||||
HighGui.imshow( "Input", src );
|
||||
double t = System.currentTimeMillis();
|
||||
|
||||
Mat dst0 = sharpen(src, new Mat());
|
||||
|
||||
t = ((double) System.currentTimeMillis() - t) / 1000;
|
||||
System.out.println("Hand written function time passed in seconds: " + t);
|
||||
|
||||
HighGui.imshow( "Output", dst0 );
|
||||
HighGui.moveWindow("Output", 400, 400);
|
||||
HighGui.waitKey();
|
||||
|
||||
//![kern]
|
||||
Mat kern = new Mat(3, 3, CvType.CV_8S);
|
||||
int row = 0, col = 0;
|
||||
kern.put(row, col, 0, -1, 0, -1, 5, -1, 0, -1, 0);
|
||||
//![kern]
|
||||
|
||||
t = System.currentTimeMillis();
|
||||
|
||||
Mat dst1 = new Mat();
|
||||
//![filter2D]
|
||||
Imgproc.filter2D(src, dst1, src.depth(), kern);
|
||||
//![filter2D]
|
||||
t = ((double) System.currentTimeMillis() - t) / 1000;
|
||||
System.out.println("Built-in filter2D time passed in seconds: " + t);
|
||||
|
||||
HighGui.imshow( "Output", dst1 );
|
||||
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//! [basic_method]
|
||||
public static double saturate(double x) {
|
||||
return x > 255.0 ? 255.0 : (x < 0.0 ? 0.0 : x);
|
||||
}
|
||||
|
||||
public Mat sharpen(Mat myImage, Mat Result) {
|
||||
//! [8_bit]
|
||||
myImage.convertTo(myImage, CvType.CV_8U);
|
||||
//! [8_bit]
|
||||
|
||||
//! [create_channels]
|
||||
int nChannels = myImage.channels();
|
||||
Result.create(myImage.size(), myImage.type());
|
||||
//! [create_channels]
|
||||
|
||||
//! [basic_method_loop]
|
||||
for (int j = 1; j < myImage.rows() - 1; ++j) {
|
||||
for (int i = 1; i < myImage.cols() - 1; ++i) {
|
||||
double sum[] = new double[nChannels];
|
||||
|
||||
for (int k = 0; k < nChannels; ++k) {
|
||||
|
||||
double top = -myImage.get(j - 1, i)[k];
|
||||
double bottom = -myImage.get(j + 1, i)[k];
|
||||
double center = (5 * myImage.get(j, i)[k]);
|
||||
double left = -myImage.get(j, i - 1)[k];
|
||||
double right = -myImage.get(j, i + 1)[k];
|
||||
|
||||
sum[k] = saturate(top + bottom + center + left + right);
|
||||
}
|
||||
|
||||
Result.put(j, i, sum);
|
||||
}
|
||||
}
|
||||
//! [basic_method_loop]
|
||||
|
||||
//! [borders]
|
||||
Result.row(0).setTo(new Scalar(0));
|
||||
Result.row(Result.rows() - 1).setTo(new Scalar(0));
|
||||
Result.col(0).setTo(new Scalar(0));
|
||||
Result.col(Result.cols() - 1).setTo(new Scalar(0));
|
||||
//! [borders]
|
||||
|
||||
return Result;
|
||||
}
|
||||
//! [basic_method]
|
||||
}
|
||||
|
||||
public class MatMaskOperations {
|
||||
public static void main(String[] args) {
|
||||
// Load the native library.
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new MatMaskOperationsRun().run(args);
|
||||
}
|
||||
}
|
130
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/mat_operations/MatOperations.java
vendored
Normal file
130
3rdparty/opencv-4.5.4/samples/java/tutorial_code/core/mat_operations/MatOperations.java
vendored
Normal file
@ -0,0 +1,130 @@
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Core.MinMaxLocResult;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class MatOperations {
|
||||
@SuppressWarnings("unused")
|
||||
public static void main(String[] args) {
|
||||
/* Snippet code for Operations with images tutorial (not intended to be run) */
|
||||
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
String filename = "";
|
||||
// Input/Output
|
||||
{
|
||||
//! [Load an image from a file]
|
||||
Mat img = Imgcodecs.imread(filename);
|
||||
//! [Load an image from a file]
|
||||
}
|
||||
{
|
||||
//! [Load an image from a file in grayscale]
|
||||
Mat img = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
//! [Load an image from a file in grayscale]
|
||||
}
|
||||
{
|
||||
Mat img = new Mat(4, 4, CvType.CV_8U);
|
||||
//! [Save image]
|
||||
Imgcodecs.imwrite(filename, img);
|
||||
//! [Save image]
|
||||
}
|
||||
// Accessing pixel intensity values
|
||||
{
|
||||
Mat img = new Mat(4, 4, CvType.CV_8U);
|
||||
int y = 0, x = 0;
|
||||
{
|
||||
//! [Pixel access 1]
|
||||
byte[] imgData = new byte[(int) (img.total() * img.channels())];
|
||||
img.get(0, 0, imgData);
|
||||
byte intensity = imgData[y * img.cols() + x];
|
||||
//! [Pixel access 1]
|
||||
}
|
||||
{
|
||||
//! [Pixel access 5]
|
||||
byte[] imgData = new byte[(int) (img.total() * img.channels())];
|
||||
imgData[y * img.cols() + x] = (byte) 128;
|
||||
img.put(0, 0, imgData);
|
||||
//! [Pixel access 5]
|
||||
}
|
||||
|
||||
}
|
||||
// Memory management and reference counting
|
||||
{
|
||||
//! [Reference counting 2]
|
||||
Mat img = Imgcodecs.imread("image.jpg");
|
||||
Mat img1 = img.clone();
|
||||
//! [Reference counting 2]
|
||||
}
|
||||
{
|
||||
//! [Reference counting 3]
|
||||
Mat img = Imgcodecs.imread("image.jpg");
|
||||
Mat sobelx = new Mat();
|
||||
Imgproc.Sobel(img, sobelx, CvType.CV_32F, 1, 0);
|
||||
//! [Reference counting 3]
|
||||
}
|
||||
// Primitive operations
|
||||
{
|
||||
Mat img = new Mat(400, 400, CvType.CV_8UC3);
|
||||
{
|
||||
//! [Set image to black]
|
||||
byte[] imgData = new byte[(int) (img.total() * img.channels())];
|
||||
Arrays.fill(imgData, (byte) 0);
|
||||
img.put(0, 0, imgData);
|
||||
//! [Set image to black]
|
||||
}
|
||||
{
|
||||
//! [Select ROI]
|
||||
Rect r = new Rect(10, 10, 100, 100);
|
||||
Mat smallImg = img.submat(r);
|
||||
//! [Select ROI]
|
||||
}
|
||||
}
|
||||
{
|
||||
//! [BGR to Gray]
|
||||
Mat img = Imgcodecs.imread("image.jpg"); // loading a 8UC3 image
|
||||
Mat grey = new Mat();
|
||||
Imgproc.cvtColor(img, grey, Imgproc.COLOR_BGR2GRAY);
|
||||
//! [BGR to Gray]
|
||||
}
|
||||
{
|
||||
Mat dst = new Mat(), src = new Mat();
|
||||
//! [Convert to CV_32F]
|
||||
src.convertTo(dst, CvType.CV_32F);
|
||||
//! [Convert to CV_32F]
|
||||
}
|
||||
// Visualizing images
|
||||
{
|
||||
//! [imshow 1]
|
||||
Mat img = Imgcodecs.imread("image.jpg");
|
||||
HighGui.namedWindow("image", HighGui.WINDOW_AUTOSIZE);
|
||||
HighGui.imshow("image", img);
|
||||
HighGui.waitKey();
|
||||
//! [imshow 1]
|
||||
}
|
||||
{
|
||||
//! [imshow 2]
|
||||
Mat img = Imgcodecs.imread("image.jpg");
|
||||
Mat grey = new Mat();
|
||||
Imgproc.cvtColor(img, grey, Imgproc.COLOR_BGR2GRAY);
|
||||
Mat sobelx = new Mat();
|
||||
Imgproc.Sobel(grey, sobelx, CvType.CV_32F, 1, 0);
|
||||
MinMaxLocResult res = Core.minMaxLoc(sobelx); // find minimum and maximum intensities
|
||||
Mat draw = new Mat();
|
||||
double maxVal = res.maxVal, minVal = res.minVal;
|
||||
sobelx.convertTo(draw, CvType.CV_8U, 255.0 / (maxVal - minVal), -minVal * 255.0 / (maxVal - minVal));
|
||||
HighGui.namedWindow("image", HighGui.WINDOW_AUTOSIZE);
|
||||
HighGui.imshow("image", draw);
|
||||
HighGui.waitKey();
|
||||
//! [imshow 2]
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.core.Range;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
|
||||
class PanoramaStitchingRotatingCameraRun {
|
||||
void basicPanoramaStitching (String[] args) {
|
||||
String img1path = args[0], img2path = args[1];
|
||||
Mat img1 = new Mat(), img2 = new Mat();
|
||||
img1 = Imgcodecs.imread(img1path);
|
||||
img2 = Imgcodecs.imread(img2path);
|
||||
|
||||
//! [camera-pose-from-Blender-at-location-1]
|
||||
Mat c1Mo = new Mat( 4, 4, CvType.CV_64FC1 );
|
||||
c1Mo.put(0 ,0 ,0.9659258723258972, 0.2588190734386444, 0.0, 1.5529145002365112,
|
||||
0.08852133899927139, -0.3303661346435547, -0.9396926164627075, -0.10281121730804443,
|
||||
-0.24321036040782928, 0.9076734185218811, -0.342020183801651, 6.130080699920654,
|
||||
0, 0, 0, 1 );
|
||||
//! [camera-pose-from-Blender-at-location-1]
|
||||
|
||||
//! [camera-pose-from-Blender-at-location-2]
|
||||
Mat c2Mo = new Mat( 4, 4, CvType.CV_64FC1 );
|
||||
c2Mo.put(0, 0, 0.9659258723258972, -0.2588190734386444, 0.0, -1.5529145002365112,
|
||||
-0.08852133899927139, -0.3303661346435547, -0.9396926164627075, -0.10281121730804443,
|
||||
0.24321036040782928, 0.9076734185218811, -0.342020183801651, 6.130080699920654,
|
||||
0, 0, 0, 1);
|
||||
//! [camera-pose-from-Blender-at-location-2]
|
||||
|
||||
//! [camera-intrinsics-from-Blender]
|
||||
Mat cameraMatrix = new Mat(3, 3, CvType.CV_64FC1);
|
||||
cameraMatrix.put(0, 0, 700.0, 0.0, 320.0, 0.0, 700.0, 240.0, 0, 0, 1 );
|
||||
//! [camera-intrinsics-from-Blender]
|
||||
|
||||
//! [extract-rotation]
|
||||
Range rowRange = new Range(0,3);
|
||||
Range colRange = new Range(0,3);
|
||||
//! [extract-rotation]
|
||||
|
||||
//! [compute-rotation-displacement]
|
||||
//c1Mo * oMc2
|
||||
Mat R1 = new Mat(c1Mo, rowRange, colRange);
|
||||
Mat R2 = new Mat(c2Mo, rowRange, colRange);
|
||||
Mat R_2to1 = new Mat();
|
||||
Core.gemm(R1, R2.t(), 1, new Mat(), 0, R_2to1 );
|
||||
//! [compute-rotation-displacement]
|
||||
|
||||
//! [compute-homography]
|
||||
Mat tmp = new Mat(), H = new Mat();
|
||||
Core.gemm(cameraMatrix, R_2to1, 1, new Mat(), 0, tmp);
|
||||
Core.gemm(tmp, cameraMatrix.inv(), 1, new Mat(), 0, H);
|
||||
Scalar s = new Scalar(H.get(2, 2)[0]);
|
||||
Core.divide(H, s, H);
|
||||
System.out.println(H.dump());
|
||||
//! [compute-homography]
|
||||
|
||||
//! [stitch]
|
||||
Mat img_stitch = new Mat();
|
||||
Imgproc.warpPerspective(img2, img_stitch, H, new Size(img2.cols()*2, img2.rows()) );
|
||||
Mat half = new Mat();
|
||||
half = new Mat(img_stitch, new Rect(0, 0, img1.cols(), img1.rows()));
|
||||
img1.copyTo(half);
|
||||
//! [stitch]
|
||||
|
||||
Mat img_compare = new Mat();
|
||||
Mat img_space = Mat.zeros(new Size(50, img1.rows()), CvType.CV_8UC3);
|
||||
List<Mat>list = new ArrayList<>();
|
||||
list.add(img1);
|
||||
list.add(img_space);
|
||||
list.add(img2);
|
||||
Core.hconcat(list, img_compare);
|
||||
|
||||
HighGui.imshow("Compare Images", img_compare);
|
||||
HighGui.imshow("Panorama Stitching", img_stitch);
|
||||
HighGui.waitKey(0);
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class PanoramaStitchingRotatingCamera {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new PanoramaStitchingRotatingCameraRun().basicPanoramaStitching(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.calib3d.Calib3d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
|
||||
class PerspectiveCorrectionRun {
|
||||
void perspectiveCorrection (String[] args) {
|
||||
String img1Path = args[0], img2Path = args[1];
|
||||
Mat img1 = Imgcodecs.imread(img1Path);
|
||||
Mat img2 = Imgcodecs.imread(img2Path);
|
||||
|
||||
//! [find-corners]
|
||||
MatOfPoint2f corners1 = new MatOfPoint2f(), corners2 = new MatOfPoint2f();
|
||||
boolean found1 = Calib3d.findChessboardCorners(img1, new Size(9, 6), corners1 );
|
||||
boolean found2 = Calib3d.findChessboardCorners(img2, new Size(9, 6), corners2 );
|
||||
//! [find-corners]
|
||||
|
||||
if (!found1 || !found2) {
|
||||
System.out.println("Error, cannot find the chessboard corners in both images.");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
//! [estimate-homography]
|
||||
Mat H = new Mat();
|
||||
H = Calib3d.findHomography(corners1, corners2);
|
||||
System.out.println(H.dump());
|
||||
//! [estimate-homography]
|
||||
|
||||
//! [warp-chessboard]
|
||||
Mat img1_warp = new Mat();
|
||||
Imgproc.warpPerspective(img1, img1_warp, H, img1.size());
|
||||
//! [warp-chessboard]
|
||||
|
||||
Mat img_draw_warp = new Mat();
|
||||
List<Mat> list1 = new ArrayList<>(), list2 = new ArrayList<>() ;
|
||||
list1.add(img2);
|
||||
list1.add(img1_warp);
|
||||
Core.hconcat(list1, img_draw_warp);
|
||||
HighGui.imshow("Desired chessboard view / Warped source chessboard view", img_draw_warp);
|
||||
|
||||
//! [compute-transformed-corners]
|
||||
Mat img_draw_matches = new Mat();
|
||||
list2.add(img1);
|
||||
list2.add(img2);
|
||||
Core.hconcat(list2, img_draw_matches);
|
||||
Point []corners1Arr = corners1.toArray();
|
||||
|
||||
for (int i = 0 ; i < corners1Arr.length; i++) {
|
||||
Mat pt1 = new Mat(3, 1, CvType.CV_64FC1), pt2 = new Mat();
|
||||
pt1.put(0, 0, corners1Arr[i].x, corners1Arr[i].y, 1 );
|
||||
|
||||
Core.gemm(H, pt1, 1, new Mat(), 0, pt2);
|
||||
double[] data = pt2.get(2, 0);
|
||||
Core.divide(pt2, new Scalar(data[0]), pt2);
|
||||
|
||||
double[] data1 =pt2.get(0, 0);
|
||||
double[] data2 = pt2.get(1, 0);
|
||||
Point end = new Point((int)(img1.cols()+ data1[0]), (int)data2[0]);
|
||||
Imgproc.line(img_draw_matches, corners1Arr[i], end, RandomColor(), 2);
|
||||
}
|
||||
|
||||
HighGui.imshow("Draw matches", img_draw_matches);
|
||||
HighGui.waitKey(0);
|
||||
//! [compute-transformed-corners]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Scalar RandomColor () {
|
||||
Random rng = new Random();
|
||||
int r = rng.nextInt(256);
|
||||
int g = rng.nextInt(256);
|
||||
int b = rng.nextInt(256);
|
||||
return new Scalar(r, g, b);
|
||||
}
|
||||
}
|
||||
|
||||
public class PerspectiveCorrection {
|
||||
public static void main (String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new PerspectiveCorrectionRun().perspectiveCorrection(args);
|
||||
}
|
||||
}
|
163
3rdparty/opencv-4.5.4/samples/java/tutorial_code/features2D/akaze_matching/AKAZEMatchDemo.java
vendored
Normal file
163
3rdparty/opencv-4.5.4/samples/java/tutorial_code/features2D/akaze_matching/AKAZEMatchDemo.java
vendored
Normal file
@ -0,0 +1,163 @@
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.KeyPoint;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.AKAZE;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
class AKAZEMatch {
|
||||
public void run(String[] args) {
|
||||
//! [load]
|
||||
String filename1 = args.length > 2 ? args[0] : "../data/graf1.png";
|
||||
String filename2 = args.length > 2 ? args[1] : "../data/graf3.png";
|
||||
String filename3 = args.length > 2 ? args[2] : "../data/H1to3p.xml";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
File file = new File(filename3);
|
||||
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
|
||||
DocumentBuilder documentBuilder;
|
||||
Document document;
|
||||
Mat homography = new Mat(3, 3, CvType.CV_64F);
|
||||
double[] homographyData = new double[(int) (homography.total()*homography.channels())];
|
||||
try {
|
||||
documentBuilder = documentBuilderFactory.newDocumentBuilder();
|
||||
document = documentBuilder.parse(file);
|
||||
String homographyStr = document.getElementsByTagName("data").item(0).getTextContent();
|
||||
String[] splited = homographyStr.split("\\s+");
|
||||
int idx = 0;
|
||||
for (String s : splited) {
|
||||
if (!s.isEmpty()) {
|
||||
homographyData[idx] = Double.parseDouble(s);
|
||||
idx++;
|
||||
}
|
||||
}
|
||||
} catch (ParserConfigurationException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
} catch (SAXException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
}
|
||||
homography.put(0, 0, homographyData);
|
||||
//! [load]
|
||||
|
||||
//! [AKAZE]
|
||||
AKAZE akaze = AKAZE.create();
|
||||
MatOfKeyPoint kpts1 = new MatOfKeyPoint(), kpts2 = new MatOfKeyPoint();
|
||||
Mat desc1 = new Mat(), desc2 = new Mat();
|
||||
akaze.detectAndCompute(img1, new Mat(), kpts1, desc1);
|
||||
akaze.detectAndCompute(img2, new Mat(), kpts2, desc2);
|
||||
//! [AKAZE]
|
||||
|
||||
//! [2-nn matching]
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(desc1, desc2, knnMatches, 2);
|
||||
//! [2-nn matching]
|
||||
|
||||
//! [ratio test filtering]
|
||||
float ratioThreshold = 0.8f; // Nearest neighbor matching ratio
|
||||
List<KeyPoint> listOfMatched1 = new ArrayList<>();
|
||||
List<KeyPoint> listOfMatched2 = new ArrayList<>();
|
||||
List<KeyPoint> listOfKeypoints1 = kpts1.toList();
|
||||
List<KeyPoint> listOfKeypoints2 = kpts2.toList();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
float dist1 = matches[0].distance;
|
||||
float dist2 = matches[1].distance;
|
||||
if (dist1 < ratioThreshold * dist2) {
|
||||
listOfMatched1.add(listOfKeypoints1.get(matches[0].queryIdx));
|
||||
listOfMatched2.add(listOfKeypoints2.get(matches[0].trainIdx));
|
||||
}
|
||||
}
|
||||
//! [ratio test filtering]
|
||||
|
||||
//! [homography check]
|
||||
double inlierThreshold = 2.5; // Distance threshold to identify inliers with homography check
|
||||
List<KeyPoint> listOfInliers1 = new ArrayList<>();
|
||||
List<KeyPoint> listOfInliers2 = new ArrayList<>();
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < listOfMatched1.size(); i++) {
|
||||
Mat col = new Mat(3, 1, CvType.CV_64F);
|
||||
double[] colData = new double[(int) (col.total() * col.channels())];
|
||||
colData[0] = listOfMatched1.get(i).pt.x;
|
||||
colData[1] = listOfMatched1.get(i).pt.y;
|
||||
colData[2] = 1.0;
|
||||
col.put(0, 0, colData);
|
||||
|
||||
Mat colRes = new Mat();
|
||||
Core.gemm(homography, col, 1.0, new Mat(), 0.0, colRes);
|
||||
colRes.get(0, 0, colData);
|
||||
Core.multiply(colRes, new Scalar(1.0 / colData[2]), col);
|
||||
col.get(0, 0, colData);
|
||||
|
||||
double dist = Math.sqrt(Math.pow(colData[0] - listOfMatched2.get(i).pt.x, 2) +
|
||||
Math.pow(colData[1] - listOfMatched2.get(i).pt.y, 2));
|
||||
|
||||
if (dist < inlierThreshold) {
|
||||
listOfGoodMatches.add(new DMatch(listOfInliers1.size(), listOfInliers2.size(), 0));
|
||||
listOfInliers1.add(listOfMatched1.get(i));
|
||||
listOfInliers2.add(listOfMatched2.get(i));
|
||||
}
|
||||
}
|
||||
//! [homography check]
|
||||
|
||||
//! [draw final matches]
|
||||
Mat res = new Mat();
|
||||
MatOfKeyPoint inliers1 = new MatOfKeyPoint(listOfInliers1.toArray(new KeyPoint[listOfInliers1.size()]));
|
||||
MatOfKeyPoint inliers2 = new MatOfKeyPoint(listOfInliers2.toArray(new KeyPoint[listOfInliers2.size()]));
|
||||
MatOfDMatch goodMatches = new MatOfDMatch(listOfGoodMatches.toArray(new DMatch[listOfGoodMatches.size()]));
|
||||
Features2d.drawMatches(img1, inliers1, img2, inliers2, goodMatches, res);
|
||||
Imgcodecs.imwrite("akaze_result.png", res);
|
||||
|
||||
double inlierRatio = listOfInliers1.size() / (double) listOfMatched1.size();
|
||||
System.out.println("A-KAZE Matching Results");
|
||||
System.out.println("*******************************");
|
||||
System.out.println("# Keypoints 1: \t" + listOfKeypoints1.size());
|
||||
System.out.println("# Keypoints 2: \t" + listOfKeypoints2.size());
|
||||
System.out.println("# Matches: \t" + listOfMatched1.size());
|
||||
System.out.println("# Inliers: \t" + listOfInliers1.size());
|
||||
System.out.println("# Inliers Ratio: \t" + inlierRatio);
|
||||
|
||||
HighGui.imshow("result", res);
|
||||
HighGui.waitKey();
|
||||
//! [draw final matches]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class AKAZEMatchDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new AKAZEMatch().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,56 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFMatching {
|
||||
public void run(String[] args) {
|
||||
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
|
||||
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
|
||||
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
|
||||
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a brute force matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
|
||||
MatOfDMatch matches = new MatOfDMatch();
|
||||
matcher.match(descriptors1, descriptors2, matches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
|
||||
|
||||
HighGui.imshow("Matches", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFMatchingDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFMatching().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFDetection {
|
||||
public void run(String[] args) {
|
||||
String filename = args.length > 0 ? args[0] : "../data/box.png";
|
||||
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints = new MatOfKeyPoint();
|
||||
detector.detect(src, keypoints);
|
||||
|
||||
//-- Draw keypoints
|
||||
Features2d.drawKeypoints(src, keypoints, src);
|
||||
|
||||
//-- Show detected (drawn) keypoints
|
||||
HighGui.imshow("SURF Keypoints", src);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFDetectionDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFDetection().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFFLANNMatching {
|
||||
public void run(String[] args) {
|
||||
String filename1 = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filename2 = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat img1 = Imgcodecs.imread(filename1, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat img2 = Imgcodecs.imread(filename2, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (img1.empty() || img2.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypoints1 = new MatOfKeyPoint(), keypoints2 = new MatOfKeyPoint();
|
||||
Mat descriptors1 = new Mat(), descriptors2 = new Mat();
|
||||
detector.detectAndCompute(img1, new Mat(), keypoints1, descriptors1);
|
||||
detector.detectAndCompute(img2, new Mat(), keypoints2, descriptors2);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(descriptors1, descriptors2, knnMatches, 2);
|
||||
|
||||
//-- Filter matches using the Lowe's ratio test
|
||||
float ratioThresh = 0.7f;
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
if (knnMatches.get(i).rows() > 1) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
if (matches[0].distance < ratioThresh * matches[1].distance) {
|
||||
listOfGoodMatches.add(matches[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
MatOfDMatch goodMatches = new MatOfDMatch();
|
||||
goodMatches.fromList(listOfGoodMatches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(img1, keypoints1, img2, keypoints2, goodMatches, imgMatches, Scalar.all(-1),
|
||||
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
|
||||
|
||||
//-- Show detected matches
|
||||
HighGui.imshow("Good Matches", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFFLANNMatchingDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFFLANNMatching().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.calib3d.Calib3d;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.DMatch;
|
||||
import org.opencv.core.KeyPoint;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfByte;
|
||||
import org.opencv.core.MatOfDMatch;
|
||||
import org.opencv.core.MatOfKeyPoint;
|
||||
import org.opencv.core.MatOfPoint2f;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.features2d.DescriptorMatcher;
|
||||
import org.opencv.features2d.Features2d;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.xfeatures2d.SURF;
|
||||
|
||||
class SURFFLANNMatchingHomography {
|
||||
public void run(String[] args) {
|
||||
String filenameObject = args.length > 1 ? args[0] : "../data/box.png";
|
||||
String filenameScene = args.length > 1 ? args[1] : "../data/box_in_scene.png";
|
||||
Mat imgObject = Imgcodecs.imread(filenameObject, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
Mat imgScene = Imgcodecs.imread(filenameScene, Imgcodecs.IMREAD_GRAYSCALE);
|
||||
if (imgObject.empty() || imgScene.empty()) {
|
||||
System.err.println("Cannot read images!");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors
|
||||
double hessianThreshold = 400;
|
||||
int nOctaves = 4, nOctaveLayers = 3;
|
||||
boolean extended = false, upright = false;
|
||||
SURF detector = SURF.create(hessianThreshold, nOctaves, nOctaveLayers, extended, upright);
|
||||
MatOfKeyPoint keypointsObject = new MatOfKeyPoint(), keypointsScene = new MatOfKeyPoint();
|
||||
Mat descriptorsObject = new Mat(), descriptorsScene = new Mat();
|
||||
detector.detectAndCompute(imgObject, new Mat(), keypointsObject, descriptorsObject);
|
||||
detector.detectAndCompute(imgScene, new Mat(), keypointsScene, descriptorsScene);
|
||||
|
||||
//-- Step 2: Matching descriptor vectors with a FLANN based matcher
|
||||
// Since SURF is a floating-point descriptor NORM_L2 is used
|
||||
DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
|
||||
List<MatOfDMatch> knnMatches = new ArrayList<>();
|
||||
matcher.knnMatch(descriptorsObject, descriptorsScene, knnMatches, 2);
|
||||
|
||||
//-- Filter matches using the Lowe's ratio test
|
||||
float ratioThresh = 0.75f;
|
||||
List<DMatch> listOfGoodMatches = new ArrayList<>();
|
||||
for (int i = 0; i < knnMatches.size(); i++) {
|
||||
if (knnMatches.get(i).rows() > 1) {
|
||||
DMatch[] matches = knnMatches.get(i).toArray();
|
||||
if (matches[0].distance < ratioThresh * matches[1].distance) {
|
||||
listOfGoodMatches.add(matches[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
MatOfDMatch goodMatches = new MatOfDMatch();
|
||||
goodMatches.fromList(listOfGoodMatches);
|
||||
|
||||
//-- Draw matches
|
||||
Mat imgMatches = new Mat();
|
||||
Features2d.drawMatches(imgObject, keypointsObject, imgScene, keypointsScene, goodMatches, imgMatches, Scalar.all(-1),
|
||||
Scalar.all(-1), new MatOfByte(), Features2d.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS);
|
||||
|
||||
//-- Localize the object
|
||||
List<Point> obj = new ArrayList<>();
|
||||
List<Point> scene = new ArrayList<>();
|
||||
|
||||
List<KeyPoint> listOfKeypointsObject = keypointsObject.toList();
|
||||
List<KeyPoint> listOfKeypointsScene = keypointsScene.toList();
|
||||
for (int i = 0; i < listOfGoodMatches.size(); i++) {
|
||||
//-- Get the keypoints from the good matches
|
||||
obj.add(listOfKeypointsObject.get(listOfGoodMatches.get(i).queryIdx).pt);
|
||||
scene.add(listOfKeypointsScene.get(listOfGoodMatches.get(i).trainIdx).pt);
|
||||
}
|
||||
|
||||
MatOfPoint2f objMat = new MatOfPoint2f(), sceneMat = new MatOfPoint2f();
|
||||
objMat.fromList(obj);
|
||||
sceneMat.fromList(scene);
|
||||
double ransacReprojThreshold = 3.0;
|
||||
Mat H = Calib3d.findHomography( objMat, sceneMat, Calib3d.RANSAC, ransacReprojThreshold );
|
||||
|
||||
//-- Get the corners from the image_1 ( the object to be "detected" )
|
||||
Mat objCorners = new Mat(4, 1, CvType.CV_32FC2), sceneCorners = new Mat();
|
||||
float[] objCornersData = new float[(int) (objCorners.total() * objCorners.channels())];
|
||||
objCorners.get(0, 0, objCornersData);
|
||||
objCornersData[0] = 0;
|
||||
objCornersData[1] = 0;
|
||||
objCornersData[2] = imgObject.cols();
|
||||
objCornersData[3] = 0;
|
||||
objCornersData[4] = imgObject.cols();
|
||||
objCornersData[5] = imgObject.rows();
|
||||
objCornersData[6] = 0;
|
||||
objCornersData[7] = imgObject.rows();
|
||||
objCorners.put(0, 0, objCornersData);
|
||||
|
||||
Core.perspectiveTransform(objCorners, sceneCorners, H);
|
||||
float[] sceneCornersData = new float[(int) (sceneCorners.total() * sceneCorners.channels())];
|
||||
sceneCorners.get(0, 0, sceneCornersData);
|
||||
|
||||
//-- Draw lines between the corners (the mapped object in the scene - image_2 )
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]),
|
||||
new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[2] + imgObject.cols(), sceneCornersData[3]),
|
||||
new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[4] + imgObject.cols(), sceneCornersData[5]),
|
||||
new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]), new Scalar(0, 255, 0), 4);
|
||||
Imgproc.line(imgMatches, new Point(sceneCornersData[6] + imgObject.cols(), sceneCornersData[7]),
|
||||
new Point(sceneCornersData[0] + imgObject.cols(), sceneCornersData[1]), new Scalar(0, 255, 0), 4);
|
||||
|
||||
//-- Show detected matches
|
||||
HighGui.imshow("Good Matches & Object detection", imgMatches);
|
||||
HighGui.waitKey(0);
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class SURFFLANNMatchingHomographyDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new SURFFLANNMatchingHomography().run(args);
|
||||
}
|
||||
}
|
119
3rdparty/opencv-4.5.4/samples/java/tutorial_code/highgui/trackbar/AddingImagesTrackbar.java
vendored
Normal file
119
3rdparty/opencv-4.5.4/samples/java/tutorial_code/highgui/trackbar/AddingImagesTrackbar.java
vendored
Normal file
@ -0,0 +1,119 @@
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Container;
|
||||
import java.awt.Image;
|
||||
|
||||
import javax.swing.BoxLayout;
|
||||
import javax.swing.ImageIcon;
|
||||
import javax.swing.JFrame;
|
||||
import javax.swing.JLabel;
|
||||
import javax.swing.JPanel;
|
||||
import javax.swing.JSlider;
|
||||
import javax.swing.event.ChangeEvent;
|
||||
import javax.swing.event.ChangeListener;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
|
||||
public class AddingImagesTrackbar {
|
||||
private static final int ALPHA_SLIDER_MAX = 100;
|
||||
private int alphaVal = 0;
|
||||
private Mat matImgSrc1;
|
||||
private Mat matImgSrc2;
|
||||
private Mat matImgDst = new Mat();
|
||||
private JFrame frame;
|
||||
private JLabel imgLabel;
|
||||
|
||||
public AddingImagesTrackbar(String[] args) {
|
||||
//! [load]
|
||||
String imagePath1 = "../data/LinuxLogo.jpg";
|
||||
String imagePath2 = "../data/WindowsLogo.jpg";
|
||||
if (args.length > 1) {
|
||||
imagePath1 = args[0];
|
||||
imagePath2 = args[1];
|
||||
}
|
||||
matImgSrc1 = Imgcodecs.imread(imagePath1);
|
||||
matImgSrc2 = Imgcodecs.imread(imagePath2);
|
||||
//! [load]
|
||||
if (matImgSrc1.empty()) {
|
||||
System.out.println("Empty image: " + imagePath1);
|
||||
System.exit(0);
|
||||
}
|
||||
if (matImgSrc2.empty()) {
|
||||
System.out.println("Empty image: " + imagePath2);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//! [window]
|
||||
// Create and set up the window.
|
||||
frame = new JFrame("Linear Blend");
|
||||
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
|
||||
// Set up the content pane.
|
||||
Image img = HighGui.toBufferedImage(matImgSrc2);
|
||||
addComponentsToPane(frame.getContentPane(), img);
|
||||
// Use the content pane's default BorderLayout. No need for
|
||||
// setLayout(new BorderLayout());
|
||||
// Display the window.
|
||||
frame.pack();
|
||||
frame.setVisible(true);
|
||||
//! [window]
|
||||
}
|
||||
|
||||
private void addComponentsToPane(Container pane, Image img) {
|
||||
if (!(pane.getLayout() instanceof BorderLayout)) {
|
||||
pane.add(new JLabel("Container doesn't use BorderLayout!"));
|
||||
return;
|
||||
}
|
||||
|
||||
JPanel sliderPanel = new JPanel();
|
||||
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
|
||||
|
||||
//! [create_trackbar]
|
||||
sliderPanel.add(new JLabel(String.format("Alpha x %d", ALPHA_SLIDER_MAX)));
|
||||
JSlider slider = new JSlider(0, ALPHA_SLIDER_MAX, 0);
|
||||
slider.setMajorTickSpacing(20);
|
||||
slider.setMinorTickSpacing(5);
|
||||
slider.setPaintTicks(true);
|
||||
slider.setPaintLabels(true);
|
||||
//! [create_trackbar]
|
||||
//! [on_trackbar]
|
||||
slider.addChangeListener(new ChangeListener() {
|
||||
@Override
|
||||
public void stateChanged(ChangeEvent e) {
|
||||
JSlider source = (JSlider) e.getSource();
|
||||
alphaVal = source.getValue();
|
||||
update();
|
||||
}
|
||||
});
|
||||
//! [on_trackbar]
|
||||
sliderPanel.add(slider);
|
||||
|
||||
pane.add(sliderPanel, BorderLayout.PAGE_START);
|
||||
imgLabel = new JLabel(new ImageIcon(img));
|
||||
pane.add(imgLabel, BorderLayout.CENTER);
|
||||
}
|
||||
|
||||
private void update() {
|
||||
double alpha = alphaVal / (double) ALPHA_SLIDER_MAX;
|
||||
double beta = 1.0 - alpha;
|
||||
Core.addWeighted(matImgSrc1, alpha, matImgSrc2, beta, 0, matImgDst);
|
||||
Image img = HighGui.toBufferedImage(matImgDst);
|
||||
imgLabel.setIcon(new ImageIcon(img));
|
||||
frame.repaint();
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Schedule a job for the event dispatch thread:
|
||||
// creating and showing this application's GUI.
|
||||
javax.swing.SwingUtilities.invokeLater(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
new AddingImagesTrackbar(args);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
public class Documentation {
|
||||
|
||||
public static void main (String[] args) {
|
||||
|
||||
//! [hello_world]
|
||||
System.out.println ("Hello World!");
|
||||
//! [hello_world]
|
||||
}
|
||||
}
|
144
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ml/introduction_to_pca/IntroductionToPCADemo.java
vendored
Normal file
144
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ml/introduction_to_pca/IntroductionToPCADemo.java
vendored
Normal file
@ -0,0 +1,144 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
//This program demonstrates how to use OpenCV PCA to extract the orientation of an object.
|
||||
class IntroductionToPCA {
|
||||
private void drawAxis(Mat img, Point p_, Point q_, Scalar colour, float scale) {
|
||||
Point p = new Point(p_.x, p_.y);
|
||||
Point q = new Point(q_.x, q_.y);
|
||||
//! [visualization1]
|
||||
double angle = Math.atan2(p.y - q.y, p.x - q.x); // angle in radians
|
||||
double hypotenuse = Math.sqrt((p.y - q.y) * (p.y - q.y) + (p.x - q.x) * (p.x - q.x));
|
||||
|
||||
// Here we lengthen the arrow by a factor of scale
|
||||
q.x = (int) (p.x - scale * hypotenuse * Math.cos(angle));
|
||||
q.y = (int) (p.y - scale * hypotenuse * Math.sin(angle));
|
||||
Imgproc.line(img, p, q, colour, 1, Imgproc.LINE_AA, 0);
|
||||
|
||||
// create the arrow hooks
|
||||
p.x = (int) (q.x + 9 * Math.cos(angle + Math.PI / 4));
|
||||
p.y = (int) (q.y + 9 * Math.sin(angle + Math.PI / 4));
|
||||
Imgproc.line(img, p, q, colour, 1, Imgproc.LINE_AA, 0);
|
||||
|
||||
p.x = (int) (q.x + 9 * Math.cos(angle - Math.PI / 4));
|
||||
p.y = (int) (q.y + 9 * Math.sin(angle - Math.PI / 4));
|
||||
Imgproc.line(img, p, q, colour, 1, Imgproc.LINE_AA, 0);
|
||||
//! [visualization1]
|
||||
}
|
||||
|
||||
private double getOrientation(MatOfPoint ptsMat, Mat img) {
|
||||
List<Point> pts = ptsMat.toList();
|
||||
//! [pca]
|
||||
// Construct a buffer used by the pca analysis
|
||||
int sz = pts.size();
|
||||
Mat dataPts = new Mat(sz, 2, CvType.CV_64F);
|
||||
double[] dataPtsData = new double[(int) (dataPts.total() * dataPts.channels())];
|
||||
for (int i = 0; i < dataPts.rows(); i++) {
|
||||
dataPtsData[i * dataPts.cols()] = pts.get(i).x;
|
||||
dataPtsData[i * dataPts.cols() + 1] = pts.get(i).y;
|
||||
}
|
||||
dataPts.put(0, 0, dataPtsData);
|
||||
|
||||
// Perform PCA analysis
|
||||
Mat mean = new Mat();
|
||||
Mat eigenvectors = new Mat();
|
||||
Mat eigenvalues = new Mat();
|
||||
Core.PCACompute2(dataPts, mean, eigenvectors, eigenvalues);
|
||||
double[] meanData = new double[(int) (mean.total() * mean.channels())];
|
||||
mean.get(0, 0, meanData);
|
||||
|
||||
// Store the center of the object
|
||||
Point cntr = new Point(meanData[0], meanData[1]);
|
||||
|
||||
// Store the eigenvalues and eigenvectors
|
||||
double[] eigenvectorsData = new double[(int) (eigenvectors.total() * eigenvectors.channels())];
|
||||
double[] eigenvaluesData = new double[(int) (eigenvalues.total() * eigenvalues.channels())];
|
||||
eigenvectors.get(0, 0, eigenvectorsData);
|
||||
eigenvalues.get(0, 0, eigenvaluesData);
|
||||
//! [pca]
|
||||
|
||||
//! [visualization]
|
||||
// Draw the principal components
|
||||
Imgproc.circle(img, cntr, 3, new Scalar(255, 0, 255), 2);
|
||||
Point p1 = new Point(cntr.x + 0.02 * eigenvectorsData[0] * eigenvaluesData[0],
|
||||
cntr.y + 0.02 * eigenvectorsData[1] * eigenvaluesData[0]);
|
||||
Point p2 = new Point(cntr.x - 0.02 * eigenvectorsData[2] * eigenvaluesData[1],
|
||||
cntr.y - 0.02 * eigenvectorsData[3] * eigenvaluesData[1]);
|
||||
drawAxis(img, cntr, p1, new Scalar(0, 255, 0), 1);
|
||||
drawAxis(img, cntr, p2, new Scalar(255, 255, 0), 5);
|
||||
|
||||
double angle = Math.atan2(eigenvectorsData[1], eigenvectorsData[0]); // orientation in radians
|
||||
//! [visualization]
|
||||
|
||||
return angle;
|
||||
}
|
||||
|
||||
public void run(String[] args) {
|
||||
//! [pre-process]
|
||||
// Load image
|
||||
String filename = args.length > 0 ? args[0] : "../data/pca_test1.jpg";
|
||||
Mat src = Imgcodecs.imread(filename);
|
||||
|
||||
// Check if image is loaded successfully
|
||||
if (src.empty()) {
|
||||
System.err.println("Cannot read image: " + filename);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Mat srcOriginal = src.clone();
|
||||
HighGui.imshow("src", srcOriginal);
|
||||
|
||||
// Convert image to grayscale
|
||||
Mat gray = new Mat();
|
||||
Imgproc.cvtColor(src, gray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// Convert image to binary
|
||||
Mat bw = new Mat();
|
||||
Imgproc.threshold(gray, bw, 50, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
|
||||
//! [pre-process]
|
||||
|
||||
//! [contours]
|
||||
// Find all the contours in the thresholded image
|
||||
List<MatOfPoint> contours = new ArrayList<>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(bw, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);
|
||||
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
// Calculate the area of each contour
|
||||
double area = Imgproc.contourArea(contours.get(i));
|
||||
// Ignore contours that are too small or too large
|
||||
if (area < 1e2 || 1e5 < area)
|
||||
continue;
|
||||
|
||||
// Draw each contour only for visualisation purposes
|
||||
Imgproc.drawContours(src, contours, i, new Scalar(0, 0, 255), 2);
|
||||
// Find the orientation of each shape
|
||||
getOrientation(contours.get(i), src);
|
||||
}
|
||||
//! [contours]
|
||||
|
||||
HighGui.imshow("output", src);
|
||||
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class IntroductionToPCADemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new IntroductionToPCA().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.TermCriteria;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.ml.Ml;
|
||||
import org.opencv.ml.SVM;
|
||||
|
||||
public class IntroductionToSVMDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
// Set up training data
|
||||
//! [setup1]
|
||||
int[] labels = { 1, -1, -1, -1 };
|
||||
float[] trainingData = { 501, 10, 255, 10, 501, 255, 10, 501 };
|
||||
//! [setup1]
|
||||
//! [setup2]
|
||||
Mat trainingDataMat = new Mat(4, 2, CvType.CV_32FC1);
|
||||
trainingDataMat.put(0, 0, trainingData);
|
||||
Mat labelsMat = new Mat(4, 1, CvType.CV_32SC1);
|
||||
labelsMat.put(0, 0, labels);
|
||||
//! [setup2]
|
||||
|
||||
// Train the SVM
|
||||
//! [init]
|
||||
SVM svm = SVM.create();
|
||||
svm.setType(SVM.C_SVC);
|
||||
svm.setKernel(SVM.LINEAR);
|
||||
svm.setTermCriteria(new TermCriteria(TermCriteria.MAX_ITER, 100, 1e-6));
|
||||
//! [init]
|
||||
//! [train]
|
||||
svm.train(trainingDataMat, Ml.ROW_SAMPLE, labelsMat);
|
||||
//! [train]
|
||||
|
||||
// Data for visual representation
|
||||
int width = 512, height = 512;
|
||||
Mat image = Mat.zeros(height, width, CvType.CV_8UC3);
|
||||
|
||||
// Show the decision regions given by the SVM
|
||||
//! [show]
|
||||
byte[] imageData = new byte[(int) (image.total() * image.channels())];
|
||||
Mat sampleMat = new Mat(1, 2, CvType.CV_32F);
|
||||
float[] sampleMatData = new float[(int) (sampleMat.total() * sampleMat.channels())];
|
||||
for (int i = 0; i < image.rows(); i++) {
|
||||
for (int j = 0; j < image.cols(); j++) {
|
||||
sampleMatData[0] = j;
|
||||
sampleMatData[1] = i;
|
||||
sampleMat.put(0, 0, sampleMatData);
|
||||
float response = svm.predict(sampleMat);
|
||||
|
||||
if (response == 1) {
|
||||
imageData[(i * image.cols() + j) * image.channels()] = 0;
|
||||
imageData[(i * image.cols() + j) * image.channels() + 1] = (byte) 255;
|
||||
imageData[(i * image.cols() + j) * image.channels() + 2] = 0;
|
||||
} else if (response == -1) {
|
||||
imageData[(i * image.cols() + j) * image.channels()] = (byte) 255;
|
||||
imageData[(i * image.cols() + j) * image.channels() + 1] = 0;
|
||||
imageData[(i * image.cols() + j) * image.channels() + 2] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
image.put(0, 0, imageData);
|
||||
//! [show]
|
||||
|
||||
// Show the training data
|
||||
//! [show_data]
|
||||
int thickness = -1;
|
||||
int lineType = Imgproc.LINE_8;
|
||||
Imgproc.circle(image, new Point(501, 10), 5, new Scalar(0, 0, 0), thickness, lineType, 0);
|
||||
Imgproc.circle(image, new Point(255, 10), 5, new Scalar(255, 255, 255), thickness, lineType, 0);
|
||||
Imgproc.circle(image, new Point(501, 255), 5, new Scalar(255, 255, 255), thickness, lineType, 0);
|
||||
Imgproc.circle(image, new Point(10, 501), 5, new Scalar(255, 255, 255), thickness, lineType, 0);
|
||||
//! [show_data]
|
||||
|
||||
// Show support vectors
|
||||
//! [show_vectors]
|
||||
thickness = 2;
|
||||
Mat sv = svm.getUncompressedSupportVectors();
|
||||
float[] svData = new float[(int) (sv.total() * sv.channels())];
|
||||
sv.get(0, 0, svData);
|
||||
for (int i = 0; i < sv.rows(); ++i) {
|
||||
Imgproc.circle(image, new Point(svData[i * sv.cols()], svData[i * sv.cols() + 1]), 6,
|
||||
new Scalar(128, 128, 128), thickness, lineType, 0);
|
||||
}
|
||||
//! [show_vectors]
|
||||
|
||||
Imgcodecs.imwrite("result.png", image); // save the image
|
||||
|
||||
HighGui.imshow("SVM Simple Example", image); // show it to the user
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
186
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ml/non_linear_svms/NonLinearSVMsDemo.java
vendored
Normal file
186
3rdparty/opencv-4.5.4/samples/java/tutorial_code/ml/non_linear_svms/NonLinearSVMsDemo.java
vendored
Normal file
@ -0,0 +1,186 @@
|
||||
import java.util.Random;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.TermCriteria;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.ml.Ml;
|
||||
import org.opencv.ml.SVM;
|
||||
|
||||
public class NonLinearSVMsDemo {
|
||||
public static final int NTRAINING_SAMPLES = 100;
|
||||
public static final float FRAC_LINEAR_SEP = 0.9f;
|
||||
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
System.out.println("\n--------------------------------------------------------------------------");
|
||||
System.out.println("This program shows Support Vector Machines for Non-Linearly Separable Data. ");
|
||||
System.out.println("--------------------------------------------------------------------------\n");
|
||||
|
||||
// Data for visual representation
|
||||
int width = 512, height = 512;
|
||||
Mat I = Mat.zeros(height, width, CvType.CV_8UC3);
|
||||
|
||||
// --------------------- 1. Set up training data randomly---------------------------------------
|
||||
Mat trainData = new Mat(2 * NTRAINING_SAMPLES, 2, CvType.CV_32F);
|
||||
Mat labels = new Mat(2 * NTRAINING_SAMPLES, 1, CvType.CV_32S);
|
||||
|
||||
Random rng = new Random(100); // Random value generation class
|
||||
|
||||
// Set up the linearly separable part of the training data
|
||||
int nLinearSamples = (int) (FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
|
||||
|
||||
//! [setup1]
|
||||
// Generate random points for the class 1
|
||||
Mat trainClass = trainData.rowRange(0, nLinearSamples);
|
||||
// The x coordinate of the points is in [0, 0.4)
|
||||
Mat c = trainClass.colRange(0, 1);
|
||||
float[] cData = new float[(int) (c.total() * c.channels())];
|
||||
double[] cDataDbl = rng.doubles(cData.length, 0, 0.4f * width).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
// The y coordinate of the points is in [0, 1)
|
||||
c = trainClass.colRange(1, 2);
|
||||
cData = new float[(int) (c.total() * c.channels())];
|
||||
cDataDbl = rng.doubles(cData.length, 0, height).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
|
||||
// Generate random points for the class 2
|
||||
trainClass = trainData.rowRange(2 * NTRAINING_SAMPLES - nLinearSamples, 2 * NTRAINING_SAMPLES);
|
||||
// The x coordinate of the points is in [0.6, 1]
|
||||
c = trainClass.colRange(0, 1);
|
||||
cData = new float[(int) (c.total() * c.channels())];
|
||||
cDataDbl = rng.doubles(cData.length, 0.6 * width, width).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
// The y coordinate of the points is in [0, 1)
|
||||
c = trainClass.colRange(1, 2);
|
||||
cData = new float[(int) (c.total() * c.channels())];
|
||||
cDataDbl = rng.doubles(cData.length, 0, height).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
//! [setup1]
|
||||
|
||||
// ------------------ Set up the non-linearly separable part of the training data ---------------
|
||||
//! [setup2]
|
||||
// Generate random points for the classes 1 and 2
|
||||
trainClass = trainData.rowRange(nLinearSamples, 2 * NTRAINING_SAMPLES - nLinearSamples);
|
||||
// The x coordinate of the points is in [0.4, 0.6)
|
||||
c = trainClass.colRange(0, 1);
|
||||
cData = new float[(int) (c.total() * c.channels())];
|
||||
cDataDbl = rng.doubles(cData.length, 0.4 * width, 0.6 * width).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
// The y coordinate of the points is in [0, 1)
|
||||
c = trainClass.colRange(1, 2);
|
||||
cData = new float[(int) (c.total() * c.channels())];
|
||||
cDataDbl = rng.doubles(cData.length, 0, height).toArray();
|
||||
for (int i = 0; i < cData.length; i++) {
|
||||
cData[i] = (float) cDataDbl[i];
|
||||
}
|
||||
c.put(0, 0, cData);
|
||||
//! [setup2]
|
||||
|
||||
// ------------------------- Set up the labels for the classes---------------------------------
|
||||
labels.rowRange(0, NTRAINING_SAMPLES).setTo(new Scalar(1)); // Class 1
|
||||
labels.rowRange(NTRAINING_SAMPLES, 2 * NTRAINING_SAMPLES).setTo(new Scalar(2)); // Class 2
|
||||
|
||||
// ------------------------ 2. Set up the support vector machines parameters--------------------
|
||||
System.out.println("Starting training process");
|
||||
//! [init]
|
||||
SVM svm = SVM.create();
|
||||
svm.setType(SVM.C_SVC);
|
||||
svm.setC(0.1);
|
||||
svm.setKernel(SVM.LINEAR);
|
||||
svm.setTermCriteria(new TermCriteria(TermCriteria.MAX_ITER, (int) 1e7, 1e-6));
|
||||
//! [init]
|
||||
|
||||
// ------------------------ 3. Train the svm----------------------------------------------------
|
||||
//! [train]
|
||||
svm.train(trainData, Ml.ROW_SAMPLE, labels);
|
||||
//! [train]
|
||||
System.out.println("Finished training process");
|
||||
|
||||
// ------------------------ 4. Show the decision regions----------------------------------------
|
||||
//! [show]
|
||||
byte[] IData = new byte[(int) (I.total() * I.channels())];
|
||||
Mat sampleMat = new Mat(1, 2, CvType.CV_32F);
|
||||
float[] sampleMatData = new float[(int) (sampleMat.total() * sampleMat.channels())];
|
||||
for (int i = 0; i < I.rows(); i++) {
|
||||
for (int j = 0; j < I.cols(); j++) {
|
||||
sampleMatData[0] = j;
|
||||
sampleMatData[1] = i;
|
||||
sampleMat.put(0, 0, sampleMatData);
|
||||
float response = svm.predict(sampleMat);
|
||||
|
||||
if (response == 1) {
|
||||
IData[(i * I.cols() + j) * I.channels()] = 0;
|
||||
IData[(i * I.cols() + j) * I.channels() + 1] = 100;
|
||||
IData[(i * I.cols() + j) * I.channels() + 2] = 0;
|
||||
} else if (response == 2) {
|
||||
IData[(i * I.cols() + j) * I.channels()] = 100;
|
||||
IData[(i * I.cols() + j) * I.channels() + 1] = 0;
|
||||
IData[(i * I.cols() + j) * I.channels() + 2] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
I.put(0, 0, IData);
|
||||
//! [show]
|
||||
|
||||
// ----------------------- 5. Show the training data--------------------------------------------
|
||||
//! [show_data]
|
||||
int thick = -1;
|
||||
int lineType = Imgproc.LINE_8;
|
||||
float px, py;
|
||||
// Class 1
|
||||
float[] trainDataData = new float[(int) (trainData.total() * trainData.channels())];
|
||||
trainData.get(0, 0, trainDataData);
|
||||
for (int i = 0; i < NTRAINING_SAMPLES; i++) {
|
||||
px = trainDataData[i * trainData.cols()];
|
||||
py = trainDataData[i * trainData.cols() + 1];
|
||||
Imgproc.circle(I, new Point(px, py), 3, new Scalar(0, 255, 0), thick, lineType, 0);
|
||||
}
|
||||
// Class 2
|
||||
for (int i = NTRAINING_SAMPLES; i < 2 * NTRAINING_SAMPLES; ++i) {
|
||||
px = trainDataData[i * trainData.cols()];
|
||||
py = trainDataData[i * trainData.cols() + 1];
|
||||
Imgproc.circle(I, new Point(px, py), 3, new Scalar(255, 0, 0), thick, lineType, 0);
|
||||
}
|
||||
//! [show_data]
|
||||
|
||||
// ------------------------- 6. Show support vectors--------------------------------------------
|
||||
//! [show_vectors]
|
||||
thick = 2;
|
||||
Mat sv = svm.getUncompressedSupportVectors();
|
||||
float[] svData = new float[(int) (sv.total() * sv.channels())];
|
||||
sv.get(0, 0, svData);
|
||||
for (int i = 0; i < sv.rows(); i++) {
|
||||
Imgproc.circle(I, new Point(svData[i * sv.cols()], svData[i * sv.cols() + 1]), 6, new Scalar(128, 128, 128),
|
||||
thick, lineType, 0);
|
||||
}
|
||||
//! [show_vectors]
|
||||
|
||||
Imgcodecs.imwrite("result.png", I); // save the Image
|
||||
HighGui.imshow("SVM for Non-Linear Training Data", I); // show it to the user
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
@ -0,0 +1,98 @@
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfRect;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.objdetect.CascadeClassifier;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
class ObjectDetection {
|
||||
public void detectAndDisplay(Mat frame, CascadeClassifier faceCascade, CascadeClassifier eyesCascade) {
|
||||
Mat frameGray = new Mat();
|
||||
Imgproc.cvtColor(frame, frameGray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.equalizeHist(frameGray, frameGray);
|
||||
|
||||
// -- Detect faces
|
||||
MatOfRect faces = new MatOfRect();
|
||||
faceCascade.detectMultiScale(frameGray, faces);
|
||||
|
||||
List<Rect> listOfFaces = faces.toList();
|
||||
for (Rect face : listOfFaces) {
|
||||
Point center = new Point(face.x + face.width / 2, face.y + face.height / 2);
|
||||
Imgproc.ellipse(frame, center, new Size(face.width / 2, face.height / 2), 0, 0, 360,
|
||||
new Scalar(255, 0, 255));
|
||||
|
||||
Mat faceROI = frameGray.submat(face);
|
||||
|
||||
// -- In each face, detect eyes
|
||||
MatOfRect eyes = new MatOfRect();
|
||||
eyesCascade.detectMultiScale(faceROI, eyes);
|
||||
|
||||
List<Rect> listOfEyes = eyes.toList();
|
||||
for (Rect eye : listOfEyes) {
|
||||
Point eyeCenter = new Point(face.x + eye.x + eye.width / 2, face.y + eye.y + eye.height / 2);
|
||||
int radius = (int) Math.round((eye.width + eye.height) * 0.25);
|
||||
Imgproc.circle(frame, eyeCenter, radius, new Scalar(255, 0, 0), 4);
|
||||
}
|
||||
}
|
||||
|
||||
//-- Show what you got
|
||||
HighGui.imshow("Capture - Face detection", frame );
|
||||
}
|
||||
|
||||
public void run(String[] args) {
|
||||
String filenameFaceCascade = args.length > 2 ? args[0] : "../../data/haarcascades/haarcascade_frontalface_alt.xml";
|
||||
String filenameEyesCascade = args.length > 2 ? args[1] : "../../data/haarcascades/haarcascade_eye_tree_eyeglasses.xml";
|
||||
int cameraDevice = args.length > 2 ? Integer.parseInt(args[2]) : 0;
|
||||
|
||||
CascadeClassifier faceCascade = new CascadeClassifier();
|
||||
CascadeClassifier eyesCascade = new CascadeClassifier();
|
||||
|
||||
if (!faceCascade.load(filenameFaceCascade)) {
|
||||
System.err.println("--(!)Error loading face cascade: " + filenameFaceCascade);
|
||||
System.exit(0);
|
||||
}
|
||||
if (!eyesCascade.load(filenameEyesCascade)) {
|
||||
System.err.println("--(!)Error loading eyes cascade: " + filenameEyesCascade);
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
VideoCapture capture = new VideoCapture(cameraDevice);
|
||||
if (!capture.isOpened()) {
|
||||
System.err.println("--(!)Error opening video capture");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
Mat frame = new Mat();
|
||||
while (capture.read(frame)) {
|
||||
if (frame.empty()) {
|
||||
System.err.println("--(!) No captured frame -- Break!");
|
||||
break;
|
||||
}
|
||||
|
||||
//-- 3. Apply the classifier to the frame
|
||||
detectAndDisplay(frame, faceCascade, eyesCascade);
|
||||
|
||||
if (HighGui.waitKey(10) == 27) {
|
||||
break;// escape
|
||||
}
|
||||
}
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class ObjectDetectionDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new ObjectDetection().run(args);
|
||||
}
|
||||
}
|
103
3rdparty/opencv-4.5.4/samples/java/tutorial_code/photo/hdr_imaging/HDRImagingDemo.java
vendored
Normal file
103
3rdparty/opencv-4.5.4/samples/java/tutorial_code/photo/hdr_imaging/HDRImagingDemo.java
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.CvType;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.imgcodecs.Imgcodecs;
|
||||
import org.opencv.photo.CalibrateDebevec;
|
||||
import org.opencv.photo.MergeDebevec;
|
||||
import org.opencv.photo.MergeMertens;
|
||||
import org.opencv.photo.Photo;
|
||||
import org.opencv.photo.Tonemap;
|
||||
|
||||
class HDRImaging {
|
||||
public void loadExposureSeq(String path, List<Mat> images, List<Float> times) {
|
||||
path += "/";
|
||||
|
||||
List<String> lines;
|
||||
try {
|
||||
lines = Files.readAllLines(Paths.get(path + "list.txt"));
|
||||
|
||||
for (String line : lines) {
|
||||
String[] splitStr = line.split("\\s+");
|
||||
if (splitStr.length == 2) {
|
||||
String name = splitStr[0];
|
||||
Mat img = Imgcodecs.imread(path + name);
|
||||
images.add(img);
|
||||
float val = Float.parseFloat(splitStr[1]);
|
||||
times.add(1/ val);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public void run(String[] args) {
|
||||
String path = args.length > 0 ? args[0] : "";
|
||||
if (path.isEmpty()) {
|
||||
System.out.println("Path is empty. Use the directory that contains images and exposure times.");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
//! [Load images and exposure times]
|
||||
List<Mat> images = new ArrayList<>();
|
||||
List<Float> times = new ArrayList<>();
|
||||
loadExposureSeq(path, images, times);
|
||||
//! [Load images and exposure times]
|
||||
|
||||
//! [Estimate camera response]
|
||||
Mat response = new Mat();
|
||||
CalibrateDebevec calibrate = Photo.createCalibrateDebevec();
|
||||
Mat matTimes = new Mat(times.size(), 1, CvType.CV_32F);
|
||||
float[] arrayTimes = new float[(int) (matTimes.total()*matTimes.channels())];
|
||||
for (int i = 0; i < times.size(); i++) {
|
||||
arrayTimes[i] = times.get(i);
|
||||
}
|
||||
matTimes.put(0, 0, arrayTimes);
|
||||
calibrate.process(images, response, matTimes);
|
||||
//! [Estimate camera response]
|
||||
|
||||
//! [Make HDR image]
|
||||
Mat hdr = new Mat();
|
||||
MergeDebevec mergeDebevec = Photo.createMergeDebevec();
|
||||
mergeDebevec.process(images, hdr, matTimes);
|
||||
//! [Make HDR image]
|
||||
|
||||
//! [Tonemap HDR image]
|
||||
Mat ldr = new Mat();
|
||||
Tonemap tonemap = Photo.createTonemap(2.2f);
|
||||
tonemap.process(hdr, ldr);
|
||||
//! [Tonemap HDR image]
|
||||
|
||||
//! [Perform exposure fusion]
|
||||
Mat fusion = new Mat();
|
||||
MergeMertens mergeMertens = Photo.createMergeMertens();
|
||||
mergeMertens.process(images, fusion);
|
||||
//! [Perform exposure fusion]
|
||||
|
||||
//! [Write results]
|
||||
Core.multiply(fusion, new Scalar(255,255,255), fusion);
|
||||
Core.multiply(ldr, new Scalar(255,255,255), ldr);
|
||||
Imgcodecs.imwrite("fusion.png", fusion);
|
||||
Imgcodecs.imwrite("ldr.png", ldr);
|
||||
Imgcodecs.imwrite("hdr.hdr", hdr);
|
||||
//! [Write results]
|
||||
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class HDRImagingDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new HDRImaging().run(args);
|
||||
}
|
||||
}
|
@ -0,0 +1,79 @@
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.video.BackgroundSubtractor;
|
||||
import org.opencv.video.Video;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
import org.opencv.videoio.Videoio;
|
||||
|
||||
class BackgroundSubtraction {
|
||||
public void run(String[] args) {
|
||||
String input = args.length > 0 ? args[0] : "../data/vtest.avi";
|
||||
boolean useMOG2 = args.length > 1 ? args[1] == "MOG2" : true;
|
||||
|
||||
//! [create]
|
||||
BackgroundSubtractor backSub;
|
||||
if (useMOG2) {
|
||||
backSub = Video.createBackgroundSubtractorMOG2();
|
||||
} else {
|
||||
backSub = Video.createBackgroundSubtractorKNN();
|
||||
}
|
||||
//! [create]
|
||||
|
||||
//! [capture]
|
||||
VideoCapture capture = new VideoCapture(input);
|
||||
if (!capture.isOpened()) {
|
||||
System.err.println("Unable to open: " + input);
|
||||
System.exit(0);
|
||||
}
|
||||
//! [capture]
|
||||
|
||||
Mat frame = new Mat(), fgMask = new Mat();
|
||||
while (true) {
|
||||
capture.read(frame);
|
||||
if (frame.empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
//! [apply]
|
||||
// update the background model
|
||||
backSub.apply(frame, fgMask);
|
||||
//! [apply]
|
||||
|
||||
//! [display_frame_number]
|
||||
// get the frame number and write it on the current frame
|
||||
Imgproc.rectangle(frame, new Point(10, 2), new Point(100, 20), new Scalar(255, 255, 255), -1);
|
||||
String frameNumberString = String.format("%d", (int)capture.get(Videoio.CAP_PROP_POS_FRAMES));
|
||||
Imgproc.putText(frame, frameNumberString, new Point(15, 15), Core.FONT_HERSHEY_SIMPLEX, 0.5,
|
||||
new Scalar(0, 0, 0));
|
||||
//! [display_frame_number]
|
||||
|
||||
//! [show]
|
||||
// show the current frame and the fg masks
|
||||
HighGui.imshow("Frame", frame);
|
||||
HighGui.imshow("FG Mask", fgMask);
|
||||
//! [show]
|
||||
|
||||
// get the input from the keyboard
|
||||
int keyboard = HighGui.waitKey(30);
|
||||
if (keyboard == 'q' || keyboard == 27) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
HighGui.waitKey();
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class BackgroundSubtractionDemo {
|
||||
public static void main(String[] args) {
|
||||
// Load the native OpenCV library
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
|
||||
new BackgroundSubtraction().run(args);
|
||||
}
|
||||
}
|
75
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/meanshift/CamshiftDemo.java
vendored
Normal file
75
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/meanshift/CamshiftDemo.java
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
import java.util.Arrays;
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.video.Video;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
|
||||
class Camshift {
|
||||
public void run(String[] args) {
|
||||
String filename = args[0];
|
||||
VideoCapture capture = new VideoCapture(filename);
|
||||
if (!capture.isOpened()) {
|
||||
System.out.println("Unable to open file!");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
Mat frame = new Mat(), hsv_roi = new Mat(), mask = new Mat(), roi;
|
||||
|
||||
// take the first frame of the video
|
||||
capture.read(frame);
|
||||
|
||||
//setup initial location of window
|
||||
Rect track_window = new Rect(300, 200, 100, 50);
|
||||
|
||||
// set up the ROI for tracking
|
||||
roi = new Mat(frame, track_window);
|
||||
Imgproc.cvtColor(roi, hsv_roi, Imgproc.COLOR_BGR2HSV);
|
||||
Core.inRange(hsv_roi, new Scalar(0, 60, 32), new Scalar(180, 255, 255), mask);
|
||||
|
||||
MatOfFloat range = new MatOfFloat(0, 256);
|
||||
Mat roi_hist = new Mat();
|
||||
MatOfInt histSize = new MatOfInt(180);
|
||||
MatOfInt channels = new MatOfInt(0);
|
||||
Imgproc.calcHist(Arrays.asList(hsv_roi), channels, mask, roi_hist, histSize, range);
|
||||
Core.normalize(roi_hist, roi_hist, 0, 255, Core.NORM_MINMAX);
|
||||
|
||||
// Setup the termination criteria, either 10 iteration or move by atleast 1 pt
|
||||
TermCriteria term_crit = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
|
||||
|
||||
while (true) {
|
||||
Mat hsv = new Mat() , dst = new Mat();
|
||||
capture.read(frame);
|
||||
if (frame.empty()) {
|
||||
break;
|
||||
}
|
||||
Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_BGR2HSV);
|
||||
Imgproc.calcBackProject(Arrays.asList(hsv), channels, roi_hist, dst, range, 1);
|
||||
|
||||
// apply camshift to get the new location
|
||||
RotatedRect rot_rect = Video.CamShift(dst, track_window, term_crit);
|
||||
|
||||
// Draw it on image
|
||||
Point[] points = new Point[4];
|
||||
rot_rect.points(points);
|
||||
for (int i = 0; i < 4 ;i++) {
|
||||
Imgproc.line(frame, points[i], points[(i+1)%4], new Scalar(255, 0, 0),2);
|
||||
}
|
||||
|
||||
HighGui.imshow("img2", frame);
|
||||
int keyboard = HighGui.waitKey(30);
|
||||
if (keyboard == 'q'|| keyboard == 27) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class CamshiftDemo {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new Camshift().run(args);
|
||||
}
|
||||
}
|
70
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/meanshift/MeanshiftDemo.java
vendored
Normal file
70
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/meanshift/MeanshiftDemo.java
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
import java.util.Arrays;
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.video.Video;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
|
||||
class Meanshift {
|
||||
public void run(String[] args) {
|
||||
String filename = args[0];
|
||||
VideoCapture capture = new VideoCapture(filename);
|
||||
if (!capture.isOpened()) {
|
||||
System.out.println("Unable to open file!");
|
||||
System.exit(-1);
|
||||
}
|
||||
Mat frame = new Mat(), hsv_roi = new Mat(), mask = new Mat(), roi;
|
||||
|
||||
// take the first frame of the video
|
||||
capture.read(frame);
|
||||
|
||||
//setup initial location of window
|
||||
Rect track_window = new Rect(300, 200, 100, 50);
|
||||
|
||||
// setup initial location of window
|
||||
roi = new Mat(frame, track_window);
|
||||
Imgproc.cvtColor(roi, hsv_roi, Imgproc.COLOR_BGR2HSV);
|
||||
Core.inRange(hsv_roi, new Scalar(0, 60, 32), new Scalar(180, 255, 255), mask);
|
||||
|
||||
MatOfFloat range = new MatOfFloat(0, 256);
|
||||
Mat roi_hist = new Mat();
|
||||
MatOfInt histSize = new MatOfInt(180);
|
||||
MatOfInt channels = new MatOfInt(0);
|
||||
Imgproc.calcHist(Arrays.asList(hsv_roi), channels, mask, roi_hist, histSize, range);
|
||||
Core.normalize(roi_hist, roi_hist, 0, 255, Core.NORM_MINMAX);
|
||||
|
||||
// Setup the termination criteria, either 10 iteration or move by atleast 1 pt
|
||||
TermCriteria term_crit = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
|
||||
|
||||
while (true) {
|
||||
Mat hsv = new Mat() , dst = new Mat();
|
||||
capture.read(frame);
|
||||
if (frame.empty()) {
|
||||
break;
|
||||
}
|
||||
Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_BGR2HSV);
|
||||
Imgproc.calcBackProject(Arrays.asList(hsv), channels, roi_hist, dst, range, 1);
|
||||
|
||||
// apply meanshift to get the new location
|
||||
Video.meanShift(dst, track_window, term_crit);
|
||||
|
||||
// Draw it on image
|
||||
Imgproc.rectangle(frame, track_window, new Scalar(255, 0, 0), 2);
|
||||
HighGui.imshow("img2", frame);
|
||||
|
||||
int keyboard = HighGui.waitKey(30);
|
||||
if (keyboard == 'q' || keyboard == 27) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class MeanshiftDemo {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new Meanshift().run(args);
|
||||
}
|
||||
}
|
96
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDemo.java
vendored
Normal file
96
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDemo.java
vendored
Normal file
@ -0,0 +1,96 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.video.Video;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
class OptFlow {
|
||||
public void run(String[] args) {
|
||||
String filename = args[0];
|
||||
VideoCapture capture = new VideoCapture(filename);
|
||||
if (!capture.isOpened()) {
|
||||
System.out.println("Unable to open this file");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
|
||||
// Create some random colors
|
||||
Scalar[] colors = new Scalar[100];
|
||||
Random rng = new Random();
|
||||
for (int i = 0 ; i < 100 ; i++) {
|
||||
int r = rng.nextInt(256);
|
||||
int g = rng.nextInt(256);
|
||||
int b = rng.nextInt(256);
|
||||
colors[i] = new Scalar(r, g, b);
|
||||
}
|
||||
|
||||
Mat old_frame = new Mat() , old_gray = new Mat();
|
||||
|
||||
// Since the function Imgproc.goodFeaturesToTrack requires MatofPoint
|
||||
// therefore first p0MatofPoint is passed to the function and then converted to MatOfPoint2f
|
||||
MatOfPoint p0MatofPoint = new MatOfPoint();
|
||||
capture.read(old_frame);
|
||||
Imgproc.cvtColor(old_frame, old_gray, Imgproc.COLOR_BGR2GRAY);
|
||||
Imgproc.goodFeaturesToTrack(old_gray, p0MatofPoint,100,0.3,7, new Mat(),7,false,0.04);
|
||||
|
||||
MatOfPoint2f p0 = new MatOfPoint2f(p0MatofPoint.toArray()) , p1 = new MatOfPoint2f();
|
||||
|
||||
// Create a mask image for drawing purposes
|
||||
Mat mask = Mat.zeros(old_frame.size(), old_frame.type());
|
||||
|
||||
while (true) {
|
||||
Mat frame = new Mat(), frame_gray = new Mat();
|
||||
capture.read(frame);
|
||||
if (frame.empty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
// calculate optical flow
|
||||
MatOfByte status = new MatOfByte();
|
||||
MatOfFloat err = new MatOfFloat();
|
||||
TermCriteria criteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS,10,0.03);
|
||||
Video.calcOpticalFlowPyrLK(old_gray, frame_gray, p0, p1, status, err, new Size(15,15),2, criteria);
|
||||
|
||||
byte StatusArr[] = status.toArray();
|
||||
Point p0Arr[] = p0.toArray();
|
||||
Point p1Arr[] = p1.toArray();
|
||||
ArrayList<Point> good_new = new ArrayList<>();
|
||||
|
||||
for (int i = 0; i<StatusArr.length ; i++ ) {
|
||||
if (StatusArr[i] == 1) {
|
||||
good_new.add(p1Arr[i]);
|
||||
Imgproc.line(mask, p1Arr[i], p0Arr[i], colors[i],2);
|
||||
Imgproc.circle(frame, p1Arr[i],5, colors[i],-1);
|
||||
}
|
||||
}
|
||||
|
||||
Mat img = new Mat();
|
||||
Core.add(frame, mask, img);
|
||||
|
||||
HighGui.imshow("Frame", img);
|
||||
|
||||
int keyboard = HighGui.waitKey(30);
|
||||
if (keyboard == 'q' || keyboard == 27) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Now update the previous frame and previous points
|
||||
old_gray = frame_gray.clone();
|
||||
Point[] good_new_arr = new Point[good_new.size()];
|
||||
good_new_arr = good_new.toArray(good_new_arr);
|
||||
p0 = new MatOfPoint2f(good_new_arr);
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class OpticalFlowDemo {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new OptFlow().run(args);
|
||||
}
|
||||
}
|
72
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDenseDemo.java
vendored
Normal file
72
3rdparty/opencv-4.5.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDenseDemo.java
vendored
Normal file
@ -0,0 +1,72 @@
|
||||
import java.util.ArrayList;
|
||||
import org.opencv.core.*;
|
||||
import org.opencv.highgui.HighGui;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.video.Video;
|
||||
import org.opencv.videoio.VideoCapture;
|
||||
|
||||
|
||||
class OptFlowDense {
|
||||
public void run(String[] args) {
|
||||
String filename = args[0];
|
||||
VideoCapture capture = new VideoCapture(filename);
|
||||
if (!capture.isOpened()) {
|
||||
//error in opening the video input
|
||||
System.out.println("Unable to open file!");
|
||||
System.exit(-1);
|
||||
}
|
||||
|
||||
Mat frame1 = new Mat() , prvs = new Mat();
|
||||
capture.read(frame1);
|
||||
Imgproc.cvtColor(frame1, prvs, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
while (true) {
|
||||
Mat frame2 = new Mat(), next = new Mat();
|
||||
capture.read(frame2);
|
||||
if (frame2.empty()) {
|
||||
break;
|
||||
}
|
||||
Imgproc.cvtColor(frame2, next, Imgproc.COLOR_BGR2GRAY);
|
||||
|
||||
Mat flow = new Mat(prvs.size(), CvType.CV_32FC2);
|
||||
Video.calcOpticalFlowFarneback(prvs, next, flow,0.5,3,15,3,5,1.2,0);
|
||||
|
||||
// visualization
|
||||
ArrayList<Mat> flow_parts = new ArrayList<>(2);
|
||||
Core.split(flow, flow_parts);
|
||||
Mat magnitude = new Mat(), angle = new Mat(), magn_norm = new Mat();
|
||||
Core.cartToPolar(flow_parts.get(0), flow_parts.get(1), magnitude, angle,true);
|
||||
Core.normalize(magnitude, magn_norm,0.0,1.0, Core.NORM_MINMAX);
|
||||
float factor = (float) ((1.0/360.0)*(180.0/255.0));
|
||||
Mat new_angle = new Mat();
|
||||
Core.multiply(angle, new Scalar(factor), new_angle);
|
||||
|
||||
//build hsv image
|
||||
ArrayList<Mat> _hsv = new ArrayList<>() ;
|
||||
Mat hsv = new Mat(), hsv8 = new Mat(), bgr = new Mat();
|
||||
|
||||
_hsv.add(new_angle);
|
||||
_hsv.add(Mat.ones(angle.size(), CvType.CV_32F));
|
||||
_hsv.add(magn_norm);
|
||||
Core.merge(_hsv, hsv);
|
||||
hsv.convertTo(hsv8, CvType.CV_8U, 255.0);
|
||||
Imgproc.cvtColor(hsv8, bgr, Imgproc.COLOR_HSV2BGR);
|
||||
|
||||
HighGui.imshow("frame2", bgr);
|
||||
|
||||
int keyboard = HighGui.waitKey(30);
|
||||
if (keyboard == 'q' || keyboard == 27) {
|
||||
break;
|
||||
}
|
||||
prvs = next;
|
||||
}
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
public class OpticalFlowDenseDemo {
|
||||
public static void main(String[] args) {
|
||||
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
|
||||
new OptFlowDense().run(args);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user