提交 38e6d668 编写于 作者: G Gourav Roy 提交者: Alexander Alekhin

Merge pull request #16276 from themechanicalcoder:video-tutorial

* Added java code for meanshift and optical_flow

* added java code for module video

* added appropriate spaces in codes

* converted absolute path to command line arguments

* added spaces at appropriate places
上级 2c21ea2d
......@@ -57,6 +57,14 @@ low light, low light values are discarded using **cv.inRange()** function.
@include samples/python/tutorial_code/video/meanshift/meanshift.py
@end_toggle
@add_toggle_java
- **Downloadable code**: Click
[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/video/meanshift/MeanshiftDemo.java)
- **Code at glance:**
@include samples/java/tutorial_code/video/meanshift/MeanshiftDemo.java
@end_toggle
Three frames in a video I used is given below:
![image](images/meanshift_result.jpg)
......@@ -98,6 +106,14 @@ parameters (used to be passed as search window in next iteration). See the code
@include samples/python/tutorial_code/video/meanshift/camshift.py
@end_toggle
@add_toggle_java
- **Downloadable code**: Click
[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/video/meanshift/CamshiftDemo.java)
- **Code at glance:**
@include samples/java/tutorial_code/video/meanshift/CamshiftDemo.java
@end_toggle
Three frames of the result is shown below:
![image](images/camshift_result.jpg)
......
......@@ -109,6 +109,15 @@ below:
@include samples/python/tutorial_code/video/optical_flow/optical_flow.py
@end_toggle
@add_toggle_java
- **Downloadable code**: Click
[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDemo.java)
- **Code at glance:**
@include samples/java/tutorial_code/video/optical_flow/OpticalFlowDemo.java
@end_toggle
(This code doesn't check how correct are the next keypoints. So even if any feature point disappears
in image, there is a chance that optical flow finds the next point which may look close to it. So
actually for a robust tracking, corner points should be detected in particular intervals. OpenCV
......@@ -151,6 +160,15 @@ corresponds to Value plane. See the code below:
@end_toggle
@add_toggle_java
- **Downloadable code**: Click
[here](https://github.com/opencv/opencv/tree/3.4/samples/java/tutorial_code/video/optical_flow/OpticalFlowDenseDemo.java)
- **Code at glance:**
@include samples/java/tutorial_code/video/optical_flow/OpticalFlowDenseDemo.java
@end_toggle
See the result below:
![image](images/opticalfb.jpg)
......@@ -17,12 +17,12 @@ tracking and foreground extractions.
- @subpage tutorial_meanshift
*Languages:* C++, Python
*Languages:* C++, Java, Python
Learn how to use the Meanshift and Camshift algorithms to track objects in videos.
- @subpage tutorial_optical_flow
*Languages:* C++, Python
*Languages:* C++, Java, Python
We will learn how to use optical flow methods to track sparse features or to create a dense representation.
import java.util.Arrays;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.opencv.video.Video;
import org.opencv.videoio.VideoCapture;
class Camshift {
public void run(String[] args) {
String filename = args[0];
VideoCapture capture = new VideoCapture(filename);
if (!capture.isOpened()) {
System.out.println("Unable to open file!");
System.exit(-1);
}
Mat frame = new Mat(), hsv_roi = new Mat(), mask = new Mat(), roi;
// take the first frame of the video
capture.read(frame);
//setup initial location of window
Rect track_window = new Rect(300, 200, 100, 50);
// set up the ROI for tracking
roi = new Mat(frame, track_window);
Imgproc.cvtColor(roi, hsv_roi, Imgproc.COLOR_BGR2HSV);
Core.inRange(hsv_roi, new Scalar(0, 60, 32), new Scalar(180, 255, 255), mask);
MatOfFloat range = new MatOfFloat(0, 256);
Mat roi_hist = new Mat();
MatOfInt histSize = new MatOfInt(180);
MatOfInt channels = new MatOfInt(0);
Imgproc.calcHist(Arrays.asList(hsv_roi), channels, mask, roi_hist, histSize, range);
Core.normalize(roi_hist, roi_hist, 0, 255, Core.NORM_MINMAX);
// Setup the termination criteria, either 10 iteration or move by atleast 1 pt
TermCriteria term_crit = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
while (true) {
Mat hsv = new Mat() , dst = new Mat();
capture.read(frame);
if (frame.empty()) {
break;
}
Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_BGR2HSV);
Imgproc.calcBackProject(Arrays.asList(hsv), channels, roi_hist, dst, range, 1);
// apply camshift to get the new location
RotatedRect rot_rect = Video.CamShift(dst, track_window, term_crit);
// Draw it on image
Point[] points = new Point[4];
rot_rect.points(points);
for (int i = 0; i < 4 ;i++) {
Imgproc.line(frame, points[i], points[(i+1)%4], new Scalar(255, 0, 0),2);
}
HighGui.imshow("img2", frame);
int keyboard = HighGui.waitKey(30);
if (keyboard == 'q'|| keyboard == 27) {
break;
}
}
System.exit(0);
}
}
public class CamshiftDemo {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new Camshift().run(args);
}
}
import java.util.Arrays;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.opencv.video.Video;
import org.opencv.videoio.VideoCapture;
class Meanshift {
public void run(String[] args) {
String filename = args[0];
VideoCapture capture = new VideoCapture(filename);
if (!capture.isOpened()) {
System.out.println("Unable to open file!");
System.exit(-1);
}
Mat frame = new Mat(), hsv_roi = new Mat(), mask = new Mat(), roi;
// take the first frame of the video
capture.read(frame);
//setup initial location of window
Rect track_window = new Rect(300, 200, 100, 50);
// setup initial location of window
roi = new Mat(frame, track_window);
Imgproc.cvtColor(roi, hsv_roi, Imgproc.COLOR_BGR2HSV);
Core.inRange(hsv_roi, new Scalar(0, 60, 32), new Scalar(180, 255, 255), mask);
MatOfFloat range = new MatOfFloat(0, 256);
Mat roi_hist = new Mat();
MatOfInt histSize = new MatOfInt(180);
MatOfInt channels = new MatOfInt(0);
Imgproc.calcHist(Arrays.asList(hsv_roi), channels, mask, roi_hist, histSize, range);
Core.normalize(roi_hist, roi_hist, 0, 255, Core.NORM_MINMAX);
// Setup the termination criteria, either 10 iteration or move by atleast 1 pt
TermCriteria term_crit = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1);
while (true) {
Mat hsv = new Mat() , dst = new Mat();
capture.read(frame);
if (frame.empty()) {
break;
}
Imgproc.cvtColor(frame, hsv, Imgproc.COLOR_BGR2HSV);
Imgproc.calcBackProject(Arrays.asList(hsv), channels, roi_hist, dst, range, 1);
// apply meanshift to get the new location
Video.meanShift(dst, track_window, term_crit);
// Draw it on image
Imgproc.rectangle(frame, track_window, new Scalar(255, 0, 0), 2);
HighGui.imshow("img2", frame);
int keyboard = HighGui.waitKey(30);
if (keyboard == 'q' || keyboard == 27) {
break;
}
}
System.exit(0);
}
}
public class MeanshiftDemo {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new Meanshift().run(args);
}
}
import java.util.ArrayList;
import java.util.Random;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.opencv.video.Video;
import org.opencv.videoio.VideoCapture;
class OptFlow {
public void run(String[] args) {
String filename = args[0];
VideoCapture capture = new VideoCapture(filename);
if (!capture.isOpened()) {
System.out.println("Unable to open this file");
System.exit(-1);
}
// Create some random colors
Scalar[] colors = new Scalar[100];
Random rng = new Random();
for (int i = 0 ; i < 100 ; i++) {
int r = rng.nextInt(256);
int g = rng.nextInt(256);
int b = rng.nextInt(256);
colors[i] = new Scalar(r, g, b);
}
Mat old_frame = new Mat() , old_gray = new Mat();
// Since the function Imgproc.goodFeaturesToTrack requires MatofPoint
// therefore first p0MatofPoint is passed to the function and then converted to MatOfPoint2f
MatOfPoint p0MatofPoint = new MatOfPoint();
capture.read(old_frame);
Imgproc.cvtColor(old_frame, old_gray, Imgproc.COLOR_BGR2GRAY);
Imgproc.goodFeaturesToTrack(old_gray, p0MatofPoint,100,0.3,7, new Mat(),7,false,0.04);
MatOfPoint2f p0 = new MatOfPoint2f(p0MatofPoint.toArray()) , p1 = new MatOfPoint2f();
// Create a mask image for drawing purposes
Mat mask = Mat.zeros(old_frame.size(), old_frame.type());
while (true) {
Mat frame = new Mat(), frame_gray = new Mat();
capture.read(frame);
if (frame.empty()) {
break;
}
Imgproc.cvtColor(frame, frame_gray, Imgproc.COLOR_BGR2GRAY);
// calculate optical flow
MatOfByte status = new MatOfByte();
MatOfFloat err = new MatOfFloat();
TermCriteria criteria = new TermCriteria(TermCriteria.COUNT + TermCriteria.EPS,10,0.03);
Video.calcOpticalFlowPyrLK(old_gray, frame_gray, p0, p1, status, err, new Size(15,15),2, criteria);
byte StatusArr[] = status.toArray();
Point p0Arr[] = p0.toArray();
Point p1Arr[] = p1.toArray();
ArrayList<Point> good_new = new ArrayList<>();
for (int i = 0; i<StatusArr.length ; i++ ) {
if (StatusArr[i] == 1) {
good_new.add(p1Arr[i]);
Imgproc.line(mask, p1Arr[i], p0Arr[i], colors[i],2);
Imgproc.circle(frame, p1Arr[i],5, colors[i],-1);
}
}
Mat img = new Mat();
Core.add(frame, mask, img);
HighGui.imshow("Frame", img);
int keyboard = HighGui.waitKey(30);
if (keyboard == 'q' || keyboard == 27) {
break;
}
// Now update the previous frame and previous points
old_gray = frame_gray.clone();
Point[] good_new_arr = new Point[good_new.size()];
good_new_arr = good_new.toArray(good_new_arr);
p0 = new MatOfPoint2f(good_new_arr);
}
System.exit(0);
}
}
public class OpticalFlowDemo {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new OptFlow().run(args);
}
}
import java.util.ArrayList;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.opencv.video.Video;
import org.opencv.videoio.VideoCapture;
class OptFlowDense {
public void run(String[] args) {
String filename = args[0];
VideoCapture capture = new VideoCapture(filename);
if (!capture.isOpened()) {
//error in opening the video input
System.out.println("Unable to open file!");
System.exit(-1);
}
Mat frame1 = new Mat() , prvs = new Mat();
capture.read(frame1);
Imgproc.cvtColor(frame1, prvs, Imgproc.COLOR_BGR2GRAY);
while (true) {
Mat frame2 = new Mat(), next = new Mat();
capture.read(frame2);
if (frame2.empty()) {
break;
}
Imgproc.cvtColor(frame2, next, Imgproc.COLOR_BGR2GRAY);
Mat flow = new Mat(prvs.size(), CvType.CV_32FC2);
Video.calcOpticalFlowFarneback(prvs, next, flow,0.5,3,15,3,5,1.2,0);
// visualization
ArrayList<Mat> flow_parts = new ArrayList<>(2);
Core.split(flow, flow_parts);
Mat magnitude = new Mat(), angle = new Mat(), magn_norm = new Mat();
Core.cartToPolar(flow_parts.get(0), flow_parts.get(1), magnitude, angle,true);
Core.normalize(magnitude, magn_norm,0.0,1.0, Core.NORM_MINMAX);
float factor = (float) ((1.0/360.0)*(180.0/255.0));
Mat new_angle = new Mat();
Core.multiply(angle, new Scalar(factor), new_angle);
//build hsv image
ArrayList<Mat> _hsv = new ArrayList<>() ;
Mat hsv = new Mat(), hsv8 = new Mat(), bgr = new Mat();
_hsv.add(new_angle);
_hsv.add(Mat.ones(angle.size(), CvType.CV_32F));
_hsv.add(magn_norm);
Core.merge(_hsv, hsv);
hsv.convertTo(hsv8, CvType.CV_8U, 255.0);
Imgproc.cvtColor(hsv8, bgr, Imgproc.COLOR_HSV2BGR);
HighGui.imshow("frame2", bgr);
int keyboard = HighGui.waitKey(30);
if (keyboard == 'q' || keyboard == 27) {
break;
}
prvs = next;
}
System.exit(0);
}
}
public class OpticalFlowDenseDemo {
public static void main(String[] args) {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new OptFlowDense().run(args);
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册