提交 fae67e0b 编写于 作者: A Andrey Kamaev

Refactored Android samples

上级 72a55a41
...@@ -8,31 +8,38 @@ import android.view.MenuItem; ...@@ -8,31 +8,38 @@ import android.view.MenuItem;
import android.view.Window; import android.view.Window;
public class Sample0Base extends Activity { public class Sample0Base extends Activity {
private static final String TAG = "Sample0Base::Activity"; private static final String TAG = "Sample::Activity";
public static final int VIEW_MODE_RGBA = 0; public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1; public static final int VIEW_MODE_GRAY = 1;
private MenuItem mItemPreviewRGBA; private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray; private MenuItem mItemPreviewGray;
public int viewMode; public static int viewMode = VIEW_MODE_RGBA;
public Sample0Base() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample0View(this)); setContentView(new Sample0View(this));
viewMode = VIEW_MODE_RGBA;
} }
@Override
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA"); mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY"); mItemPreviewGray = menu.add("Preview GRAY");
return true; return true;
} }
@Override
public boolean onOptionsItemSelected(MenuItem item) { public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item); Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) if (item == mItemPreviewRGBA)
......
...@@ -2,132 +2,45 @@ package org.opencv.samples.s0; ...@@ -2,132 +2,45 @@ package org.opencv.samples.s0;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.util.List;
class Sample0View extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample0Base::View";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
class Sample0View extends SampleViewBase {
public Sample0View(Context context) { public Sample0View(Context context) {
super(context); super(context);
mHolder = getHolder();
mHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
if ( mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
//selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(mFrameWidth, mFrameHeight);
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
mCamera.setPreviewCallback(
new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized(Sample0View.this) {
mFrame = data;
Sample0View.this.notify();
}
}
}
);
(new Thread(this)).start();
} }
public void surfaceDestroyed(SurfaceHolder holder) { @Override
mThreadRun = false; protected Bitmap processFrame(byte[] data) {
if(mCamera != null) { int frameSize = getFrameWidth() * getFrameHeight();
synchronized(Sample0View.this) { int[] rgba = new int[frameSize];
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
public void run() { int view_mode = Sample0Base.viewMode;
mThreadRun = true; if (view_mode == Sample0Base.VIEW_MODE_GRAY) {
Log.i(TAG, "Starting thread"); for (int i = 0; i < frameSize; i++) {
while(mThreadRun) { int y = (0xff & ((int) data[i]));
byte[] data = null; rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y;
synchronized(this) {
try {
this.wait();
data = mFrame;
} catch (InterruptedException e) {
e.printStackTrace();
}
} }
} else if (view_mode == Sample0Base.VIEW_MODE_RGBA) {
int frameSize = mFrameWidth*mFrameHeight; for (int i = 0; i < getFrameHeight(); i++)
int[] rgba = new int[frameSize]; for (int j = 0; j < getFrameWidth(); j++) {
int y = (0xff & ((int) data[i * getFrameWidth() + j]));
Sample0Base a = (Sample0Base)getContext(); int u = (0xff & ((int) data[frameSize + (i >> 1) * getFrameWidth() + (j & ~1) + 0]));
int view_mode = a.viewMode; int v = (0xff & ((int) data[frameSize + (i >> 1) * getFrameWidth() + (j & ~1) + 1]));
if(view_mode == Sample0Base.VIEW_MODE_GRAY) { y = y < 16 ? 16 : y;
for(int i = 0; i < frameSize; i++) {
int y = (0xff & ((int)data[i])); int r = Math.round(1.164f * (y - 16) + 1.596f * (v - 128));
rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y; int g = Math.round(1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = Math.round(1.164f * (y - 16) + 2.018f * (u - 128));
r = r < 0 ? 0 : (r > 255 ? 255 : r);
g = g < 0 ? 0 : (g > 255 ? 255 : g);
b = b < 0 ? 0 : (b > 255 ? 255 : b);
rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r;
} }
} else if (view_mode == Sample0Base.VIEW_MODE_RGBA) {
for(int i = 0; i < mFrameHeight; i++)
for(int j = 0; j < mFrameWidth; j++) {
int y = (0xff & ((int)data[i*mFrameWidth+j]));
int u = (0xff & ((int)data[frameSize + (i >> 1) * mFrameWidth + (j & ~1) + 0]));
int v = (0xff & ((int)data[frameSize + (i >> 1) * mFrameWidth + (j & ~1) + 1]));
if (y < 16) y = 16;
int r = Math.round(1.164f * (y - 16) + 1.596f * (v - 128) );
int g = Math.round(1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
int b = Math.round(1.164f * (y - 16) + 2.018f * (u - 128));
if (r < 0) r = 0; if (r > 255) r = 255;
if (g < 0) g = 0; if (g > 255) g = 255;
if (b < 0) b = 0; if (b > 255) b = 255;
rgba[i*mFrameWidth+j] = 0xff000000 + (b << 16) + (g << 8) + r;
}
}
Bitmap bmp = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
bmp.setPixels(rgba, 0/*offset*/, mFrameWidth /*stride*/, 0, 0, mFrameWidth, mFrameHeight);
Canvas canvas = mHolder.lockCanvas();
canvas.drawBitmap(bmp, (canvas.getWidth()-mFrameWidth)/2, (canvas.getHeight()-mFrameHeight)/2, null);
mHolder.unlockCanvasAndPost(canvas);
} }
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return bmp;
} }
} }
\ No newline at end of file
package org.opencv.samples.s0;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
}
}
});
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
protected abstract Bitmap processFrame(byte[] data);
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
}
\ No newline at end of file
...@@ -7,10 +7,10 @@ import android.graphics.Bitmap; ...@@ -7,10 +7,10 @@ import android.graphics.Bitmap;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
class Sample1View extends SampleViewBase { class Sample1View extends SampleViewBase {
Mat mYuv; private Mat mYuv;
Mat mRgba; private Mat mRgba;
Mat mGraySubmat; private Mat mGraySubmat;
Mat mIntermediateMat; private Mat mIntermediateMat;
public Sample1View(Context context) { public Sample1View(Context context) {
super(context); super(context);
......
...@@ -2,12 +2,20 @@ package org.opencv.samples.s2; ...@@ -2,12 +2,20 @@ package org.opencv.samples.s2;
import android.app.Activity; import android.app.Activity;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log;
import android.view.Window; import android.view.Window;
public class Sample2Native extends Activity { public class Sample2Native extends Activity {
private static final String TAG = "Sample::Activity";
public Sample2Native() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample2View(this)); setContentView(new Sample2View(this));
......
...@@ -2,108 +2,23 @@ package org.opencv.samples.s2; ...@@ -2,108 +2,23 @@ package org.opencv.samples.s2;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.util.List; class Sample2View extends SampleViewBase {
class Sample2View extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample2Native::View";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
public Sample2View(Context context) { public Sample2View(Context context) {
super(context); super(context);
mHolder = getHolder();
mHolder.addCallback(this);
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { @Override
if ( mCamera != null) { protected Bitmap processFrame(byte[] data) {
Camera.Parameters params = mCamera.getParameters(); int frameSize = getFrameWidth() * getFrameHeight();
List<Camera.Size> sizes = params.getSupportedPreviewSizes(); int[] rgba = new int[frameSize];
mFrameWidth = width;
mFrameHeight = height;
//selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(mFrameWidth, mFrameHeight);
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
mCamera.setPreviewCallback(
new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized(Sample2View.this) {
mFrame = data;
Sample2View.this.notify();
}
}
}
);
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) { FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba);
mThreadRun = false;
if(mCamera != null) {
synchronized(Sample2View.this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
public void run() { Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
mThreadRun = true; bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
Log.i(TAG, "Starting thread"); return bmp;
while(mThreadRun) {
byte[] data = null;
synchronized(this) {
try {
this.wait();
data = mFrame;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
int frameSize = mFrameWidth*mFrameHeight;
int[] rgba = new int[frameSize];
FindFeatures(mFrameWidth, mFrameHeight, data, rgba);
Bitmap bmp = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
bmp.setPixels(rgba, 0/*offset*/, mFrameWidth /*stride*/, 0, 0, mFrameWidth, mFrameHeight);
Canvas canvas = mHolder.lockCanvas();
canvas.drawBitmap(bmp, (canvas.getWidth()-mFrameWidth)/2, (canvas.getHeight()-mFrameHeight)/2, null);
mHolder.unlockCanvasAndPost(canvas);
}
} }
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba); public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
......
package org.opencv.samples.s2;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
}
}
});
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
protected abstract Bitmap processFrame(byte[] data);
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
}
\ No newline at end of file
...@@ -8,40 +8,45 @@ import android.view.MenuItem; ...@@ -8,40 +8,45 @@ import android.view.MenuItem;
import android.view.Window; import android.view.Window;
public class Sample4Mixed extends Activity { public class Sample4Mixed extends Activity {
private static final String TAG = "Sample4Mixed::Activity"; private static final String TAG = "Sample::Activity";
public static final int VIEW_MODE_RGBA = 0; public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1; public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2; public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_SOBEL = 3; public static final int VIEW_MODE_SOBEL = 3;
public static final int VIEW_MODE_BLUR = 4; public static final int VIEW_MODE_BLUR = 4;
public static final int VIEW_MODE_FEATURES = 5; public static final int VIEW_MODE_FEATURES = 5;
private MenuItem mItemPreviewRGBA; private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray; private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny; private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewSobel; private MenuItem mItemPreviewSobel;
private MenuItem mItemPreviewBlur; private MenuItem mItemPreviewBlur;
private MenuItem mItemPreviewFeatures; private MenuItem mItemPreviewFeatures;
public int viewMode; public static int viewMode = VIEW_MODE_RGBA;
public Sample4Mixed() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample4View(this)); setContentView(new Sample4View(this));
viewMode = VIEW_MODE_RGBA;
} }
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {
mItemPreviewRGBA = menu.add("Preview RGBA"); Log.i(TAG, "onCreateOptionsMenu");
mItemPreviewGray = menu.add("Preview GRAY"); mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny"); mItemPreviewCanny = menu.add("Canny");
mItemPreviewSobel = menu.add("Sobel"); mItemPreviewSobel = menu.add("Sobel");
mItemPreviewBlur = menu.add("Blur"); mItemPreviewBlur = menu.add("Blur");
mItemPreviewFeatures = menu.add("Find features"); mItemPreviewFeatures = menu.add("Find features");
return true; return true;
} }
......
package org.opencv.samples.s4; package org.opencv.samples.s4;
import org.opencv.*;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.SurfaceView;
class Sample4View extends SampleViewBase {
import org.opencv.CvType;
import org.opencv.Mat;
import org.opencv.Size;
import org.opencv.core;
import org.opencv.imgproc;
import org.opencv.android;
import java.util.List;
class Sample4View extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample4Mixed::View";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private Mat mYuv; private Mat mYuv;
private Mat mRgba; private Mat mRgba;
private Mat mGraySubmat; private Mat mGraySubmat;
...@@ -35,145 +14,85 @@ class Sample4View extends SurfaceView implements SurfaceHolder.Callback, Runnabl ...@@ -35,145 +14,85 @@ class Sample4View extends SurfaceView implements SurfaceHolder.Callback, Runnabl
public Sample4View(Context context) { public Sample4View(Context context) {
super(context); super(context);
mHolder = getHolder();
mHolder.addCallback(this);
} }
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
if ( mCamera != null) { super.surfaceChanged(_holder, format, width, height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes(); synchronized (this) {
mFrameWidth = width; // initialize Mats before usage
mFrameHeight = height; mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
//selecting optimal camera preview size
{ mRgba = new Mat();
double minDiff = Double.MAX_VALUE; mIntermediateMat = new Mat();
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(mFrameWidth, mFrameHeight);
mCamera.setParameters(params);
mCamera.startPreview();
// initialize all required Mats before usage to minimize number of auxiliary jni calls
if(mYuv != null) mYuv.dispose();
mYuv = new Mat(mFrameHeight+mFrameHeight/2, mFrameWidth, CvType.CV_8UC1);
if(mRgba != null) mRgba.dispose();
mRgba = new Mat(mFrameHeight, mFrameWidth, CvType.CV_8UC4);
if(mGraySubmat != null) mGraySubmat.dispose();
mGraySubmat = mYuv.submat(0, mFrameHeight, 0, mFrameWidth);
if(mIntermediateMat != null) mIntermediateMat.dispose();
mIntermediateMat = new Mat(mFrameHeight, mFrameWidth, CvType.CV_8UC1);
} }
} }
public void surfaceCreated(SurfaceHolder holder) { @Override
mCamera = Camera.open(); protected Bitmap processFrame(byte[] data) {
mCamera.setPreviewCallback( mYuv.put(0, 0, data);
new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized(Sample4View.this) {
mFrame = data;
Sample4View.this.notify();
}
}
}
);
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) { switch (Sample4Mixed.viewMode) {
mThreadRun = false; case Sample4Mixed.VIEW_MODE_GRAY:
if(mCamera != null) { imgproc.cvtColor(mGraySubmat, mRgba, imgproc.CV_GRAY2RGBA, 4);
synchronized(Sample4View.this) { break;
mCamera.stopPreview(); case Sample4Mixed.VIEW_MODE_RGBA:
mCamera.setPreviewCallback(null); imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
mCamera.release(); break;
mCamera = null; case Sample4Mixed.VIEW_MODE_CANNY:
} imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_SOBEL:
imgproc.Sobel(mGraySubmat, mIntermediateMat, CvType.CV_8U, 1, 1);
core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 8);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_BLUR:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
imgproc.blur(mRgba, mRgba, new Size(15, 15));
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
} }
// Explicitly dispose Mats Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
if(mYuv != null) {
mYuv.dispose(); if (android.MatToBitmap(mRgba, bmp))
return bmp;
bmp.recycle();
return null;
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.dispose();
if (mRgba != null)
mRgba.dispose();
if (mGraySubmat != null)
mGraySubmat.dispose();
if (mIntermediateMat != null)
mIntermediateMat.dispose();
mYuv = null; mYuv = null;
}
if(mRgba != null) {
mRgba.dispose();
mRgba = null; mRgba = null;
}
if(mGraySubmat != null) {
mGraySubmat.dispose();
mGraySubmat = null; mGraySubmat = null;
}
if(mIntermediateMat != null) {
mIntermediateMat.dispose();
mIntermediateMat = null; mIntermediateMat = null;
} }
} }
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting thread");
while(mThreadRun) {
synchronized(this) {
try {
this.wait();
mYuv.put(0, 0, mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Sample4Mixed a = (Sample4Mixed)getContext();
switch(a.viewMode)
{
case Sample4Mixed.VIEW_MODE_GRAY:
imgproc.cvtColor(mGraySubmat, mRgba, imgproc.CV_GRAY2RGBA, 4);
break;
case Sample4Mixed.VIEW_MODE_RGBA:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
break;
case Sample4Mixed.VIEW_MODE_CANNY:
imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_SOBEL:
imgproc.Sobel(mGraySubmat, mIntermediateMat, CvType.CV_8U, 1, 1);
core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 8);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_BLUR:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
imgproc.blur(mRgba, mRgba, new Size(15, 15));
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
Bitmap bmp = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
android.MatToBitmap(mRgba, bmp);
Canvas canvas = mHolder.lockCanvas();
canvas.drawBitmap(bmp, (canvas.getWidth()-mFrameWidth)/2, (canvas.getHeight()-mFrameHeight)/2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
public native void FindFeatures(long matAddrGr, long matAddrRgba); public native void FindFeatures(long matAddrGr, long matAddrRgba);
static { static {
System.loadLibrary("mixed_sample"); System.loadLibrary("mixed_sample");
} }
......
package org.opencv.samples.s4;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceCreated");
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallback(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
}
}
});
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
mThreadRun = false;
if (mCamera != null) {
synchronized (this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
}
protected abstract Bitmap processFrame(byte[] data);
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册