提交 067744d0 编写于 作者: A Andrey Kamaev

Fix Android samples for devices having front camera only (Nexus 7)

上级 fc935184
......@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample-15puzzle::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
......@@ -26,76 +26,67 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
......@@ -120,7 +111,6 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
bmp.recycle();
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -15,9 +15,8 @@ import android.view.Window;
import android.view.WindowManager;
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/
public class puzzle15Activity extends Activity
{
private static final String TAG = "Sample::Activity";
public class puzzle15Activity extends Activity {
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemNewGame;
private MenuItem mItemToggleNumbers;
......@@ -33,6 +32,7 @@ public class puzzle15Activity extends Activity
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -40,13 +40,14 @@ public class puzzle15Activity extends Activity
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -76,7 +77,7 @@ public class puzzle15Activity extends Activity
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -84,12 +85,11 @@ public class puzzle15Activity extends Activity
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -97,16 +97,15 @@ public class puzzle15Activity extends Activity
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemNewGame = menu.add("Start new game");
mItemToggleNumbers = menu.add("Show/hide tile numbers");
return true;
......@@ -114,12 +113,10 @@ public class puzzle15Activity extends Activity
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemNewGame) {
synchronized (mView) {
mView.startNewGame();
}
} else if (item == mItemToggleNumbers)
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemNewGame)
mView.startNewGame();
else if (item == mItemToggleNumbers)
mView.tolggleTileNumbers();
return true;
}
......
......@@ -18,6 +18,8 @@ import android.view.View;
import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mRgba15;
private Mat[] mCells;
......@@ -42,14 +44,17 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
mTextHeights[i] = (int) s.height;
mTextWidths[i] = (int) s.width;
}
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
......@@ -102,7 +107,14 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
startNewGame();
}
public void startNewGame() {
private void drawGrid(int cols, int rows) {
for (int i = 1; i < gridSize; i++) {
Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);
Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);
}
}
public synchronized void startNewGame() {
do {
shuffle(mIndexses);
} while (!isPuzzleSolvable());
......@@ -154,13 +166,6 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
}
}
private void drawGrid(int cols, int rows) {
for (int i = 1; i < gridSize; i++) {
Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);
Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);
}
}
@Override
public void run() {
super.run();
......
......@@ -13,9 +13,9 @@ import android.view.Window;
import android.view.WindowManager;
public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "OCVSample::Activity";
private static final String TAG = "Sample-ColorBlobDetection::Activity";
private ColorBlobDetectionView mView;
private ColorBlobDetectionView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
......@@ -27,6 +27,7 @@ public class ColorBlobDetectionActivity extends Activity {
// Create and set View
mView = new ColorBlobDetectionView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -34,13 +35,14 @@ public class ColorBlobDetectionActivity extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -70,7 +72,7 @@ public class ColorBlobDetectionActivity extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -78,12 +80,11 @@ public class ColorBlobDetectionActivity extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -91,7 +92,7 @@ public class ColorBlobDetectionActivity extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......
......@@ -22,29 +22,27 @@ import android.view.View;
import android.view.View.OnTouchListener;
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32);
// Logcat tag
private static final String TAG = "Sample-ColorBlobDetection::View";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
private Mat mRgba;
private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32);
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) {
super(context);
setOnTouchListener(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
......@@ -53,6 +51,14 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
super.surfaceCreated(holder);
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
......@@ -110,8 +116,8 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(2, 34, 2, 34);
colorLabel.setTo(mBlobColorRgba);
......@@ -130,14 +136,6 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
return bmp;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
@Override
public void run() {
super.run();
......
......@@ -12,6 +12,16 @@ import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector {
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
public void setColorRadius(Scalar radius) {
mColorRadius = radius;
}
......@@ -94,14 +104,4 @@ public class ColorBlobDetector {
public List<MatOfPoint> getContours() {
return mContours;
}
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
}
......@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
......@@ -26,76 +26,67 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
......@@ -121,7 +112,6 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
bmp.recycle();
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -38,8 +38,4 @@ public class DetectionBasedTracker
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
static {
System.loadLibrary("detection_based_tracker");
}
}
......@@ -15,7 +15,7 @@ import android.view.Window;
import android.view.WindowManager;
public class FdActivity extends Activity {
private static final String TAG = "Sample-FD::Activity";
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemFace50;
private MenuItem mItemFace40;
......@@ -42,6 +42,7 @@ public class FdActivity extends Activity {
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -49,13 +50,14 @@ public class FdActivity extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -80,28 +82,27 @@ public class FdActivity extends Activity {
};
public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass());
mDetectorName = new String[2];
mDetectorName[FdView.JAVA_DETECTOR] = "Java";
mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "onPause");
if (mView != null)
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -109,7 +110,7 @@ public class FdActivity extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......@@ -117,7 +118,7 @@ public class FdActivity extends Activity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemFace50 = menu.add("Face size 50%");
mItemFace40 = menu.add("Face size 40%");
mItemFace30 = menu.add("Face size 30%");
......@@ -128,7 +129,7 @@ public class FdActivity extends Activity {
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemFace50)
mView.setMinFaceSize(0.5f);
else if (item == mItemFace40)
......@@ -137,8 +138,7 @@ public class FdActivity extends Activity {
mView.setMinFaceSize(0.3f);
else if (item == mItemFace20)
mView.setMinFaceSize(0.2f);
else if (item == mItemType)
{
else if (item == mItemType) {
mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]);
mView.setDetectorType(mDetectorType);
......
......@@ -22,22 +22,22 @@ import android.util.Log;
import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase {
private static final String TAG = "Sample-FD::View";
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private int mDetectorType = JAVA_DETECTOR;
private int mDetectorType = JAVA_DETECTOR;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
......@@ -62,6 +62,7 @@ class FdView extends SampleCvViewBase {
super(context);
try {
// load cascade file from application resources
InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
......@@ -90,10 +91,13 @@ class FdView extends SampleCvViewBase {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
......
......@@ -10,7 +10,7 @@ import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "Sample::FpsMeter";
private static final String TAG = "OCVSample::FpsMeter";
int step;
int framesCouner;
double freq;
......
......@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
......@@ -28,77 +28,66 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
Log.e(TAG, "Failed to open native camera");
releaseCamera();
return false;
}
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "surfaceDestroyed2");
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
mFps.init();
while (true) {
......@@ -128,7 +117,6 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
bmp.recycle();
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -10,7 +10,7 @@ import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "Sample::FpsMeter";
private static final String TAG = "OCVSample::FpsMeter";
int step;
int framesCouner;
double freq;
......
......@@ -15,8 +15,7 @@ import android.view.Window;
import android.view.WindowManager;
public class ImageManipulationsActivity extends Activity {
private static final String TAG = "Sample-ImageManipulations::Activity";
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
......@@ -35,11 +34,10 @@ public class ImageManipulationsActivity extends Activity {
private MenuItem mItemPreviewZoom;
private MenuItem mItemPreviewPixelize;
private MenuItem mItemPreviewPosterize;
private ImageManipulationsView mView;
public static int viewMode = VIEW_MODE_RGBA;
private ImageManipulationsView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
......@@ -50,6 +48,7 @@ public class ImageManipulationsActivity extends Activity {
// Create and set View
mView = new ImageManipulationsView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -57,13 +56,14 @@ public class ImageManipulationsActivity extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -93,7 +93,7 @@ public class ImageManipulationsActivity extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -101,12 +101,11 @@ public class ImageManipulationsActivity extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -114,7 +113,7 @@ public class ImageManipulationsActivity extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......@@ -122,7 +121,7 @@ public class ImageManipulationsActivity extends Activity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewHist = menu.add("Histograms");
mItemPreviewCanny = menu.add("Canny");
......@@ -136,7 +135,7 @@ public class ImageManipulationsActivity extends Activity {
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
viewMode = VIEW_MODE_RGBA;
if (item == mItemPreviewHist)
......
......@@ -21,28 +21,32 @@ import android.util.Log;
import android.view.SurfaceHolder;
class ImageManipulationsView extends SampleCvViewBase {
private Size mSize0;
private Size mSizeRgba;
private Size mSizeRgbaInner;
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private Mat mHist, mMat0;
private MatOfInt mChannels[], mHistSize;
private int mHistSizeNum;
private MatOfFloat mRanges;
private Scalar mColorsRGB[], mColorsHue[], mWhilte;
private Point mP1, mP2;
float mBuff[];
private Mat mRgbaInnerWindow;
private Mat mGrayInnerWindow;
private Mat mBlurWindow;
private Mat mZoomWindow;
private Mat mZoomCorner;
private Mat mSepiaKernel;
private static final String TAG = "OCVSample::View";
private Size mSize0;
private Size mSizeRgba;
private Size mSizeRgbaInner;
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private Mat mHist;
private Mat mMat0;
private MatOfInt mChannels[];
private MatOfInt mHistSize;
private int mHistSizeNum;
private MatOfFloat mRanges;
private Scalar mColorsRGB[];
private Scalar mColorsHue[];
private Scalar mWhilte;
private Point mP1;
private Point mP2;
private float mBuff[];
private Mat mRgbaInnerWindow;
private Mat mGrayInnerWindow;
private Mat mBlurWindow;
private Mat mZoomWindow;
private Mat mZoomCorner;
private Mat mSepiaKernel;
public ImageManipulationsView(Context context) {
super(context);
......@@ -52,10 +56,13 @@ class ImageManipulationsView extends SampleCvViewBase {
mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
......@@ -234,7 +241,7 @@ class ImageManipulationsView extends SampleCvViewBase {
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
......
......@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample-ImageManipulations::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
......@@ -28,86 +28,75 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
mFps.init();
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null) {
Log.i(TAG, "mCamera == null");
if (mCamera == null)
break;
}
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
......@@ -129,7 +118,6 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
bmp.recycle();
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -11,7 +11,7 @@ import android.view.Window;
public class Sample0Base extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
......@@ -23,14 +23,14 @@ public class Sample0Base extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
......@@ -38,8 +38,8 @@ public class Sample0Base extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
......@@ -49,7 +49,7 @@ public class Sample0Base extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
mView = new Sample0View(this);
......@@ -58,7 +58,7 @@ public class Sample0Base extends Activity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
return true;
......@@ -66,7 +66,7 @@ public class Sample0Base extends Activity {
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
else if (item == mItemPreviewGray)
......
......@@ -6,20 +6,21 @@ import android.util.Log;
class Sample0View extends SampleViewBase {
private static final String TAG = "Sample::View";
int mSize;
int[] mRGBA;
private Bitmap mBitmap;
private int mViewMode;
private static final String TAG = "OCVSample::View";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
int mSize;
int[] mRGBA;
private Bitmap mBitmap;
private int mViewMode;
public Sample0View(Context context) {
super(context);
mSize = 0;
mViewMode = VIEW_MODE_RGBA;
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
......@@ -64,7 +65,7 @@ class Sample0View extends SampleViewBase {
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")");
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
mRGBA = new int[previewWidth * previewHeight];
......@@ -72,7 +73,7 @@ class Sample0View extends SampleViewBase {
@Override
protected void onPreviewStopped() {
Log.i(TAG, "onPreviewStopped");
Log.i(TAG, "called onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
......@@ -84,7 +85,7 @@ class Sample0View extends SampleViewBase {
}
public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")");
Log.i(TAG, "called setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}
\ No newline at end of file
......@@ -16,7 +16,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
......@@ -52,19 +52,30 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist)");
e.printStackTrace();
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Failed to open camera");
Log.e(TAG, "Can't open any camera");
return false;
}
......@@ -77,11 +88,12 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
......@@ -93,64 +105,61 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Log.i(TAG, "setupCamera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setParameters(params);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
......@@ -163,12 +172,12 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
......@@ -191,7 +200,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
......@@ -214,6 +223,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -15,7 +15,7 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample1Java extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
......@@ -32,6 +32,7 @@ public class Sample1Java extends Activity {
// Create and set View
mView = new Sample1View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -39,8 +40,8 @@ public class Sample1Java extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
......@@ -76,7 +77,7 @@ public class Sample1Java extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -84,12 +85,11 @@ public class Sample1Java extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -97,7 +97,7 @@ public class Sample1Java extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......@@ -105,7 +105,7 @@ public class Sample1Java extends Activity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
......@@ -114,7 +114,7 @@ public class Sample1Java extends Activity {
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
......
......@@ -13,41 +13,46 @@ import android.graphics.Bitmap;
import android.util.Log;
class Sample1View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
public Sample1View(Context context) {
super(context);
mViewMode = VIEW_MODE_RGBA;
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
synchronized (this) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "called onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
synchronized (this) {
......@@ -101,6 +106,7 @@ class Sample1View extends SampleViewBase {
}
public void setViewMode(int viewMode) {
Log.i(TAG, "called setViewMode("+viewMode+")");
mViewMode = viewMode;
}
......
......@@ -16,14 +16,14 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
......@@ -53,11 +53,29 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
Log.e(TAG, "Can't open any camera");
return false;
}
......@@ -70,16 +88,16 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
......@@ -87,74 +105,79 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
......@@ -177,13 +200,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
......@@ -198,5 +223,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
}
}
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -15,7 +15,7 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
......@@ -24,11 +24,10 @@ public class Sample2NativeCamera extends Activity {
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private Sample2View mView;
public static int viewMode = VIEW_MODE_RGBA;
private Sample2View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
......@@ -39,6 +38,7 @@ public class Sample2NativeCamera extends Activity {
// Create and set View
mView = new Sample2View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
......@@ -46,13 +46,14 @@ public class Sample2NativeCamera extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -82,7 +83,7 @@ public class Sample2NativeCamera extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -90,12 +91,11 @@ public class Sample2NativeCamera extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -103,7 +103,7 @@ public class Sample2NativeCamera extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......@@ -111,7 +111,7 @@ public class Sample2NativeCamera extends Activity {
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
......@@ -120,7 +120,7 @@ public class Sample2NativeCamera extends Activity {
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
viewMode = VIEW_MODE_RGBA;
else if (item == mItemPreviewGray)
......
......@@ -15,16 +15,20 @@ import android.util.Log;
import android.view.SurfaceHolder;
class Sample2View extends SampleCvViewBase {
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
public Sample2View(Context context) {
super(context);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
......@@ -59,7 +63,7 @@ class Sample2View extends SampleCvViewBase {
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
......
......@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
......@@ -26,76 +26,67 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass());
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
synchronized (this) {
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Failed to open native camera");
return false;
}
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
......@@ -115,13 +106,12 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finishing processing thread");
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -13,8 +13,9 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity";
private Sample3View mView;
private static final String TAG = "OCVSample::Activity";
private Sample3View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
......@@ -37,8 +38,8 @@ public class Sample3Native extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
......@@ -73,7 +74,7 @@ public class Sample3Native extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -81,12 +82,11 @@ public class Sample3Native extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -94,7 +94,7 @@ public class Sample3Native extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
......
......@@ -2,22 +2,27 @@ package org.opencv.samples.tutorial3;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
class Sample3View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
public Sample3View(Context context) {
super(context);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
mFrameSize = previewWidth * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
......
......@@ -15,14 +15,14 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
......@@ -52,11 +52,29 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
Log.e(TAG, "Can't open any camera");
return false;
}
......@@ -69,11 +87,12 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
......@@ -86,77 +105,81 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
......@@ -177,13 +200,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
......@@ -198,5 +223,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
}
}
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
......@@ -15,7 +15,7 @@ import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
......@@ -45,13 +45,14 @@ public class Sample4Mixed extends Activity {
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
......@@ -81,7 +82,7 @@ public class Sample4Mixed extends Activity {
@Override
protected void onPause() {
Log.i(TAG, "onPause");
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
......@@ -89,12 +90,11 @@ public class Sample4Mixed extends Activity {
@Override
protected void onResume() {
Log.i(TAG, "onResume");
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
......@@ -102,14 +102,15 @@ public class Sample4Mixed extends Activity {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu");
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
......@@ -117,8 +118,9 @@ public class Sample4Mixed extends Activity {
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
......
......@@ -10,26 +10,28 @@ import android.graphics.Bitmap;
import android.util.Log;
class Sample4View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_FEATURES = 5;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private int mViewMode;
private Bitmap mBitmap;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
public Sample4View(Context context) {
super(context);
}
@Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) {
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
......@@ -37,31 +39,34 @@ class Sample4View extends SampleViewBase {
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "called onPreviewStopped");
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
}
......@@ -105,6 +110,7 @@ class Sample4View extends SampleViewBase {
public native void FindFeatures(long matAddrGr, long matAddrRgba);
public void setViewMode(int viewMode) {
Log.i(TAG, "called setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}
......@@ -2,6 +2,7 @@ package org.opencv.samples.tutorial4;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
......@@ -15,14 +16,14 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
......@@ -52,11 +53,29 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
public boolean openCamera() {
Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open();
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
Log.e(TAG, "Can't open any camera");
return false;
}
......@@ -69,16 +88,16 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
......@@ -86,78 +105,81 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
onPreviewStopped();
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
mCamera.setParameters(params);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setParameters(params);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
......@@ -178,13 +200,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
......@@ -199,5 +223,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
}
}
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册