提交 76c5c707 编写于 作者: 杨时权

【修复】修复OpenGL示例代码存在的问题。

上级 b7a19da2
package com.ly.avfoundation.avfoundation.common;
import android.util.Log;
public class Define {
public static class Status {
public static final int _SUCCESS = 0;
public static final int _BUFFERING = 1;
public static final int SUCCESS = 0;
public static final int BUFFERING = 1;
public static final int _ENC_CREATE = -1000;
public static final int _ENC_OUTBUF = -1001;
public static final int ENC_CREATE = -1000;
public static final int ENC_OUTBUF = -1001;
}
public static final boolean DEBUG = true;
public static void logWithDebug(String TAG, String logInfo) {
if (DEBUG) {
Log.i(TAG, logInfo);
}
}
}
......@@ -21,226 +21,24 @@ import javax.microedition.khronos.opengles.GL10;
public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
private final static String TAG = "[CameraGLSurfaceView]";
// Camera filters; must match up with cameraFilterNames in strings.xml
private static final int FILTER_NONE = 0;
private static final int FILTER_BLACK_WHITE = 1;
private static final int FILTER_BLUR = 2;
private static final int FILTER_SHARPEN = 3;
private static final int FILTER_EDGE_DETECT = 4;
private static final int FILTER_EMBOSS = 5;
private static final int MSG_SET_SURFACE_TEXTURE = 0;
private static final float mSTMatrix[] = new float[16];
private int mTextureid;
private FullFrameRect mFullScreen = null;
private SurfaceTexture mSurfaceTexture = null;
private CameraHandler mCameraHandler = null;
// width/height of the incoming camera preview frames
private boolean mIncomingSizeUpdated = false;
private int mIncomingWidth = -1;
private int mIncomingHeight = -1;
private int mCurrentFilter = -1;
private int mNewFilter = FILTER_NONE;
private OnSetSurfaceTextureListener mAvaildableListener = null;
public CameraGLSurfaceView(Context context) {
super(context);
this.initialize();
}
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.initialize();
}
private void initialize() {
// initialize all val
mIncomingSizeUpdated = false;
mIncomingWidth = -1;
mIncomingHeight = -1;
mCurrentFilter = -1;
mNewFilter = CameraSurfaceRenderer.FILTER_NONE;
mTextureid = -1;
mCameraHandler = new CameraHandler(this);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
// Set up the texture blitter that will be used for on-screen display. This
// is *not* applied to the recording, because that uses a separate shader.
mFullScreen = new FullFrameRect(new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
mTextureid = mFullScreen.createTextureObject();
// Create a SurfaceTexture, with an external texture, in this EGL context. We don't
// have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame
// available messages will arrive on the main thread.
mSurfaceTexture = new SurfaceTexture(mTextureid);
// send message to handle
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
}
@Override
public void onSurfaceChanged(GL10 gl10, int i, int i1) {
// update preview size
GLES20.glViewport(0, 0, i, i1);
}
@Override
public void onDrawFrame(GL10 gl10) {
// update surface texture
mSurfaceTexture.updateTexImage();
if (mIncomingHeight <= -1 || mIncomingWidth <= -1) {
// Texture size isn't set yet. This is only used for the filters, but to be
// safe we can just skip drawing while we wait for the various races to resolve.
// (This seems to happen if you toggle the screen off/on with power button.)
Log.i(TAG, "Drawing before incoming texture size set; skipping");
return;
}
// TODO .encoder
// Update the filter, if necessary.
if (mCurrentFilter != mNewFilter) {
this.updateFilter();
}
if (mIncomingSizeUpdated) {
mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
mIncomingSizeUpdated = false;
}
// Draw the video frame.
mSurfaceTexture.getTransformMatrix(mSTMatrix);
mFullScreen.drawFrame(mTextureid, mSTMatrix);
// TODO
}
/**
* Records the size of the incoming camera preview frames.
* <p>
* It's not clear whether this is guaranteed to execute before or after onSurfaceCreated(),
* so we assume it could go either way. (Fortunately they both run on the same thread,
* so we at least know that they won't execute concurrently.)
*/
public void setCameraPreviewSize(int width, int height) {
Log.d(TAG, "setCameraPreviewSize (" + width + "x" + height + ");");
mIncomingWidth = width;
mIncomingHeight = height;
mIncomingSizeUpdated = true;
}
public void setSurfaceTextureListener(OnSetSurfaceTextureListener listener) {
mAvaildableListener = listener;
}
public void updateFilter() {
Texture2dProgram.ProgramType programType;
float[] kernel = null;
float colorAdj = 0.0f;
Log.d(TAG, "updating filter to " + mNewFilter);
switch (mNewFilter) {
case CameraSurfaceRenderer.FILTER_NONE:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT;
break;
case CameraSurfaceRenderer.FILTER_BLACK_WHITE:
// (In a previous version the TEXTURE_EXT_BW variant was enabled by a flag called
// ROSE_COLORED_GLASSES, because the shader set the red channel to the B&W color
// and green/blue to zero.)
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BW;
break;
case CameraSurfaceRenderer.FILTER_BLUR:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
1f/16f, 2f/16f, 1f/16f,
2f/16f, 4f/16f, 2f/16f,
1f/16f, 2f/16f, 1f/16f };
break;
case CameraSurfaceRenderer.FILTER_SHARPEN:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
0f, -1f, 0f,
-1f, 5f, -1f,
0f, -1f, 0f };
break;
case CameraSurfaceRenderer.FILTER_EDGE_DETECT:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
-1f, -1f, -1f,
-1f, 8f, -1f,
-1f, -1f, -1f };
break;
case CameraSurfaceRenderer.FILTER_EMBOSS:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
2f, 0f, 0f,
0f, -1f, 0f,
0f, 0f, -1f };
colorAdj = 0.5f;
break;
default:
throw new RuntimeException("Unknown filter mode " + mNewFilter);
}
// Do we need a whole new program? (We want to avoid doing this if we don't have
// too -- compiling a program could be expensive.)
if (programType != mFullScreen.getProgram().getProgramType()) {
mFullScreen.changeProgram(new Texture2dProgram(programType));
// If we created a new program, we need to initialize the texture width/height.
mIncomingSizeUpdated = true;
}
if (kernel != null) {
mFullScreen.getProgram().setKernel(kernel, colorAdj);
}
mCurrentFilter = mNewFilter;
}
public void handleSetSurfaceTexture(SurfaceTexture st) {
if (mAvaildableListener != null)
mAvaildableListener.onSetSurfaceTexture(st);
}
public interface OnSetSurfaceTextureListener {
void onSetSurfaceTexture(SurfaceTexture st);
}
public static class CameraHandler extends Handler {
private WeakReference<CameraGLSurfaceView> mWeaker;
public CameraHandler(CameraGLSurfaceView surfaceView) {
mWeaker = new WeakReference<CameraGLSurfaceView>(surfaceView);
}
@Override
public void handleMessage(Message msg) {
CameraGLSurfaceView surfaceView = mWeaker.get();
if (surfaceView == null) {
Log.e(TAG, "handleMessage msg.what:" + msg.what);
return;
}
switch (msg.what) {
case MSG_SET_SURFACE_TEXTURE:
surfaceView.handleSetSurfaceTexture((SurfaceTexture) msg.obj);
break;
}
}
}
}
......@@ -118,11 +118,6 @@ public class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
this.updateFilter();
}
if (mIncomingSizeUpdated) {
mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
mIncomingSizeUpdated = false;
}
// Draw the video frame.
mSurfaceTexture.getTransformMatrix(mSTMatrix);
mFullScreen.drawFrame(mTextureId, mSTMatrix);
......@@ -195,41 +190,6 @@ public class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
case CameraSurfaceRenderer.FILTER_NONE:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT;
break;
case CameraSurfaceRenderer.FILTER_BLACK_WHITE:
// (In a previous version the TEXTURE_EXT_BW variant was enabled by a flag called
// ROSE_COLORED_GLASSES, because the shader set the red channel to the B&W color
// and green/blue to zero.)
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BW;
break;
case CameraSurfaceRenderer.FILTER_BLUR:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
1f/16f, 2f/16f, 1f/16f,
2f/16f, 4f/16f, 2f/16f,
1f/16f, 2f/16f, 1f/16f };
break;
case CameraSurfaceRenderer.FILTER_SHARPEN:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
0f, -1f, 0f,
-1f, 5f, -1f,
0f, -1f, 0f };
break;
case CameraSurfaceRenderer.FILTER_EDGE_DETECT:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
-1f, -1f, -1f,
-1f, 8f, -1f,
-1f, -1f, -1f };
break;
case CameraSurfaceRenderer.FILTER_EMBOSS:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
kernel = new float[] {
2f, 0f, 0f,
0f, -1f, 0f,
0f, 0f, -1f };
colorAdj = 0.5f;
break;
default:
throw new RuntimeException("Unknown filter mode " + mNewFilter);
}
......@@ -242,9 +202,6 @@ public class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
mIncomingSizeUpdated = true;
}
if (kernel != null) {
mFullScreen.getProgram().setKernel(kernel, colorAdj);
}
mCurrentFilter = mNewFilter;
}
......
......@@ -3,6 +3,7 @@ package com.ly.avfoundation.avfoundation.render;
import android.content.Context;
import android.graphics.PixelFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
......@@ -17,6 +18,8 @@ import com.ly.avfoundation.avfoundation.gles.WindowSurface;
import java.lang.ref.WeakReference;
import static android.os.Process.THREAD_PRIORITY_URGENT_AUDIO;
public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
public static final String TAG = "[TriangleRender2]";
......@@ -24,8 +27,8 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
private WindowSurface mWindowSurface;
private TriangleProgram mProgram;
private Handler mHandler = null;
private HandlerThread mRenderThread = null;
private Thread mThread = null;
private boolean mInitialized = false;
private boolean mQuit;
public static final int MSG_SURFACE_CREATE = 0;
......@@ -52,8 +55,16 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
}
public TriangleRender2() {
mThread = new Thread(this);
mHandler = new Handler() {
mRenderThread = new HandlerThread("TriangleRender2", THREAD_PRIORITY_URGENT_AUDIO);
mRenderThread.start();
mRenderThread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable ex) {
Log.e(TAG, "uncaughtException: " + ex);
}
});
mHandler = new Handler(mRenderThread.getLooper()) {
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_SURFACE_CREATE:
......@@ -77,6 +88,7 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
@Override
public void surfaceCreated(@NonNull SurfaceHolder surfaceHolder) {
mQuit = false;
mThread = new Thread(this);
mThread.start();
mHandler.sendMessage(mHandler.obtainMessage(MSG_SURFACE_CREATE, surfaceHolder));
}
......@@ -88,12 +100,16 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder surfaceHolder) {
mQuit = true;
mHandler.sendMessage(mHandler.obtainMessage(MSG_SURFACE_DESTROY));
try {
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
if (!mQuit) {
mQuit = true;
mHandler.sendMessage(mHandler.obtainMessage(MSG_SURFACE_DESTROY));
try {
mThread.join();
mThread = null;
} catch (Exception exp) {
}
}
}
......@@ -101,8 +117,12 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
public void run() {
// onDrawFrame
while (!mQuit) {
Log.i(TAG, "run: " + mQuit);
mHandler.sendMessage(mHandler.obtainMessage(MSG_SURFACE_RENDER));
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
......@@ -116,17 +136,21 @@ public class TriangleRender2 implements SurfaceHolder.Callback, Runnable {
}
public void handleSurfaceChange(Bounds bounds) {
if (mProgram != null) {
mProgram.setScreenBounds(0, 0, bounds.width, bounds.height);
if (mProgram == null || mWindowSurface == null || mEGLCore == null) {
return;
}
mProgram.setScreenBounds(0, 0, bounds.width, bounds.height);
Log.i(TAG, "handleSurfaceChange " + bounds);
}
public void handleSurfaceRende() {
mWindowSurface.makeCurrent();
if (mProgram != null) {
mProgram.draw();
if (mEGLCore == null || mWindowSurface == null || mProgram == null) {
return;
}
mWindowSurface.makeCurrent();
mProgram.draw();
mWindowSurface.swapBuffers();
}
......
......@@ -26,87 +26,10 @@ import com.ly.avfoundation.avfoundation.common.AVCodecDefine;
import com.ly.avfoundation.avfoundation.render.CameraGLSurfaceView;
import com.ly.avfoundation.avfoundation.render.CameraSurfaceRenderer;
public class CaptureFragment extends Fragment implements View.OnClickListener, RadioGroup.OnCheckedChangeListener, CameraGLSurfaceView.OnSetSurfaceTextureListener, VideoCapture.OnFrameAvailableListener, VideoCapture.OnPreviewSizeChangeListener {
public class CaptureFragment extends Fragment {
private static final String TAG = "[CaptureFragment]";
private Activity mActivity;
private Button mCaptureStartBtn;
private Button mCaptureStopBtn;
private RadioGroup mCodecGroup;
private RadioGroup mInputGroup;
private EditText mWidthText;
private EditText mHeightText;
private EditText mFpsText;
private EditText mBitrateText;
private CameraGLSurfaceView mSurfaceView;
private SurfaceTexture mSurfaceTexture = null;
private VideoCapture mCapture;
private VideoCapture.Configuration mCameraConfiguration;
@Override
public void onSetSurfaceTexture(SurfaceTexture st) {
Log.i(TAG, "onSetSurfaceTexture " + st);
if (mCapture != null) {
mCapture.setSurfaceTexture(st);
}
mSurfaceTexture = st;
}
@Override
public void onFrameAvailable(SurfaceTexture st) {
mSurfaceView.requestRender();
}
@Override
public void onPreviewSizeChange(int width, int height) {
Log.i(TAG, "onPreviewSizeChange (" + width + "x" + height + ");");
if (mSurfaceView != null) {
mSurfaceView.setCameraPreviewSize(width, height);
}
this.onChangeVideoSize(width, height);
}
public static class InputType {
public static final int SUFFIX = 0;
public static final int PREPOSE = 1;
}
private int mInputType = InputType.PREPOSE;
private int mCodecID;
private class EditorWatch implements TextWatcher {
private EditText mOpaque;
public EditorWatch(EditText opaque) {
mOpaque = opaque;
}
@Override
public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {
}
@Override
public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {
String text = charSequence.toString();
// 如果相关输入控件输入为"",则不能开始采集和编码
if (text.equals("")) {
GradientDrawable drawable = (GradientDrawable)mOpaque.getBackground();
drawable.setStroke(2, 0xFFFF0000);
mCaptureStartBtn.setEnabled(false);
} else {
GradientDrawable drawable = (GradientDrawable)mOpaque.getBackground();
drawable.setStroke(2, 0xFF39C5BB);
mCaptureStartBtn.setEnabled(true);
}
}
@Override
public void afterTextChanged(Editable editable) {
}
}
private Context mActivity = null;
public static CaptureFragment newInstance(Bundle saveInstanceState) {
Bundle bundle = new Bundle();
......@@ -124,141 +47,6 @@ public class CaptureFragment extends Fragment implements View.OnClickListener, R
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup group, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.capture_fragment, group, false);
// 获取界面相关控件,用于采集编码使用
mCaptureStartBtn = view.findViewById(R.id.capture_start_btn);
mCaptureStartBtn.setOnClickListener(this);
mCaptureStopBtn = view.findViewById(R.id.capture_stop_btn);
mCaptureStopBtn.setOnClickListener(this);
mCodecGroup = view.findViewById(R.id.code_group);
mCodecGroup.setOnCheckedChangeListener(this);
mInputGroup = view.findViewById(R.id.input_group);
mInputGroup.setOnCheckedChangeListener(this);
mWidthText = view.findViewById(R.id.width_edit_view);
mWidthText.addTextChangedListener(new EditorWatch(mWidthText));
mHeightText = view.findViewById(R.id.height_edit_view);
mHeightText.addTextChangedListener(new EditorWatch(mHeightText));
mFpsText = view.findViewById(R.id.fps_edit_view);
mFpsText.addTextChangedListener(new EditorWatch(mFpsText));
mBitrateText = view.findViewById(R.id.bit_edit_view);
mBitrateText.addTextChangedListener(new EditorWatch(mBitrateText));
mSurfaceView = view.findViewById(R.id.surfaceView);
mSurfaceView.setEGLContextClientVersion(2);
mSurfaceView.setSurfaceTextureListener(this);
mSurfaceView.setRenderer(mSurfaceView);
mSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
return view;
}
@Override
public void onResume() {
super.onResume();
// TODO
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.capture_start_btn:
this.onStartCaptureAndEncoder();
break;
case R.id.capture_stop_btn:
this.onStopCaptureAndEncoder();
break;
default:
break;
}
}
@Override
public void onCheckedChanged(RadioGroup group, int checkid) {
switch (checkid) {
case R.id.code_group:
this.onCodecCheckedChanged(group);
break;
case R.id.input_group:
this.onInputCheckedChanged(group);
break;
}
}
private void onStartCaptureAndEncoder() {
int width = Integer.parseInt(mWidthText.getText().toString());
int height = Integer.parseInt(mHeightText.getText().toString());
int fps = Integer.parseInt(mFpsText.getText().toString());
int bitrate = Integer.parseInt(mBitrateText.getText().toString());
if (mCapture != null) {
return;
}
this.onInputCheckedChanged(mInputGroup);
mCameraConfiguration = new VideoCapture.Configuration(mInputType, width, height, 90);
mCapture = new VideoCapture();
mCapture.setAvailableListener(this);
mCapture.setPreviewSizeChangeListener(this);
mCapture.openCamera(mCameraConfiguration);
mCapture.setSurfaceTexture(mSurfaceTexture);
Log.i(TAG, "width:" + width + " height:" + height + " fps:" + fps + " bitrate:" + bitrate + " inputType:" + mInputType + " surfaceTexture:" + mSurfaceTexture);
}
private void onStopCaptureAndEncoder() {
if (mCapture != null) {
mCapture.releaseCamera();
mCapture = null;
}
mCameraConfiguration = null;
}
private void onInputCheckedChanged(RadioGroup group) {
switch (group.getCheckedRadioButtonId()) {
case R.id.prepose_btn:
mInputType = InputType.PREPOSE;
break;
case R.id.suffix_btn:
mInputType = InputType.SUFFIX;
break;
}
Log.i(TAG, "onInputCheckedChanged " + mInputType);
}
private void onCodecCheckedChanged(RadioGroup group) {
switch (group.getCheckedRadioButtonId()) {
case R.id.avc_hw_radio_btn:
mCodecID = AVCodecDefine.CodecID.AVC;
break;
case R.id.avc_sw_radio_btn:
mCodecID = AVCodecDefine.CodecID.AVC;
break;
}
}
private void onChangeVideoSize(int width, int height) {
int surfaceWidth = mSurfaceView.getWidth();
int surfaceHeight = mSurfaceView.getHeight();
float max;
if (getResources().getConfiguration().orientation == ActivityInfo.SCREEN_ORIENTATION_PORTRAIT) {
max = Math.max((float)width / (float)surfaceWidth, (float)height / (float)surfaceHeight);
} else {
max = Math.max((float)width / (float)surfaceHeight, (float)height / (float)surfaceWidth);
}
//视频宽高分别/最大倍数值 计算出放大后的视频尺寸
width = (int) Math.ceil((float) width / max);
height = (int) Math.ceil((float) height / max);
RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(width, height);
layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
mSurfaceView.setLayoutParams(layoutParams);
}
}
package com.ly.avfoundation.fragment;
import android.graphics.Color;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.SurfaceView;
......
......@@ -7,7 +7,7 @@
android:layout_width="match_parent"
android:layout_height="400dp">
<com.ly.avfoundation.avfoundation.render.CameraGLSurfaceView
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="match_parent"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册