提交 9740af73 编写于 作者: G guoshuyu

增加自定义render的支持 (2017-09-26)

上级 8a66072d
......@@ -10,7 +10,7 @@
**支持**|**DEMO演示获取视频第一帧等实现。**
**支持**|**简单滤镜(内置黑白、色彩过滤、高斯、模糊、模糊等等20多种)。**
**支持**|**GL效果动画,(xyz轴旋转,放大)。**
**支持**|**视频帧截图**
**支持**|**视频帧截图功能**
**支持**|**列表播放;列表连续播放;重力旋转与手动旋转;视频本身rotation旋转属性。**
**支持**|**全屏切换动画效果;小窗口播放,可拖动。**
**支持**|**快播和慢播;网络视频加载速度。**
......
......@@ -30,8 +30,11 @@ import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by guoshuyu on 2017/6/18.
* sampleVideo支持全屏与非全屏切换的清晰度,旋转,镜像等功能.
* Activity可以继承GSYBaseActivityDetail实现详情模式的页面
* 或者参考DetailPlayer、DetailListPlayer实现
*
* Created by guoshuyu on 2017/6/18.
*/
public class DetailControlActivity extends GSYBaseActivityDetail {
......
......@@ -13,6 +13,7 @@ import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.RequestOptions;
import com.example.gsyvideoplayer.effect.GSYVideoGLViewCustomRender;
import com.example.gsyvideoplayer.effect.PixelationEffect;
import com.example.gsyvideoplayer.utils.CommonUtil;
import com.example.gsyvideoplayer.utils.JumpUtils;
......@@ -63,6 +64,8 @@ import butterknife.ButterKnife;
/**
* 滤镜
* Activity可以继承GSYBaseActivityDetail实现详情模式的页面
* 或者参考DetailPlayer、DetailListPlayer实现
* Created by guoshuyu on 2017/6/18.
*/
......@@ -128,6 +131,9 @@ public class DetailFilterActivity extends GSYBaseActivityDetail {
}
});
//自定义render需要在播放器设置
//detailPlayer.setCustomGLRenderer(new GSYVideoGLViewCustomRender());
changeFilter.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
......
......@@ -21,6 +21,8 @@ import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Activity可以继承GSYBaseActivityDetail实现详情模式的页面
* 或者参考DetailPlayer、DetailListPlayer实现
* Created by shuyu on 2016/12/20.
*/
......
package com.example.gsyvideoplayer.effect;
import android.opengl.GLSurfaceView;
import com.shuyu.gsyvideoplayer.GSYVideoGLView.ShaderInterface;
/**
* 合成两个渲染图画面
*/
public class BitmapEffect implements ShaderInterface {
public BitmapEffect() {
}
@Override
public String getShader(GLSurfaceView mGlSurfaceView) {
String shader =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "uniform sampler2D sTexture2;\n"
+ "void main() {\n"
+ " vec4 c2 = texture2D(sTexture, vTextureCoord);\n"
+ " vec2 coord = vTextureCoord - vec2(0.5, 0.5);\n"
+ " vec4 c1 = texture2D(sTexture2, vTextureCoord);\n"
+ " gl_FragColor = vec4(mix(c2.rgb, c1.rgb, c1.a * 0.5), c2.a);\n"
+ "}\n";
return shader;
}
}
\ No newline at end of file
package com.example.gsyvideoplayer.effect;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import com.shuyu.gsyvideoplayer.R;
import com.shuyu.gsyvideoplayer.render.GSYVideoGLViewSimpleRender;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* 自定义渲染功能
*/
@SuppressLint("ViewConstructor")
public class GSYVideoGLViewCustomRender extends GSYVideoGLViewSimpleRender {
private Bitmap mBitmap;
private int mTexturesBitmap[] = new int[1];
//水印圖
private BitmapEffect bitmapEffect = new BitmapEffect();
public GSYVideoGLViewCustomRender() {
super();
}
@Override
protected void bindDrawFrameTexture() {
super.bindDrawFrameTexture();
int mFilterInputTextureUniform2 = GLES20.glGetUniformLocation(mProgram, "sTexture2");
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexturesBitmap[0]);
if (mBitmap != null && !mBitmap.isRecycled()) {
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, mBitmap);
}
GLES20.glUniform1i(mFilterInputTextureUniform2, mTexturesBitmap[0]);
}
@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
super.onSurfaceCreated(glUnused, config);
mBitmap = BitmapFactory.decodeResource(mSurfaceView.getResources(), R.drawable.unlock);
GLES20.glGenTextures(1, mTexturesBitmap, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTexturesBitmap[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, mBitmap, 0);
}
@Override
protected String getVertexShader() {
return super.getVertexShader();
}
@Override
protected String getFragmentShader() {
return bitmapEffect.getShader(mSurfaceView);
}
@Override
public void releaseAll() {
super.releaseAll();
if (mBitmap != null && !mBitmap.isRecycled()) {
mBitmap.recycle();
mBitmap = null;
}
}
}
......@@ -13,6 +13,7 @@ import android.widget.RelativeLayout;
import com.shuyu.gsyvideoplayer.listener.GSYVideoShotListener;
import com.shuyu.gsyvideoplayer.listener.GSYVideoShotSaveListener;
import com.shuyu.gsyvideoplayer.render.GSYVideoGLViewBaseRender;
import com.shuyu.gsyvideoplayer.utils.FileUtils;
import com.shuyu.gsyvideoplayer.utils.GSYVideoType;
......@@ -187,6 +188,14 @@ public class GSYRenderView {
}
}
public void releaseAll() {
if (mShowView instanceof GSYVideoGLView) {
GSYVideoGLView gsyVideoGLView = (GSYVideoGLView) mShowView;
gsyVideoGLView.requestLayout();
gsyVideoGLView.releaseAll();
}
}
/**
* 添加播放的view
*/
......@@ -243,15 +252,21 @@ public class GSYRenderView {
/**
* 添加播放的view
*/
public void addGLView(Context context, ViewGroup textureViewContainer, int rotate, GSYVideoGLView.onGSYSurfaceListener gsySurfaceListener, GSYVideoGLView.ShaderInterface effect, float[] transform) {
public void addGLView(Context context, ViewGroup textureViewContainer, int rotate,
GSYVideoGLView.onGSYSurfaceListener gsySurfaceListener,
GSYVideoGLView.ShaderInterface effect, float[] transform,
GSYVideoGLViewBaseRender customRender) {
if (textureViewContainer.getChildCount() > 0) {
textureViewContainer.removeAllViews();
}
GSYVideoGLView gsyVideoGLView = new GSYVideoGLView(context);
if (customRender != null) {
gsyVideoGLView.setCustomRenderer(customRender);
}
gsyVideoGLView.setEffect(effect);
gsyVideoGLView.setGSYSurfaceListener(gsySurfaceListener);
gsyVideoGLView.setRotation(rotate);
gsyVideoGLView.initRender();
mShowView = gsyVideoGLView;
if (transform != null && transform.length == 16) {
......
......@@ -2,20 +2,21 @@ package com.shuyu.gsyvideoplayer;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Paint;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.Surface;
import com.shuyu.gsyvideoplayer.listener.GSYVideoShotListener;
import com.shuyu.gsyvideoplayer.render.GSYVideoGLViewBaseRender;
import com.shuyu.gsyvideoplayer.render.GSYVideoGLViewSimpleRender;
import com.shuyu.gsyvideoplayer.utils.MeasureHelper;
import com.shuyu.gsyvideoplayer.effect.NoEffect;
import java.io.File;
/**
* 在videffects的基础上调整的
*
* <p>
* 原 @author sheraz.khilji
*/
@SuppressLint("ViewConstructor")
......@@ -23,12 +24,14 @@ public class GSYVideoGLView extends GLSurfaceView {
private static final String TAG = GSYVideoGLView.class.getName();
private GSYVideoGLViewSimpleRender mRenderer;
private GSYVideoGLViewBaseRender mRenderer;
private Context mContext;
private ShaderInterface mEffect = new NoEffect();
private float[] mMVPMatrix;
private MeasureHelper measureHelper;
private onGSYSurfaceListener mGSYSurfaceListener;
......@@ -56,7 +59,21 @@ public class GSYVideoGLView extends GLSurfaceView {
setEGLContextClientVersion(2);
mRenderer = new GSYVideoGLViewSimpleRender();
measureHelper = new MeasureHelper(this);
mRenderer.setSurfaceView(GSYVideoGLView.this);
}
public void initRender() {
setRenderer(mRenderer);
}
/**
* 设置自定义的render,其他自定义设置会被取消,需要重新设置
* 在initRender() 前设置才会生效
*
* @param CustomRender
*/
public void setCustomRenderer(GSYVideoGLViewBaseRender CustomRender) {
this.mRenderer = CustomRender;
mRenderer.setSurfaceView(GSYVideoGLView.this);
}
......@@ -74,6 +91,7 @@ public class GSYVideoGLView extends GLSurfaceView {
public void setMVPMatrix(float[] MVPMatrix) {
if (MVPMatrix != null) {
mMVPMatrix = MVPMatrix;
mRenderer.setMVPMatrix(MVPMatrix);
}
}
......@@ -82,6 +100,7 @@ public class GSYVideoGLView extends GLSurfaceView {
mRenderer.takeShotPic();
}
public void setGSYVideoShotListener(GSYVideoShotListener listener, boolean high) {
this.mRenderer.setGSYVideoShotListener(listener, high);
}
......@@ -117,4 +136,9 @@ public class GSYVideoGLView extends GLSurfaceView {
return measureHelper.getMeasuredWidth();
}
public void releaseAll() {
if (mRenderer != null) {
mRenderer.releaseAll();
}
}
}
package com.shuyu.gsyvideoplayer.render;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.GLException;
import android.opengl.GLSurfaceView;
import android.opengl.GLUtils;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import com.shuyu.gsyvideoplayer.GSYVideoGLView;
import com.shuyu.gsyvideoplayer.R;
import com.shuyu.gsyvideoplayer.effect.NoEffect;
import com.shuyu.gsyvideoplayer.listener.GSYVideoShotListener;
import com.shuyu.gsyvideoplayer.utils.Debuger;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
*
*/
@SuppressLint("ViewConstructor")
public abstract class GSYVideoGLViewBaseRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
//是否需要高清截图
protected boolean mHighShot = false;
protected GSYVideoGLView.onGSYSurfaceListener mGSYSurfaceListener;
protected GLSurfaceView mSurfaceView;
public abstract void releaseAll();
public void setSurfaceView(GLSurfaceView surfaceView) {
this.mSurfaceView = surfaceView;
}
public void sendSurfaceForPlayer(Surface surface) {
if (mGSYSurfaceListener != null) {
mGSYSurfaceListener.onSurfaceAvailable(surface);
}
}
protected int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS,
compiled, 0);
if (compiled[0] == 0) {
Debuger.printfError("Could not compile shader " + shaderType + ":");
Debuger.printfError(GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
protected int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS,
linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Debuger.printfError("Could not link program: ");
Debuger.printfError(GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
protected void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Debuger.printfError(op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
/**
* 创建bitmap截图
*/
protected Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl) {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.
GL_UNSIGNED_BYTE,
intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
return null;
}
if (mHighShot) {
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
} else {
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.RGB_565);
}
}
public void setGSYSurfaceListener(GSYVideoGLView.onGSYSurfaceListener onSurfaceListener) {
this.mGSYSurfaceListener = onSurfaceListener;
}
/**
* 形变动画
*/
public void setMVPMatrix(float[] MVPMatrix) {
}
/**
* 打开截图
*/
public void takeShotPic() {
}
/**
* 截图监听
*/
public void setGSYVideoShotListener(GSYVideoShotListener listener, boolean high) {
}
/**
* 设置滤镜效果
* @param shaderEffect
*/
public void setEffect(GSYVideoGLView.ShaderInterface shaderEffect) {
}
}
package com.shuyu.gsyvideoplayer;
package com.shuyu.gsyvideoplayer.render;
import android.annotation.SuppressLint;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.GLException;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import com.shuyu.gsyvideoplayer.GSYVideoGLView;
import com.shuyu.gsyvideoplayer.effect.NoEffect;
import com.shuyu.gsyvideoplayer.listener.GSYVideoShotListener;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
......@@ -29,14 +25,16 @@ import javax.microedition.khronos.opengles.GL10;
* 原 @author sheraz.khilji
*/
@SuppressLint("ViewConstructor")
public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private static String TAG = GSYVideoGLViewSimpleRender.class.getName();
public class GSYVideoGLViewSimpleRender extends GSYVideoGLViewBaseRender {
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private final float[] mTriangleVerticesData = {
......@@ -44,7 +42,6 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
-1.0f, -1.0f, 0, 0.f, 0.f, 1.0f, -1.0f, 0, 1.f, 0.f, -1.0f,
1.0f, 0, 0.f, 1.f, 1.0f, 1.0f, 0, 1.f, 1.f,};
private final String mVertexShader = "uniform mat4 uMVPMatrix;\n"
+ "uniform mat4 uSTMatrix;\n"
+ "attribute vec4 aPosition;\n"
......@@ -59,29 +56,30 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
private float[] mSTMatrix = new float[16];
private int mProgram;
protected int mProgram;
private int mTextureID[] = new int[2];
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private boolean mUpdateSurface = false;
private boolean mTakeShotPic = false;
private boolean mHighShot = false;
private boolean mTakeShotPic = false;
private FloatBuffer mTriangleVertices;
private SurfaceTexture mSurface;
private GSYVideoGLView.onGSYSurfaceListener mGSYSurfaceListener;
private GSYVideoShotListener mGSYVideoShotListener;
private GSYVideoGLView.ShaderInterface mEffect = new NoEffect();
private GLSurfaceView mSurfaceView;
public GSYVideoGLViewSimpleRender() {
mTriangleVertices = ByteBuffer
.allocateDirect(
......@@ -93,18 +91,6 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
Matrix.setIdentityM(mMVPMatrix, 0);
}
public void setGSYSurfaceListener(GSYVideoGLView.onGSYSurfaceListener onSurfaceListener) {
this.mGSYSurfaceListener = onSurfaceListener;
}
public void setSurfaceView(GLSurfaceView surfaceView) {
this.mSurfaceView = surfaceView;
}
public void setMVPMatrix(float[] MVPMatrix) {
this.mMVPMatrix = MVPMatrix;
}
@Override
public void onDrawFrame(GL10 glUnused) {
synchronized (this) {
......@@ -114,47 +100,13 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
mUpdateSurface = false;
}
}
mProgram = createProgram(mVertexShader, mEffect.getShader(mSurfaceView));
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT
| GLES20.GL_COLOR_BUFFER_BIT);
initDrawFrame();
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
bindDrawFrameTexture();
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID[0]);
initPointerAndDraw();
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix,
0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
if (mTakeShotPic) {
mTakeShotPic = false;
if (mGSYVideoShotListener != null) {
Bitmap bitmap = createBitmapFromGLSurface(0, 0, mSurfaceView.getWidth(), mSurfaceView.getHeight(), glUnused);
mGSYVideoShotListener.getBitmap(bitmap);
}
}
takeBitmap(glUnused);
GLES20.glFinish();
......@@ -168,7 +120,7 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mEffect.getShader(mSurfaceView));
mProgram = createProgram(getVertexShader(), getFragmentShader());
if (mProgram == 0) {
return;
}
......@@ -221,9 +173,8 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
if (mGSYSurfaceListener != null) {
mGSYSurfaceListener.onSurfaceAvailable(surface);
}
sendSurfaceForPlayer(surface);
}
@Override
......@@ -231,110 +182,104 @@ public class GSYVideoGLViewSimpleRender implements GLSurfaceView.Renderer, Surfa
mUpdateSurface = true;
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS,
compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
@Override
public void releaseAll() {
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER,
fragmentSource);
if (pixelShader == 0) {
return 0;
}
protected void initDrawFrame() {
mProgram = createProgram(getVertexShader(), getFragmentShader());
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT
| GLES20.GL_COLOR_BUFFER_BIT);
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS,
linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
protected void bindDrawFrameTexture() {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID[0]);
}
private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl) {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.
GL_UNSIGNED_BYTE,
intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
protected void takeBitmap(GL10 glUnused) {
if (mTakeShotPic) {
mTakeShotPic = false;
if (mGSYVideoShotListener != null) {
Bitmap bitmap = createBitmapFromGLSurface(0, 0, mSurfaceView.getWidth(), mSurfaceView.getHeight(), glUnused);
mGSYVideoShotListener.getBitmap(bitmap);
}
} catch (GLException e) {
return null;
}
if (mHighShot) {
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
} else {
return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.RGB_565);
}
}
public void takeShotPic() {
mTakeShotPic = true;
protected void initPointerAndDraw() {
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT,
false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix,
0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
}
protected String getVertexShader() {
return mVertexShader;
}
protected String getFragmentShader() {
return mEffect.getShader(mSurfaceView);
}
public void setEffect(GSYVideoGLView.ShaderInterface shaderEffect) {
if (shaderEffect != null)
mEffect = shaderEffect;
/**
* 形变动画
*/
public void setMVPMatrix(float[] MVPMatrix) {
this.mMVPMatrix = MVPMatrix;
}
/**
* 打开截图
*/
public void takeShotPic() {
mTakeShotPic = true;
}
/**
* 截图监听
*/
public void setGSYVideoShotListener(GSYVideoShotListener listener, boolean high) {
this.mGSYVideoShotListener = listener;
this.mHighShot = high;
}
/**
* 设置滤镜效果
* @param shaderEffect
*/
public void setEffect(GSYVideoGLView.ShaderInterface shaderEffect) {
if (shaderEffect != null)
mEffect = shaderEffect;
}
}
......@@ -209,6 +209,7 @@ public abstract class GSYBaseVideoPlayer extends GSYVideoControlView {
to.mNetSate = from.mNetSate;
to.mRotateWithSystem = from.mRotateWithSystem;
to.mBackUpPlayingBufferState = from.mBackUpPlayingBufferState;
to.mRenderer = from.mRenderer;
to.setUp(from.mOriginUrl, from.mCache, from.mCachePath, from.mMapHeadData, from.mTitle);
to.setStateAndUi(from.mCurrentState);
}
......
......@@ -18,6 +18,7 @@ import com.shuyu.gsyvideoplayer.GSYRenderView;
import com.shuyu.gsyvideoplayer.GSYVideoGLView;
import com.shuyu.gsyvideoplayer.GSYVideoManager;
import com.shuyu.gsyvideoplayer.effect.NoEffect;
import com.shuyu.gsyvideoplayer.render.GSYVideoGLViewBaseRender;
import com.shuyu.gsyvideoplayer.utils.GSYVideoType;
/**
......@@ -47,6 +48,9 @@ public abstract class GSYTextureRenderView extends FrameLayout implements Textur
//画面选择角度
protected int mRotate;
//自定义渲染
protected GSYVideoGLViewBaseRender mRenderer;
public GSYTextureRenderView(@NonNull Context context) {
super(context);
}
......@@ -140,7 +144,7 @@ public abstract class GSYTextureRenderView extends FrameLayout implements Textur
mTextureView.addSurfaceView(getContext(), mTextureViewContainer, mRotate, this);
return;
} else if (GSYVideoType.getRenderType() == GSYVideoType.GLSURFACE) {
mTextureView.addGLView(getContext(), mTextureViewContainer, mRotate, this, mEffectFilter, mMatrixGL);
mTextureView.addGLView(getContext(), mTextureViewContainer, mRotate, this, mEffectFilter, mMatrixGL, mRenderer);
return;
}
mTextureView.addTextureView(getContext(), mTextureViewContainer, mRotate, this);
......@@ -227,6 +231,7 @@ public abstract class GSYTextureRenderView extends FrameLayout implements Textur
/**
* GL模式下的画面matrix效果
*
* @param matrixGL 16位长度
*/
public void setMatrixGL(float[] matrixGL) {
......@@ -238,4 +243,17 @@ public abstract class GSYTextureRenderView extends FrameLayout implements Textur
gsyVideoGLView.setMVPMatrix(mMatrixGL);
}
}
/**
* 自定义GL的渲染render
*/
public void setCustomGLRenderer(GSYVideoGLViewBaseRender renderer) {
this.mRenderer = renderer;
if (mTextureView != null && mRenderer != null &&
mTextureView.getShowView() instanceof GSYVideoGLView) {
GSYVideoGLView gsyVideoGLView =
(GSYVideoGLView) mTextureView.getShowView();
gsyVideoGLView.setCustomRenderer(mRenderer);
}
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册