提交 fb2ecda5 编写于 作者: 杨时权

【需求】Android支持前置/后置采集+预览。利用SurfaceView来实现渲染。

上级 76c5c707
......@@ -81,4 +81,9 @@ public class AspectFrameLayout extends FrameLayout {
// "] height=[" + View.MeasureSpec.toString(heightMeasureSpec) + "]");
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
super.onSizeChanged(w, h, oldW, oldH);
}
}
......@@ -6,29 +6,22 @@ import android.util.Log;
import java.io.IOException;
public class VideoCapture implements SurfaceTexture.OnFrameAvailableListener {
public class VideoCapture {
private static final String TAG = "[VideoCapture]";
private Camera mCamera;
private Configuration mConfiguration;
private OnFrameAvailableListener mAvailableListener = null;
private OnPreviewSizeChangeListener mPreviewSizeChangeListener = null;
public interface OnFrameAvailableListener {
void onFrameAvailable(SurfaceTexture st);
public static class FactingType {
public final static int REAR = 0; // 后置
public final static int FRONT = 1; // 前置
}
public interface OnPreviewSizeChangeListener {
void onPreviewSizeChange(int width, int height);
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (mAvailableListener != null) {
mAvailableListener.onFrameAvailable(surfaceTexture);
}
}
public static class Configuration {
public int facingType;
public int width;
......@@ -48,10 +41,6 @@ public class VideoCapture implements SurfaceTexture.OnFrameAvailableListener {
mConfiguration = null;
}
public void setAvailableListener(OnFrameAvailableListener listener) {
mAvailableListener = listener;
}
public void setPreviewSizeChangeListener(OnPreviewSizeChangeListener listener) {
mPreviewSizeChangeListener = listener;
}
......@@ -92,11 +81,22 @@ public class VideoCapture implements SurfaceTexture.OnFrameAvailableListener {
// impact on frame rate.
parameters.setRecordingHint(true);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
// leave the frame rate set to default
mCamera.setParameters(parameters);
// set display orientation.
mCamera.setDisplayOrientation(mConfiguration.angle);
mCamera.setDisplayOrientation(0);
mCamera.cancelAutoFocus();
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
}
});
if (mPreviewSizeChangeListener != null) {
mPreviewSizeChangeListener.onPreviewSizeChange(parameters.getPreviewSize().width, parameters.getPreviewSize().height);
......@@ -105,7 +105,6 @@ public class VideoCapture implements SurfaceTexture.OnFrameAvailableListener {
}
public void setSurfaceTexture(SurfaceTexture st) {
st.setOnFrameAvailableListener(this);
try {
mCamera.setPreviewTexture(st);
} catch (IOException e) {
......
......@@ -33,4 +33,60 @@ public class AVCodecDefine {
public final static int SURFACE = 22;
public final static int TEXTURE = 23;
}
/**
* lyav_media Video Profile
*/
public static class Profile {
public final static int BASELINE = 1;
public final static int MAIN = 2;
public final static int HIGH10 = 3;
public final static int HIGH422 = 4;
public final static int HIGH444 = 5;
public final static int HIGH = 6;
}
/**
* lyav_media Video Level
*/
public static class Level {
public final static int L10 = 10;
public final static int L11 = 11;
public final static int L12 = 12;
public final static int L20 = 20;
public final static int L21 = 21;
public final static int L22 = 22;
public final static int L30 = 30;
public final static int L32 = 32;
public final static int L40 = 40;
public final static int L41 = 41;
}
/**
* lyav_media Video Encoder BitrateMode
*/
public static class BitrateMode {
public final static int UNKNOWN = 0xFF;
public final static int BTM_QP = 1;
public final static int BTM_VBR = 2;
public final static int BTM_CBR = 3;
}
/**
* lyav_media Video Codec Mime
*/
public static class Mime {
public final static String CODEC_UNKNOWN_MIME = "video/unknown";
public final static String CODEC_AVC_MIME = "video/avc";
public final static String CODEC_HEVC_MIME = "video/hevc";
}
/**
* lyav_media Success codec
*/
public static class Status {
public final static int SUCCESS = 0;
public final static int ENC_ERR_CONFIG = -1000;
}
}
package com.ly.avfoundation.avfoundation.common;
import android.media.MediaCodecInfo;
public class AVCodecParameters {
public static final int PADDING_MAX_LENGTH = 64;
......@@ -24,11 +26,17 @@ public class AVCodecParameters {
/**
* -video: the pixel format.
* -audio: the sample format.
* {@link AVCodecDefine.Format}
*/
public int format;
public int mFormat;
// the average bitrate of the encoded data (in bits per second).
public long mBitRate;
public int mBitRate;
/**
* {@link AVCodecDefine.BitrateMode}
*/
public int mBitrateMode;
/**
* The number of bits per sample in the codedwords.
......@@ -42,6 +50,8 @@ public class AVCodecParameters {
/**
* Codec-specific bit stream restrictions that the stream conforms to.
* {@link AVCodecDefine.Profile}
* {@link }
*/
public int mProfile;
public int mLevel;
......@@ -63,8 +73,9 @@ public class AVCodecParameters {
parameters.mCodecID = AVCodecDefine.CodecID.UNKNOWN;
parameters.mExtraData = null;
parameters.mExtraDataSize = 0;
parameters.format = 0;
parameters.mFormat = 0;
parameters.mBitRate = 0;
parameters.mBitrateMode = 0xFF;
parameters.mBitsPerCodedSample = 0;
parameters.mBitsPerRawSample = 0;
parameters.mProfile = 0;
......@@ -84,4 +95,84 @@ public class AVCodecParameters {
System.arraycopy(extraData, 0, mExtraData, 0, extraDataSize);
mExtraDataSize = extraDataSize;
}
public String mime() {
switch (mCodecID) {
case AVCodecDefine.CodecID.AVC:
return AVCodecDefine.Mime.CODEC_AVC_MIME;
case AVCodecDefine.CodecID.HEVC:
return AVCodecDefine.Mime.CODEC_HEVC_MIME;
default:
return AVCodecDefine.Mime.CODEC_UNKNOWN_MIME;
}
}
public int getColorFormat() {
switch (mFormat) {
case AVCodecDefine.Format.I420:
return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
case AVCodecDefine.Format.NV12:
return MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
case AVCodecDefine.Format.SURFACE:
case AVCodecDefine.Format.TEXTURE:
default:
return MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
}
}
public int getBitrateMode() {
switch (mBitrateMode) {
case AVCodecDefine.BitrateMode.BTM_QP:
return MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ;
case AVCodecDefine.BitrateMode.BTM_CBR:
return MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR;
case AVCodecDefine.BitrateMode.BTM_VBR:
default:
return MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR;
}
}
public int getProfile() {
switch (mProfile) {
case AVCodecDefine.Profile.BASELINE:
return MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
case AVCodecDefine.Profile.MAIN:
return MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
case AVCodecDefine.Profile.HIGH10:
return MediaCodecInfo.CodecProfileLevel.AVCProfileHigh10;
case AVCodecDefine.Profile.HIGH422:
return MediaCodecInfo.CodecProfileLevel.AVCProfileHigh422;
case AVCodecDefine.Profile.HIGH444:
return MediaCodecInfo.CodecProfileLevel.AVCProfileHigh444;
case AVCodecDefine.Profile.HIGH:
default:
return MediaCodecInfo.CodecProfileLevel.AVCProfileHigh;
}
}
public int getLevel() {
switch (mLevel) {
case AVCodecDefine.Level.L10:
return MediaCodecInfo.CodecProfileLevel.AVCLevel1;
case AVCodecDefine.Level.L11:
return MediaCodecInfo.CodecProfileLevel.AVCLevel11;
case AVCodecDefine.Level.L12:
return MediaCodecInfo.CodecProfileLevel.AVCLevel12;
case AVCodecDefine.Level.L20:
return MediaCodecInfo.CodecProfileLevel.AVCLevel2;
case AVCodecDefine.Level.L21:
return MediaCodecInfo.CodecProfileLevel.AVCLevel21;
case AVCodecDefine.Level.L22:
return MediaCodecInfo.CodecProfileLevel.AVCLevel22;
case AVCodecDefine.Level.L30:
return MediaCodecInfo.CodecProfileLevel.AVCLevel3;
case AVCodecDefine.Level.L32:
return MediaCodecInfo.CodecProfileLevel.AVCLevel32;
case AVCodecDefine.Level.L40:
return MediaCodecInfo.CodecProfileLevel.AVCLevel4;
case AVCodecDefine.Level.L41:
default:
return MediaCodecInfo.CodecProfileLevel.AVCLevel41;
}
}
}
package com.ly.avfoundation.avfoundation.encoder;
import android.view.Surface;
import com.ly.avfoundation.avfoundation.common.AVCodecParameters;
import com.ly.avfoundation.avfoundation.common.AVFrame;
......@@ -14,6 +16,15 @@ public interface IEncoder {
void onFrame(AVFrame frame);
}
/**
* 编码器输入Surface回调,输入格式设置为{@link com.ly.avfoundation.avfoundation.common.AVCodecDefine.Format#SURFACE}
* 和{@link com.ly.avfoundation.avfoundation.common.AVCodecDefine.Format#TEXTURE}时有效。
*/
interface OnInputSurfaceListener {
void onInputSurfaceCreate(Surface surface);
void onInputSurfaceDestroy(Surface surface);
}
/**
* 配置编码器
* @param parameters 需要配置的参数需要更新到parameters中
......@@ -21,6 +32,8 @@ public interface IEncoder {
*/
int configure(AVCodecParameters parameters);
Surface getInputSurface();
/**
* 重置编码器
* @return 返回状态码 {@link com.ly.avfoundation.avfoundation.common.Define.Status}
......
package com.ly.avfoundation.avfoundation.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Range;
import android.view.Surface;
import androidx.annotation.NonNull;
import com.ly.avfoundation.avfoundation.common.AVCodecDefine;
import com.ly.avfoundation.avfoundation.common.AVCodecParameters;
public class MediaCodecEncoder implements IEncoder {
private final static String TAG = "MediaCodecEncoder";
private MediaCodec mEncoder = null;
private Surface mInputSurface = null;
private AVCodecParameters mParameters = null;
private String mMime = "";
private String mCodecName = "";
private MediaFormat mInputFormat = null;
private MediaCodec.BufferInfo mBufferInfo = null;
private Boolean mAsyncMode = Boolean.FALSE;
private Boolean mQuit = Boolean.TRUE;
private Handler mHandler = null;
private OnInputSurfaceListener mInputSurfaceListener = null;
private OnFrameListener mFrameListener = null;
public MediaCodecEncoder() {
HandlerThread thread = new HandlerThread("MediaCodecEncoder");
mHandler = new Handler(thread.getLooper());
}
@Override
public int configure(AVCodecParameters parameters) {
synchronized (this) {
try {
reset();
mParameters = parameters;
mMime = mParameters.mime();
mEncoder = MediaCodec.createEncoderByType(mMime);
mCodecName = mEncoder.getName();
createInputFormat();
setAsyncModeCallback();
mEncoder.configure(mInputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (mParameters.getColorFormat() == MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface) {
mInputSurface = mEncoder.createInputSurface();
if (mInputSurfaceListener != null) {
mInputSurfaceListener.onInputSurfaceCreate(mInputSurface);
}
}
mBufferInfo = new MediaCodec.BufferInfo();
mEncoder.start();
postDrainEncoder();
return AVCodecDefine.Status.SUCCESS;
} catch (Throwable ex) {
Log.e(TAG, "configure error mime=" + mMime + " exception=" + Log.getStackTraceString(ex));
try {
if (mEncoder != null) {
mEncoder.release();
}
} catch (Exception exp) {
Log.i(TAG, "configure error release crash with trace:" + Log.getStackTraceString(exp));
}
mEncoder = null;
return AVCodecDefine.Status.ENC_ERR_CONFIG;
}
}
}
@Override
public Surface getInputSurface() {
return mInputSurface;
}
@Override
public int reset() {
releaseMediaCodec();
return AVCodecDefine.Status.SUCCESS;
}
@Override
public int destroy() {
Log.i(TAG, "destroy.");
mInputFormat = null;
mQuit = Boolean.TRUE;
mInputSurfaceListener = null;
if (mHandler != null) {
mHandler.post(new Runnable() {
@Override
public void run() {
// TODO
}
});
mHandler.getLooper().quitSafely();
}
return AVCodecDefine.Status.SUCCESS;
}
@Override
public String version() {
return "@Version-1.0.0";
}
private void createInputFormat() {
mInputFormat = MediaFormat.createVideoFormat(mParameters.mime(), getLowerWidth(mParameters.mWidth), getLowerHeight(mParameters.mHeight));
mInputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mParameters.getColorFormat());
mInputFormat.setInteger(MediaFormat.KEY_BIT_RATE, mParameters.mBitRate * 1024);
mInputFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, mParameters.getBitrateMode());
mInputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mParameters.mFramePerSecond);
mInputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
setProfileLevel();
}
private void setAsyncModeCallback() {
if (Build.VERSION.SDK_INT >= 23) {
MediaCodec.Callback callback = new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec mediaCodec, int i) {
// nothing
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec, int i, @NonNull MediaCodec.BufferInfo bufferInfo) {
// deal with output
}
@Override
public void onError(@NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
// deal with error
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec mediaCodec, @NonNull MediaFormat mediaFormat) {
}
};
if (Build.VERSION.SDK_INT >= 23) {
mEncoder.setCallback(callback, mHandler);
} else {
mEncoder.setCallback(callback);
}
mAsyncMode = true;
} else {
mAsyncMode = false;
}
Log.i(TAG, "setAsyncModeCallback result:" + mAsyncMode);
}
private void postDrainEncoder() {
if (mAsyncMode) {
Log.i(TAG, "postDrainEncoder cancel for asyncModel=" + mAsyncMode);
return;
}
if (mHandler == null) {
Log.e(TAG, "postDrainEncoder handler=" + mHandler);
return;
}
mHandler.post(new Runnable() {
@Override
public void run() {
// TODO
}
});
}
private void releaseMediaCodec() {
}
private void setProfileLevel() {
if (mEncoder == null) {
return;
}
int profile = mParameters.getProfile();
int level = mParameters.getLevel();
if (profile > 0) {
level = level > MediaCodecInfo.CodecProfileLevel.AVCLevel42 ? MediaCodecInfo.CodecProfileLevel.AVCLevel42 : level;
mInputFormat.setInteger("profile", profile);
mInputFormat.setInteger("level", level);
}
}
private int getLowerWidth(int width) {
if (Build.VERSION.SDK_INT >= 21) {
Range<Integer> widthRange = getWidthRange();
if (widthRange != null && width < widthRange.getLower()) {
Log.i(TAG, "getLowerWidth inW=" + width + " lowerW=" + widthRange.getLower());
return widthRange.getLower();
}
}
return width;
}
private int getLowerHeight(int height) {
if (Build.VERSION.SDK_INT >= 21) {
Range<Integer> heightRange = getHeightRange();
if (heightRange != null && height < heightRange.getLower()) {
Log.i(TAG, "getLowerHeight inH=" + height + " lowerH=" + heightRange.getLower());
return heightRange.getLower();
}
}
return height;
}
private Range<Integer> getWidthRange() {
if (mEncoder == null) {
return null;
}
MediaCodecInfo codecInfo = mEncoder.getCodecInfo();
if (!codecInfo.isEncoder()) {
return null;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(types[j]);
if (Build.VERSION.SDK_INT >= 21) {
MediaCodecInfo.VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
if (videoCapabilities != null) {
Range<Integer> widthRange = videoCapabilities.getSupportedWidths();
Log.i(TAG, "getWidthRange max=" + widthRange.getUpper() + " min=" + widthRange.getLower());
return widthRange;
}
}
}
return null;
}
private Range<Integer> getHeightRange() {
if (mEncoder == null) {
return null;
}
MediaCodecInfo codecInfo = mEncoder.getCodecInfo();
if (!codecInfo.isEncoder()) {
return null;
}
String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(types[j]);
if (Build.VERSION.SDK_INT >= 21) {
MediaCodecInfo.VideoCapabilities videoCapabilities = capabilities.getVideoCapabilities();
if (videoCapabilities != null) {
Range<Integer> heightRange = videoCapabilities.getSupportedHeights();
Log.i(TAG, "getHeightRange max=" + heightRange.getUpper() + " min=" + heightRange.getLower());
return heightRange;
}
}
}
return null;
}
}
......@@ -344,6 +344,11 @@ public final class EglCore {
return mGlVersion;
}
/**
* Return the GLES context.
*/
public EGLContext getContext() { return mEGLContext; }
/**
* Writes the current display, context, and surface to the log.
*/
......
package com.ly.avfoundation.avfoundation.gles;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Tweaked version of Drawable2d that rescales the texture coordinates to provide a
* "zoom" effect.
*/
public class ScaleDrawable2d extends Drawable2d {
private static final String TAG = "ScaleDrawable2d";
private static final int SIZEOF_FLOAT = 4;
private FloatBuffer mTweakedTexCoordArray;
private float mScale = 1.0f;
private boolean mRecalculate;
/**
* Prepares a drawable from a "pre-fabricated" shape definition.
* <p>
* Does no EGL/GL operations, so this can be done at any time.
*
* @param shape
*/
public ScaleDrawable2d(Prefab shape) {
super(shape);
mRecalculate = true;
}
/**
* Set the scale factor.
*/
public void setScale(float scale) {
if (scale < 0.0f || scale > 1.0f) {
throw new RuntimeException("invalid scale " + scale);
}
mScale = scale;
mRecalculate = true;
}
/**
* Returns the array of texture coordinates. The first time this is called, we generate
* a modified version of the array from the parent class.
* <p>
* To avoid allocations, this returns internal state. The caller must not modify it.
*/
@Override
public FloatBuffer getTexCoordArray() {
if (mRecalculate) {
//Log.v(TAG, "Scaling to " + mScale);
FloatBuffer parentBuf = super.getTexCoordArray();
int count = parentBuf.capacity();
if (mTweakedTexCoordArray == null) {
ByteBuffer bb = ByteBuffer.allocateDirect(count * SIZEOF_FLOAT);
bb.order(ByteOrder.nativeOrder());
mTweakedTexCoordArray = bb.asFloatBuffer();
}
// Texture coordinates range from 0.0 to 1.0, inclusive. We do a simple scale
// here, but we could get much fancier if we wanted to (say) zoom in and pan
// around.
FloatBuffer fb = mTweakedTexCoordArray;
float scale = mScale;
for (int i = 0; i < count; i++) {
float fl = parentBuf.get(i);
fl = ((fl - 0.5f) * scale) + 0.5f;
fb.put(i, fl);
}
mRecalculate = false;
}
return mTweakedTexCoordArray;
}
}
......@@ -32,18 +32,6 @@ public class Texture2dProgram {
TEXTURE_2D, TEXTURE_EXT, TEXTURE_I420,
}
// Fragment shader with a convolution filter. The upper-left half will be drawn normally,
// the lower-right half will have the filter applied, and a thin red line will be drawn
// at the border.
//
// This is not optimized for performance. Some things that might make this faster:
// - Remove the conditionals. They're used to present a half & half view with a red
// stripe across the middle, but that's only useful for a demo.
// - Unroll the loop. Ideally the compiler does this for you when it's beneficial.
// - Bake the filter kernel into the shader, instead of passing it through a uniform
// array. That, combined with loop unrolling, should reduce memory accesses.
public static final int KERNEL_SIZE = 9;
public static final String kVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uTexMatrix;\n" +
......@@ -56,236 +44,20 @@ public class Texture2dProgram {
"}\n";
public static final String kFragmentShader2D =
"precision highp float;\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform sampler2D sTexture;\n" +
"uniform int enableHDR;\n" +
"uniform vec3 dst_luma;\n" +
"uniform mat3 cms_matrix;\n" +
"uniform int hdrType;\n" +
"const int maxVecSize=64;\n" +
"uniform int uMosaicEnabled;\n" +
"uniform int uMosaicNum;\n" +
"uniform vec4 vMosaicArea[maxVecSize];\n" +
"const vec2 TexSize = vec2(400.0, 400.0);\n" +
"const vec2 MosaicSize = vec2(16.0, 16.0);\n" +
"vec3 my_mix(vec3 x, vec3 y, bvec3 a) {\n" +
" vec3 aa = vec3(float(a.x), float(a.y), float(a.z));\n" +
" return mix(x, y, aa);\n" +
"}\n" +
"vec4 mpvToneMapping(vec4 color) {\n" +
" // color mapping\n" +
" // linearize\n" +
" color.rgb = clamp(color.rgb, 0.0, 1.0);\n" +
" color.rgb = pow(color.rgb, vec3(1.0/78.843750));\n" +
" color.rgb = max(color.rgb - vec3(0.835938), vec3(0.0)) \n" +
" / (vec3(18.851562) - vec3(18.687500) * color.rgb);\n" +
" color.rgb = pow(color.rgb, vec3(6.277395));\n" +
" color.rgb *= vec3(49.261084);\n" +
" color.rgb *= vec3(1.0/49.261086);\n" +
" color.rgb *= vec3(49.261086);\n" +
" // HDR tone mapping\n" +
" int sig_idx = 0;\n" +
" if (color[1] > color[sig_idx]) sig_idx = 1;\n" +
" if (color[2] > color[sig_idx]) sig_idx = 2;\n" +
" float sig_max = color[sig_idx];\n" +
" float sig_peak = 49.261086;\n" +
" float sig_avg = 0.250000;\n" +
" vec3 sig = min(color.rgb, sig_peak);\n" +
" float sig_orig = sig[sig_idx];\n" +
" float slope = min(1.000000, 0.250000 / sig_avg);\n" +
" sig *= slope;\n" +
" sig_peak *= slope;\n" +
" vec4 sig_pq = vec4(sig.rgb, sig_peak);\n" +
" sig_pq *= vec4(1.0/49.261084);\n" +
" sig_pq = pow(sig_pq, vec4(0.159302));\n" +
" sig_pq = (vec4(0.835938) + vec4(18.851562) * sig_pq)\n" +
" / (vec4(1.0) + vec4(18.687500) * sig_pq);\n" +
" sig_pq = pow(sig_pq, vec4(78.843750));\n" +
" float scale = 1.0 / sig_pq.a;\n" +
" sig_pq.rgb *= vec3(scale);\n" +
" float maxLum = 0.580690 * scale;\n" +
" float ks = 1.5 * maxLum - 0.5;\n" +
" vec3 tb = (sig_pq.rgb - vec3(ks)) / vec3(1.0 - ks);\n" +
" vec3 tb2 = tb * tb;\n" +
" vec3 tb3 = tb2 * tb;\n" +
" vec3 pb = (2.0 * tb3 - 3.0 * tb2 + vec3(1.0)) * vec3(ks) + \n" +
" (tb3 - 2.0 * tb2 + tb) * vec3(1.0 - ks) + \n" +
" (-2.0 * tb3 + 3.0 * tb2) * vec3(maxLum);\n" +
" sig = my_mix(pb, sig_pq.rgb, bvec3(lessThan(sig_pq.rgb, vec3(ks))));\n" +
" sig *= vec3(sig_pq.a);\n" +
" sig = pow(sig, vec3(1.0/78.843750));\n" +
" sig = max(sig - vec3(0.835938), 0.0) /\n" +
" (vec3(18.851562) - vec3(18.687500) * sig);\n" +
" sig = pow(sig, vec3(1.0/0.159302));\n" +
" sig *= vec3(49.261084);\n" +
" vec3 sig_lin = color.rgb * (sig[sig_idx] / sig_orig);\n" +
" float coeff = max(sig[sig_idx] - 0.180000, 1e-6) / max(sig[sig_idx], 1.0);\n" +
" coeff = 0.750000 * pow(coeff, 1.500000);\n" +
" color.rgb = mix(sig_lin, 1.000000 * sig, coeff);\n" +
" color.rgb = cms_matrix * color.rgb;\n" +
" float cmin = min(min(color.r, color.g), color.b);\n" +
" if (cmin < 0.0) { \n" +
" float luma = dot(dst_luma, color.rgb);\n" +
" float coeff = cmin / (cmin - luma);\n" +
" color.rgb = mix(color.rgb, vec3(luma), coeff);\n" +
" }\n" +
" float cmax = max(max(color.r, color.g), color.b);\n" +
" if (cmax > 1.0) color.rgb /= cmax;\n" +
" color.rgb *= vec3(1.000000);\n" +
" // delinearize\n" +
" color.rgb = clamp(color.rgb, 0.0, 1.0);\n" +
" color.rgb *= vec3(1.000000);\n" +
" color.rgb = pow(color.rgb, vec3(1.0/2.2));\n" +
" return color;\n" +
"}\n" +
"void main() {\n" +
" if (uMosaicEnabled == 1 ) \n" +
" {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
" if (enableHDR > 0) {\n" +
" if (hdrType == 0) {\n" +
" gl_FragColor = mpvToneMapping(gl_FragColor);\n" +
" }\n" +
" }\n" +
" for (int i = 0;i < uMosaicNum; i ++)\n" +
" {\n" +
" if (vTextureCoord.x < vMosaicArea[i].z && vTextureCoord.x > vMosaicArea[i].x && vTextureCoord.y < vMosaicArea[i].w && vTextureCoord.y > vMosaicArea[i].y) \n" +
" {\n" +
" vec2 intXY = vec2(vTextureCoord.x * TexSize.x, vTextureCoord.y * TexSize.y);\n" +
" vec2 XYMosaic = vec2(floor(intXY.x/MosaicSize.x)*MosaicSize.x, floor(intXY.y/MosaicSize.y)*MosaicSize.y);\n" +
" vec2 UVMosaic = vec2(XYMosaic.x/TexSize.x, XYMosaic.y/TexSize.y);\n" +
" vec4 color = texture2D(sTexture, UVMosaic);\n" +
" gl_FragColor = color;\n" +
" }\n" +
" }\n" +
" }\n" +
" else \n" +
" {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
" if (enableHDR > 0) {\n" +
" if (hdrType == 0) {\n" +
" gl_FragColor = mpvToneMapping(gl_FragColor);\n" +
" }\n" +
" }\n" +
" }\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private static final String kFragmentShaderExt =
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"uniform int enableHDR;\n" +
"uniform vec3 dst_luma;\n" +
"uniform mat3 cms_matrix;\n" +
"uniform int hdrType;\n" +
"const int maxVecSize=64;\n" +
"uniform int uMosaicEnabled;\n" +
"uniform int uMosaicNum;\n" +
"uniform vec4 vMosaicArea[maxVecSize];\n" +
"const vec2 TexSize = vec2(400.0, 400.0);\n" +
"const vec2 MosaicSize = vec2(16.0, 16.0);\n" +
"vec3 my_mix(vec3 x, vec3 y, bvec3 a) {\n" +
" vec3 aa = vec3(float(a.x), float(a.y), float(a.z));\n" +
" return mix(x, y, aa);\n" +
"}\n" +
"vec4 mpvToneMapping(vec4 color) {\n" +
" // color mapping\n" +
" // linearize\n" +
" color.rgb = clamp(color.rgb, 0.0, 1.0);\n" +
" color.rgb = pow(color.rgb, vec3(1.0/78.843750));\n" +
" color.rgb = max(color.rgb - vec3(0.835938), vec3(0.0)) \n" +
" / (vec3(18.851562) - vec3(18.687500) * color.rgb);\n" +
" color.rgb = pow(color.rgb, vec3(6.277395));\n" +
" color.rgb *= vec3(49.261084);\n" +
" color.rgb *= vec3(1.0/49.261086);\n" +
" color.rgb *= vec3(49.261086);\n" +
" // HDR tone mapping\n" +
" int sig_idx = 0;\n" +
" if (color[1] > color[sig_idx]) sig_idx = 1;\n" +
" if (color[2] > color[sig_idx]) sig_idx = 2;\n" +
" float sig_max = color[sig_idx];\n" +
" float sig_peak = 49.261086;\n" +
" float sig_avg = 0.250000;\n" +
" vec3 sig = min(color.rgb, sig_peak);\n" +
" float sig_orig = sig[sig_idx];\n" +
" float slope = min(1.000000, 0.250000 / sig_avg);\n" +
" sig *= slope;\n" +
" sig_peak *= slope;\n" +
" vec4 sig_pq = vec4(sig.rgb, sig_peak);\n" +
" sig_pq *= vec4(1.0/49.261084);\n" +
" sig_pq = pow(sig_pq, vec4(0.159302));\n" +
" sig_pq = (vec4(0.835938) + vec4(18.851562) * sig_pq)\n" +
" / (vec4(1.0) + vec4(18.687500) * sig_pq);\n" +
" sig_pq = pow(sig_pq, vec4(78.843750));\n" +
" float scale = 1.0 / sig_pq.a;\n" +
" sig_pq.rgb *= vec3(scale);\n" +
" float maxLum = 0.580690 * scale;\n" +
" float ks = 1.5 * maxLum - 0.5;\n" +
" vec3 tb = (sig_pq.rgb - vec3(ks)) / vec3(1.0 - ks);\n" +
" vec3 tb2 = tb * tb;\n" +
" vec3 tb3 = tb2 * tb;\n" +
" vec3 pb = (2.0 * tb3 - 3.0 * tb2 + vec3(1.0)) * vec3(ks) + \n" +
" (tb3 - 2.0 * tb2 + tb) * vec3(1.0 - ks) + \n" +
" (-2.0 * tb3 + 3.0 * tb2) * vec3(maxLum);\n" +
" sig = my_mix(pb, sig_pq.rgb, bvec3(lessThan(sig_pq.rgb, vec3(ks))));\n" +
" sig *= vec3(sig_pq.a);\n" +
" sig = pow(sig, vec3(1.0/78.843750));\n" +
" sig = max(sig - vec3(0.835938), 0.0) /\n" +
" (vec3(18.851562) - vec3(18.687500) * sig);\n" +
" sig = pow(sig, vec3(1.0/0.159302));\n" +
" sig *= vec3(49.261084);\n" +
" vec3 sig_lin = color.rgb * (sig[sig_idx] / sig_orig);\n" +
" float coeff = max(sig[sig_idx] - 0.180000, 1e-6) / max(sig[sig_idx], 1.0);\n" +
" coeff = 0.750000 * pow(coeff, 1.500000);\n" +
" color.rgb = mix(sig_lin, 1.000000 * sig, coeff);\n" +
" color.rgb = cms_matrix * color.rgb;\n" +
" float cmin = min(min(color.r, color.g), color.b);\n" +
" if (cmin < 0.0) { \n" +
" float luma = dot(dst_luma, color.rgb);\n" +
" float coeff = cmin / (cmin - luma);\n" +
" color.rgb = mix(color.rgb, vec3(luma), coeff);\n" +
" }\n" +
" float cmax = max(max(color.r, color.g), color.b);\n" +
" if (cmax > 1.0) color.rgb /= cmax;\n" +
" color.rgb *= vec3(1.000000);\n" +
" // delinearize\n" +
" color.rgb = clamp(color.rgb, 0.0, 1.0);\n" +
" color.rgb *= vec3(1.000000);\n" +
" color.rgb = pow(color.rgb, vec3(1.0/2.2));\n" +
" return color;\n" +
"}\n" +
"void main() {\n" +
" if (uMosaicEnabled == 1) \n" +
" {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
" if (enableHDR > 0) {\n" +
" if (hdrType == 0) {\n" +
" gl_FragColor = mpvToneMapping(gl_FragColor);\n" +
" }\n" +
" }\n" +
" for (int i = 0;i < uMosaicNum; i ++)\n" +
" {\n" +
" if (vTextureCoord.x < vMosaicArea[i].z && vTextureCoord.x > vMosaicArea[i].x && vTextureCoord.y < vMosaicArea[i].w && vTextureCoord.y > vMosaicArea[i].y) \n" +
" {\n" +
" vec2 intXY = vec2(vTextureCoord.x * TexSize.x, vTextureCoord.y * TexSize.y);\n" +
" vec2 XYMosaic = vec2(floor(intXY.x/MosaicSize.x)*MosaicSize.x, floor(intXY.y/MosaicSize.y)*MosaicSize.y);\n" +
" vec2 UVMosaic = vec2(XYMosaic.x/TexSize.x, XYMosaic.y/TexSize.y);\n" +
" vec4 color = texture2D(sTexture, UVMosaic);\n" +
" gl_FragColor = color;\n" +
" }\n" +
" }\n" +
" }\n" +
" else \n" +
" {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
" if (enableHDR > 0) {\n" +
" if (hdrType == 0) {\n" +
" gl_FragColor = mpvToneMapping(gl_FragColor);\n" +
" }\n" +
" }\n" +
" }\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private static final String kFragmentShaderI420 =
......
package com.ly.avfoundation.avfoundation.render;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Handler;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
import com.ly.avfoundation.avfoundation.capture.VideoCapture;
import com.ly.avfoundation.avfoundation.gles.FullFrameRect;
import com.ly.avfoundation.avfoundation.gles.Texture2dProgram;
import java.lang.ref.WeakReference;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
private final static String TAG = "[CameraGLSurfaceView]";
public CameraGLSurfaceView(Context context) {
super(context);
}
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
}
@Override
public void onSurfaceChanged(GL10 gl10, int i, int i1) {
}
@Override
public void onDrawFrame(GL10 gl10) {
}
}
package com.ly.avfoundation.avfoundation.render;
import android.graphics.SurfaceTexture;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import com.ly.avfoundation.avfoundation.gles.FullFrameRect;
import com.ly.avfoundation.avfoundation.gles.Texture2dProgram;
import androidx.annotation.NonNull;
import java.lang.ref.WeakReference;
import static android.os.Process.THREAD_PRIORITY_URGENT_AUDIO;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraSurfaceRenderer implements GLSurfaceView.Renderer {
public class CameraSurfaceRenderer implements SurfaceHolder.Callback {
public final static String TAG = "[CameraSurfaceRenderer]";
private final static int MSG_CREATE_SURFACE = 0;
private final static int MSG_ADD_OUTPUT_SURFACE = 1;
private final static int MSG_CHANGED_SURFACE = 2;
private final static int MSG_DESTROY_SURFACE = 4;
private final static int MSG_RELEASE_SURFACE = 5;
private final static int MSG_CHANGED_PREVIEW = 6;
private final static int MSG_CHANGED_ANGLE = 7;
private final static int MSG_CREATE_SURFACE_INNER = 10;
private HandlerThread mRenderThread;
private Handler mRenderHandler;
private SurfaceTexture mInputSurfaceTexture = null;
private Surface mInputSurface = null;
private IRenderConnect mRenderConnect = null;
private int mInputTextureId = -1;
private float[] mTransform = null;
private float[] mTmpTransform = null;
private float[] mSrcTransform = null;
private float[] mDefaultTransform = null;
private int mViewW = 0;
private int mViewH = 0;
private int mVideoW = 0;
private int mVideoH = 0;
public interface OnSurfaceRenderListener {
void onFrameAvaliable(SurfaceTexture st);
}
public OnSurfaceRenderListener mSurfaceRenderListener = null;
// Camera filters; must match up with cameraFilterNames in strings.xml
public static final int FILTER_NONE = 0;
public static final int FILTER_BLACK_WHITE = 1;
public static final int FILTER_BLUR = 2;
public static final int FILTER_SHARPEN = 3;
public static final int FILTER_EDGE_DETECT = 4;
public static final int FILTER_EMBOSS = 5;
private final float[] mSTMatrix = new float[16];
private OnSurfaceTextureListener mSurfaceTextureListener = null;
private FullFrameRect mFullScreen = null;
private int mTextureId;
private SurfaceTexture mSurfaceTexture = null;
private CameraHandler mCameraHandler;
// width/height of the incoming camera preview frames
private boolean mIncomingSizeUpdated;
private int mIncomingWidth;
private int mIncomingHeight;
private int mCurrentFilter;
private int mNewFilter;
public static class CameraHandler extends Handler {
public static final int MSG_SET_SURFACE_TEXTURE = 0;
public final static String TAG = "[CameraHandler]";
private WeakReference<CameraSurfaceRenderer> mWeakRenderer;
public CameraHandler(CameraSurfaceRenderer renderer) {
mWeakRenderer = new WeakReference<CameraSurfaceRenderer>(renderer);
}
public void invalidaeHandler() {
mWeakRenderer.clear();
}
@Override
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
CameraSurfaceRenderer renderer = mWeakRenderer.get();
if (renderer == null) {
Log.w(TAG, "handleMessage render:" + renderer);
public CameraSurfaceRenderer() {
mRenderThread = new HandlerThread("CameraSurfaceRender", THREAD_PRIORITY_URGENT_AUDIO);
mRenderThread.start();
mRenderThread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable ex) {
Log.e(TAG, "uncaughtException: " + ex);
}
});
mRenderHandler = new Handler(mRenderThread.getLooper()) {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_CREATE_SURFACE_INNER:
handleCreateSurfaceInner();
break;
case MSG_ADD_OUTPUT_SURFACE:
handleAddOutputSurface((Surface) msg.obj);
break;
case MSG_CHANGED_SURFACE:
handleChangedSurface(msg.arg1, msg.arg2);
break;
case MSG_CHANGED_PREVIEW:
handleChangedPreview(msg.arg1, msg.arg2);
break;
case MSG_CHANGED_ANGLE:
handleChangedAngle(msg.arg1);
break;
case MSG_RELEASE_SURFACE:
handleReleaseSurface();
break;
case MSG_DESTROY_SURFACE:
handleDestroySurface();
break;
}
return;
}
};
switch (what) {
case MSG_SET_SURFACE_TEXTURE:
renderer.handleSetSurfaceTexture((SurfaceTexture) inputMessage.obj);
break;
default:
throw new RuntimeException("unknown msg " + what);
}
}
mRenderConnect = new SurfaceViewConnect();
}
public interface OnSurfaceTextureListener {
void onSetSurfaceTexture(SurfaceTexture st);
@Override
protected void finalize() throws Throwable {
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_RELEASE_SURFACE));
super.finalize();
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
Log.d(TAG, "onSurfaceCreated");
public void setSurfaceRenderListener(OnSurfaceRenderListener listener) {
mSurfaceRenderListener = listener;
}
// Set up the texture blitter that will be used for on-screen display. This
// is *not* applied to the recording, because that uses a separate shader.
mFullScreen = new FullFrameRect(new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));
mTextureId = mFullScreen.createTextureObject();
public void setRenderSurface(Surface surface) {
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_ADD_OUTPUT_SURFACE, surface));
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_CREATE_SURFACE_INNER));
}
// Create a SurfaceTexture, with an external texture, in this EGL context. We don't
// have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame
// available messages will arrive on the main thread.
mSurfaceTexture = new SurfaceTexture(mTextureId);
public void setVideoResolution(int width, int height) {
mVideoW = width;
mVideoH = height;
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_CHANGED_PREVIEW, width, height));
}
mCameraHandler.sendMessage(mCameraHandler.obtainMessage(CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
public void setAngle(int angle) {
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_CHANGED_ANGLE, angle, 0));
}
@Override
public void onSurfaceChanged(GL10 gl10, int i, int i1) {
Log.d(TAG, "onSurfaceChanged (" + i + "x" + i1 + ")");
public void surfaceCreated(@NonNull SurfaceHolder surfaceHolder) {
setRenderSurface(surfaceHolder.getSurface());
Log.i(TAG, "surfaceCreated: " + surfaceHolder.getSurface());
}
@Override
public void onDrawFrame(GL10 gl10) {
mSurfaceTexture.updateTexImage();
if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
// Texture size isn't set yet. This is only used for the filters, but to be
// safe we can just skip drawing while we wait for the various races to resolve.
// (This seems to happen if you toggle the screen off/on with power button.)
Log.i(TAG, "Drawing before incoming texture size set; skipping");
return;
}
// Update the filter, if necessary.
if (mCurrentFilter != mNewFilter) {
this.updateFilter();
}
// Draw the video frame.
mSurfaceTexture.getTransformMatrix(mSTMatrix);
mFullScreen.drawFrame(mTextureId, mSTMatrix);
// TODO
// Draw a flashing box if we're recording. This only appears on screen.
public void surfaceChanged(@NonNull SurfaceHolder surfaceHolder, int format, int width, int height) {
mViewW = width;
mViewH = height;
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_CHANGED_SURFACE, width, height));
Log.i(TAG, "surfaceChanged w=" + width + " h=" + height + " format=" + format);
}
public CameraSurfaceRenderer() {
mIncomingSizeUpdated = false;
mIncomingWidth = -1;
mIncomingHeight = -1;
mCurrentFilter = -1;
mNewFilter = CameraSurfaceRenderer.FILTER_NONE;
mTextureId = -1;
mCameraHandler = new CameraHandler(this);
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder surfaceHolder) {
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_RELEASE_SURFACE));
mRenderHandler.sendMessage(mRenderHandler.obtainMessage(MSG_DESTROY_SURFACE));
}
/**
* Records the size of the incoming camera preview frames.
* <p>
* It's not clear whether this is guaranteed to execute before or after onSurfaceCreated(),
* so we assume it could go either way. (Fortunately they both run on the same thread,
* so we at least know that they won't execute concurrently.)
*/
public void setCameraPreviewSize(int width, int height) {
Log.d(TAG, "setCameraPreviewSize (" + width + "x" + height + ");");
mIncomingWidth = width;
mIncomingHeight = height;
mIncomingSizeUpdated = true;
private void handleCreateSurfaceInner() {
mInputTextureId = mRenderConnect.createTextureObject();
mInputSurfaceTexture = new SurfaceTexture(mInputTextureId);
mInputSurface = new Surface(mInputSurfaceTexture);
mInputSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (surfaceTexture == mInputSurfaceTexture) {
surfaceTexture.updateTexImage();
handleFrameAvailable(mInputSurfaceTexture.getTimestamp() / 1000000);
}
}
});
mSrcTransform = new float[] {
1.0f, 0, 0, 0,
0, 1.0f, 0, 0,
0, 0, 1.0f, 0,
0, 0, 0, 1.0f,
};
Matrix.rotateM(mSrcTransform, 0, 180, 1.0f, 0, 0);
Matrix.translateM(mSrcTransform, 0, 0, -1.0f, 0);
mTransform = new float[16];
mTmpTransform = new float[16];
mDefaultTransform = new float[16];
System.arraycopy(mSrcTransform, 0, mTransform, 0, 16);
System.arraycopy(mSrcTransform, 0, mDefaultTransform, 0, 16);
if (mSurfaceRenderListener != null) {
mSurfaceRenderListener.onFrameAvaliable(mInputSurfaceTexture);
}
}
public void setSurfaceTextureListener(OnSurfaceTextureListener listener) {
mSurfaceTextureListener = listener;
private void handleFrameAvailable(long timestamp) {
if (mRenderConnect != null) {
mRenderConnect.draw(mInputTextureId, mDefaultTransform);
}
}
public void changeFilterMode(int filter) {
mNewFilter = filter;
private void handleAddOutputSurface(Object surface) {
if (mRenderConnect == null)
mRenderConnect = new SurfaceViewConnect();
mRenderConnect.setOutputSurface(surface);
}
/**
* Notifies the renderer thread that the activity is pausing.
* <p>
* For best results, call this *after* disabling Camera preview.
*/
public void notifyPausing() {
if (mSurfaceTexture != null) {
Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
mSurfaceTexture.release();
mSurfaceTexture = null;
}
if (mFullScreen != null) {
mFullScreen.release(false); // assume the GLSurfaceView EGL context is about
mFullScreen = null; // to be destroyed
}
mIncomingWidth = mIncomingHeight = -1;
private void handleChangedSurface(int width, int height) {
if (mRenderConnect != null)
mRenderConnect.setWindowSurfaceRect(width, height);
}
public void updateFilter() {
Texture2dProgram.ProgramType programType;
float[] kernel = null;
float colorAdj = 0.0f;
Log.d(TAG, "updating filter to " + mNewFilter);
switch (mNewFilter) {
case CameraSurfaceRenderer.FILTER_NONE:
programType = Texture2dProgram.ProgramType.TEXTURE_EXT;
break;
default:
throw new RuntimeException("Unknown filter mode " + mNewFilter);
private void handleChangedPreview(int width, int height) {
if (mRenderConnect != null) {
mRenderConnect.setVideoRect(width, height);
}
}
// Do we need a whole new program? (We want to avoid doing this if we don't have
// too -- compiling a program could be expensive.)
if (programType != mFullScreen.getProgram().getProgramType()) {
mFullScreen.changeProgram(new Texture2dProgram(programType));
// If we created a new program, we need to initialize the texture width/height.
mIncomingSizeUpdated = true;
private void handleChangedAngle(int angle) {
if (mRenderConnect != null) {
mRenderConnect.setAngle(angle);
}
mCurrentFilter = mNewFilter;
}
public void handleSetSurfaceTexture(SurfaceTexture st) {
if (mSurfaceTextureListener != null) {
mSurfaceTextureListener.onSetSurfaceTexture(st);
private void handleDestroySurface() {
if (mRenderConnect != null) {
mRenderConnect.removeOutputSurface();
}
}
public void clear() {
private void handleReleaseSurface() {
if (mRenderConnect != null) {
mRenderConnect.release();
mRenderConnect = null;
}
}
}
package com.ly.avfoundation.avfoundation.render;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.util.Log;
import android.view.Surface;
import com.ly.avfoundation.avfoundation.gles.EglCore;
import com.ly.avfoundation.avfoundation.gles.WindowSurface;
public abstract class IRenderConnect {
static final String TAG = "IRenderConnect";
EglCore mEglCore;
Object mOutputSurface;
WindowSurface mWindowSurface;
int mWindowSurfaceWidth;
int mWindowSurfaceHeight;
int mVideoWidth;
int mVideoHeight;
int mAngle;
public static class AngleValue {
public final static int ANGLE_0 = 0;
public final static int ANGLE_90 = 90;
public final static int ANGLE_180 = 180;
public final static int ANGLE_270 = 270;
}
public IRenderConnect() {
this(null);
}
public IRenderConnect(EGLContext context) {
mEglCore = new EglCore(context, 0);
mAngle = 0;
mWindowSurfaceWidth = mWindowSurfaceHeight = 0;
mVideoWidth = mVideoHeight = 0;
}
public void setOutputSurface(Object surface) {
mOutputSurface = surface;
if (surface instanceof Surface) {
mWindowSurface = new WindowSurface(mEglCore, (Surface)surface, false);
} else {
mWindowSurface = new WindowSurface(mEglCore, (SurfaceTexture)surface);
}
Log.i(TAG, "setOutputSurface wins=" + mWindowSurface + " this:" + this);
}
public void setWindowSurfaceRect(int width, int height) {
mWindowSurfaceWidth = width;
mWindowSurfaceHeight = height;
}
public void setVideoRect(int width, int height) {
mVideoWidth = width;
mVideoHeight = height;
}
public void setAngle(int angle) {
mAngle = angle;
}
public void removeOutputSurface() {
mOutputSurface = null;
if (mWindowSurface != null) {
mWindowSurface.release();
mWindowSurface = null;
}
Log.i(TAG, "removeOutputSurface wins=" + mWindowSurface + " this:" + this);
}
public EGLContext getContext() {
if (mEglCore != null)
return mEglCore.getContext();
return null;
}
public void release() {
removeOutputSurface();
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
public abstract void draw(int textureid, float[] matrix);
public abstract int createTextureObject();
}
package com.ly.avfoundation.avfoundation.render;
public class OutputSurface {
private Object mSurface;
private int mWidth;
private int mHeight;
private int mParentWidth;
private int mParentHeight;
public Object getSurface() {
return mSurface;
}
public void setSurface(Object surface) {
mSurface = surface;
}
public int getWidth() {
return mWidth;
}
public void setWidth(int width) {
mWidth = width;
}
public int getHeight() {
return mHeight;
}
public void setHeight(int height) {
mHeight = height;
}
public int getParentWidth() {
return mParentWidth;
}
public void setParentWidth(int width) {
mParentWidth = width;
}
public int getParentHeight() {
return mParentHeight;
}
public void setParentHeight(int height) {
mParentHeight = height;
}
@Override
public String toString() {
return "[OutputSurface]Surface:" + mSurface +
", width:" + mWidth +
", height:" + mHeight +
", parentWidth:" + mParentWidth +
", parentHeight:" + mParentHeight;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OutputSurface that = (OutputSurface)o;
if (mWidth != that.getWidth()) return false;
if (mHeight != that.getHeight()) return false;
if (mParentWidth != that.getParentWidth()) return false;
if (mParentHeight != that.getParentHeight()) return false;
if (mSurface != null) {
if (!mSurface.equals(that.getSurface()))
return false;
else
return true;
} else {
if (that.getSurface() != null)
return false;
else
return true;
}
}
}
package com.ly.avfoundation.avfoundation.render;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import com.ly.avfoundation.avfoundation.gles.Drawable2d;
import com.ly.avfoundation.avfoundation.gles.ScaleDrawable2d;
import com.ly.avfoundation.avfoundation.gles.Sprite2d;
import com.ly.avfoundation.avfoundation.gles.Texture2dProgram;
public class SurfaceViewConnect extends IRenderConnect {
private final ScaleDrawable2d mRectDrawable = new ScaleDrawable2d(Drawable2d.Prefab.RECTANGLE);
private Sprite2d mRect = new Sprite2d(mRectDrawable);
private Texture2dProgram mTexProgram = null;
private float[] mDisplayProjectionMatrix = new float[16];
public SurfaceViewConnect() {
this(null);
}
public SurfaceViewConnect(EGLContext context) {
super(context);
Log.i(TAG, "SurfaceViewConnect this=" + this);
}
public void setOutputSurface(Object surface) {
super.setOutputSurface(surface);
mWindowSurface.makeCurrent();
mTexProgram = new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT);
}
@Override
public void setWindowSurfaceRect(int width, int height) {
super.setWindowSurfaceRect(width, height);
GLES20.glViewport(0, 0, width, height);
Matrix.orthoM(mDisplayProjectionMatrix, 0, 0, width, 0, height, -1, 1);
updateGeometry();
Log.i(TAG, "setWindowSurfaceRect w=" + width + " h=" + height);
}
@Override
public void setVideoRect(int width, int height) {
super.setVideoRect(width, height);
updateGeometry();
Log.i(TAG, "setVideoRect w=" + width + " h=" + height);
}
@Override
public void setAngle(int angle) {
super.setAngle(angle);
updateGeometry();
Log.i(TAG, "setAngle angle=" + angle);
}
@Override
public void draw(int textureid, float[] matrix) {
mWindowSurface.makeCurrent();
// TODO draw
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mRect.setTexture(textureid);
mRect.draw(mTexProgram, mDisplayProjectionMatrix);
mWindowSurface.swapBuffers();
}
@Override
public int createTextureObject() {
if (mTexProgram != null) {
return mTexProgram.createTextureObject();
}
return 0;
}
private void updateGeometry() {
int width = mWindowSurfaceWidth;
int height = mWindowSurfaceHeight;
int smallDim = Math.min(width, height);
float scaled = smallDim * 1.0f;
float cameraAspect = (float) mVideoWidth / mVideoHeight;
int newWidth = Math.round(scaled * cameraAspect);
int newHeight = Math.round(scaled);
mRect.setScale(newWidth, newHeight);
mRect.setPosition(width / 2, height / 2);
mRect.setRotation(mAngle);
mRectDrawable.setScale(1.0f);
}
}
package com.ly.avfoundation.avfoundation.render;
import android.view.View;
interface VideoView {
interface OnSurfaceListener {
void onSurfaceChanged(Object surface, int width, int height, int parentWidth, int parentHeight);
void onSurfaceDestroy(Object surface);
}
View getView();
void setSurfaceListener(OnSurfaceListener listener);
void setParentSize(int width, int height);
void setVideoOffset(int left, int right, int top, int bottom);
}
......@@ -2,34 +2,46 @@ package com.ly.avfoundation.fragment;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.PixelFormat;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.GradientDrawable;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.RadioGroup;
import android.widget.RelativeLayout;
import androidx.fragment.app.Fragment;
import com.ly.avfoundation.R;
import com.ly.avfoundation.avfoundation.capture.VideoCapture;
import com.ly.avfoundation.avfoundation.common.AVCodecDefine;
import com.ly.avfoundation.avfoundation.render.CameraGLSurfaceView;
import com.ly.avfoundation.avfoundation.render.CameraSurfaceRenderer;
public class CaptureFragment extends Fragment {
public class CaptureFragment extends Fragment implements View.OnClickListener,
VideoCapture.OnPreviewSizeChangeListener, TextWatcher, CameraSurfaceRenderer.OnSurfaceRenderListener {
private static final String TAG = "[CaptureFragment]";
private Context mActivity = null;
private CameraSurfaceRenderer mRender = null;
private VideoCapture mCapture;
private VideoCapture.Configuration mConfiguration;
private SurfaceTexture mSurfaceTexture;
private Boolean mStartCapture;
private EditText mWidthText;
private EditText mHeightText;
private EditText mBitText;
private EditText mFpsText;
private Button mStartBtn;
private Button mStopBtn;
private int mSelectInputType;
public static CaptureFragment newInstance(Bundle saveInstanceState) {
Bundle bundle = new Bundle();
......@@ -41,12 +53,153 @@ public class CaptureFragment extends Fragment {
@Override
public void onAttach(Context context) {
super.onAttach(context);
mActivity = (Activity)context;
mActivity = context;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup group, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.capture_fragment, group, false);
SurfaceView surfaceView = view.findViewById(R.id.surfaceView);
mRender = new CameraSurfaceRenderer();
mRender.setSurfaceRenderListener(this);
surfaceView.getHolder().addCallback(mRender);
surfaceView.setZOrderOnTop(true);
surfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
mWidthText = view.findViewById(R.id.width_edit_view);
mWidthText.addTextChangedListener(this);
mHeightText = view.findViewById(R.id.height_edit_view);
mHeightText.addTextChangedListener(this);
mBitText = view.findViewById(R.id.bit_edit_view);
mBitText.addTextChangedListener(this);
mFpsText = view.findViewById(R.id.fps_edit_view);
mFpsText.addTextChangedListener(this);
mStartBtn = view.findViewById(R.id.capture_start_btn);
mStartBtn.setOnClickListener(this);
mStopBtn = view.findViewById(R.id.capture_stop_btn);
mStopBtn.setOnClickListener(this);
RadioGroup radioGroup = view.findViewById(R.id.input_group);
radioGroup.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup radioGroup, int selected) {
switch (selected) {
case R.id.prepose_btn:
mSelectInputType = VideoCapture.FactingType.FRONT;
break;
case R.id.suffix_btn:
mSelectInputType = VideoCapture.FactingType.REAR;
break;
}
}
});
mSelectInputType = VideoCapture.FactingType.FRONT;
mCapture = new VideoCapture();
mConfiguration = new VideoCapture.Configuration(0, 0, 0, 90);
mStartCapture = false;
return view;
}
@Override
public void onResume() {
super.onResume();
// to open capture
if (mStartCapture) {
}
}
@Override
public void onPause() {
super.onPause();
// to close capture
if (mStartCapture) {
}
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.capture_start_btn:
handleStartBtn();
break;
case R.id.capture_stop_btn:
handleStopBtn();
break;
}
}
@Override
public void onPreviewSizeChange(int width, int height) {
if (mRender != null) {
mRender.setVideoResolution(width, height);
}
}
@Override
public void beforeTextChanged(CharSequence charSequence, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence charSequence, int start, int before, int count) {
if (count == 0) {
mStartBtn.setEnabled(Boolean.FALSE);
} else {
mStartBtn.setEnabled(Boolean.TRUE);
}
}
@Override
public void afterTextChanged(Editable editable) {
}
@Override
public void onFrameAvaliable(SurfaceTexture st) {
mSurfaceTexture = st;
Log.i(TAG, "onFrameAvaliable st=" + st);
}
private void handleStartBtn() {
mStartCapture = Boolean.TRUE;
int width, height;
String widthStr = mWidthText.getText().toString();
String heightStr = mHeightText.getText().toString();
width = Integer.parseInt(widthStr);
height = Integer.parseInt(heightStr);
mConfiguration.width = width;
mConfiguration.height = height;
mConfiguration.angle = 0;
mConfiguration.facingType = mSelectInputType;
mCapture.setPreviewSizeChangeListener(this);
mCapture.openCamera(mConfiguration);
mCapture.setSurfaceTexture(mSurfaceTexture);
if (mSelectInputType == VideoCapture.FactingType.FRONT) {
mRender.setAngle(90);
} else {
mRender.setAngle(270);
}
}
private void handleStopBtn() {
mStartCapture = Boolean.FALSE;
mCapture.releaseCamera();
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册