Feature: add CameraFilter with OpenGL

pull/221/head
xufuji456 2 years ago
parent f6e703035b
commit f3dd152a72
  1. 2
      CameraFilter/.gitignore
  2. 26
      CameraFilter/build.gradle
  3. 21
      CameraFilter/proguard-rules.pro
  4. 7
      CameraFilter/src/main/AndroidManifest.xml
  5. 136
      CameraFilter/src/main/java/com/frank/camerafilter/camera/CameraManager.java
  6. 29
      CameraFilter/src/main/java/com/frank/camerafilter/factory/BeautyFilterFactory.java
  7. 7
      CameraFilter/src/main/java/com/frank/camerafilter/factory/BeautyFilterType.java
  8. 173
      CameraFilter/src/main/java/com/frank/camerafilter/filter/BaseFilter.java
  9. 155
      CameraFilter/src/main/java/com/frank/camerafilter/filter/BeautyCameraFilter.java
  10. 42
      CameraFilter/src/main/java/com/frank/camerafilter/filter/advance/BeautyCrayonFilter.java
  11. 37
      CameraFilter/src/main/java/com/frank/camerafilter/filter/advance/BeautySketchFilter.java
  12. 195
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/gles/EglCore.java
  13. 106
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/gles/EglSurfaceBase.java
  14. 325
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/TextureVideoRecorder.java
  15. 121
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java
  16. 45
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/WindowEglSurface.java
  17. 128
      CameraFilter/src/main/java/com/frank/camerafilter/util/OpenGLUtil.java
  18. 36
      CameraFilter/src/main/java/com/frank/camerafilter/util/Rotation.java
  19. 82
      CameraFilter/src/main/java/com/frank/camerafilter/util/TextureRotateUtil.java
  20. 50
      CameraFilter/src/main/java/com/frank/camerafilter/widget/BeautyCameraView.java
  21. 246
      CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java
  22. 53
      CameraFilter/src/main/res/raw/crayon.glsl
  23. 13
      CameraFilter/src/main/res/raw/default_fragment.glsl
  24. 11
      CameraFilter/src/main/res/raw/default_vertex.glsl
  25. 46
      CameraFilter/src/main/res/raw/sketch.glsl
  26. 1
      app/build.gradle
  27. 1
      settings.gradle

@ -0,0 +1,2 @@
/build
/.cxx

@ -0,0 +1,26 @@
plugins {
id 'com.android.library'
}
android {
compileSdkVersion rootProject.ext.compileSdkVersion
defaultConfig {
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation "androidx.appcompat:appcompat:$rootProject.appcompatVersion"
}

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.frank.camerafilter">
<uses-feature android:glEsVersion="0x00030000" android:required="true"/>
</manifest>

@ -0,0 +1,136 @@
package com.frank.camerafilter.camera;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import java.io.IOException;
import java.util.List;
/**
* @author xufulong
* @date 2022/6/17 5:14 下午
* @desc
*/
public class CameraManager {
private Camera mCamera;
private int mCameraId = 0;
private SurfaceTexture mSurfaceTexture;
public Camera getCamera() {
return mCamera;
}
public boolean openCamera() {
return openCamera(mCameraId);
}
public boolean openCamera(int cameraId) {
if (mCamera == null) {
try {
mCameraId = cameraId;
mCamera = Camera.open(cameraId);
setDefaultParams();
return true;
} catch (RuntimeException e) {
return false;
}
}
return false;
}
public void releaseCamera() {
if (mCamera == null)
return;
stopPreview();
mCamera.release();
mCamera = null;
}
public void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
releaseCamera();
openCamera(mCameraId);
startPreview(mSurfaceTexture);
}
private static Camera.Size getLargePictureSize(Camera camera){
if(camera != null){
List<Camera.Size> sizes = camera.getParameters().getSupportedPictureSizes();
Camera.Size temp = sizes.get(0);
for(int i = 1;i < sizes.size();i ++){
float scale = (float)(sizes.get(i).height) / sizes.get(i).width;
if(temp.width < sizes.get(i).width && scale < 0.6f && scale > 0.5f)
temp = sizes.get(i);
}
return temp;
}
return null;
}
private static Camera.Size getLargePreviewSize(Camera camera){
if(camera != null){
List<Camera.Size> sizes = camera.getParameters().getSupportedPreviewSizes();
Camera.Size temp = sizes.get(0);
for(int i = 1;i < sizes.size();i ++){
if(temp.width < sizes.get(i).width)
temp = sizes.get(i);
}
return temp;
}
return null;
}
public void setDefaultParams() {
Parameters parameters = mCamera.getParameters();
if (parameters.getSupportedFocusModes().contains(
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
Camera.Size previewSize = getLargePreviewSize(mCamera);
parameters.setPreviewSize(previewSize.width, previewSize.height);
Camera.Size pictureSize = getLargePictureSize(mCamera);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
parameters.setRotation(90);
mCamera.setParameters(parameters);
}
public void startPreview(SurfaceTexture surfaceTexture) {
if (mCamera == null)
return;
try {
mCamera.setPreviewTexture(surfaceTexture);
mSurfaceTexture = surfaceTexture;
mCamera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
public void stopPreview() {
if (mCamera == null)
return;
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
}
public int getOrientation() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, cameraInfo);
return cameraInfo.orientation;
}
public Camera.Size getPreviewSize() {
return mCamera.getParameters().getPreviewSize();
}
public boolean isFront() {
return mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT;
}
}

@ -0,0 +1,29 @@
package com.frank.camerafilter.factory;
import android.content.Context;
import com.frank.camerafilter.filter.advance.BeautyCrayonFilter;
import com.frank.camerafilter.filter.advance.BeautySketchFilter;
import com.frank.camerafilter.filter.BaseFilter;
public class BeautyFilterFactory {
private static BeautyFilterType filterType = BeautyFilterType.NONE;
public static BaseFilter getFilter(BeautyFilterType type, Context context) {
filterType = type;
switch (type) {
case SKETCH:
return new BeautySketchFilter(context);
case CRAYON:
return new BeautyCrayonFilter(context);
default:
return null;
}
}
public static BeautyFilterType getFilterType() {
return filterType;
}
}

@ -0,0 +1,7 @@
package com.frank.camerafilter.factory;
public enum BeautyFilterType {
NONE,
CRAYON,
SKETCH
}

@ -0,0 +1,173 @@
package com.frank.camerafilter.filter;
import android.opengl.GLES30;
import com.frank.camerafilter.util.OpenGLUtil;
import com.frank.camerafilter.util.Rotation;
import com.frank.camerafilter.util.TextureRotateUtil;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.LinkedList;
public class BaseFilter {
public final static String NORMAL_VERTEX_SHADER =
"attribute vec4 position;\n" +
"attribute vec4 inputTextureCoordinate;\n" +
"varying vec2 textureCoordinate;\n" +
"void main() {\n" +
" gl_Position = position;\n" +
" textureCoordinate = inputTextureCoordinate.xy;\n" +
"}";
private final String mVertexShader;
private final String mFragmentShader;
private final LinkedList<Runnable> mRunnableDraw;
protected int mProgramId;
protected int mInputWidth;
protected int mInputHeight;
protected int mOutputWidth;
protected int mOutputHeight;
protected int mUniformTexture;
protected int mAttributePosition;
protected int mAttributeTextureCoordinate;
protected boolean mHasInitialized;
protected FloatBuffer mVertexBuffer;
protected FloatBuffer mTextureBuffer;
public BaseFilter(String vertexShader, String fragmentShader) {
mRunnableDraw = new LinkedList<>();
mVertexShader = vertexShader;
mFragmentShader = fragmentShader;
mVertexBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.VERTEX.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mVertexBuffer.put(TextureRotateUtil.VERTEX).position(0);
mTextureBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.TEXTURE_ROTATE_0.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mTextureBuffer.put(TextureRotateUtil.getRotateTexture(Rotation.NORMAL, false, true))
.position(0);
}
protected void onInit() {
mProgramId = OpenGLUtil.loadProgram(mVertexShader, mFragmentShader);
mAttributePosition = GLES30.glGetAttribLocation(mProgramId, "position");
mUniformTexture = GLES30.glGetUniformLocation(mProgramId, "inputImageTexture");
mAttributeTextureCoordinate = GLES30.glGetAttribLocation(mProgramId, "inputTextureCoordinate");
}
protected void onInitialized() {
}
public void init() {
onInit();
mHasInitialized = true;
onInitialized();
}
protected void onDestroy() {
}
public void destroy() {
mHasInitialized = false;
GLES30.glDeleteProgram(mProgramId);
onDestroy();
}
public void onInputSizeChanged(final int width, final int height) {
mInputWidth = width;
mInputHeight = height;
}
protected void runPendingOnDrawTask() {
while (!mRunnableDraw.isEmpty()) {
mRunnableDraw.removeFirst().run();
}
}
protected void onDrawArrayBefore() {
}
protected void onDrawArrayAfter() {
}
public int onDrawFrame(final int textureId) {
return onDrawFrame(textureId, mVertexBuffer, mTextureBuffer);
}
public int onDrawFrame(final int textureId, FloatBuffer vertexBuffer, FloatBuffer textureBuffer) {
if (!mHasInitialized)
return OpenGLUtil.NOT_INIT;
GLES30.glUseProgram(mProgramId);
runPendingOnDrawTask();
vertexBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributePosition, 2, GLES30.GL_FLOAT, false, 0, vertexBuffer);
GLES30.glEnableVertexAttribArray(mAttributePosition);
textureBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributeTextureCoordinate, 2, GLES30.GL_FLOAT, false, 0, textureBuffer);
GLES30.glEnableVertexAttribArray(mAttributeTextureCoordinate);
if (textureId != OpenGLUtil.NO_TEXTURE) {
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textureId);
GLES30.glUniform1i(mUniformTexture, 0);
}
onDrawArrayBefore();
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);
GLES30.glDisableVertexAttribArray(mAttributePosition);
GLES30.glDisableVertexAttribArray(mAttributeTextureCoordinate);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
onDrawArrayAfter();
return OpenGLUtil.ON_DRAWN;
}
public boolean hasInitialized() {
return mHasInitialized;
}
public int getProgramId() {
return mProgramId;
}
protected void runOnDraw(final Runnable runnable) {
synchronized (mRunnableDraw) {
mRunnableDraw.addLast(runnable);
}
}
public void setFloat(final int location, final float floatVal) {
runOnDraw(new Runnable() {
@Override
public void run() {
GLES30.glUniform1f(location, floatVal);
}
});
}
public void setFloatVec2(final int location, final float[] floatArray) {
runOnDraw(new Runnable() {
@Override
public void run() {
GLES30.glUniform2fv(location, 1, FloatBuffer.wrap(floatArray));
}
});
}
public void onOutputSizeChanged(final int width, final int height) {
mOutputWidth = width;
mOutputHeight = height;
}
}

@ -0,0 +1,155 @@
package com.frank.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES11Ext;
import android.opengl.GLES30;
import com.frank.camerafilter.R;
import com.frank.camerafilter.util.OpenGLUtil;
import java.nio.FloatBuffer;
public class BeautyCameraFilter extends BaseFilter {
private int frameWidth = -1;
private int frameHeight = -1;
private int[] frameBuffer = null;
private int[] frameBufferTexture = null;
private int textureTransformLocation;
private float[] textureTransformMatrix;
public BeautyCameraFilter(Context context) {
super(OpenGLUtil.readShaderFromSource(context, R.raw.default_vertex),
OpenGLUtil.readShaderFromSource(context, R.raw.default_fragment));
}
protected void onInit() {
super.onInit();
textureTransformLocation = GLES30.glGetUniformLocation(getProgramId(), "textureTransform");
}
public void setTextureTransformMatrix(float[] matrix) {
textureTransformMatrix = matrix;
}
@Override
public int onDrawFrame(int textureId) {
return onDrawFrame(textureId, mVertexBuffer, mTextureBuffer);
}
@Override
public int onDrawFrame(int textureId, FloatBuffer vertexBuffer, FloatBuffer textureBuffer) {
if (!hasInitialized()) {
return OpenGLUtil.NOT_INIT;
}
GLES30.glUseProgram(getProgramId());
runPendingOnDrawTask();
vertexBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributePosition, 2, GLES30.GL_FLOAT, false, 0, vertexBuffer);
GLES30.glEnableVertexAttribArray(mAttributePosition);
textureBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributeTextureCoordinate, 2, GLES30.GL_FLOAT, false, 0, textureBuffer);
GLES30.glEnableVertexAttribArray(mAttributeTextureCoordinate);
GLES30.glUniformMatrix4fv(textureTransformLocation, 1, false, textureTransformMatrix, 0);
if (textureId != OpenGLUtil.NO_TEXTURE) {
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES30.glUniform1i(mUniformTexture, 0);
}
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);
GLES30.glDisableVertexAttribArray(mAttributePosition);
GLES30.glDisableVertexAttribArray(mAttributeTextureCoordinate);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
return OpenGLUtil.ON_DRAWN;
}
public int onDrawToTexture(int textureId) {
if (!hasInitialized()) {
return OpenGLUtil.NOT_INIT;
}
if (frameBuffer == null) {
return OpenGLUtil.NO_TEXTURE;
}
GLES30.glUseProgram(getProgramId());
runPendingOnDrawTask();
GLES30.glViewport(0, 0, frameWidth, frameHeight);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBuffer[0]);
mVertexBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributePosition, 2, GLES30.GL_FLOAT, false, 0, mVertexBuffer);
GLES30.glEnableVertexAttribArray(mAttributePosition);
mTextureBuffer.position(0);
GLES30.glVertexAttribPointer(mAttributeTextureCoordinate, 2, GLES30.GL_FLOAT, false, 0, mTextureBuffer);
GLES30.glEnableVertexAttribArray(mAttributeTextureCoordinate);
GLES30.glUniformMatrix4fv(textureTransformLocation, 1, false, textureTransformMatrix, 0);
if (textureId != OpenGLUtil.NO_TEXTURE) {
GLES30.glActiveTexture(GLES30.GL_TEXTURE0);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
GLES30.glUniform1i(mUniformTexture, 0);
}
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, 4);
GLES30.glDisableVertexAttribArray(mAttributePosition);
GLES30.glDisableVertexAttribArray(mAttributeTextureCoordinate);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
GLES30.glViewport(0, 0, mOutputWidth, mOutputHeight);
return frameBufferTexture[0];
}
@Override
public void onInputSizeChanged(int width, int height) {
super.onInputSizeChanged(width, height);
}
@Override
protected void onDestroy() {
super.onDestroy();
destroyFrameBuffer();
}
public void initFrameBuffer(int width, int height) {
if (frameBuffer != null && (frameWidth != width || frameHeight != height))
destroyFrameBuffer();
if (frameBuffer == null) {
frameWidth = width;
frameHeight = height;
frameBuffer = new int[1];
frameBufferTexture = new int[1];
GLES30.glGenFramebuffers(1, frameBuffer, 0);
GLES30.glGenTextures(1, frameBufferTexture, 0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, frameBufferTexture[0]);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexParameterf(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexImage2D(GLES30.GL_TEXTURE_2D, 0, GLES30.GL_RGBA, width, height,
0, GLES30.GL_RGBA, GLES30.GL_UNSIGNED_BYTE, null);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, frameBuffer[0]);
GLES30.glFramebufferTexture2D(GLES30.GL_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0,
GLES30.GL_TEXTURE_2D, frameBufferTexture[0], 0);
GLES30.glBindFramebuffer(GLES30.GL_FRAMEBUFFER, 0);
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
}
}
public void destroyFrameBuffer() {
if (frameBufferTexture != null) {
GLES30.glDeleteTextures(1, frameBufferTexture, 0);
frameBufferTexture = null;
}
if (frameBuffer != null) {
GLES30.glDeleteFramebuffers(1, frameBuffer, 0);
frameBuffer = null;
}
frameWidth = -1;
frameHeight = -1;
}
}

@ -0,0 +1,42 @@
package com.frank.camerafilter.filter.advance;
import android.content.Context;
import android.opengl.GLES20;
import com.frank.camerafilter.R;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.util.OpenGLUtil;
public class BeautyCrayonFilter extends BaseFilter {
// 1.0--5.0
private int mStrengthLocation;
private int mStepOffsetLocation;
public BeautyCrayonFilter(Context context) {
super(NORMAL_VERTEX_SHADER, OpenGLUtil.readShaderFromSource(context, R.raw.crayon));
}
protected void onInit() {
super.onInit();
mStrengthLocation = GLES20.glGetUniformLocation(getProgramId(), "strength");
mStepOffsetLocation = GLES20.glGetUniformLocation(getProgramId(), "singleStepOffset");
}
protected void onInitialized() {
super.onInitialized();
setFloat(mStrengthLocation, 2.0f);
}
@Override
public void onInputSizeChanged(int width, int height) {
super.onInputSizeChanged(width, height);
setFloatVec2(mStepOffsetLocation, new float[] {1.0f / width, 1.0f / height});
}
protected void onDestroy() {
super.onDestroy();
}
}

@ -0,0 +1,37 @@
package com.frank.camerafilter.filter.advance;
import android.content.Context;
import android.opengl.GLES20;
import com.frank.camerafilter.R;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.util.OpenGLUtil;
public class BeautySketchFilter extends BaseFilter {
private int strengthLocation;
private int stepOffsetLocation;
public BeautySketchFilter(Context context) {
super(NORMAL_VERTEX_SHADER, OpenGLUtil.readShaderFromSource(context, R.raw.sketch));
}
protected void onInit() {
super.onInit();
strengthLocation = GLES20.glGetUniformLocation(getProgramId(), "strength");
stepOffsetLocation = GLES20.glGetUniformLocation(getProgramId(), "singleStepOffset");
}
@Override
protected void onInitialized() {
super.onInitialized();
setFloat(strengthLocation, 0.5f);
}
@Override
public void onInputSizeChanged(int width, int height) {
super.onInputSizeChanged(width, height);
setFloatVec2(stepOffsetLocation, new float[] {1.0f / width, 1.0f / height});
}
}

@ -0,0 +1,195 @@
package com.frank.camerafilter.recorder.gles;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Core EGL state (display, context, config).
* <p>
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
public final class EglCore {
private final static String TAG = EglCore.class.getSimpleName();
public final static int FLAG_RECORDABLE = 0x01;
public final static int FLAG_TRY_GLES3 = 0x02;
private final static int EGL_RECORDABLE_ANDROID = 0x3142;
private int mGlVersion = -1;
private EGLConfig mEGLConfig = null;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
public EglCore() {
this(null, 0);
}
public EglCore(EGLContext sharedContext, int flag) {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
throw new RuntimeException("unable to init EGL14");
}
if ((flag & FLAG_TRY_GLES3) != 0) {
initEGLContext(sharedContext, flag, 3);
}
if (mEGLContext == EGL14.EGL_NO_CONTEXT) {
initEGLContext(sharedContext, flag, 2);
}
int[] value = new int[1];
EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, value, 0);
Log.i(TAG, "EGLContext client version=" + value[0]);
}
private void initEGLContext(EGLContext sharedContext, int flag, int version) {
EGLConfig config = getConfig(flag, version);
if (config == null) {
throw new RuntimeException("unable to find suitable EGLConfig");
}
int[] attributeList = {EGL14.EGL_CONTEXT_CLIENT_VERSION, version, EGL14.EGL_NONE};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext, attributeList, 0);
if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
mEGLConfig = config;
mEGLContext = context;
mGlVersion = version;
}
}
private EGLConfig getConfig(int flag, int version) {
int renderType = EGL14.EGL_OPENGL_ES2_BIT;
if (version >= 3) {
renderType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
}
int[] attributeList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
//EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderType,
EGL14.EGL_NONE, 0,
EGL14.EGL_NONE
};
if ((flag & FLAG_RECORDABLE) != 0) {
attributeList[attributeList.length - 3] = EGL_RECORDABLE_ANDROID;
attributeList[attributeList.length - 2] = 1;
}
int[] numConfigs = new int[1];
EGLConfig[] configs = new EGLConfig[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attributeList, 0, configs,
0, configs.length, numConfigs, 0)) {
Log.e(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
return configs[0];
}
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLConfig = null;
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
}
@Override
protected void finalize() throws Throwable {
try {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
release();
}
} finally {
super.finalize();
}
}
public void releaseSurface(EGLSurface eglSurface) {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
}
}
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface:" + surface);
}
int[] surfaceAttr = {EGL14.EGL_NONE};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface, surfaceAttr, 0);
if (eglSurface == null) {
throw new RuntimeException("window surface is null");
}
return eglSurface;
}
public EGLSurface createOffsetScreenSurface(int width, int height) {
int[] surfaceAttr = {EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE};
EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig, surfaceAttr, 0);
if (eglSurface == null) {
throw new RuntimeException("offset-screen surface is null");
}
return eglSurface;
}
public void makeCurrent(EGLSurface eglSurface) {
if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed!");
}
}
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed!");
}
}
public boolean swapBuffers(EGLSurface eglSurface) {
return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
}
public void setPresentationTime(EGLSurface eglSurface, long nsec) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsec);
}
public boolean isCurrent(EGLSurface eglSurface) {
return mEGLContext.equals(EGL14.eglGetCurrentContext())
&& eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
}
public int querySurface(EGLSurface eglSurface, int what) {
int[] value = new int[1];
EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
return value[0];
}
public String queryString(int what) {
return EGL14.eglQueryString(mEGLDisplay, what);
}
public int getVersion() {
return mGlVersion;
}
}

@ -0,0 +1,106 @@
package com.frank.camerafilter.recorder.gles;
import android.graphics.Bitmap;
import android.opengl.EGL14;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.IntBuffer;
/**
* @author xufulong
* @date 2022/6/23 8:51 上午
* @desc
*/
public class EglSurfaceBase {
protected EglCore mEglCore;
protected int mWidth = -1;
protected int mHeight = -1;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
protected EglSurfaceBase(EglCore eglCore) {
mEglCore = eglCore;
}
public void createWindowSurface(Object surface) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("egl surface has already created");
}
mEGLSurface = mEglCore.createWindowSurface(surface);
}
public void createOffsetScreenSurface(int width, int height) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("egl surface has already created");
}
mWidth = width;
mHeight = height;
mEGLSurface = mEglCore.createOffsetScreenSurface(width, height);
}
public int getWidth() {
if (mWidth <= 0) {
mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
}
return mWidth;
}
public int getHeight() {
if (mHeight <= 0) {
mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
}
return mHeight;
}
public void releaseEglSurface() {
mEglCore.releaseSurface(mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
mWidth = -1;
mHeight = -1;
}
public void makeCurrent() {
mEglCore.makeCurrent(mEGLSurface);
}
public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
}
public boolean swapBuffers() {
return mEglCore.swapBuffers(mEGLSurface);
}
public void setPresentationTime(long nsec) {
mEglCore.setPresentationTime(mEGLSurface, nsec);
}
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("isn't current surface/context");
}
String fileName = file.toString();
int width = getWidth();
int height = getHeight();
IntBuffer buffer = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
BufferedOutputStream outputStream = null;
try {
outputStream = new BufferedOutputStream(new FileOutputStream(fileName));
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
bitmap.recycle();
} finally {
if (outputStream != null)
outputStream.close();
}
}
}

@ -0,0 +1,325 @@
package com.frank.camerafilter.recorder.video;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import androidx.annotation.NonNull;
import com.frank.camerafilter.filter.BeautyCameraFilter;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.factory.BeautyFilterFactory;
import com.frank.camerafilter.factory.BeautyFilterType;
import com.frank.camerafilter.recorder.gles.EglCore;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.FloatBuffer;
/**
* Encode a movie from frames rendered from an external texture image.
* <p>
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
* <p>
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
* <p>
* To use:
* <ul>
* <li>create TextureMovieEncoder object
* <li>create an EncoderConfig
* <li>call TextureMovieEncoder#startRecording() with the config
* <li>call TextureMovieEncoder#setTextureId() with the texture object that receives frames
* <li>for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
* </ul>
*/
public class TextureVideoRecorder implements Runnable {
private final static String TAG = TextureVideoRecorder.class.getSimpleName();
private final static int MSG_START_RECORDING = 0;
private final static int MSG_STOP_RECORDING = 1;
private final static int MSG_FRAME_AVAILABLE = 2;
private final static int MSG_SET_TEXTURE_ID = 3;
private final static int MSG_UPDATE_SHARED_CONTEXT = 4;
private final static int MSG_QUIT_RECORDING = 5;
private int mTextureId;
private EglCore mEglCore;
private BeautyCameraFilter mInput;
private WindowEglSurface mWindowSurface;
private VideoRecorderCore mVideoRecorder;
// access by multiple threads
private volatile RecorderHandler mHandler;
private boolean mReady;
private boolean mRunning;
private Context mContext;
private BaseFilter mFilter;
private FloatBuffer glVertexBuffer;
private FloatBuffer glTextureBuffer;
// guard ready/running
private final Object mReadyFence = new Object();
private int mPreviewWidth = -1;
private int mPreviewHeight = -1;
private int mVideoWidth = -1;
private int mVideoHeight = -1;
private BeautyFilterType type = BeautyFilterType.NONE;
public TextureVideoRecorder(Context context) {
mContext = context;
}
public static class RecorderConfig {
final int mWidth;
final int mHeight;
final int mBitrate;
final File mOutputFile;
final EGLContext mEglContext;
public RecorderConfig(int width, int height, int bitrate, File outputFile, EGLContext eglContext) {
this.mWidth = width;
this.mHeight = height;
this.mBitrate = bitrate;
this.mOutputFile = outputFile;
this.mEglContext = eglContext;
}
}
public void startRecording(RecorderConfig config) {
synchronized (mReadyFence) {
if (mRunning) {
return;
}
mRunning = true;
new Thread(this, TAG).start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT_RECORDING));
}
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext eglContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, eglContext));
}
public void frameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
float[] transform = new float[16];
surfaceTexture.getTransformMatrix(transform);
long timestamp = surfaceTexture.getTimestamp();
if (timestamp == 0) {
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform));
}
public void setTextureId(int id) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
@Override
public void run() {
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new RecorderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
synchronized (mReadyFence) {
mReady = false;
mRunning = false;
mHandler = null;
}
}
private static class RecorderHandler extends Handler {
private final WeakReference<TextureVideoRecorder> mWeakRecorder;
public RecorderHandler(TextureVideoRecorder recorder) {
mWeakRecorder = new WeakReference<>(recorder);
}
@Override
public void handleMessage(@NonNull Message msg) {
Object obj = msg.obj;
TextureVideoRecorder recorder = mWeakRecorder.get();
if (recorder == null) {
return;
}
switch (msg.what) {
case MSG_START_RECORDING:
recorder.handlerStartRecording((RecorderConfig)obj);
break;
case MSG_STOP_RECORDING:
recorder.handlerStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) msg.arg1) << 32) |
(((long) msg.arg2) & 0xffffffffL);
recorder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
recorder.handleSetTexture(msg.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
recorder.handleUpdateSharedContext((EGLContext)obj);
break;
case MSG_QUIT_RECORDING:
Looper.myLooper().quit();
break;
default:
break;
}
}
}
private void handlerStartRecording(RecorderConfig config) {
prepareRecorder(
config.mEglContext,
config.mWidth,
config.mHeight,
config.mBitrate,
config.mOutputFile);
}
private void handlerStopRecording() {
mVideoRecorder.drainEncoder(true);
releaseRecorder();
}
private void handleFrameAvailable(float[] transform, long timestamp) {
mVideoRecorder.drainEncoder(false);
mInput.setTextureTransformMatrix(transform);
if (mFilter == null) {
mInput.onDrawFrame(mTextureId, glVertexBuffer, glTextureBuffer);
} else {
mFilter.onDrawFrame(mTextureId, glVertexBuffer, glTextureBuffer);
}
mWindowSurface.setPresentationTime(timestamp);
mWindowSurface.swapBuffers();
}
private void handleSetTexture(int id) {
mTextureId = id;
}
private void handleUpdateSharedContext(EGLContext eglContext) {
mWindowSurface.releaseEglSurface();
mInput.destroy();
mEglCore.release();
mEglCore = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
mWindowSurface.recreate(mEglCore);
mWindowSurface.makeCurrent();
mInput = new BeautyCameraFilter(mContext);
mInput.init();
mFilter = BeautyFilterFactory.getFilter(type, mContext);
if (mFilter != null) {
mFilter.init();
mFilter.onOutputSizeChanged(mVideoWidth, mVideoHeight);
mFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
}
}
private void prepareRecorder(EGLContext eglContext, int width, int height, int bitrate, File file) {
try {
mVideoRecorder = new VideoRecorderCore(width, height, bitrate, file);
} catch (IOException e) {
throw new RuntimeException(e);
}
mVideoWidth = width;
mVideoHeight = height;
mEglCore = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
mWindowSurface = new WindowEglSurface(mEglCore, mVideoRecorder.getInputSurface(), true);
mWindowSurface.makeCurrent();
mInput = new BeautyCameraFilter(mContext);
mInput.init();
mFilter = BeautyFilterFactory.getFilter(type, mContext);
if (mFilter != null) {
mFilter.init();
mFilter.onOutputSizeChanged(mVideoWidth, mVideoHeight);
mFilter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
}
}
private void releaseRecorder() {
mVideoRecorder.release();
if (mWindowSurface != null) {
mWindowSurface.release();
mWindowSurface = null;
}
if (mInput != null) {
mInput.destroy();
mInput = null;
}
if (mFilter != null) {
mFilter.destroy();
mFilter = null;
type = BeautyFilterType.NONE;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
public void setFilter(BeautyFilterType type) {
this.type = type;
}
public void setPreviewSize(int width, int height){
mPreviewWidth = width;
mPreviewHeight = height;
}
public void setTextureBuffer(FloatBuffer glTextureBuffer) {
this.glTextureBuffer = glTextureBuffer;
}
public void setCubeBuffer(FloatBuffer gLVertexBuffer) {
this.glVertexBuffer = gLVertexBuffer;
}
}

@ -0,0 +1,121 @@
package com.frank.camerafilter.recorder.video;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoRecorderCore {
private final static String TAG = VideoRecorderCore.class.getSimpleName();
private final static int FRAME_RATE = 30;
private final static int IFRAME_INTERVAL = 5;
private final static String MIME_TYPE = "video/avc";
private final static int TIMEOUT_USEC = 10000;
private int mTrackIndex;
private boolean mMuxerStarted;
private final Surface mInputSurface;
private MediaMuxer mMediaMuxer;
private MediaCodec mVideoEncoder;
private final MediaCodec.BufferInfo mBufferInfo;
public VideoRecorderCore(int width, int height, int bitrate, File outputFile) throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
mVideoEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mVideoEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mVideoEncoder.createInputSurface();
mVideoEncoder.start();
mMediaMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
public Surface getInputSurface() {
return mInputSurface;
}
public void drainEncoder(boolean endOfStream) {
if (endOfStream) {
mVideoEncoder.signalEndOfInputStream();
}
ByteBuffer[] outputBuffers = mVideoEncoder.getOutputBuffers();
while (true) {
int encodeStatus = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encodeStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
break;
}
} else if (encodeStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mVideoEncoder.getOutputBuffers();
} else if (encodeStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (mMuxerStarted) {
throw new RuntimeException("format has changed!");
}
MediaFormat newFormat = mVideoEncoder.getOutputFormat();
mTrackIndex = mMediaMuxer.addTrack(newFormat);
mMediaMuxer.start();
mMuxerStarted = true;
} else if (encodeStatus < 0) {
Log.e(TAG, "error encodeStatus=" + encodeStatus);
} else {
ByteBuffer data = outputBuffers[encodeStatus];
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
data.position(mBufferInfo.offset);
data.limit(mBufferInfo.offset + mBufferInfo.size);
mMediaMuxer.writeSampleData(mTrackIndex, data, mBufferInfo);
}
mVideoEncoder.releaseOutputBuffer(encodeStatus, false);
// end of stream
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
}
}
public void release() {
if (mVideoEncoder != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
if (mMediaMuxer != null) {
mMediaMuxer.stop();
mMediaMuxer.release();
mMediaMuxer = null;
}
}
}

@ -0,0 +1,45 @@
package com.frank.camerafilter.recorder.video;
import android.view.Surface;
import com.frank.camerafilter.recorder.gles.EglCore;
import com.frank.camerafilter.recorder.gles.EglSurfaceBase;
/**
* @author xufulong
* @date 2022/6/23 9:15 上午
* @desc
*/
public class WindowEglSurface extends EglSurfaceBase {
private Surface mSurface;
private boolean mReleaseSurface;
public WindowEglSurface(EglCore eglCore, Surface surface) {
this(eglCore, surface, false);
}
public WindowEglSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
super(eglCore);
createWindowSurface(surface);
mSurface = surface;
mReleaseSurface = releaseSurface;
}
public void release() {
releaseEglSurface();
if (mSurface != null && mReleaseSurface) {
mSurface.release();
}
mSurface = null;
}
public void recreate(EglCore newEglCore) {
if (mSurface == null) {
throw new RuntimeException("Surface is null");
}
mEglCore = newEglCore;
createWindowSurface(mSurface);
}
}

@ -0,0 +1,128 @@
package com.frank.camerafilter.util;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES30;
import android.opengl.GLUtils;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import javax.microedition.khronos.opengles.GL10;
public class OpenGLUtil {
public final static int ON_DRAWN = 1;
public static final int NOT_INIT = -1;
public static final int NO_SHADER = 0;
public static final int NO_TEXTURE = -1;
private static Bitmap getBitmapFromAssetFile(Context context, String name) {
try {
AssetManager assetManager = context.getResources().getAssets();
InputStream stream = assetManager.open(name);
Bitmap bitmap = BitmapFactory.decodeStream(stream);
stream.close();
return bitmap;
} catch (IOException e) {
return null;
}
}
public static int loadTexture(final Context context, final String name) {
if (context == null || name == null)
return NO_TEXTURE;
final int[] textures = new int[1];
GLES30.glGenTextures(1, textures, 0);
if (textures[0] == 0)
return NO_TEXTURE;
Bitmap bitmap = getBitmapFromAssetFile(context, name);
if (bitmap == null)
return NO_TEXTURE;
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, textures[0]);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MIN_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_MAG_FILTER, GLES30.GL_LINEAR);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_S, GLES30.GL_CLAMP_TO_EDGE);
GLES30.glTexParameteri(GLES30.GL_TEXTURE_2D, GLES30.GL_TEXTURE_WRAP_T, GLES30.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES30.GL_TEXTURE_2D, 0, bitmap, 0);
bitmap.recycle();
return textures[0];
}
private static int loadShader(final String source, final int type) {
int shader = GLES30.glCreateShader(type);
GLES30.glShaderSource(shader, source);
GLES30.glCompileShader(shader);
int[] compile = new int[1];
GLES30.glGetShaderiv(shader, GLES30.GL_COMPILE_STATUS, compile, 0);
if (compile[0] <= 0) {
Log.e("OpenGlUtil", "Shader compile error=" + GLES30.glGetShaderInfoLog(shader));
return NO_SHADER;
}
return shader;
}
public static int loadProgram(final String vertexSource, final String fragmentSource) {
int vertexShader = loadShader(vertexSource, GLES30.GL_VERTEX_SHADER);
int fragmentShader = loadShader(fragmentSource, GLES30.GL_FRAGMENT_SHADER);
if (vertexShader == NO_SHADER || fragmentShader == NO_SHADER) {
return 0;
}
int programId = GLES30.glCreateProgram();
GLES30.glAttachShader(programId, vertexShader);
GLES30.glAttachShader(programId, fragmentShader);
GLES30.glLinkProgram(programId);
int[] linked = new int[1];
GLES30.glGetProgramiv(programId, GLES30.GL_LINK_STATUS, linked, 0);
if (linked[0] <= 0) {
programId = 0;
Log.e("OpenGlUtil", "program link error=" + GLES30.glGetProgramInfoLog(programId));
}
GLES30.glDeleteShader(vertexShader);
GLES30.glDeleteShader(fragmentShader);
return programId;
}
public static int getExternalOESTextureId() {
int[] textures = new int[1];
GLES30.glGenTextures(1, textures, 0);
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return textures[0];
}
public static String readShaderFromSource(Context context, final int resourceId) {
String line;
StringBuilder builder = new StringBuilder();
InputStream inputStream = context.getResources().openRawResource(resourceId);
InputStreamReader reader = new InputStreamReader(inputStream);
BufferedReader bufferedReader = new BufferedReader(reader);
try {
while ((line = bufferedReader.readLine()) != null) {
builder.append(line).append("\n");
}
} catch (IOException e) {
return null;
} finally {
try {
inputStream.close();
reader.close();
bufferedReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return builder.toString();
}
}

@ -0,0 +1,36 @@
package com.frank.camerafilter.util;
public enum Rotation {
NORMAL, ROTATION_90, ROTATION_180, ROTATION_270;
public int toInt() {
switch (this) {
case NORMAL:
return 0;
case ROTATION_90:
return 90;
case ROTATION_180:
return 180;
case ROTATION_270:
return 270;
default:
throw new IllegalStateException("unknown rotation value...");
}
}
public static Rotation fromInt(int rotation) {
switch (rotation) {
case 0:
return NORMAL;
case 90:
return ROTATION_90;
case 180:
return ROTATION_180;
case 270:
return ROTATION_270;
default:
throw new IllegalStateException("unknown rotation=" +rotation);
}
}
}

@ -0,0 +1,82 @@
package com.frank.camerafilter.util;
public class TextureRotateUtil {
public final static float[] TEXTURE_ROTATE_0 = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
};
public final static float[] TEXTURE_ROTATE_90 = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f
};
public final static float[] TEXTURE_ROTATE_180 = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f
};
public final static float[] TEXTURE_ROTATE_270 = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f
};
public final static float[] VERTEX = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f
};
private TextureRotateUtil() {}
private static float flip(float value) {
return value == 1.0f ? 0.0f : 1.0f;
}
public static float[] getRotateTexture(Rotation rotation, boolean horizontalFlip, boolean verticalFlip) {
float[] rotateTexture;
switch (rotation) {
case ROTATION_90:
rotateTexture = TEXTURE_ROTATE_90;
break;
case ROTATION_180:
rotateTexture = TEXTURE_ROTATE_180;
break;
case ROTATION_270:
rotateTexture = TEXTURE_ROTATE_270;
break;
case NORMAL:
default:
rotateTexture = TEXTURE_ROTATE_0;
break;
}
if (horizontalFlip) {
rotateTexture = new float[] {
flip(rotateTexture[0]), rotateTexture[1],
flip(rotateTexture[2]), rotateTexture[3],
flip(rotateTexture[4]), rotateTexture[5],
flip(rotateTexture[6]), rotateTexture[7]
};
}
if (verticalFlip) {
rotateTexture = new float[] {
rotateTexture[0], flip(rotateTexture[1]),
rotateTexture[2], flip(rotateTexture[3]),
rotateTexture[4], flip(rotateTexture[5]),
rotateTexture[6], flip(rotateTexture[7])
};
}
return rotateTexture;
}
}

@ -0,0 +1,50 @@
package com.frank.camerafilter.widget;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import com.frank.camerafilter.factory.BeautyFilterType;
public class BeautyCameraView extends GLSurfaceView {
private final CameraRender mCameraRender;
public BeautyCameraView(Context context) {
this(context, null);
}
public BeautyCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
getHolder().addCallback(this);
mCameraRender = new CameraRender(this);
setEGLContextClientVersion(3);
setRenderer(mCameraRender);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
super.surfaceDestroyed(holder);
if (mCameraRender != null) {
mCameraRender.releaseCamera();
}
}
public void switchCamera() {
if (mCameraRender != null) {
mCameraRender.switchCamera();
}
}
public void setFilter(BeautyFilterType type) {
mCameraRender.setFilter(type);
}
public void setRecording(boolean isRecording) {
mCameraRender.setRecording(isRecording);
}
}

@ -0,0 +1,246 @@
package com.frank.camerafilter.widget;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.EGL14;
import android.opengl.GLES30;
import android.opengl.GLSurfaceView;
import android.os.Environment;
import com.frank.camerafilter.camera.CameraManager;
import com.frank.camerafilter.filter.BeautyCameraFilter;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.factory.BeautyFilterFactory;
import com.frank.camerafilter.factory.BeautyFilterType;
import com.frank.camerafilter.recorder.video.TextureVideoRecorder;
import com.frank.camerafilter.util.OpenGLUtil;
import com.frank.camerafilter.util.Rotation;
import com.frank.camerafilter.util.TextureRotateUtil;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected BaseFilter mFilter;
private SurfaceTexture surfaceTexture;
private BeautyCameraFilter cameraFilter;
private final CameraManager cameraManager;
protected int mTextureId = OpenGLUtil.NO_TEXTURE;
protected FloatBuffer mVertexBuffer;
protected FloatBuffer mTextureBuffer;
protected int mImageWidth, mImageHeight;
protected int mSurfaceWidth, mSurfaceHeight;
private final float[] mMatrix = new float[16];
private final BeautyCameraView mCameraView;
private final File outputFile;
private int recordStatus;
protected boolean recordEnable;
private final TextureVideoRecorder videoRecorder;
private final static int RECORDING_OFF = 0;
private final static int RECORDING_ON = 1;
private final static int RECORDING_RESUME = 2;
private static final int videoBitrate = 6 * 1024 * 1024;
private static final String videoName = "camera_record.mp4";
private static final String videoPath = Environment.getExternalStorageDirectory().getPath();
public CameraRender(BeautyCameraView cameraView) {
mCameraView = cameraView;
cameraManager = new CameraManager();
mVertexBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.VERTEX.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mVertexBuffer.put(TextureRotateUtil.VERTEX).position(0);
mTextureBuffer = ByteBuffer.allocateDirect(TextureRotateUtil.TEXTURE_ROTATE_0.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
mTextureBuffer.put(TextureRotateUtil.TEXTURE_ROTATE_0).position(0);
recordEnable = false;
recordStatus = RECORDING_OFF;
videoRecorder = new TextureVideoRecorder(mCameraView.getContext());
outputFile = new File(videoPath, videoName);
}
private void openCamera() {
if (cameraManager.getCamera() == null)
cameraManager.openCamera();
Camera.Size size = cameraManager.getPreviewSize();
if (cameraManager.getOrientation() == 90 || cameraManager.getOrientation() == 270) {
mImageWidth = size.height;
mImageHeight = size.width;
} else {
mImageWidth = size.width;
mImageHeight = size.height;
}
cameraFilter.onInputSizeChanged(mImageWidth, mImageHeight);
adjustSize(cameraManager.getOrientation(), cameraManager.isFront(), true);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
GLES30.glDisable(GL10.GL_DITHER);
GLES30.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES30.glEnable(GL10.GL_CULL_FACE);
GLES30.glEnable(GL10.GL_DEPTH_TEST);
cameraFilter = new BeautyCameraFilter(mCameraView.getContext());
cameraFilter.init();
mTextureId = OpenGLUtil.getExternalOESTextureId();
if (mTextureId != OpenGLUtil.NO_TEXTURE) {
surfaceTexture = new SurfaceTexture(mTextureId);
surfaceTexture.setOnFrameAvailableListener(this);
}
openCamera();
}
@Override
public void onSurfaceChanged(GL10 gl10, int width, int height) {
GLES30.glViewport(0, 0, width, height);
mSurfaceWidth = width;
mSurfaceHeight = height;
cameraManager.startPreview(surfaceTexture);
onFilterChanged();
}
@Override
public void onDrawFrame(GL10 gl10) {
GLES30.glClear(GLES30.GL_COLOR_BUFFER_BIT | GLES30.GL_DEPTH_BUFFER_BIT);
GLES30.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mMatrix);
cameraFilter.setTextureTransformMatrix(mMatrix);
int id = mTextureId;
if (mFilter == null) {
cameraFilter.onDrawFrame(mTextureId, mVertexBuffer, mTextureBuffer);
} else {
id = cameraFilter.onDrawToTexture(mTextureId);
mFilter.onDrawFrame(id, mVertexBuffer, mTextureBuffer);
}
onRecordVideo(id);
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
mCameraView.requestRender();
}
public void adjustSize(int rotation, boolean horizontalFlip, boolean verticalFlip) {
float[] vertexData = TextureRotateUtil.VERTEX;
float[] textureData = TextureRotateUtil.getRotateTexture(Rotation.fromInt(rotation),
horizontalFlip, verticalFlip);
mVertexBuffer.clear();
mVertexBuffer.put(vertexData).position(0);
mTextureBuffer.clear();
mTextureBuffer.put(textureData).position(0);
}
public void switchCamera() {
if (cameraManager != null) {
cameraManager.switchCamera();
}
}
public void releaseCamera() {
if (cameraManager != null) {
cameraManager.releaseCamera();
}
}
private void onRecordVideo(int textureId) {
if (recordEnable) {
switch (recordStatus) {
case RECORDING_OFF:
videoRecorder.setPreviewSize(mImageWidth, mImageHeight);
videoRecorder.setTextureBuffer(mTextureBuffer);
videoRecorder.setCubeBuffer(mVertexBuffer);
videoRecorder.startRecording(new TextureVideoRecorder.RecorderConfig(
mImageWidth,
mImageHeight,
videoBitrate,
outputFile,
EGL14.eglGetCurrentContext()));
recordStatus = RECORDING_ON;
break;
case RECORDING_RESUME:
videoRecorder.updateSharedContext(EGL14.eglGetCurrentContext());
recordStatus = RECORDING_ON;
break;
case RECORDING_ON:
break;
default:
throw new RuntimeException("unknown status " + recordStatus);
}
} else {
switch (recordStatus) {
case RECORDING_ON:
case RECORDING_RESUME:
videoRecorder.stopRecording();
recordStatus = RECORDING_OFF;
break;
case RECORDING_OFF:
break;
default:
throw new RuntimeException("unknown status " + recordStatus);
}
}
videoRecorder.setTextureId(textureId);
videoRecorder.frameAvailable(surfaceTexture);
}
public void setRecording(boolean isRecording) {
recordEnable = isRecording;
}
public void setFilter(final BeautyFilterType type) {
mCameraView.queueEvent(new Runnable() {
@Override
public void run() {
if (mFilter != null)
mFilter.destroy();
mFilter = null;
mFilter = BeautyFilterFactory.getFilter(type, mCameraView.getContext());
if (mFilter != null)
mFilter.init();
onFilterChanged();
}
});
mCameraView.requestRender();
}
public void onFilterChanged() {
if (mFilter != null) {
mFilter.onInputSizeChanged(mImageWidth, mImageHeight);
mFilter.onOutputSizeChanged(mSurfaceWidth, mSurfaceHeight);
}
cameraFilter.onOutputSizeChanged(mSurfaceWidth, mSurfaceHeight);
if (mFilter != null)
cameraFilter.initFrameBuffer(mImageWidth, mImageHeight);
else
cameraFilter.destroyFrameBuffer();
}
}

@ -0,0 +1,53 @@
varying highp vec2 textureCoordinate;
precision mediump float;
uniform sampler2D inputImageTexture;
uniform vec2 singleStepOffset;
uniform float strength;
const highp vec3 W = vec3(0.299,0.587,0.114);
const mat3 rgb2yiqMatrix = mat3(
0.299, 0.587, 0.114,
0.596,-0.275,-0.321,
0.212,-0.523, 0.311);
const mat3 yiq2rgbMatrix = mat3(
1.0, 0.956, 0.621,
1.0,-0.272,-1.703,
1.0,-1.106, 0.0);
void main()
{
vec4 oralColor = texture2D(inputImageTexture, textureCoordinate);
vec3 maxValue = vec3(0.,0.,0.);
for(int i = -2; i<=2; i++)
{
for(int j = -2; j<=2; j++)
{
vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j));
maxValue.r = max(maxValue.r,tempColor.r);
maxValue.g = max(maxValue.g,tempColor.g);
maxValue.b = max(maxValue.b,tempColor.b);
}
}
vec3 textureColor = oralColor.rgb / maxValue;
float gray = dot(textureColor, W);
float k = 0.223529;
float alpha = min(gray,k)/k;
textureColor = textureColor * alpha + (1.-alpha)*oralColor.rgb;
vec3 yiqColor = textureColor * rgb2yiqMatrix;
yiqColor.r = max(0.0,min(1.0,pow(gray,strength)));
textureColor = yiqColor * yiq2rgbMatrix;
gl_FragColor = vec4(textureColor, oralColor.w);
}

@ -0,0 +1,13 @@
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying mediump vec2 textureCoordinate;
uniform samplerExternalOES inputImageTexture;
void main(){
vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
gl_FragColor = vec4(centralColor.rgb,1.0);;
}

@ -0,0 +1,11 @@
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
uniform mat4 textureTransform;
varying vec2 textureCoordinate;
void main()
{
textureCoordinate = (textureTransform * inputTextureCoordinate).xy;
gl_Position = position;
}

@ -0,0 +1,46 @@
varying highp vec2 textureCoordinate;
precision mediump float;
uniform sampler2D inputImageTexture;
uniform vec2 singleStepOffset;
uniform float strength;
const highp vec3 W = vec3(0.299,0.587,0.114);
void main()
{
float threshold = 0.0;
//pic1
vec4 oralColor = texture2D(inputImageTexture, textureCoordinate);
//pic2
vec3 maxValue = vec3(0.,0.,0.);
for(int i = -2; i<=2; i++)
{
for(int j = -2; j<=2; j++)
{
vec4 tempColor = texture2D(inputImageTexture, textureCoordinate+singleStepOffset*vec2(i,j));
maxValue.r = max(maxValue.r,tempColor.r);
maxValue.g = max(maxValue.g,tempColor.g);
maxValue.b = max(maxValue.b,tempColor.b);
threshold += dot(tempColor.rgb, W);
}
}
//pic3
float gray1 = dot(oralColor.rgb, W);
//pic4
float gray2 = dot(maxValue, W);
//pic5
float contour = gray1 / gray2;
threshold = threshold / 25.;
float alpha = max(1.0,gray1>threshold?1.0:(gray1/threshold));
float result = contour * alpha + (1.0-alpha)*gray1;
gl_FragColor = vec4(vec3(result,result,result), oralColor.w);
}

@ -66,4 +66,5 @@ dependencies {
implementation project(':AndroidMedia')
//implementation "libmp3" if you need mp3-lite module
// implementation project(':libmp3')
implementation project(':CameraFilter')
}

@ -2,3 +2,4 @@ include ':AndroidMedia'
include ':app'
include ':Live'
include ':libmp3'
include ':CameraFilter'

Loading…
Cancel
Save