Feature: adjust recorder params

pull/221/head
xufuji456 2 years ago
parent 9dd6b64327
commit be37dabaca
  1. 155
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/CameraVideoRecorder.java
  2. 4
      CameraFilter/src/main/java/com/frank/camerafilter/recorder/video/VideoRecorderCore.java
  3. 12
      CameraFilter/src/main/java/com/frank/camerafilter/widget/CameraRender.java
  4. 2
      CameraFilter/src/main/res/raw/default_fragment.glsl

@ -43,9 +43,9 @@ import java.nio.FloatBuffer;
* call TextureMovieEncoder#frameAvailable().
* </ul>
*/
public class TextureVideoRecorder implements Runnable {
public class CameraVideoRecorder implements Runnable {
private final static String TAG = TextureVideoRecorder.class.getSimpleName();
private final static String TAG = CameraVideoRecorder.class.getSimpleName();
private final static int MSG_START_RECORDING = 0;
private final static int MSG_STOP_RECORDING = 1;
@ -80,7 +80,7 @@ public class TextureVideoRecorder implements Runnable {
private BeautyFilterType type = BeautyFilterType.NONE;
public TextureVideoRecorder(Context context) {
public CameraVideoRecorder(Context context) {
mContext = context;
}
@ -101,88 +101,17 @@ public class TextureVideoRecorder implements Runnable {
}
public void startRecording(RecorderConfig config) {
synchronized (mReadyFence) {
if (mRunning) {
return;
}
mRunning = true;
new Thread(this, TAG).start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT_RECORDING));
}
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext eglContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, eglContext));
}
public void frameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
float[] transform = new float[16];
surfaceTexture.getTransformMatrix(transform);
long timestamp = surfaceTexture.getTimestamp();
if (timestamp == 0) {
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform));
}
public void setTextureId(int id) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
@Override
public void run() {
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new RecorderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
synchronized (mReadyFence) {
mReady = false;
mRunning = false;
mHandler = null;
}
}
private static class RecorderHandler extends Handler {
private final WeakReference<TextureVideoRecorder> mWeakRecorder;
private final WeakReference<CameraVideoRecorder> mWeakRecorder;
public RecorderHandler(TextureVideoRecorder recorder) {
public RecorderHandler(CameraVideoRecorder recorder) {
mWeakRecorder = new WeakReference<>(recorder);
}
@Override
public void handleMessage(@NonNull Message msg) {
Object obj = msg.obj;
TextureVideoRecorder recorder = mWeakRecorder.get();
CameraVideoRecorder recorder = mWeakRecorder.get();
if (recorder == null) {
return;
}
@ -305,6 +234,78 @@ public class TextureVideoRecorder implements Runnable {
mEglCore = null;
}
}
public void startRecording(RecorderConfig config) {
synchronized (mReadyFence) {
if (mRunning) {
return;
}
mRunning = true;
new Thread(this, TAG).start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT_RECORDING));
}
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
public void updateSharedContext(EGLContext eglContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, eglContext));
}
public void frameAvailable(SurfaceTexture surfaceTexture) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
float[] transform = new float[16];
surfaceTexture.getTransformMatrix(transform);
long timestamp = surfaceTexture.getTimestamp();
if (timestamp == 0) {
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE, (int) (timestamp >> 32), (int) timestamp, transform));
}
public void setTextureId(int id) {
synchronized (mReadyFence) {
if (!mReady)
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
@Override
public void run() {
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new RecorderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
synchronized (mReadyFence) {
mReady = false;
mRunning = false;
mHandler = null;
}
}
public void setFilter(BeautyFilterType type) {
this.type = type;
}

@ -26,9 +26,9 @@ public class VideoRecorderCore {
private final static String TAG = VideoRecorderCore.class.getSimpleName();
private final static int FRAME_RATE = 30;
private final static int IFRAME_INTERVAL = 5;
private final static int IFRAME_INTERVAL = 30;
private final static String MIME_TYPE = "video/avc";
private final static int TIMEOUT_USEC = 10000;
private final static int TIMEOUT_USEC = 20000;
private int mTrackIndex;
private boolean mMuxerStarted;

@ -8,11 +8,11 @@ import android.opengl.GLSurfaceView;
import android.os.Environment;
import com.frank.camerafilter.camera.CameraManager;
import com.frank.camerafilter.filter.BeautyCameraFilter;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.factory.BeautyFilterFactory;
import com.frank.camerafilter.factory.BeautyFilterType;
import com.frank.camerafilter.recorder.video.TextureVideoRecorder;
import com.frank.camerafilter.filter.BeautyCameraFilter;
import com.frank.camerafilter.filter.BaseFilter;
import com.frank.camerafilter.recorder.video.CameraVideoRecorder;
import com.frank.camerafilter.util.OpenGLUtil;
import com.frank.camerafilter.util.Rotation;
import com.frank.camerafilter.util.TextureRotateUtil;
@ -50,7 +50,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr
private final File outputFile;
private int recordStatus;
protected boolean recordEnable;
private final TextureVideoRecorder videoRecorder;
private final CameraVideoRecorder videoRecorder;
private final static int RECORDING_OFF = 0;
private final static int RECORDING_ON = 1;
@ -75,7 +75,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr
recordEnable = false;
recordStatus = RECORDING_OFF;
videoRecorder = new TextureVideoRecorder(mCameraView.getContext());
videoRecorder = new CameraVideoRecorder(mCameraView.getContext());
outputFile = new File(videoPath, videoName);
}
@ -178,7 +178,7 @@ public class CameraRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFr
videoRecorder.setPreviewSize(mImageWidth, mImageHeight);
videoRecorder.setTextureBuffer(mTextureBuffer);
videoRecorder.setCubeBuffer(mVertexBuffer);
videoRecorder.startRecording(new TextureVideoRecorder.RecorderConfig(
videoRecorder.startRecording(new CameraVideoRecorder.RecorderConfig(
mImageWidth,
mImageHeight,
videoBitrate,

@ -9,5 +9,5 @@ uniform samplerExternalOES inputImageTexture;
void main(){
vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
gl_FragColor = vec4(centralColor.rgb,1.0);;
gl_FragColor = vec4(centralColor.rgb,1.0);
}
Loading…
Cancel
Save