Delete old media encoder

v2
Mattia Iavarone 6 years ago
parent 5b303f0522
commit a706b97673
  1. 366
      cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoder.java
  2. 211
      cameraview/src/main/gles/com/otaliastudios/cameraview/OldMediaEncoderCore.java
  3. 37
      cameraview/src/main/java/com/otaliastudios/cameraview/SnapshotVideoRecorder.java

@ -1,366 +0,0 @@
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.support.annotation.RequiresApi;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
/**
* -- from grafika --
*
* Encode a movie from frames rendered from an external texture image.
* <p>
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
* <p>
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
* <p>
* To use:
* <ul>
* <li>create TextureMovieEncoder object
* <li>create an Config
* <li>call TextureMovieEncoder#startRecording() with the config
* <li>call TextureMovieEncoder#setTextureId() with the texture object that receives frames
* <li>for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
* </ul>
*
* TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class OldMediaEncoder implements Runnable {
private static final String TAG = OldMediaEncoder.class.getSimpleName();
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_QUIT = 4;
// ----- accessed exclusively by encoder thread -----
private EglWindowSurface mInputWindowSurface;
private EglCore mEglCore;
private EglViewport mFullScreen;
private int mTextureId;
private int mFrameNum = -1; // Important
private OldMediaEncoderCore mVideoEncoder;
private float mTransformationScaleX = 1F;
private float mTransformationScaleY = 1F;
private int mTransformationRotation = 0;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private final Object mLooperReadyLock = new Object(); // guards ready/running
private boolean mLooperReady;
private boolean mRunning;
/**
* Encoder configuration.
* <p>
* Object is immutable, which means we can safely pass it between threads without
* explicit synchronization (and don't need to worry about it getting tweaked out from
* under us).
* <p>
*/
static class Config {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final int mFrameRate;
final int mRotation;
final float mScaleX;
final float mScaleY;
final EGLContext mEglContext;
final String mMimeType;
Config(File outputFile, int width, int height,
int bitRate, int frameRate,
int rotation,
float scaleX, float scaleY,
String mimeType,
EGLContext sharedEglContext) {
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mFrameRate = frameRate;
mEglContext = sharedEglContext;
mScaleX = scaleX;
mScaleY = scaleY;
mRotation = rotation;
mMimeType = mimeType;
}
@Override
public String toString() {
return "Config: " + mWidth + "x" + mHeight + " @" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
private void prepareEncoder(Config config) {
OldMediaEncoderCore.VideoConfig videoConfig = new OldMediaEncoderCore.VideoConfig(
config.mWidth, config.mHeight, config.mBitRate, config.mFrameRate,
0, // The video encoder rotation does not work, so we apply it here using Matrix.rotateM().
config.mMimeType);
try {
mVideoEncoder = new OldMediaEncoderCore(videoConfig, config.mOutputFile);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
mEglCore = new EglCore(config.mEglContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new EglWindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent(); // drawing will happen on the InputWindowSurface, which
// is backed by mVideoEncoder.getInputSurface()
mFullScreen = new EglViewport();
mTransformationScaleX = config.mScaleX;
mTransformationScaleY = config.mScaleY;
mTransformationRotation = config.mRotation;
}
private void releaseEncoder() {
mVideoEncoder.release();
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mFullScreen != null) {
mFullScreen.release(true);
mFullScreen = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
}
/**
* Tells the video recorder to start recording. (Call from non-encoder thread.)
* <p>
* Creates a new thread, which will create an encoder using the provided configuration.
* <p>
* Returns after the recorder thread has started and is ready to accept Messages. The
* encoder may not yet be fully configured.
*/
public void startRecording(Config config) {
Log.d(TAG, "Encoder: startRecording()");
synchronized (mLooperReadyLock) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mLooperReady) {
try {
mLooperReadyLock.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
/**
* Tells the video recorder to stop recording. (Call from non-encoder thread.)
* <p>
* Returns immediately; the encoder/muxer may not yet be finished creating the movie.
* <p>
*/
public void stopRecording(Runnable onStop) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING, onStop));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mLooperReadyLock) {
return mRunning;
}
}
/**
* Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
* <p>
* This function sends a message and returns immediately. This isn't sufficient -- we
* don't want the caller to latch a new frame until we're done with this one -- but we
* can get away with it so long as the input frame rate is reasonable and the encoder
* thread doesn't stall.
* <p>
* TODO: either block here until the texture has been rendered onto the encoder surface,
* or have a separate "block if still busy" method that the caller can execute immediately
* before it calls updateTexImage(). The latter is preferred because we don't want to
* stall the caller while this thread does work.
*/
public void frameAvailable(SurfaceTexture st) {
synchronized (mLooperReadyLock) {
if (!mLooperReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame. Not easy, need a pool
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
}
/**
* Tells the video recorder what texture name to use. This is the external texture that
* we're receiving camera previews in. (Call from non-encoder thread.)
* <p>
* TODO: do something less clumsy
*/
public void setTextureId(int id) {
synchronized (mLooperReadyLock) {
if (!mLooperReady) return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
/**
* Encoder thread entry point. Establishes Looper/Handler and waits for messages.
* <p>
* @see java.lang.Thread#run()
*/
@Override
public void run() {
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mLooperReadyLock) {
mHandler = new EncoderHandler(this);
mLooperReady = true;
mLooperReadyLock.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mLooperReadyLock) {
mLooperReady = mRunning = false;
mHandler = null;
}
}
/**
* Handles encoder state change requests. The handler is created on the encoder thread.
*/
private static class EncoderHandler extends Handler {
private WeakReference<OldMediaEncoder> mWeakEncoder;
public EncoderHandler(OldMediaEncoder encoder) {
mWeakEncoder = new WeakReference<>(encoder);
}
@Override // runs on encoder thread
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
OldMediaEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.mFrameNum = 0;
Config config = (Config) obj;
encoder.prepareEncoder(config);
break;
case MSG_STOP_RECORDING:
encoder.mFrameNum = -1;
encoder.mVideoEncoder.drainEncoder(true);
encoder.releaseEncoder();
((Runnable) obj).run();
break;
case MSG_FRAME_AVAILABLE:
if (encoder.mFrameNum < 0) break;
encoder.mFrameNum++;
long timestamp = (((long) inputMessage.arg1) << 32) | (((long) inputMessage.arg2) & 0xffffffffL);
float[] transform = (float[]) obj;
// We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float scaleX = encoder.mTransformationScaleX;
float scaleY = encoder.mTransformationScaleY;
float scaleTranslX = (1F - scaleX) / 2F;
float scaleTranslY = (1F - scaleY) / 2F;
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// We also must rotate this matrix. In GlCameraPreview it is not needed because it is a live
// stream, but the output video, must be correctly rotated based on the device rotation at the moment.
// Rotation also takes place with respect to the origin (the Z axis), so we must
// translate to origin, rotate, then back to where we were.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0);
Matrix.rotateM(transform, 0, encoder.mTransformationRotation, 0, 0, 1);
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0);
encoder.mVideoEncoder.drainEncoder(false);
encoder.mFullScreen.drawFrame(encoder.mTextureId, transform);
encoder.mInputWindowSurface.setPresentationTime(timestamp);
encoder.mInputWindowSurface.swapBuffers();
break;
case MSG_SET_TEXTURE_ID:
encoder.mTextureId = inputMessage.arg1;
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
}

@ -1,211 +0,0 @@
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.otaliastudios.cameraview;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* -- From grafika VideoEncoderCore.java --
*
* This class wraps up the core components used for surface-input video encoding.
* <p>
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
* <p>
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
class OldMediaEncoderCore {
private MediaMuxer mMuxer;
private boolean mMuxerStarted;
private MediaCodec mVideoEncoder;
private Surface mVideoInputSurface;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
static class VideoConfig {
int width;
int height;
int bitRate;
int frameRate;
int rotation;
String mimeType;
VideoConfig(int width, int height, int bitRate, int frameRate, int rotation, String mimeType) {
this.width = width;
this.height = height;
this.bitRate = bitRate;
this.frameRate = frameRate;
this.rotation = rotation;
this.mimeType = mimeType;
}
}
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
OldMediaEncoderCore(VideoConfig videoConfig, File outputFile) throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(videoConfig.mimeType, videoConfig.width, videoConfig.height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, videoConfig.bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, videoConfig.frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
format.setInteger("rotation-degrees", videoConfig.rotation);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mVideoEncoder = MediaCodec.createEncoderByType(videoConfig.mimeType);
mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mVideoInputSurface = mVideoEncoder.createInputSurface();
mVideoEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mVideoInputSurface;
}
/**
* Releases encoder resources.
*/
public void release() {
if (mVideoEncoder != null) {
mVideoEncoder.stop();
mVideoEncoder.release();
mVideoEncoder = null;
}
if (mMuxer != null) {
// TODO: stop() throws an exception if you haven't fed it any data. Keep track
// of frames submitted, and don't call stop() if we haven't written anything.
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
* <p>
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (endOfStream) {
mVideoEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mVideoEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mVideoEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mVideoEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mVideoEncoder.getOutputFormat();
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w("OldMediaEncoderCore", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
}
mVideoEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w("OldMediaEncoderCore", "reached end of stream unexpectedly");
}
break; // out of while
}
}
}
}
}

@ -18,12 +18,10 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
private static final String TAG = SnapshotVideoRecorder.class.getSimpleName(); private static final String TAG = SnapshotVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final boolean USE_OLD_ENCODER = false;
private static final int STATE_RECORDING = 0; private static final int STATE_RECORDING = 0;
private static final int STATE_NOT_RECORDING = 1; private static final int STATE_NOT_RECORDING = 1;
private OldMediaEncoder mEncoder;
private MediaEncoderEngine mEncoderEngine; private MediaEncoderEngine mEncoderEngine;
private GlCameraPreview mPreview; private GlCameraPreview mPreview;
@ -33,9 +31,6 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
SnapshotVideoRecorder(VideoResult stub, VideoResultListener listener, GlCameraPreview preview) { SnapshotVideoRecorder(VideoResult stub, VideoResultListener listener, GlCameraPreview preview) {
super(stub, listener); super(stub, listener);
if (USE_OLD_ENCODER) {
mEncoder = new OldMediaEncoder();
}
mPreview = preview; mPreview = preview;
mPreview.addRendererFrameCallback(this); mPreview.addRendererFrameCallback(this);
} }
@ -82,22 +77,6 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC: case H_264: type = "video/avc"; break; // MediaFormat.MIMETYPE_VIDEO_AVC:
case DEVICE_DEFAULT: type = "video/avc"; break; case DEVICE_DEFAULT: type = "video/avc"; break;
} }
if (USE_OLD_ENCODER) {
OldMediaEncoder.Config configuration = new OldMediaEncoder.Config(
mResult.getFile(),
width,
height,
1000000,
30,
mResult.getRotation(),
scaleX,
scaleY,
type,
EGL14.eglGetCurrentContext()
);
mEncoder.startRecording(configuration);
mEncoder.setTextureId(mTextureId);
} else {
TextureMediaEncoder.Config config = new TextureMediaEncoder.Config( TextureMediaEncoder.Config config = new TextureMediaEncoder.Config(
width, height, width, height,
1000000, 1000000,
@ -110,34 +89,19 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config); TextureMediaEncoder videoEncoder = new TextureMediaEncoder(config);
mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, null); mEncoderEngine = new MediaEncoderEngine(mResult.file, videoEncoder, null);
mEncoderEngine.start(); mEncoderEngine.start();
}
mResult.rotation = 0; // We will rotate the result instead. mResult.rotation = 0; // We will rotate the result instead.
mCurrentState = STATE_RECORDING; mCurrentState = STATE_RECORDING;
} }
if (mCurrentState == STATE_RECORDING) { if (mCurrentState == STATE_RECORDING) {
if (USE_OLD_ENCODER) {
mEncoder.frameAvailable(surfaceTexture);
} else {
TextureMediaEncoder.Frame frame = new TextureMediaEncoder.Frame(); TextureMediaEncoder.Frame frame = new TextureMediaEncoder.Frame();
frame.timestamp = surfaceTexture.getTimestamp(); frame.timestamp = surfaceTexture.getTimestamp();
frame.transform = new float[16]; frame.transform = new float[16];
surfaceTexture.getTransformMatrix(frame.transform); surfaceTexture.getTransformMatrix(frame.transform);
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
} }
}
if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) { if (mCurrentState == STATE_RECORDING && mDesiredState == STATE_NOT_RECORDING) {
if (USE_OLD_ENCODER) {
mEncoder.stopRecording(new Runnable() {
@Override
public void run() {
// We are in the encoder thread.
dispatchResult();
}
});
mEncoder = null;
} else {
mEncoderEngine.stop(new Runnable() { mEncoderEngine.stop(new Runnable() {
@Override @Override
public void run() { public void run() {
@ -146,7 +110,6 @@ class SnapshotVideoRecorder extends VideoRecorder implements GlCameraPreview.Ren
} }
}); });
mEncoderEngine = null; mEncoderEngine = null;
}
mCurrentState = STATE_NOT_RECORDING; mCurrentState = STATE_NOT_RECORDING;
mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this); mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
mPreview = null; mPreview = null;

Loading…
Cancel
Save