Preview interface changes (#816)

* FilterCameraPreview as interface, better CameraPreview APIs

* Create abstract RendererCameraPreview

* Add missing file

* New rotation approach
pull/789/merge
Mattia Iavarone 5 years ago committed by GitHub
parent 5530c011fa
commit edbade24a9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/preview/MockCameraPreview.java
  2. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  3. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  4. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  5. 63
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  6. 32
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/CameraPreview.java
  7. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/FilterCameraPreview.java
  8. 19
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/GlCameraPreview.java
  9. 23
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererCameraPreview.java
  10. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/RendererFrameCallback.java
  11. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/SurfaceCameraPreview.java
  12. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/preview/TextureCameraPreview.java
  13. 21
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  14. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java

@ -10,7 +10,7 @@ import android.view.ViewGroup;
import com.otaliastudios.cameraview.filter.Filter;
import com.otaliastudios.cameraview.preview.CameraPreview;
public class MockCameraPreview extends FilterCameraPreview<View, Void> {
public class MockCameraPreview extends CameraPreview<View, Void> implements FilterCameraPreview {
public MockCameraPreview(Context context, ViewGroup parent) {
super(context, parent);

@ -43,7 +43,7 @@ import com.otaliastudios.cameraview.metering.MeteringTransform;
import com.otaliastudios.cameraview.picture.Full1PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot1PictureRecorder;
import com.otaliastudios.cameraview.picture.SnapshotGlPictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.video.Full1VideoRecorder;
@ -209,6 +209,7 @@ public class Camera1Engine extends CameraBaseEngine implements
throw new IllegalStateException("previewStreamSize should not be null at this point.");
}
mPreview.setStreamSize(previewSize.getWidth(), previewSize.getHeight());
mPreview.setDrawRotation(0);
Camera.Parameters params = mCamera.getParameters();
// NV21 should be the default, but let's make sure, since YuvImage will only support this
@ -357,11 +358,12 @@ public class Camera1Engine extends CameraBaseEngine implements
LOG.i("onTakePictureSnapshot:", "executing.");
// Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0.
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
if (mPreview instanceof GlCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this, (GlCameraPreview) mPreview, outputRatio);
if (mPreview instanceof RendererCameraPreview && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mPictureRecorder = new SnapshotGlPictureRecorder(stub, this,
(RendererCameraPreview) mPreview, outputRatio, getOverlay());
} else {
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR);
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
}
mPictureRecorder.take();
@ -397,13 +399,13 @@ public class Camera1Engine extends CameraBaseEngine implements
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
if (!(mPreview instanceof RendererCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
throw new IllegalStateException("Video snapshots are only supported on API 18+.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
RendererCameraPreview glPreview = (RendererCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
@ -425,8 +427,7 @@ public class Camera1Engine extends CameraBaseEngine implements
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview,
getOverlay(), stub.rotation);
mVideoRecorder = new SnapshotVideoRecorder(Camera1Engine.this, glPreview, getOverlay());
mVideoRecorder.start(stub);
}

@ -64,7 +64,7 @@ import com.otaliastudios.cameraview.internal.CropHelper;
import com.otaliastudios.cameraview.metering.MeteringRegions;
import com.otaliastudios.cameraview.picture.Full2PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.video.Full2VideoRecorder;
@ -785,17 +785,16 @@ public class Camera2Engine extends CameraBaseEngine implements
action.start(this);
} else {
LOG.i("onTakePictureSnapshot:", "doMetering is false. Performing.");
if (!(mPreview instanceof GlCameraPreview)) {
if (!(mPreview instanceof RendererCameraPreview)) {
throw new RuntimeException("takePictureSnapshot with Camera2 is only " +
"supported with Preview.GL_SURFACE");
}
// stub.size is not the real size: it will be cropped to the given ratio
// stub.rotation will be set to 0 - we rotate the texture instead.
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mPictureRecorder = new Snapshot2PictureRecorder(stub, this,
(GlCameraPreview) mPreview, outputRatio);
(RendererCameraPreview) mPreview, outputRatio);
mPictureRecorder.take();
}
}
@ -910,10 +909,10 @@ public class Camera2Engine extends CameraBaseEngine implements
@Override
protected void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio) {
if (!(mPreview instanceof GlCameraPreview)) {
if (!(mPreview instanceof RendererCameraPreview)) {
throw new IllegalStateException("Video snapshots are only supported with GL_SURFACE.");
}
GlCameraPreview glPreview = (GlCameraPreview) mPreview;
RendererCameraPreview glPreview = (RendererCameraPreview) mPreview;
Size outputSize = getUncroppedSnapshotSize(Reference.OUTPUT);
if (outputSize == null) {
throw new IllegalStateException("outputSize should not be null.");
@ -921,24 +920,10 @@ public class Camera2Engine extends CameraBaseEngine implements
Rect outputCrop = CropHelper.computeCrop(outputSize, outputRatio);
outputSize = new Size(outputCrop.width(), outputCrop.height());
stub.size = outputSize;
// Vertical: 0 (270-0-0)
// Left (unlocked): 270 (270-90-270)
// Right (unlocked): 90 (270-270-90)
// Upside down (unlocked): 180 (270-180-180)
// Left (locked): 270 (270-0-270)
// Right (locked): 90 (270-0-90)
// Upside down (locked): 180 (270-0-180)
// Unlike Camera1, the correct formula seems to be deviceOrientation,
// which means offset(Reference.BASE, Reference.OUTPUT, Axis.ABSOLUTE).
stub.rotation = getAngles().offset(Reference.BASE, Reference.OUTPUT, Axis.ABSOLUTE);
stub.rotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
stub.videoFrameRate = Math.round(mPreviewFrameRate);
LOG.i("onTakeVideoSnapshot", "rotation:", stub.rotation, "size:", stub.size);
// Start.
// The overlay rotation should alway be VIEW-OUTPUT, just liek Camera1Engine.
int overlayRotation = getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay(),
overlayRotation);
mVideoRecorder = new SnapshotVideoRecorder(this, glPreview, getOverlay());
mVideoRecorder.start(stub);
}

@ -18,7 +18,7 @@ import com.otaliastudios.cameraview.engine.action.Actions;
import com.otaliastudios.cameraview.engine.action.BaseAction;
import com.otaliastudios.cameraview.engine.action.CompletionCallback;
import com.otaliastudios.cameraview.engine.lock.LockAction;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
/**
@ -107,9 +107,9 @@ public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
public Snapshot2PictureRecorder(@NonNull PictureResult.Stub stub,
@NonNull Camera2Engine engine,
@NonNull GlCameraPreview preview,
@NonNull RendererCameraPreview preview,
@NonNull AspectRatio outputRatio) {
super(stub, engine, preview, outputRatio);
super(stub, engine, preview, outputRatio, engine.getOverlay());
mHolder = engine;
mAction = Actions.sequence(

@ -12,14 +12,11 @@ import android.os.Build;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.GlTextureDrawer;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.CropHelper;
import com.otaliastudios.cameraview.internal.WorkerHandler;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
import com.otaliastudios.cameraview.filter.Filter;
@ -30,13 +27,14 @@ import com.otaliastudios.opengl.surface.EglSurface;
import com.otaliastudios.opengl.surface.EglWindowSurface;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.WorkerThread;
import android.view.Surface;
/**
* API 19.
* Records a picture snapshots from the {@link GlCameraPreview}. It works as follows:
* Records a picture snapshots from the {@link RendererCameraPreview}. It works as follows:
*
* - We register a one time {@link RendererFrameCallback} on the preview
* - We get the textureId and the frame callback on the {@link RendererThread}
@ -55,8 +53,7 @@ import android.view.Surface;
*/
public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
private CameraEngine mEngine;
private GlCameraPreview mPreview;
private RendererCameraPreview mPreview;
private AspectRatio mOutputRatio;
private Overlay mOverlay;
@ -66,14 +63,14 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
public SnapshotGlPictureRecorder(
@NonNull PictureResult.Stub stub,
@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview,
@NonNull AspectRatio outputRatio) {
super(stub, engine);
mEngine = engine;
@Nullable PictureResultListener listener,
@NonNull RendererCameraPreview preview,
@NonNull AspectRatio outputRatio,
@Nullable Overlay overlay) {
super(stub, listener);
mPreview = preview;
mOutputRatio = outputRatio;
mOverlay = engine.getOverlay();
mOverlay = overlay;
mHasOverlay = mOverlay != null && mOverlay.drawsOn(Overlay.Target.PICTURE_SNAPSHOT);
}
@ -96,10 +93,10 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
final float scaleX,
final float scaleY) {
int rotation, float scaleX, float scaleY) {
mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY);
SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture,
rotation, scaleX, scaleY);
}
});
@ -129,18 +126,17 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void onRendererFrame(@SuppressWarnings("unused") @NonNull final SurfaceTexture surfaceTexture,
final int rotation,
final float scaleX,
final float scaleY) {
// Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle.
final EGLContext eglContext = EGL14.eglGetCurrentContext();
// Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
WorkerHandler.execute(new Runnable() {
@Override
public void run() {
takeFrame(surfaceTexture, scaleX, scaleY, eglContext);
takeFrame(surfaceTexture, rotation, scaleX, scaleY, eglContext);
}
});
@ -173,6 +169,7 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@WorkerThread
@TargetApi(Build.VERSION_CODES.KITKAT)
protected void takeFrame(@NonNull SurfaceTexture surfaceTexture,
int rotation,
float scaleX,
float scaleY,
@NonNull EGLContext eglContext) {
@ -189,22 +186,18 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
eglSurface.makeCurrent();
final float[] transform = mTextureDrawer.getTextureTransform();
// 2. Apply scale and crop
boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
float realScaleX = flip ? scaleY : scaleX;
float realScaleY = flip ? scaleX : scaleY;
float scaleTranslX = (1F - realScaleX) / 2F;
float scaleTranslY = (1F - realScaleY) / 2F;
// 2. Apply preview transformations
surfaceTexture.getTransformMatrix(transform);
float scaleTranslX = (1F - scaleX) / 2F;
float scaleTranslY = (1F - scaleY) / 2F;
Matrix.translateM(transform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(transform, 0, realScaleX, realScaleY, 1);
Matrix.scaleM(transform, 0, scaleX, scaleY, 1);
// 3. Apply rotation and flip
// If this doesn't work, rotate "rotation" before scaling, like GlCameraPreview does.
Matrix.translateM(transform, 0, 0.5F, 0.5F, 0); // Go back to 0,0
Matrix.rotateM(transform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus)
mResult.rotation = 0;
if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera
Matrix.scaleM(transform, 0, -1, 1, 1);
}
Matrix.rotateM(transform, 0, rotation + mResult.rotation, 0, 0, 1); // Rotate to OUTPUT
Matrix.scaleM(transform, 0, 1, -1, 1); // Vertical flip because we'll use glReadPixels
Matrix.translateM(transform, 0, -0.5F, -0.5F, 0); // Go back to old position
// 4. Do pretty much the same for overlays
@ -213,13 +206,12 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
// 2. Then we can apply the transformations
int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1);
// No need to flip the x axis for front camera, but need to flip the y axis always.
Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1);
Matrix.rotateM(mOverlayDrawer.getTransform(), 0, mResult.rotation, 0, 0, 1);
Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1); // Vertical flip because we'll use glReadPixels
Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
}
mResult.rotation = 0;
// 5. Draw and save
long timestampUs = surfaceTexture.getTimestamp() / 1000L;
@ -239,7 +231,6 @@ public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
@Override
protected void dispatchResult() {
mEngine = null;
mOutputRatio = null;
super.dispatchResult();
}

@ -18,6 +18,7 @@ import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.size.Size;
/**
@ -62,18 +63,24 @@ public abstract class CameraPreview<T extends View, Output> {
@VisibleForTesting CropCallback mCropCallback;
private SurfaceCallback mSurfaceCallback;
private T mView;
boolean mCropping;
@SuppressWarnings("WeakerAccess")
protected boolean mCropping;
// These are the surface dimensions in REF_VIEW.
int mOutputSurfaceWidth;
int mOutputSurfaceHeight;
@SuppressWarnings("WeakerAccess")
protected int mOutputSurfaceWidth;
@SuppressWarnings("WeakerAccess")
protected int mOutputSurfaceHeight;
// These are the preview stream dimensions, in REF_VIEW.
int mInputStreamWidth;
int mInputStreamHeight;
@SuppressWarnings("WeakerAccess")
protected int mInputStreamWidth;
@SuppressWarnings("WeakerAccess")
protected int mInputStreamHeight;
// The rotation, if any, to be applied when drawing.
int mDrawRotation;
@SuppressWarnings("WeakerAccess")
protected int mDrawRotation;
/**
* Creates a new preview.
@ -88,7 +95,7 @@ public abstract class CameraPreview<T extends View, Output> {
* Sets a callback to be notified of surface events (creation, change, destruction)
* @param callback a callback
*/
public final void setSurfaceCallback(@Nullable SurfaceCallback callback) {
public void setSurfaceCallback(@Nullable SurfaceCallback callback) {
if (hasSurface() && mSurfaceCallback != null) {
mSurfaceCallback.onSurfaceDestroyed();
}
@ -124,9 +131,8 @@ public abstract class CameraPreview<T extends View, Output> {
* @return the root view
*/
@SuppressWarnings("unused")
@VisibleForTesting
@NonNull
abstract View getRootView();
public abstract View getRootView();
/**
* Returns the output surface object (for example a SurfaceHolder
@ -162,7 +168,7 @@ public abstract class CameraPreview<T extends View, Output> {
* Returns the current input stream size, in view coordinates.
* @return the current input stream size
*/
@SuppressWarnings("unused")
@VisibleForTesting
@NonNull
final Size getStreamSize() {
return new Size(mInputStreamWidth, mInputStreamHeight);
@ -326,11 +332,15 @@ public abstract class CameraPreview<T extends View, Output> {
* Sometimes we don't need this:
* - In Camera1, the buffer producer sets our Surface size and rotates it based on the value
* that we pass to {@link android.hardware.Camera.Parameters#setDisplayOrientation(int)},
* so the stream that comes in is already rotated.
* so the stream that comes in is already rotated (if we apply SurfaceTexture transform).
* - In Camera2, for {@link android.view.SurfaceView} based previews, apparently it just works
* out of the box. The producer might be doing something similar.
*
* But in all the other Camera2 cases, we need to apply this rotation when drawing the surface.
* Seems that Camera1 can correctly rotate the stream/transform to {@link Reference#VIEW},
* while Camera2, that does not have any rotation API, will only rotate to {@link Reference#BASE}.
* That's why in Camera2 this angle is set as the offset between BASE and VIEW.
*
* @param drawRotation the rotation in degrees
*/
public void setDrawRotation(int drawRotation) {

@ -16,18 +16,13 @@ import com.otaliastudios.cameraview.filter.Filter;
* The preview has the responsibility of calling {@link Filter#setSize(int, int)}
* whenever the preview size changes and as soon as the filter is applied.
*/
public abstract class FilterCameraPreview<T extends View, Output> extends CameraPreview<T, Output> {
@SuppressWarnings("WeakerAccess")
public FilterCameraPreview(@NonNull Context context, @NonNull ViewGroup parent) {
super(context, parent);
}
public interface FilterCameraPreview {
/**
* Sets a new filter.
* @param filter new filter
*/
public abstract void setFilter(@NonNull Filter filter);
void setFilter(@NonNull Filter filter);
/**
* Returns the currently used filter.
@ -35,5 +30,5 @@ public abstract class FilterCameraPreview<T extends View, Output> extends Camera
*/
@SuppressWarnings("unused")
@NonNull
public abstract Filter getCurrentFilter();
Filter getCurrentFilter();
}

@ -60,7 +60,8 @@ import javax.microedition.khronos.opengles.GL10;
* which means that we can fetch the GL context that was created and is managed
* by the {@link GLSurfaceView}.
*/
public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceTexture> {
public class GlCameraPreview extends CameraPreview<GLSurfaceView, SurfaceTexture>
implements FilterCameraPreview, RendererCameraPreview {
private boolean mDispatched;
private SurfaceTexture mInputSurfaceTexture;
@ -110,7 +111,7 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
@NonNull
@Override
View getRootView() {
public View getRootView() {
return mRootView;
}
@ -232,7 +233,7 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
mOutputTextureDrawer.draw(mInputSurfaceTexture.getTimestamp() / 1000L);
for (RendererFrameCallback callback : mRendererFrameCallbacks) {
callback.onRendererFrame(mInputSurfaceTexture, mCropScaleX, mCropScaleY);
callback.onRendererFrame(mInputSurfaceTexture, mDrawRotation, mCropScaleX, mCropScaleY);
}
}
}
@ -289,11 +290,7 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
if (callback != null) callback.onCrop();
}
/**
* Method specific to the GL preview. Adds a {@link RendererFrameCallback}
* to receive renderer frame events.
* @param callback a callback
*/
@Override
public void addRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
getView().queueEvent(new Runnable() {
@Override
@ -308,11 +305,7 @@ public class GlCameraPreview extends FilterCameraPreview<GLSurfaceView, SurfaceT
});
}
/**
* Method specific to the GL preview. Removes a {@link RendererFrameCallback}
* that was previously added to receive renderer frame events.
* @param callback a callback
*/
@Override
public void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback) {
mRendererFrameCallbacks.remove(callback);
}

@ -0,0 +1,23 @@
package com.otaliastudios.cameraview.preview;
import androidx.annotation.NonNull;
/**
* Base interface for previews that support renderer frame callbacks,
* see {@link RendererFrameCallback}.
*/
public interface RendererCameraPreview {
/**
* Adds a {@link RendererFrameCallback} to receive renderer frame events.
* @param callback a callback
*/
void addRendererFrameCallback(@NonNull final RendererFrameCallback callback);
/**
* Removes a {@link RendererFrameCallback} that was previously added to receive renderer
* frame events.
* @param callback a callback
*/
void removeRendererFrameCallback(@NonNull final RendererFrameCallback callback);
}

@ -24,17 +24,17 @@ public interface RendererFrameCallback {
* Called on the renderer thread after each frame was drawn.
* You are not supposed to hold for too long onto this thread, because
* well, it is the rendering thread.
*
* @param surfaceTexture the texture to get transformation
* @param rotation the rotation (to reach REF_VIEW)
* @param scaleX the scaleX (in REF_VIEW) value
* @param scaleY the scaleY (in REF_VIEW) value
*/
@RendererThread
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, float scaleX, float scaleY);
void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, int rotation, float scaleX, float scaleY);
/**
* Called when the renderer filter changes. This is guaranteed to be called at least once
* before the first {@link #onRendererFrame(SurfaceTexture, float, float)}.
* before the first {@link #onRendererFrame(SurfaceTexture, int, float, float)}.
*
* @param filter the new filter
*/

@ -75,7 +75,7 @@ public class SurfaceCameraPreview extends CameraPreview<SurfaceView, SurfaceHold
@NonNull
@Override
View getRootView() {
public View getRootView() {
return mRootView;
}

@ -68,7 +68,7 @@ public class TextureCameraPreview extends CameraPreview<TextureView, SurfaceText
@NonNull
@Override
View getRootView() {
public View getRootView() {
return mRootView;
}

@ -11,7 +11,7 @@ import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread;
import com.otaliastudios.cameraview.filter.Filter;
@ -52,7 +52,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private MediaEncoderEngine mEncoderEngine;
private final Object mEncoderEngineLock = new Object();
private GlCameraPreview mPreview;
private RendererCameraPreview mPreview;
private int mCurrentState = STATE_NOT_RECORDING;
private int mDesiredState = STATE_NOT_RECORDING;
@ -61,19 +61,16 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private Overlay mOverlay;
private OverlayDrawer mOverlayDrawer;
private boolean mHasOverlay;
private int mOverlayRotation;
private Filter mCurrentFilter;
public SnapshotVideoRecorder(@NonNull CameraEngine engine,
@NonNull GlCameraPreview preview,
@Nullable Overlay overlay,
int overlayRotation) {
@NonNull RendererCameraPreview preview,
@Nullable Overlay overlay) {
super(engine);
mPreview = preview;
mOverlay = overlay;
mHasOverlay = overlay != null && overlay.drawsOn(Overlay.Target.VIDEO_SNAPSHOT);
mOverlayRotation = overlayRotation;
}
@Override
@ -126,9 +123,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
@RendererThread
@Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture,
float scaleX,
float scaleY) {
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, int rotation,
float scaleX, float scaleY) {
if (mCurrentState == STATE_NOT_RECORDING && mDesiredState == STATE_RECORDING) {
LOG.i("Starting the encoder engine.");
@ -219,7 +215,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
videoConfig.height = mResult.size.getHeight();
videoConfig.bitRate = mResult.videoBitRate;
videoConfig.frameRate = mResult.videoFrameRate;
videoConfig.rotation = mResult.rotation;
videoConfig.rotation = rotation + mResult.rotation;
videoConfig.mimeType = videoType;
videoConfig.encoder = deviceEncoders.getVideoEncoder();
videoConfig.textureId = mTextureId;
@ -232,7 +228,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
if (mHasOverlay) {
videoConfig.overlayTarget = Overlay.Target.VIDEO_SNAPSHOT;
videoConfig.overlayDrawer = mOverlayDrawer;
videoConfig.overlayRotation = mOverlayRotation;
videoConfig.overlayRotation = mResult.rotation;
// ^ no "rotation" here! Overlays are already in VIEW ref.
}
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig);

@ -218,8 +218,8 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, 0.5F, 0.5F, 0);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation,
0, 0, 1);
Matrix.rotateM(mConfig.overlayDrawer.getTransform(),
0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, -0.5F, -0.5F, 0);
}

Loading…
Cancel
Save