Fix issue 514 (#528)

* Simplify SnapshotGlPictureRecorder

* Add size

* Remove setDefaultBufferSize

* Call onStart at each frame

* Add hasOverlay boolean

* Use workaround in SnapshotVideoRecorder

* Add issue description

* Small changes

* Move workaround.start()

* use glBindTexture instead of workaround

* Remove bindTexture from EglViewport

* Simplify workaround

* Reuse fallback WorkerHandler

* Draw overlays on the encoder thread

* Use lock instead of afterOverlayGlDrawn

* Improve comments and readability

* Move blending code to OverlayDrawer

* Add OverlayDrawer tests

* Improve WorkerHandler tests
pull/531/head
Mattia Iavarone 5 years ago committed by GitHub
parent 3e0ae65dad
commit 42de6e30a4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  2. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  3. 120
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/overlay/OverlayDrawerTest.java
  4. 119
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/Issue514Workaround.java
  5. 18
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/egl/EglViewport.java
  6. 52
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  7. 131
      cameraview/src/main/java/com/otaliastudios/cameraview/overlay/OverlayDrawer.java
  8. 91
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  9. 46
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  10. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureConfig.java
  11. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  12. 2
      demo/src/main/res/layout/activity_camera.xml

@ -86,7 +86,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
@Before @Before
public void setUp() { public void setUp() {
LOG.e("Test started. Setting up camera."); LOG.e("Test started. Setting up camera.");
WorkerHandler.destroy(); WorkerHandler.destroyAll();
uiSync(new Runnable() { uiSync(new Runnable() {
@Override @Override
@ -126,7 +126,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
public void tearDown() { public void tearDown() {
LOG.e("Test ended. Tearing down camera."); LOG.e("Test ended. Tearing down camera.");
camera.destroy(); camera.destroy();
WorkerHandler.destroy(); WorkerHandler.destroyAll();
} }
private void waitForUiException() throws Throwable { private void waitForUiException() throws Throwable {

@ -13,6 +13,7 @@ import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
@ -218,7 +219,7 @@ public class WorkerHandlerTest extends BaseTest {
public void testDestroy() { public void testDestroy() {
final WorkerHandler handler = WorkerHandler.get("handler"); final WorkerHandler handler = WorkerHandler.get("handler");
assertTrue(handler.getThread().isAlive()); assertTrue(handler.getThread().isAlive());
WorkerHandler.destroy(); handler.destroy();
// Wait for the thread to die. // Wait for the thread to die.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {} try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive()); assertFalse(handler.getThread().isAlive());
@ -226,4 +227,15 @@ public class WorkerHandlerTest extends BaseTest {
assertNotSame(handler, newHandler); assertNotSame(handler, newHandler);
assertTrue(newHandler.getThread().isAlive()); assertTrue(newHandler.getThread().isAlive());
} }
@Test
public void testDestroyAll() {
final WorkerHandler handler1 = WorkerHandler.get("handler1");
final WorkerHandler handler2 = WorkerHandler.get("handler2");
WorkerHandler.destroyAll();
WorkerHandler newHandler1 = WorkerHandler.get("handler1");
WorkerHandler newHandler2 = WorkerHandler.get("handler2");
assertNotSame(handler1, newHandler1);
assertNotSame(handler2, newHandler2);
}
} }

@ -0,0 +1,120 @@
package com.otaliastudios.cameraview.overlay;
import android.content.res.XmlResourceParser;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.util.Xml;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.test.annotation.UiThreadTest;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.size.Size;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyFloat;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class OverlayDrawerTest extends BaseTest {
private final static int WIDTH = 100;
private final static int HEIGHT = 100;
private EglCore eglCore;
private EglBaseSurface eglSurface;
@Before
public void setUp() {
eglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
eglSurface = new EglBaseSurface(eglCore);
eglSurface.createOffscreenSurface(WIDTH, HEIGHT);
eglSurface.makeCurrent();
}
@After
public void tearDown() {
eglSurface.releaseEglSurface();
eglSurface = null;
eglCore.release();
eglCore = null;
}
@Test
public void testDraw() {
Overlay overlay = mock(Overlay.class);
OverlayDrawer drawer = new OverlayDrawer(overlay, new Size(WIDTH, HEIGHT));
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
verify(overlay, times(1)).drawOn(
eq(Overlay.Target.PICTURE_SNAPSHOT),
any(Canvas.class));
}
@Test
public void testGetTransform() {
// We'll check that the transform is not all zeros, which is highly unlikely
// (the default transform should be the identity matrix)
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
assertThat(drawer.getTransform(), new BaseMatcher<float[]>() {
public void describeTo(Description description) { }
public boolean matches(Object item) {
float[] array = (float[]) item;
for (float value : array) {
if (value != 0.0F) return true;
}
return false;
}
});
}
@Test
public void testRender() {
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
drawer.mViewport = spy(drawer.mViewport);
drawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
drawer.render();
verify(drawer.mViewport, times(1)).drawFrame(
drawer.mTextureId,
drawer.getTransform()
);
}
@Test
public void testRelease() {
OverlayDrawer drawer = new OverlayDrawer(mock(Overlay.class), new Size(WIDTH, HEIGHT));
EglViewport viewport = spy(drawer.mViewport);
drawer.mViewport = viewport;
drawer.release();
verify(viewport, times(1)).release();
}
}

@ -0,0 +1,119 @@
package com.otaliastudios.cameraview.internal;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.view.Surface;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.preview.RendererThread;
/**
* Fixes an issue for some devices with snapshot picture and video recording.
* This is so unclear that I wanted to have a separate class holding code and comments.
*
* WHEN TO USE THIS CLASS
* There is actually no need of this class in some cases:
* - when we don't have overlays, everything works
* - on the majority of devices, everything works
* But some devices will show the issue #514 and so they need this class to fix it.
* We will use this always since it should have close to none performance impact.
*
* SNAPSHOT PROCEDURE
* The issue is about picture and video snapshots with overlays. In both cases, we:
* 1. Take textureId from the camera preview
* 2. Take EGLContext from the camera preview thread ({@link RendererThread})
* 3. Create an overlayTextureId
* 4. Create an overlaySurfaceTexture
* 5. Create an overlaySurface
* 6. Move to another thread
* 7. Create a new EGLContext using the old context as a shared context so we have texture data
* 8. Create a new EGLWindow using some surface as output
* 9. For each frame:
* 9A. Draw overlays on the overlaySurface.lockCanvas() / unlockCanvasAndPost()
* 9B. Publish overlays to GL texture using overlaySurfaceTexture.updateTexImage()
* 9C. GLES - draw textureId
* 9D. GLES - draw overlayTextureId
* Both textures are drawn on the same EGLWindow and we manage to overlay them with {@link GLES20#GL_BLEND}.
* This is the whole procedure and it works for the majority of devices and situations.
*
* ISSUE DESCRIPTION
* The #514 issue can be described as follows:
* - Overlays have no transparency: background is {@link Color#BLACK} and covers the video
* - Overlays have distorted colors: {@link Color#RED} becomes greenish,
* {@link Color#GREEN} becomes blueish,
* {@link Color#BLUE} becomes reddish
*
* ISSUE INSIGHTS
* After painful debugging, we have reached these conclusions:
* 1. Overlays are drawn on {@link Canvas} with the correct format
* This can be checked for example by applying alpha to one overlay. The final color will
* be faded out, although on a black background. So the {@link Canvas} drawing step works well.
* 2. The GLES shader will always receive pixels in RGBA
* This seems to be a constant in Android - someone does the conversion for us at a lower level.
* This was confirmed for example by forcing A=0.5 and seeing the video frames behind the overlay
* black background, or by forcing to 0.0 some of the channels and seeing the output.
* 3. The {@link Canvas} / {@link Surface} pixels are wrongly treated as YUV!
* On problematic devices, some component down there thinks that our overlays RGBA are in YUV,
* and will CONVERT THEM TO RGBA. This means:
* 3A. Original alpha is dropped. The algorithm thinks we have passed YUV.
* 3B. Original colors are messed up. For example, (255,0,0,255,RGBA) is treated as (255,0,0,YUV)
* and converted back to rgb becoming greenish (74,255,27,255,RGBA).
* Doing the same conversion for {@link Color#GREEN} and {@link Color#BLUE} confirms what we
* were seeing in the issue screenshots.
*
* So a pixel format conversion takes place, when it shouldn't happen. We can't solve this:
* - It is done at a lower level, there's no real way for us to specify the surface format, but
* it seems that these devices will prefer a YUV format and misunderstand our {@link Canvas} pixels.
* - There is also no way to identify which devices will present this issue, it's a bug somewhere
* and it is implementation specific.
*
* THE MAGIC
* Hard to say why, but using this class fixes the described issue.
* It seems that when the {@link SurfaceTexture#updateTexImage()} method for the overlay surface
* is called - the one that updates the overlayTextureId - we must ensure that the CURRENTLY
* BOUND TEXTURE ID IS NOT 0. The id we choose to apply might be cameraTextureId, or overlayTextureId,
* or probably whatever other valid id, and should be passed to {@link #Issue514Workaround(int)}.
* [Tested with cameraTextureId and overlayTextureId: both do work.]
* [Tested with invalid id like 9999. This won't work.]
*
* This makes no sense, since overlaySurfaceTexture.updateTexImage() is setting it to overlayTextureId
* anyway, but it fixes the issue. Specifically, after any draw operation with {@link EglViewport},
* the bound texture is reset to 0 so this must be undone here. We offer:
*
* - {@link #beforeOverlayUpdateTexImage()} to be called before the {@link SurfaceTexture#updateTexImage()} call
* - {@link #end()} to release and bring things back to normal state
*
* Since updating and rendering can happen on different threads with a shared EGL context,
* in case they do, the {@link #beforeOverlayUpdateTexImage()}, the actual updateTexImage() and
* finally the {@link EglViewport} drawing operations should be synchronized with a lock.
*
* REFERENCES
* https://github.com/natario1/CameraView/issues/514
* https://android.googlesource.com/platform/frameworks/native/+/5c1139f/libs/gui/SurfaceTexture.cpp
* I can see here that SurfaceTexture does indeed call glBindTexture with the same parameters whenever
* updateTexImage is called, but it also does other gl stuff first. This other gl stuff might be
* breaking when we don't have a bound texture on some specific hardware implementation.
*/
public class Issue514Workaround {
private final int textureId;
public Issue514Workaround(int textureId) {
this.textureId = textureId;
}
public void beforeOverlayUpdateTexImage() {
bindTexture(textureId);
}
public void end() {
bindTexture(0);
}
private void bindTexture(int textureId) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
}
}

@ -64,6 +64,8 @@ public class EglViewport extends EglElement {
// Stuff from Texture2dProgram // Stuff from Texture2dProgram
private int mProgramHandle; private int mProgramHandle;
private int mTextureTarget; private int mTextureTarget;
private int mTextureUnit;
// Program attributes // Program attributes
private int muMVPMatrixLocation; private int muMVPMatrixLocation;
private int muTexMatrixLocation; private int muTexMatrixLocation;
@ -75,7 +77,12 @@ public class EglViewport extends EglElement {
// private int muColorAdjustLoc; // Used for filtering // private int muColorAdjustLoc; // Used for filtering
public EglViewport() { public EglViewport() {
mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; this(GLES20.GL_TEXTURE0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
}
private EglViewport(int textureUnit, int textureTarget) {
mTextureUnit = textureUnit;
mTextureTarget = textureTarget;
mProgramHandle = createProgram(SIMPLE_VERTEX_SHADER, SIMPLE_FRAGMENT_SHADER); mProgramHandle = createProgram(SIMPLE_VERTEX_SHADER, SIMPLE_FRAGMENT_SHADER);
maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, "aPosition"); maPositionLocation = GLES20.glGetAttribLocation(mProgramHandle, "aPosition");
checkLocation(maPositionLocation, "aPosition"); checkLocation(maPositionLocation, "aPosition");
@ -105,6 +112,7 @@ public class EglViewport extends EglElement {
check("glGenTextures"); check("glGenTextures");
int texId = textures[0]; int texId = textures[0];
GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, texId); GLES20.glBindTexture(mTextureTarget, texId);
check("glBindTexture " + texId); check("glBindTexture " + texId);
@ -145,14 +153,8 @@ public class EglViewport extends EglElement {
GLES20.glUseProgram(mProgramHandle); GLES20.glUseProgram(mProgramHandle);
check("glUseProgram"); check("glUseProgram");
// enable blending, from: http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
// Set the texture. // Set the texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(mTextureUnit);
GLES20.glBindTexture(mTextureTarget, textureId); GLES20.glBindTexture(mTextureTarget, textureId);
// Copy the model / view / projection matrix over. // Copy the model / view / projection matrix over.

@ -25,6 +25,13 @@ public class WorkerHandler {
private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName()); private final static CameraLogger LOG = CameraLogger.create(WorkerHandler.class.getSimpleName());
private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4); private final static ConcurrentHashMap<String, WeakReference<WorkerHandler>> sCache = new ConcurrentHashMap<>(4);
private final static String FALLBACK_NAME = "FallbackCameraThread";
// Store a hard reference to the fallback handler. We never use this, only update it
// anytime get() is called. This should ensure that this instance is not collected.
@SuppressWarnings("FieldCanBeLocal")
private static WorkerHandler sFallbackHandler;
/** /**
* Gets a possibly cached handler with the given name. * Gets a possibly cached handler with the given name.
* @param name the handler name * @param name the handler name
@ -36,14 +43,19 @@ public class WorkerHandler {
//noinspection ConstantConditions //noinspection ConstantConditions
WorkerHandler cached = sCache.get(name).get(); WorkerHandler cached = sCache.get(name).get();
if (cached != null) { if (cached != null) {
HandlerThread thread = cached.mThread; if (cached.getThread().isAlive() && !cached.getThread().isInterrupted()) {
if (thread.isAlive() && !thread.isInterrupted()) {
LOG.w("get:", "Reusing cached worker handler.", name); LOG.w("get:", "Reusing cached worker handler.", name);
return cached; return cached;
} else {
// Cleanup the old thread before creating a new one
cached.destroy();
LOG.w("get:", "Thread reference found, but not alive or interrupted. Removing.", name);
sCache.remove(name);
} }
} else {
LOG.w("get:", "Thread reference died. Removing.", name);
sCache.remove(name);
} }
LOG.w("get:", "Thread reference died, removing.", name);
sCache.remove(name);
} }
LOG.i("get:", "Creating new handler.", name); LOG.i("get:", "Creating new handler.", name);
@ -58,7 +70,8 @@ public class WorkerHandler {
*/ */
@NonNull @NonNull
public static WorkerHandler get() { public static WorkerHandler get() {
return get("FallbackCameraThread"); sFallbackHandler = get(FALLBACK_NAME);
return sFallbackHandler;
} }
/** /**
@ -183,6 +196,7 @@ public class WorkerHandler {
* Returns the android backing {@link Looper}. * Returns the android backing {@link Looper}.
* @return the looper * @return the looper
*/ */
@SuppressWarnings("WeakerAccess")
@NonNull @NonNull
public Looper getLooper() { public Looper getLooper() {
return mThread.getLooper(); return mThread.getLooper();
@ -197,21 +211,35 @@ public class WorkerHandler {
return mExecutor; return mExecutor;
} }
/**
* Destroys this handler and its thread. After this method returns, the handler
* should be considered unusable.
*
* Internal note: this does not remove the thread from our cache, but it does
* interrupt it, so the next {@link #get(String)} call will remove it.
* In any case, we only store weak references.
*/
@SuppressWarnings("WeakerAccess")
public void destroy() {
HandlerThread thread = getThread();
if (thread.isAlive()) {
thread.interrupt();
thread.quit();
// after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {}
}
}
/** /**
* Destroys all handlers, interrupting their work and * Destroys all handlers, interrupting their work and
* removing them from our cache. * removing them from our cache.
*/ */
public static void destroy() { public static void destroyAll() {
for (String key : sCache.keySet()) { for (String key : sCache.keySet()) {
WeakReference<WorkerHandler> ref = sCache.get(key); WeakReference<WorkerHandler> ref = sCache.get(key);
//noinspection ConstantConditions //noinspection ConstantConditions
WorkerHandler handler = ref.get(); WorkerHandler handler = ref.get();
if (handler != null && handler.getThread().isAlive()) { if (handler != null) handler.destroy();
handler.getThread().interrupt();
handler.getThread().quit();
// after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {}
}
ref.clear(); ref.clear();
} }
sCache.clear(); sCache.clear();

@ -0,0 +1,131 @@
package com.otaliastudios.cameraview.overlay;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.view.Surface;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.size.Size;
import java.nio.Buffer;
/**
* Draws overlays through {@link Overlay}.
*
* - Provides a {@link Canvas} to be passed to the Overlay
* - Lets the overlay draw there: {@link #draw(Overlay.Target)}
* - Renders this into the current EGL window: {@link #render()}
* - Applies the {@link Issue514Workaround} the correct way
*
* In the future we might want to use a different approach than {@link EglViewport},
* {@link SurfaceTexture} and {@link GLES11Ext#GL_TEXTURE_EXTERNAL_OES},
* for example by using a regular {@link GLES20#GL_TEXTURE_2D} that might
* be filled through {@link GLES20#glTexImage2D(int, int, int, int, int, int, int, int, Buffer)}.
*
* The current approach has some issues, for example see {@link Issue514Workaround}.
*/
public class OverlayDrawer {
private static final String TAG = OverlayDrawer.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Overlay mOverlay;
@VisibleForTesting int mTextureId;
private SurfaceTexture mSurfaceTexture;
private Surface mSurface;
private float[] mTransform = new float[16];
@VisibleForTesting EglViewport mViewport;
private Issue514Workaround mIssue514Workaround;
private final Object mIssue514WorkaroundLock = new Object();
public OverlayDrawer(@NonNull Overlay overlay, @NonNull Size size) {
mOverlay = overlay;
mViewport = new EglViewport();
mTextureId = mViewport.createTexture();
mSurfaceTexture = new SurfaceTexture(mTextureId);
mSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
mSurface = new Surface(mSurfaceTexture);
mIssue514Workaround = new Issue514Workaround(mTextureId);
}
/**
* Should be called to draw the {@link Overlay} on the given {@link Overlay.Target}.
* This will provide a working {@link Canvas} to the overlay and also update the
* drawn contents to a GLES texture.
* @param target the target
*/
public void draw(@NonNull Overlay.Target target) {
try {
final Canvas surfaceCanvas = mSurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(target, surfaceCanvas);
mSurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
}
synchronized (mIssue514WorkaroundLock) {
mIssue514Workaround.beforeOverlayUpdateTexImage();
mSurfaceTexture.updateTexImage();
}
mSurfaceTexture.getTransformMatrix(mTransform);
}
/**
* Returns the transform that should be used to render the drawn content.
* This should be called after {@link #draw(Overlay.Target)} and can be modified.
* @return the transform matrix
*/
public float[] getTransform() {
return mTransform;
}
/**
* Renders the drawn content in the current EGL surface, assuming there is one.
* Should be called after {@link #draw(Overlay.Target)} and any {@link #getTransform()}
* modification.
*/
public void render() {
// Enable blending
// Reference http://www.learnopengles.com/android-lesson-five-an-introduction-to-blending/
GLES20.glDisable(GLES20.GL_CULL_FACE);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
synchronized (mIssue514WorkaroundLock) {
mViewport.drawFrame(mTextureId, mTransform);
}
}
/**
* Releases resources.
*/
public void release() {
if (mIssue514Workaround != null) {
mIssue514Workaround.end();
mIssue514Workaround = null;
}
if (mSurfaceTexture != null) {
mSurfaceTexture.release();
mSurfaceTexture = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mViewport != null) {
mViewport.release();
mViewport = null;
}
}
}

@ -14,6 +14,7 @@ import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface; import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
@ -25,6 +26,7 @@ import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -64,15 +66,11 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
private Overlay mOverlay; private Overlay mOverlay;
private boolean mHasOverlay; private boolean mHasOverlay;
private OverlayDrawer mOverlayDrawer;
private int mTextureId; private int mTextureId;
private SurfaceTexture mSurfaceTexture;
private float[] mTransform; private float[] mTransform;
private int mOverlayTextureId = 0;
private SurfaceTexture mOverlaySurfaceTexture;
private Surface mOverlaySurface;
private float[] mOverlayTransform;
private EglViewport mViewport; private EglViewport mViewport;
@ -104,7 +102,7 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
@Override @Override
public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) { public void onRendererFrame(@NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
mPreview.removeRendererFrameCallback(this); mPreview.removeRendererFrameCallback(this);
SnapshotGlPictureRecorder.this.onRendererFrame(scaleX, scaleY); SnapshotGlPictureRecorder.this.onRendererFrame(surfaceTexture, scaleX, scaleY);
} }
}); });
} }
@ -114,19 +112,14 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
private void onRendererTextureCreated(int textureId) { private void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
mViewport = new EglViewport(); mViewport = new EglViewport();
mSurfaceTexture = new SurfaceTexture(mTextureId, true);
// Need to crop the size. // Need to crop the size.
Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio); Rect crop = CropHelper.computeCrop(mResult.size, mOutputRatio);
mResult.size = new Size(crop.width(), crop.height()); mResult.size = new Size(crop.width(), crop.height());
mSurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mTransform = new float[16]; mTransform = new float[16];
Matrix.setIdentityM(mTransform, 0);
if (mHasOverlay) { if (mHasOverlay) {
mOverlayTextureId = mViewport.createTexture(); mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId, true);
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mOverlaySurface = new Surface(mOverlaySurfaceTexture);
mOverlayTransform = new float[16];
} }
} }
@ -155,23 +148,27 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
*/ */
@RendererThread @RendererThread
@TargetApi(Build.VERSION_CODES.KITKAT) @TargetApi(Build.VERSION_CODES.KITKAT)
private void onRendererFrame(final float scaleX, final float scaleY) { private void onRendererFrame(final @NonNull SurfaceTexture surfaceTexture, final float scaleX, final float scaleY) {
// Get egl context from the RendererThread, which is the one in which we have created // Get egl context from the RendererThread, which is the one in which we have created
// the textureId and the overlayTextureId, managed by the GlSurfaceView. // the textureId and the overlayTextureId, managed by the GlSurfaceView.
// Next operations can then be performed on different threads using this handle. // Next operations can then be performed on different threads using this handle.
final EGLContext eglContext = EGL14.eglGetCurrentContext(); final EGLContext eglContext = EGL14.eglGetCurrentContext();
final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE); // Calling this invalidates the rotation/scale logic below:
// surfaceTexture.getTransformMatrix(mTransform); // TODO activate and fix the logic.
WorkerHandler.execute(new Runnable() { WorkerHandler.execute(new Runnable() {
@Override @Override
public void run() { public void run() {
// 0. Create an EGL surface // 0. EGL window will need an output.
EglBaseSurface eglSurface = new EglWindowSurface(core, mSurfaceTexture); // We create a fake one as explained in javadocs.
final int fakeOutputTextureId = 9999;
SurfaceTexture fakeOutputSurface = new SurfaceTexture(fakeOutputTextureId);
fakeOutputSurface.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
// 1. Create an EGL surface
final EglCore core = new EglCore(eglContext, EglCore.FLAG_RECORDABLE);
final EglBaseSurface eglSurface = new EglWindowSurface(core, fakeOutputSurface);
eglSurface.makeCurrent(); eglSurface.makeCurrent();
// 1. Get latest texture
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mTransform);
// 2. Apply scale and crop // 2. Apply scale and crop
boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR); boolean flip = mEngine.getAngles().flip(Reference.VIEW, Reference.SENSOR);
float realScaleX = flip ? scaleY : scaleX; float realScaleX = flip ? scaleY : scaleX;
@ -181,60 +178,40 @@ public class SnapshotGlPictureRecorder extends PictureRecorder {
Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0); Matrix.translateM(mTransform, 0, scaleTranslX, scaleTranslY, 0);
Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1); Matrix.scaleM(mTransform, 0, realScaleX, realScaleY, 1);
// 3. Go back to 0,0 so that rotate and flip work well // 3. Apply rotation and flip
Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); Matrix.translateM(mTransform, 0, 0.5F, 0.5F, 0); // Go back to 0,0
Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1); // Rotate (not sure why we need the minus)
// 4. Apply rotation (not sure why we need the minus here)
Matrix.rotateM(mTransform, 0, -mResult.rotation, 0, 0, 1);
mResult.rotation = 0; mResult.rotation = 0;
if (mResult.facing == Facing.FRONT) { // 5. Flip horizontally for front camera
// 5. Flip horizontally for front camera
if (mResult.facing == Facing.FRONT) {
Matrix.scaleM(mTransform, 0, -1, 1, 1); Matrix.scaleM(mTransform, 0, -1, 1, 1);
} }
Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0); // Go back to old position
// 6. Go back to old position // 4. Do pretty much the same for overlays
Matrix.translateM(mTransform, 0, -0.5F, -0.5F, 0);
// 7. Do pretty much the same for overlays
if (mHasOverlay) { if (mHasOverlay) {
// 1. First we must draw on the texture and get latest image // 1. First we must draw on the texture and get latest image
try { mOverlayDrawer.draw(Overlay.Target.PICTURE_SNAPSHOT);
final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(Overlay.Target.PICTURE_SNAPSHOT, surfaceCanvas);
mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing picture overlays", e);
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(mOverlayTransform);
// 2. Then we can apply the transformations // 2. Then we can apply the transformations
int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE); int rotation = mEngine.getAngles().offset(Reference.VIEW, Reference.OUTPUT, Axis.ABSOLUTE);
Matrix.translateM(mOverlayTransform, 0, 0.5F, 0.5F, 0); Matrix.translateM(mOverlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.rotateM(mOverlayTransform, 0, rotation, 0, 0, 1); Matrix.rotateM(mOverlayDrawer.getTransform(), 0, rotation, 0, 0, 1);
// No need to flip the x axis for front camera, but need to flip the y axis always. // No need to flip the x axis for front camera, but need to flip the y axis always.
Matrix.scaleM(mOverlayTransform, 0, 1, -1, 1); Matrix.scaleM(mOverlayDrawer.getTransform(), 0, 1, -1, 1);
Matrix.translateM(mOverlayTransform, 0, -0.5F, -0.5F, 0); Matrix.translateM(mOverlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
} }
// 8. Draw and save // 5. Draw and save
mViewport.drawFrame(mTextureId, mTransform); mViewport.drawFrame(mTextureId, mTransform);
if (mHasOverlay) mViewport.drawFrame(mOverlayTextureId, mOverlayTransform); if (mHasOverlay) mOverlayDrawer.render();
mResult.format = PictureResult.FORMAT_JPEG; mResult.format = PictureResult.FORMAT_JPEG;
mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG); mResult.data = eglSurface.saveFrameTo(Bitmap.CompressFormat.JPEG);
// 9. Cleanup // 6. Cleanup
mSurfaceTexture.releaseTexImage();
eglSurface.releaseEglSurface(); eglSurface.releaseEglSurface();
mViewport.release(); mViewport.release();
mSurfaceTexture.release(); fakeOutputSurface.release();
if (mHasOverlay) { if (mHasOverlay) mOverlayDrawer.release();
mOverlaySurfaceTexture.releaseTexImage();
mOverlaySurface.release();
mOverlaySurfaceTexture.release();
}
core.release(); core.release();
dispatchResult(); dispatchResult();
} }

@ -9,11 +9,13 @@ import android.os.Build;
import android.view.Surface; import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio; import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.engine.CameraEngine; import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
import com.otaliastudios.cameraview.preview.GlCameraPreview; import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.preview.RendererFrameCallback; import com.otaliastudios.cameraview.preview.RendererFrameCallback;
import com.otaliastudios.cameraview.preview.RendererThread; import com.otaliastudios.cameraview.preview.RendererThread;
@ -59,10 +61,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
private int mDesiredState = STATE_NOT_RECORDING; private int mDesiredState = STATE_NOT_RECORDING;
private int mTextureId = 0; private int mTextureId = 0;
private int mOverlayTextureId = 0;
private SurfaceTexture mOverlaySurfaceTexture;
private Surface mOverlaySurface;
private Overlay mOverlay; private Overlay mOverlay;
private OverlayDrawer mOverlayDrawer;
private boolean mHasOverlay; private boolean mHasOverlay;
private int mOverlayRotation; private int mOverlayRotation;
@ -93,12 +93,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
public void onRendererTextureCreated(int textureId) { public void onRendererTextureCreated(int textureId) {
mTextureId = textureId; mTextureId = textureId;
if (mHasOverlay) { if (mHasOverlay) {
EglViewport temp = new EglViewport(); mOverlayDrawer = new OverlayDrawer(mOverlay, mResult.size);
mOverlayTextureId = temp.createTexture();
mOverlaySurfaceTexture = new SurfaceTexture(mOverlayTextureId);
mOverlaySurfaceTexture.setDefaultBufferSize(mResult.size.getWidth(), mResult.size.getHeight());
mOverlaySurface = new Surface(mOverlaySurfaceTexture);
temp.release(true);
} }
} }
@ -141,7 +136,8 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Next operations can then be performed on different threads using this handle. // Next operations can then be performed on different threads using this handle.
videoConfig.eglContext = EGL14.eglGetCurrentContext(); videoConfig.eglContext = EGL14.eglGetCurrentContext();
if (mHasOverlay) { if (mHasOverlay) {
videoConfig.overlayTextureId = mOverlayTextureId; videoConfig.overlayTarget = Overlay.Target.VIDEO_SNAPSHOT;
videoConfig.overlayDrawer = mOverlayDrawer;
videoConfig.overlayRotation = mOverlayRotation; videoConfig.overlayRotation = mOverlayRotation;
} }
TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig); TextureMediaEncoder videoEncoder = new TextureMediaEncoder(videoConfig);
@ -171,25 +167,7 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
frame.timestamp = surfaceTexture.getTimestamp(); frame.timestamp = surfaceTexture.getTimestamp();
frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work. frame.timestampMillis = System.currentTimeMillis(); // NOTE: this is an approximation but it seems to work.
surfaceTexture.getTransformMatrix(frame.transform); surfaceTexture.getTransformMatrix(frame.transform);
if (mEncoderEngine != null) { // Can happen on teardown. At least it used to.
// get overlay
if (mHasOverlay) {
try {
final Canvas surfaceCanvas = mOverlaySurface.lockCanvas(null);
surfaceCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mOverlay.drawOn(Overlay.Target.VIDEO_SNAPSHOT, surfaceCanvas);
mOverlaySurface.unlockCanvasAndPost(surfaceCanvas);
} catch (Surface.OutOfResourcesException e) {
LOG.w("Got Surface.OutOfResourcesException while drawing video overlays", e);
}
mOverlaySurfaceTexture.updateTexImage();
mOverlaySurfaceTexture.getTransformMatrix(frame.overlayTransform);
}
if (mEncoderEngine != null) {
// Can happen on teardown. At least it used to.
// NOTE: If this still happens, I would say we can still crash on mOverlaySurface
// calls above. We might have to add some synchronization.
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
} }
} }
@ -237,13 +215,9 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mDesiredState = STATE_NOT_RECORDING; mDesiredState = STATE_NOT_RECORDING;
mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this); mPreview.removeRendererFrameCallback(SnapshotVideoRecorder.this);
mPreview = null; mPreview = null;
if (mOverlaySurfaceTexture != null) { if (mOverlayDrawer != null) {
mOverlaySurfaceTexture.release(); mOverlayDrawer.release();
mOverlaySurfaceTexture = null; mOverlayDrawer = null;
}
if (mOverlaySurface != null) {
mOverlaySurface.release();
mOverlaySurface = null;
} }
mEncoderEngine = null; mEncoderEngine = null;
dispatchResult(); dispatchResult();

@ -4,16 +4,19 @@ import android.opengl.EGLContext;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.overlay.OverlayDrawer;
/** /**
* Video configuration to be passed as input to the constructor * Video configuration to be passed as input to the constructor
* of a {@link TextureMediaEncoder}. * of a {@link TextureMediaEncoder}.
*/ */
public class TextureConfig extends VideoConfig { public class TextureConfig extends VideoConfig {
private final static int NO_TEXTURE = Integer.MIN_VALUE; public int textureId;
public Overlay.Target overlayTarget;
public int textureId = NO_TEXTURE; public OverlayDrawer overlayDrawer;
public int overlayTextureId = NO_TEXTURE;
public int overlayRotation; public int overlayRotation;
public float scaleX; public float scaleX;
public float scaleY; public float scaleY;
@ -24,7 +27,8 @@ public class TextureConfig extends VideoConfig {
TextureConfig copy = new TextureConfig(); TextureConfig copy = new TextureConfig();
copy(copy); copy(copy);
copy.textureId = this.textureId; copy.textureId = this.textureId;
copy.overlayTextureId = this.overlayTextureId; copy.overlayDrawer = this.overlayDrawer;
copy.overlayTarget = this.overlayTarget;
copy.overlayRotation = this.overlayRotation; copy.overlayRotation = this.overlayRotation;
copy.scaleX = this.scaleX; copy.scaleX = this.scaleX;
copy.scaleY = this.scaleY; copy.scaleY = this.scaleY;
@ -33,6 +37,6 @@ public class TextureConfig extends VideoConfig {
} }
boolean hasOverlay() { boolean hasOverlay() {
return overlayTextureId != NO_TEXTURE; return overlayDrawer != null;
} }
} }

@ -6,6 +6,7 @@ import android.opengl.Matrix;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.Issue514Workaround;
import com.otaliastudios.cameraview.internal.egl.EglCore; import com.otaliastudios.cameraview.internal.egl.EglCore;
import com.otaliastudios.cameraview.internal.egl.EglViewport; import com.otaliastudios.cameraview.internal.egl.EglViewport;
import com.otaliastudios.cameraview.internal.egl.EglWindowSurface; import com.otaliastudios.cameraview.internal.egl.EglWindowSurface;
@ -64,11 +65,6 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
* The transformation matrix for the base texture. * The transformation matrix for the base texture.
*/ */
public float[] transform = new float[16]; public float[] transform = new float[16];
/**
* The transformation matrix for the overlay texture, if any.
*/
public float[] overlayTransform = new float[16];
} }
/** /**
@ -130,7 +126,6 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
// 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping. // 1. We must scale this matrix like GlCameraPreview does, because it might have some cropping.
// Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate. // Scaling takes place with respect to the (0, 0, 0) point, so we must apply a Translation to compensate.
float[] transform = frame.transform; float[] transform = frame.transform;
float[] overlayTransform = frame.overlayTransform;
float scaleX = mConfig.scaleX; float scaleX = mConfig.scaleX;
float scaleY = mConfig.scaleY; float scaleY = mConfig.scaleY;
float scaleTranslX = (1F - scaleX) / 2F; float scaleTranslX = (1F - scaleX) / 2F;
@ -148,13 +143,14 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
// 3. Do the same for overlays with their own rotation. // 3. Do the same for overlays with their own rotation.
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
Matrix.translateM(overlayTransform, 0, 0.5F, 0.5F, 0); mConfig.overlayDrawer.draw(mConfig.overlayTarget);
Matrix.rotateM(overlayTransform, 0, mConfig.overlayRotation, 0, 0, 1); Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, 0.5F, 0.5F, 0);
Matrix.translateM(overlayTransform, 0, -0.5F, -0.5F, 0); Matrix.rotateM(mConfig.overlayDrawer.getTransform(), 0, mConfig.overlayRotation, 0, 0, 1);
Matrix.translateM(mConfig.overlayDrawer.getTransform(), 0, -0.5F, -0.5F, 0);
} }
mViewport.drawFrame(mConfig.textureId, transform); mViewport.drawFrame(mConfig.textureId, transform);
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
mViewport.drawFrame(mConfig.overlayTextureId, overlayTransform); mConfig.overlayDrawer.render();
} }
mWindow.setPresentationTime(frame.timestamp); mWindow.setPresentationTime(frame.timestamp);
mWindow.swapBuffers(); mWindow.swapBuffers();

@ -21,7 +21,7 @@
app:cameraPlaySounds="true" app:cameraPlaySounds="true"
app:cameraGrid="off" app:cameraGrid="off"
app:cameraFlash="off" app:cameraFlash="off"
app:cameraAudio="on" app:cameraAudio="off"
app:cameraFacing="back" app:cameraFacing="back"
app:cameraGestureTap="autoFocus" app:cameraGestureTap="autoFocus"
app:cameraGestureLongTap="none" app:cameraGestureLongTap="none"

Loading…
Cancel
Save