Testing and stability improvements (#696)

pull/708/head
Mattia Iavarone 5 years ago committed by GitHub
parent 3db6fd3fc0
commit 4a6b9be905
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 43
      .github/workflows/build.yml
  2. 14
      .github/workflows/emulator_script.sh
  3. 25
      cameraview/build.gradle
  4. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  5. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  6. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java
  7. 26
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java
  8. 428
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  9. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  10. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java
  11. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  12. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerLayoutTest.java
  13. 12
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Emulator.java
  14. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Retry.java
  15. 52
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/RetryRule.java
  16. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExclude.java
  17. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExcludeFilter.java
  18. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkInclude.java
  19. 47
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkIncludeFilter.java
  20. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  21. 44
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  22. 77
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  23. 168
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  24. 161
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  25. 55
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/orchestrator/CameraOrchestrator.java
  26. 30
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/orchestrator/CameraStateOrchestrator.java
  27. 68
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/DeviceEncoders.java
  28. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  29. 27
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full1PictureRecorder.java
  30. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java
  31. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/FullPictureRecorder.java
  32. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  33. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  34. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  35. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotPictureRecorder.java
  36. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full1VideoRecorder.java
  37. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full2VideoRecorder.java
  38. 96
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  39. 64
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  40. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
  41. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  42. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  43. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  44. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  45. 8
      codecov.yml

@ -1,4 +1,5 @@
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions # https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
# Renaming ? Change the README badge.
name: Build name: Build
on: on:
push: push:
@ -37,28 +38,46 @@ jobs:
name: Emulator Tests name: Emulator Tests
runs-on: macOS-latest runs-on: macOS-latest
strategy: strategy:
fail-fast: false
matrix: matrix:
# TODO 29 fails due to Mockito issues, probably reproducible locally. # TODO 29 fails due to Mockito issues, probably reproducible locally
# 22, 23, 24, 25, 26, 27, 28 work - some of them, with SdkExclude restrictions. # 22-28 work (some of them, with SdkExclude restrictions)
EMULATOR_API: [22, 23, 24, 25, 26, 27, 28] EMULATOR_API: [22, 23, 24, 25, 26, 27, 28]
EMULATOR_ARCH: [x86_64] include:
- EMULATOR_API: 28
EMULATOR_ARCH: x86_64
- EMULATOR_API: 27
EMULATOR_ARCH: x86_64
- EMULATOR_API: 26
EMULATOR_ARCH: x86_64
- EMULATOR_API: 25
EMULATOR_ARCH: x86
- EMULATOR_API: 24
EMULATOR_ARCH: x86
- EMULATOR_API: 23
EMULATOR_ARCH: x86
- EMULATOR_API: 22
EMULATOR_ARCH: x86
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- uses: actions/setup-java@v1 - uses: actions/setup-java@v1
with: with:
java-version: 1.8 java-version: 1.8
- name: Execute emulator tests - name: Execute emulator tests
uses: reactivecircus/android-emulator-runner@v2 timeout-minutes: 20
uses: reactivecircus/android-emulator-runner@v2.2.0
with: with:
api-level: ${{ matrix.EMULATOR_API }} api-level: ${{ matrix.EMULATOR_API }}
arch: ${{ matrix.EMULATOR_ARCH }} arch: ${{ matrix.EMULATOR_ARCH }}
disable-animations: true disable-animations: true
emulator-options: -no-snapshot -no-window -no-boot-anim -camera-back emulated -camera-front emulated -memory 2048 profile: Nexus 5X
script: ./gradlew cameraview:connectedCheck emulator-options: -no-snapshot -no-window -no-boot-anim -camera-back emulated -camera-front emulated -gpu swiftshader_indirect
emulator-build: 6031357
script: ./.github/workflows/emulator_script.sh
- name: Upload emulator tests artifact - name: Upload emulator tests artifact
uses: actions/upload-artifact@v1 uses: actions/upload-artifact@v1
with: with:
name: emulator_tests name: emulator_tests_${{ matrix.EMULATOR_API }}
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected
CODE_COVERAGE: CODE_COVERAGE:
name: Code Coverage Report name: Code Coverage Report
@ -72,13 +91,15 @@ jobs:
- name: Download unit tests artifact - name: Download unit tests artifact
uses: actions/download-artifact@v1 uses: actions/download-artifact@v1
with: with:
name: unit_tests name: unit_tests
path: ./cameraview/build/jacoco/ path: ./cameraview/build/jacoco/
- name: Download emulator tests artifact - name: Download emulator tests artifact
uses: actions/download-artifact@v1 uses: actions/download-artifact@v1
with: with:
name: emulator_tests # 27 is the EMULATOR_API with less SdkExclude annotations, and should have
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected # the best possible coverage.
name: emulator_tests_27
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected
- name: Create merged coverage report - name: Create merged coverage report
run: ./gradlew cameraview:mergeCoverageReports run: ./gradlew cameraview:mergeCoverageReports
- name: Upload merged coverage report (GitHub) - name: Upload merged coverage report (GitHub)

@ -0,0 +1,14 @@
#!/usr/bin/env bash
# Core
ADB_TAGS="CameraView:I CameraCallbacks:I CameraOrchestrator:I CameraEngine:I"
ADB_TAGS="$ADB_TAGS CameraUtils:I WorkerHandler:I"
# Recorders
ADB_TAGS="$ADB_TAGS VideoRecorder:I FullVideoRecorder:I SnapshotVideoRecorder:I"
ADB_TAGS="$ADB_TAGS FullPictureRecorder:I SnapshotPictureRecorder:I DeviceEncoders:I"
# Video encoders
ADB_TAGS="$ADB_TAGS MediaEncoderEngine:I MediaEncoder:I AudioMediaEncoder:I VideoMediaEncoder:I TextureMediaEncoder:I"
# Debugging
ADB_TAGS="$ADB_TAGS CameraIntegrationTest:I MessageQueue:W MPEG4Writer:I"
adb logcat -c
adb logcat $ADB_TAGS *:E -v color &
./gradlew cameraview:connectedCheck

@ -17,7 +17,9 @@ android {
versionCode 1 versionCode 1
versionName project.version versionName project.version
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
testInstrumentationRunnerArgument "filter", "com.otaliastudios.cameraview.tools.SdkExcludeFilter" testInstrumentationRunnerArgument "filter", "" +
"com.otaliastudios.cameraview.tools.SdkExcludeFilter," +
"com.otaliastudios.cameraview.tools.SdkIncludeFilter"
} }
buildTypes { buildTypes {
@ -241,17 +243,16 @@ task mergeCoverageReports(type: JacocoReport) {
if (isCI) { if (isCI) {
// All these classes are tested by the integration tests that we are not able to // All these classes are tested by the integration tests that we are not able to
// run on the CI emulator. // run on the CI emulator.
classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*') // classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*')
classFilter.add('**/com/otaliastudios/cameraview/picture/**.*') // classFilter.add('**/com/otaliastudios/cameraview/picture/**.*')
classFilter.add('**/com/otaliastudios/cameraview/video/**.*') // classFilter.add('**/com/otaliastudios/cameraview/video/**.*')
// TODO these below could be easily testable ALSO outside of the integration tests // classFilter.add('**/com/otaliastudios/cameraview/orchestrator/**.*')
classFilter.add('**/com/otaliastudios/cameraview/orchestrator/**.*') // classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*')
classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*')
} }
// We don't test OpenGL filters. // We don't test OpenGL filters.
classFilter.add('**/com/otaliastudios/cameraview/filters/**.*') classFilter.add('**/com/otaliastudios/cameraview/filters/**.*')

@ -10,6 +10,7 @@ import android.os.PowerManager;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.test.platform.app.InstrumentationRegistry; import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.GrantPermissionRule;
import com.otaliastudios.cameraview.tools.Op; import com.otaliastudios.cameraview.tools.Op;
@ -89,19 +90,6 @@ public class BaseTest {
InstrumentationRegistry.getInstrumentation().waitForIdleSync(); InstrumentationRegistry.getInstrumentation().waitForIdleSync();
} }
protected static void grantAllPermissions() {
grantPermission("android.permission.CAMERA");
grantPermission("android.permission.RECORD_AUDIO");
grantPermission("android.permission.WRITE_EXTERNAL_STORAGE");
}
@SuppressWarnings("WeakerAccess")
protected static void grantPermission(@NonNull String permission) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) return;
String command = "pm grant " + getContext().getPackageName() + " " + permission;
InstrumentationRegistry.getInstrumentation().getUiAutomation().executeShellCommand(command);
}
@NonNull @NonNull
protected static Stubber doCountDown(@NonNull final CountDownLatch latch) { protected static Stubber doCountDown(@NonNull final CountDownLatch latch) {
return doAnswer(new Answer<Object>() { return doAnswer(new Answer<Object>() {

@ -6,6 +6,7 @@ import android.content.res.TypedArray;
import android.graphics.PointF; import android.graphics.PointF;
import android.location.Location; import android.location.Location;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.test.annotation.UiThreadTest;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest; import androidx.test.filters.MediumTest;
@ -135,7 +136,7 @@ public class CameraViewTest extends BaseTest {
public void testDestroy() { public void testDestroy() {
cameraView.destroy(); cameraView.destroy();
verify(mockPreview, times(1)).onDestroy(); verify(mockPreview, times(1)).onDestroy();
verify(mockController, times(1)).destroy(); verify(mockController, times(1)).destroy(true);
} }
//region testDefaults //region testDefaults

@ -18,8 +18,8 @@ import androidx.test.filters.RequiresDevice;
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@LargeTest @LargeTest
@RequiresDevice // @RequiresDevice
public class Camera1IntegrationTest extends CameraIntegrationTest { public class Camera1IntegrationTest extends CameraIntegrationTest<Camera1Engine> {
@NonNull @NonNull
@Override @Override

@ -1,7 +1,9 @@
package com.otaliastudios.cameraview.engine; package com.otaliastudios.cameraview.engine;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.TotalCaptureResult;
import android.os.Handler;
import com.otaliastudios.cameraview.controls.Engine; import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.action.ActionHolder; import com.otaliastudios.cameraview.engine.action.ActionHolder;
@ -24,8 +26,8 @@ import java.util.concurrent.CountDownLatch;
*/ */
@RunWith(AndroidJUnit4.class) @RunWith(AndroidJUnit4.class)
@LargeTest @LargeTest
@RequiresDevice // @RequiresDevice
public class Camera2IntegrationTest extends CameraIntegrationTest { public class Camera2IntegrationTest extends CameraIntegrationTest<Camera2Engine> {
@NonNull @NonNull
@Override @Override
@ -39,7 +41,6 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
// Extra wait for the first frame to be dispatched. // Extra wait for the first frame to be dispatched.
// This is because various classes require getLastResult to be non-null // This is because various classes require getLastResult to be non-null
// and that's typically the case in a real app. // and that's typically the case in a real app.
Camera2Engine engine = (Camera2Engine) controller;
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
new BaseAction() { new BaseAction() {
@Override @Override
@ -50,7 +51,7 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
latch.countDown(); latch.countDown();
setState(STATE_COMPLETED); setState(STATE_COMPLETED);
} }
}.start(engine); }.start(controller);
try { latch.await(); } catch (InterruptedException ignore) {} try { latch.await(); } catch (InterruptedException ignore) {}
} }
@ -58,4 +59,21 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
protected long getMeteringTimeoutMillis() { protected long getMeteringTimeoutMillis() {
return Camera2Engine.METER_TIMEOUT; return Camera2Engine.METER_TIMEOUT;
} }
/**
* setMaxDuration can crash on legacy devices (most emulator are), and I don't see
* any way to fix this in code. They shouldn't use Camera2 at all.
* @return true if possible.
*/
@Override
protected boolean canSetVideoMaxDuration() {
if (!super.canSetVideoMaxDuration()) return false;
boolean shouldOpen = !camera.isOpened();
if (shouldOpen) openSync(true);
boolean result = controller.readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
if (shouldOpen) closeSync(true);
return result;
}
} }

@ -28,14 +28,20 @@ import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState; import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.Frame; import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor; import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.tools.Emulator;
import com.otaliastudios.cameraview.tools.Op; import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.size.Size; import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.tools.Retry;
import com.otaliastudios.cameraview.tools.RetryRule;
import com.otaliastudios.cameraview.tools.SdkExclude;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.test.rule.ActivityTestRule; import androidx.test.rule.ActivityTestRule;
import androidx.test.rule.GrantPermissionRule;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -64,89 +70,102 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public abstract class CameraIntegrationTest extends BaseTest { public abstract class CameraIntegrationTest<E extends CameraEngine> extends BaseTest {
private final static CameraLogger LOG = CameraLogger.create(CameraIntegrationTest.class.getSimpleName()); private final static CameraLogger LOG = CameraLogger.create(CameraIntegrationTest.class.getSimpleName());
private final static long DELAY = 8000; private final static long DELAY = 8000;
@Rule @Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class); public ActivityTestRule<TestActivity> activityRule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera; @Rule
protected CameraEngine controller; public RetryRule retryRule = new RetryRule(3);
private CameraListener listener;
private Op<Throwable> uiExceptionOp;
@BeforeClass @Rule
public static void grant() { public GrantPermissionRule permissionRule = GrantPermissionRule.grant(
grantAllPermissions(); "android.permission.CAMERA",
} "android.permission.RECORD_AUDIO",
"android.permission.WRITE_EXTERNAL_STORAGE"
);
protected CameraView camera;
protected E controller;
private CameraListener listener;
private Op<Throwable> error;
@NonNull @NonNull
protected abstract Engine getEngine(); protected abstract Engine getEngine();
@Before @Before
public void setUp() { public void setUp() {
LOG.e("Test started. Setting up camera."); LOG.w("[TEST STARTED]", "Setting up camera.");
WorkerHandler.destroyAll(); WorkerHandler.destroyAll();
uiSync(new Runnable() { uiSync(new Runnable() {
@Override @Override
public void run() { public void run() {
camera = new CameraView(rule.getActivity()) { camera = new CameraView(activityRule.getActivity()) {
@NonNull @NonNull
@Override @Override
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) { protected CameraEngine instantiateCameraEngine(
controller = super.instantiateCameraEngine(getEngine(), callback); @NonNull Engine engine,
@NonNull CameraEngine.Callback callback) {
//noinspection unchecked
controller = (E) super.instantiateCameraEngine(getEngine(), callback);
return controller; return controller;
} }
}; };
listener = mock(CameraListener.class);
camera.setExperimental(true); camera.setExperimental(true);
camera.setEngine(getEngine()); camera.setEngine(getEngine());
camera.addCameraListener(listener); activityRule.getActivity().inflate(camera);
rule.getActivity().inflate(camera); }
});
// Ensure that controller exceptions are thrown on this thread (not on the UI thread).
// TODO this makes debugging for wrong tests very hard, as we don't get the exception listener = mock(CameraListener.class);
// unless waitForUiException() is called. camera.addCameraListener(listener);
uiExceptionOp = new Op<>();
WorkerHandler crashThread = WorkerHandler.get("CrashThread"); error = new Op<>();
crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { WorkerHandler crashThread = WorkerHandler.get("CrashThread");
@Override crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread t, Throwable e) { @Override
uiExceptionOp.controller().end(e); public void uncaughtException(@NonNull Thread thread, @NonNull Throwable exception) {
} error.controller().end(exception);
}); }
controller.mCrashHandler = crashThread.getHandler(); });
controller.mCrashHandler = crashThread.getHandler();
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraError(@NonNull CameraException exception) {
super.onCameraError(exception);
if (exception.isUnrecoverable()) {
LOG.e("[UNRECOVERABLE CAMERAEXCEPTION]",
"Got unrecoverable exception, not clear what to do in a test.");
}
} }
}); });
} }
@After @After
public void tearDown() { public void tearDown() {
LOG.e("Test ended. Tearing down camera."); LOG.w("[TEST ENDED]", "Tearing down camera.");
camera.destroy(); camera.destroy();
WorkerHandler.destroyAll(); WorkerHandler.destroyAll();
LOG.w("[TEST ENDED]", "Torn down camera.");
} }
private void waitForUiException() throws Throwable { protected final CameraOptions openSync(boolean expectSuccess) {
Throwable throwable = uiExceptionOp.await(DELAY);
if (throwable != null) {
throw throwable;
}
}
private CameraOptions openSync(boolean expectSuccess) {
camera.open();
final Op<CameraOptions> open = new Op<>(); final Op<CameraOptions> open = new Op<>();
doEndOp(open, 0).when(listener).onCameraOpened(any(CameraOptions.class)); doEndOp(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
camera.open();
CameraOptions result = open.await(DELAY); CameraOptions result = open.await(DELAY);
if (expectSuccess) { if (expectSuccess) {
LOG.i("[OPEN SYNC]", "Expecting success.");
assertNotNull("Can open", result); assertNotNull("Can open", result);
onOpenSync(); onOpenSync();
} else { } else {
LOG.i("[OPEN SYNC]", "Expecting failure.");
assertNull("Should not open", result); assertNull("Should not open", result);
} }
return result; return result;
@ -160,14 +179,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
while (controller.getState() != CameraState.PREVIEW) {} while (controller.getState() != CameraState.PREVIEW) {}
} }
private void closeSync(boolean expectSuccess) { protected final void closeSync(boolean expectSuccess) {
camera.close();
final Op<Boolean> close = new Op<>(); final Op<Boolean> close = new Op<>();
doEndOp(close, true).when(listener).onCameraClosed(); doEndOp(close, true).when(listener).onCameraClosed();
camera.close();
Boolean result = close.await(DELAY); Boolean result = close.await(DELAY);
if (expectSuccess) { if (expectSuccess) {
LOG.i("[CLOSE SYNC]", "Expecting success.");
assertNotNull("Can close", result); assertNotNull("Can close", result);
} else { } else {
LOG.i("[CLOSE SYNC]", "Expecting failure.");
assertNull("Should not close", result); assertNull("Should not close", result);
} }
} }
@ -175,45 +196,64 @@ public abstract class CameraIntegrationTest extends BaseTest {
@SuppressWarnings("UnusedReturnValue") @SuppressWarnings("UnusedReturnValue")
@Nullable @Nullable
private VideoResult waitForVideoResult(boolean expectSuccess) { private VideoResult waitForVideoResult(boolean expectSuccess) {
// CountDownLatch for onVideoRecordingEnd. Op<Boolean> wait1 = new Op<>();
CountDownLatch onVideoRecordingEnd = new CountDownLatch(1); Op<VideoResult> wait2 = new Op<>();
doCountDown(onVideoRecordingEnd).when(listener).onVideoRecordingEnd(); doEndOp(wait1, true).when(listener).onVideoRecordingEnd();
doEndOp(wait1, false).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
// Op for onVideoTaken.
final Op<VideoResult> video = new Op<>();
doEndOp(video, 0).when(listener).onVideoTaken(any(VideoResult.class));
doEndOp(video, null).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@Override @Override
public boolean matches(CameraException argument) { public boolean matches(CameraException argument) {
return argument.getReason() == CameraException.REASON_VIDEO_FAILED; return argument.getReason() == CameraException.REASON_VIDEO_FAILED;
} }
})); }));
doEndOp(wait2, 0).when(listener).onVideoTaken(any(VideoResult.class));
doEndOp(wait2, null).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@Override
public boolean matches(CameraException argument) {
return argument.getReason() == CameraException.REASON_VIDEO_FAILED;
}
}));
int maxLoops = 10;
int loops = 0;
// Wait for onVideoTaken and check. // First wait for onVideoRecordingEnd().
VideoResult result = video.await(DELAY);
// It seems that when running all the tests together, the device can go in some // It seems that when running all the tests together, the device can go in some
// power saving mode which makes the CPU extremely slow. This is especially problematic // power saving mode which makes the CPU extremely slow. This is especially problematic
// with video snapshots where we do lots of processing. The videoEnd callback can return // with video snapshots where we do lots of processing. The videoEnd callback can return
// long after the actual stop() call, so if we're still processing, let's wait more. // long after the actual stop() call, so if we're still processing, let's wait more.
if (expectSuccess && camera.isTakingVideo()) { LOG.i("[WAIT VIDEO]", "Waiting for onVideoRecordingEnd()...");
while (camera.isTakingVideo()) { Boolean wait1Result = wait1.await(DELAY);
video.listen(); if (expectSuccess) {
result = video.await(DELAY); while (wait1Result == null && loops <= maxLoops) {
LOG.w("[WAIT VIDEO]", "Waiting extra", DELAY, "milliseconds...");
wait1.listen();
wait1Result = wait1.await(DELAY);
loops++;
}
}
// Now wait for onVideoResult(). One cycle should be enough.
LOG.i("[WAIT VIDEO]", "Waiting for onVideoTaken()...");
VideoResult wait2Result = wait2.await(DELAY);
if (expectSuccess) {
while (wait2Result == null && loops <= maxLoops) {
LOG.w("[WAIT VIDEO]", "Waiting extra", DELAY, "milliseconds...");
wait2.listen();
wait2Result = wait2.await(DELAY);
loops++;
} }
// Sleep another 1000, because camera.isTakingVideo() might return false even
// if the result still has to be dispatched. Rare but could happen.
try { Thread.sleep(1000); } catch (InterruptedException ignore) {}
} }
// Now we should be OK. // Assert.
if (expectSuccess) { if (expectSuccess) {
assertEquals("Should call onVideoRecordingEnd", 0, onVideoRecordingEnd.getCount()); assertNotNull("Should call onVideoRecordingEnd", wait1Result);
assertNotNull("Should end video", result); assertTrue("Should call onVideoRecordingEnd", wait1Result);
assertNotNull("Should call onVideoTaken", wait2Result);
} else { } else {
assertNull("Should not end video", result); assertTrue("Should not call onVideoRecordingEnd",
wait1Result == null || !wait1Result);
assertNull("Should not call onVideoTaken", wait2Result);
} }
return result; return wait2Result;
} }
@Nullable @Nullable
@ -228,18 +268,29 @@ public abstract class CameraIntegrationTest extends BaseTest {
})); }));
PictureResult result = pic.await(DELAY); PictureResult result = pic.await(DELAY);
if (expectSuccess) { if (expectSuccess) {
LOG.i("[WAIT PICTURE]", "Expecting success.");
assertNotNull("Can take picture", result); assertNotNull("Can take picture", result);
} else { } else {
LOG.i("[WAIT PICTURE]", "Expecting failure.");
assertNull("Should not take picture", result); assertNull("Should not take picture", result);
} }
return result; return result;
} }
private void takeVideoSync(boolean expectSuccess) { /**
* Emulators do not respond well to setVideoMaxDuration() with full videos.
* The mediaRecorder.stop() call can hang forever.
* @return true if possible
*/
protected boolean canSetVideoMaxDuration() {
return !Emulator.isEmulator();
}
protected void takeVideoSync(boolean expectSuccess) {
takeVideoSync(expectSuccess,0); takeVideoSync(expectSuccess,0);
} }
private void takeVideoSync(boolean expectSuccess, int duration) { protected void takeVideoSync(boolean expectSuccess, int duration) {
final Op<Boolean> op = new Op<>(); final Op<Boolean> op = new Op<>();
doEndOp(op, true).when(listener).onVideoRecordingStart(); doEndOp(op, true).when(listener).onVideoRecordingStart();
doEndOp(op, false).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() { doEndOp(op, false).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@ -250,15 +301,33 @@ public abstract class CameraIntegrationTest extends BaseTest {
})); }));
File file = new File(getContext().getFilesDir(), "video.mp4"); File file = new File(getContext().getFilesDir(), "video.mp4");
if (duration > 0) { if (duration > 0) {
camera.takeVideo(file, duration); if (canSetVideoMaxDuration()) {
camera.takeVideo(file, duration);
} else {
final int delay = Math.round(duration * 1.2F); // Compensate for thread jumps, ...
uiSync(new Runnable() {
@Override
public void run() {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
camera.stopVideo();
}
}, delay);
}
});
camera.takeVideo(file);
}
} else { } else {
camera.takeVideo(file); camera.takeVideo(file);
} }
Boolean result = op.await(DELAY); Boolean result = op.await(DELAY);
if (expectSuccess) { if (expectSuccess) {
LOG.i("[WAIT VIDEO START]", "Expecting success.");
assertNotNull("should start video recording or get CameraError", result); assertNotNull("should start video recording or get CameraError", result);
assertTrue("should start video recording successfully", result); assertTrue("should start video recording successfully", result);
} else { } else {
LOG.i("[WAIT VIDEO START]", "Expecting failure.");
assertTrue("should not start video recording", result == null || !result); assertTrue("should not start video recording", result == null || !result);
} }
} }
@ -285,36 +354,53 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
Boolean result = op.await(DELAY); Boolean result = op.await(DELAY);
if (expectSuccess) { if (expectSuccess) {
LOG.i("[WAIT VIDEO SNAP START]", "Expecting success.");
assertNotNull("should start video recording or get CameraError", result); assertNotNull("should start video recording or get CameraError", result);
assertTrue("should start video recording successfully", result); assertTrue("should start video recording successfully", result);
} else { } else {
LOG.i("[WAIT VIDEO SNAP START]", "Expecting failure.");
assertTrue("should not start video recording", result == null || !result); assertTrue("should not start video recording", result == null || !result);
} }
} }
private void waitForError() throws Throwable {
Throwable throwable = error.await(DELAY);
if (throwable != null) {
throw throwable;
}
}
//region test open/close //region test open/close
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOpenClose() { public void testOpenClose() {
assertEquals(controller.getState(), CameraState.OFF); assertEquals(CameraState.OFF, controller.getState());
openSync(true); openSync(true);
assertTrue(controller.getState().isAtLeast(CameraState.ENGINE)); assertTrue(controller.getState().isAtLeast(CameraState.ENGINE));
closeSync(true); closeSync(true);
assertEquals(controller.getState(), CameraState.OFF); assertEquals(CameraState.OFF, controller.getState());
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOpenTwice() { public void testOpenTwice() {
openSync(true); openSync(true);
openSync(false); openSync(false);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCloseTwice() { public void testCloseTwice() {
closeSync(false); closeSync(false);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
// This works great on the device but crashes often on the emulator. // This works great on the device but crashes often on the emulator.
// There must be something wrong with the emulated camera... // There must be something wrong with the emulated camera...
// Like stopPreview() and release() are not really sync calls? // Like stopPreview() and release() are not really sync calls?
@ -333,6 +419,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartInitializesOptions() { public void testStartInitializesOptions() {
assertNull(camera.getCameraOptions()); assertNull(camera.getCameraOptions());
openSync(true); openSync(true);
@ -345,6 +433,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
// Test things that should reset the camera. // Test things that should reset the camera.
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetFacing() throws Exception { public void testSetFacing() throws Exception {
CameraOptions o = openSync(true); CameraOptions o = openSync(true);
int size = o.getSupportedFacing().size(); int size = o.getSupportedFacing().size();
@ -362,6 +452,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetMode() throws Exception { public void testSetMode() throws Exception {
camera.setMode(Mode.PICTURE); camera.setMode(Mode.PICTURE);
openSync(true); openSync(true);
@ -384,6 +476,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
// When camera is open, parameters will be set only if supported. // When camera is open, parameters will be set only if supported.
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetZoom() { public void testSetZoom() {
CameraOptions options = openSync(true); CameraOptions options = openSync(true);
float oldValue = camera.getZoom(); float oldValue = camera.getZoom();
@ -400,6 +494,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetExposureCorrection() { public void testSetExposureCorrection() {
CameraOptions options = openSync(true); CameraOptions options = openSync(true);
float oldValue = camera.getExposureCorrection(); float oldValue = camera.getExposureCorrection();
@ -416,6 +512,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetFlash() { public void testSetFlash() {
CameraOptions options = openSync(true); CameraOptions options = openSync(true);
Flash[] values = Flash.values(); Flash[] values = Flash.values();
@ -435,6 +533,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetWhiteBalance() { public void testSetWhiteBalance() {
CameraOptions options = openSync(true); CameraOptions options = openSync(true);
WhiteBalance[] values = WhiteBalance.values(); WhiteBalance[] values = WhiteBalance.values();
@ -453,6 +553,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetHdr() { public void testSetHdr() {
CameraOptions options = openSync(true); CameraOptions options = openSync(true);
Hdr[] values = Hdr.values(); Hdr[] values = Hdr.values();
@ -471,6 +573,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetAudio() { public void testSetAudio() {
openSync(true); openSync(true);
Audio[] values = Audio.values(); Audio[] values = Audio.values();
@ -481,6 +585,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetLocation() { public void testSetLocation() {
openSync(true); openSync(true);
camera.setLocation(10d, 2d); camera.setLocation(10d, 2d);
@ -493,15 +599,22 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetPreviewFrameRate() { public void testSetPreviewFrameRate() {
openSync(true); CameraOptions options = openSync(true);
camera.setPreviewFrameRate(30); camera.setPreviewFrameRate(30);
Op<Void> op = new Op<>(controller.mPreviewFrameRateTask); Op<Void> op = new Op<>(controller.mPreviewFrameRateTask);
op.await(300); op.await(300);
assertEquals(camera.getPreviewFrameRate(), 30, 0); assertEquals(camera.getPreviewFrameRate(),
Math.min(options.getPreviewFrameRateMaxValue(),
Math.max(options.getPreviewFrameRateMinValue(), 30)),
0);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetPlaySounds() { public void testSetPlaySounds() {
boolean oldValue = camera.getPlaySounds(); boolean oldValue = camera.getPlaySounds();
boolean newValue = !oldValue; boolean newValue = !oldValue;
@ -530,17 +643,18 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region test takeVideo //region test takeVideo
@Test(expected = RuntimeException.class) @Test(expected = RuntimeException.class)
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartVideo_whileInPictureMode() throws Throwable { public void testStartVideo_whileInPictureMode() throws Throwable {
// Fails on Travis. Some emulators can't deal with MediaRecorder
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
camera.setMode(Mode.PICTURE); camera.setMode(Mode.PICTURE);
openSync(true); openSync(true);
takeVideoSync(false); takeVideoSync(false);
waitForUiException(); waitForError();
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testStartEndVideo() { public void testStartEndVideo() {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
openSync(true); openSync(true);
@ -549,6 +663,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartEndVideoSnapshot() { public void testStartEndVideoSnapshot() {
// TODO should check api level for snapshot? // TODO should check api level for snapshot?
openSync(true); openSync(true);
@ -557,6 +673,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testStartEndVideo_withManualStop() { public void testStartEndVideo_withManualStop() {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
openSync(true); openSync(true);
@ -576,6 +694,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartEndVideoSnapshot_withManualStop() { public void testStartEndVideoSnapshot_withManualStop() {
openSync(true); openSync(true);
takeVideoSnapshotSync(true); takeVideoSnapshotSync(true);
@ -594,6 +714,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideo_withoutStarting() { public void testEndVideo_withoutStarting() {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
openSync(true); openSync(true);
@ -602,32 +724,54 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testEndVideo_withMaxSize() { public void testEndVideo_withMaxSize() {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
camera.setVideoMaxSize(3000*1000); // Less is risky camera.setVideoSize(SizeSelectors.maxArea(480 * 360));
openSync(true); openSync(true);
// Assuming video frame rate is 20...
//noinspection ConstantConditions
camera.setVideoBitRate((int) estimateVideoBitRate(camera.getVideoSize(), 20));
camera.setVideoMaxSize(estimateVideoBytes(camera.getVideoBitRate(), 5000));
takeVideoSync(true); takeVideoSync(true);
waitForVideoResult(true); waitForVideoResult(true);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideoSnapshot_withMaxSize() { public void testEndVideoSnapshot_withMaxSize() {
camera.setVideoMaxSize(3000*1000);
openSync(true); openSync(true);
camera.setSnapshotMaxWidth(480);
camera.setSnapshotMaxHeight(480);
// We don't want a very low FPS or the video frames are too sparse and recording
// can fail (e.g. audio reaching completion while video still struggling to start)
camera.setPreviewFrameRate(30F);
//noinspection ConstantConditions
camera.setVideoBitRate((int) estimateVideoBitRate(camera.getSnapshotSize(),
(int) camera.getPreviewFrameRate()));
camera.setVideoMaxSize(estimateVideoBytes(camera.getVideoBitRate(), 5000));
takeVideoSnapshotSync(true); takeVideoSnapshotSync(true);
waitForVideoResult(true); waitForVideoResult(true);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testEndVideo_withMaxDuration() { public void testEndVideo_withMaxDuration() {
camera.setMode(Mode.VIDEO); if (canSetVideoMaxDuration()) {
camera.setVideoMaxDuration(4000); camera.setMode(Mode.VIDEO);
openSync(true); camera.setVideoMaxDuration(4000);
takeVideoSync(true); openSync(true);
waitForVideoResult(true); takeVideoSync(true);
waitForVideoResult(true);
}
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideoSnapshot_withMaxDuration() { public void testEndVideoSnapshot_withMaxDuration() {
camera.setVideoMaxDuration(4000); camera.setVideoMaxDuration(4000);
openSync(true); openSync(true);
@ -640,6 +784,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region startAutoFocus //region startAutoFocus
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartAutoFocus() { public void testStartAutoFocus() {
CameraOptions o = openSync(true); CameraOptions o = openSync(true);
@ -657,6 +803,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStopAutoFocus() { public void testStopAutoFocus() {
CameraOptions o = openSync(true); CameraOptions o = openSync(true);
@ -681,12 +829,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region capture //region capture
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_beforeStarted() { public void testCapturePicture_beforeStarted() {
camera.takePicture(); camera.takePicture();
waitForPictureResult(false); waitForPictureResult(false);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_concurrentCalls() throws Exception { public void testCapturePicture_concurrentCalls() throws Exception {
// Second take should fail. // Second take should fail.
openSync(true); openSync(true);
@ -702,34 +854,57 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_size() { public void testCapturePicture_size() {
// Decoding can fail for large bitmaps. set a small size.
// camera.setPictureSize(SizeSelectors.smallest());
openSync(true); openSync(true);
Size size = camera.getPictureSize(); Size size = camera.getPictureSize();
assertNotNull(size); assertNotNull(size);
camera.takePicture(); camera.takePicture();
PictureResult result = waitForPictureResult(true); PictureResult result = waitForPictureResult(true);
assertNotNull(result); assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap);
assertEquals(result.getSize(), size);
assertEquals(bitmap.getWidth(), size.getWidth());
assertEquals(bitmap.getHeight(), size.getHeight());
assertNotNull(result.getData()); assertNotNull(result.getData());
assertNull(result.getLocation()); assertNull(result.getLocation());
assertFalse(result.isSnapshot()); assertFalse(result.isSnapshot());
assertEquals(result.getSize(), size);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(),
Integer.MAX_VALUE, Integer.MAX_VALUE);
if (bitmap != null) {
assertNotNull(bitmap);
String message = LOG.i("[PICTURE SIZE]", "Desired:", size, "Bitmap:",
new Size(bitmap.getWidth(), bitmap.getHeight()));
if (!Emulator.isEmulator()) {
assertEquals(message, bitmap.getWidth(), size.getWidth());
assertEquals(message, bitmap.getHeight(), size.getHeight());
} else {
// Emulator implementation sometimes does not rotate the image correctly.
assertTrue(message, bitmap.getWidth() == size.getWidth()
|| bitmap.getWidth() == size.getHeight());
assertTrue(message, bitmap.getHeight() == size.getWidth()
|| bitmap.getHeight() == size.getHeight());
assertEquals(message, size.getWidth() * size.getHeight(),
bitmap.getWidth() * bitmap.getHeight());
}
}
} }
@Test(expected = RuntimeException.class) @Test(expected = RuntimeException.class)
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_whileInVideoMode() throws Throwable { public void testCapturePicture_whileInVideoMode() throws Throwable {
camera.setMode(Mode.VIDEO); camera.setMode(Mode.VIDEO);
openSync(true); openSync(true);
camera.takePicture(); camera.takePicture();
waitForUiException(); waitForError();
camera.takePicture(); camera.takePicture();
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_withMetering() { public void testCapturePicture_withMetering() {
openSync(true); openSync(true);
camera.setPictureMetering(true); camera.setPictureMetering(true);
@ -738,6 +913,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_withoutMetering() { public void testCapturePicture_withoutMetering() {
openSync(true); openSync(true);
camera.setPictureMetering(false); camera.setPictureMetering(false);
@ -746,12 +923,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_beforeStarted() { public void testCaptureSnapshot_beforeStarted() {
camera.takePictureSnapshot(); camera.takePictureSnapshot();
waitForPictureResult(false); waitForPictureResult(false);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_concurrentCalls() throws Exception { public void testCaptureSnapshot_concurrentCalls() throws Exception {
// Second take should fail. // Second take should fail.
openSync(true); openSync(true);
@ -767,6 +948,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_size() { public void testCaptureSnapshot_size() {
openSync(true); openSync(true);
Size size = camera.getSnapshotSize(); Size size = camera.getSnapshotSize();
@ -775,17 +958,24 @@ public abstract class CameraIntegrationTest extends BaseTest {
PictureResult result = waitForPictureResult(true); PictureResult result = waitForPictureResult(true);
assertNotNull(result); assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap);
assertEquals(result.getSize(), size);
assertEquals(bitmap.getWidth(), size.getWidth());
assertEquals(bitmap.getHeight(), size.getHeight());
assertNotNull(result.getData()); assertNotNull(result.getData());
assertNull(result.getLocation()); assertNull(result.getLocation());
assertTrue(result.isSnapshot()); assertTrue(result.isSnapshot());
assertEquals(result.getSize(), size);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(),
Integer.MAX_VALUE, Integer.MAX_VALUE);
if (bitmap != null) {
String message = LOG.i("[PICTURE SIZE]", "Desired:", size, "Bitmap:",
new Size(bitmap.getWidth(), bitmap.getHeight()));
assertNotNull(bitmap);
assertEquals(message, bitmap.getWidth(), size.getWidth());
assertEquals(message, bitmap.getHeight(), size.getHeight());
}
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_withMetering() { public void testCaptureSnapshot_withMetering() {
openSync(true); openSync(true);
camera.setPictureSnapshotMetering(true); camera.setPictureSnapshotMetering(true);
@ -794,6 +984,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_withoutMetering() { public void testCaptureSnapshot_withoutMetering() {
openSync(true); openSync(true);
camera.setPictureSnapshotMetering(false); camera.setPictureSnapshotMetering(false);
@ -807,6 +999,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
@SuppressWarnings("ConstantConditions") @SuppressWarnings("ConstantConditions")
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testPictureFormat_DNG() { public void testPictureFormat_DNG() {
openSync(true); openSync(true);
if (camera.getCameraOptions().supports(PictureFormat.DNG)) { if (camera.getCameraOptions().supports(PictureFormat.DNG)) {
@ -830,24 +1024,28 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region Frame Processing //region Frame Processing
private void assert30Frames(FrameProcessor mock) throws Exception { private void assert15Frames(@NonNull FrameProcessor mock) throws Exception {
// Expect 30 frames // Expect 15 frames. Time is very high because currently Camera2 keeps a very low FPS.
CountDownLatch latch = new CountDownLatch(30); CountDownLatch latch = new CountDownLatch(15);
doCountDown(latch).when(mock).process(any(Frame.class)); doCountDown(latch).when(mock).process(any(Frame.class));
boolean did = latch.await(15, TimeUnit.SECONDS); boolean did = latch.await(30, TimeUnit.SECONDS);
assertTrue("Latch count should be 0: " + latch.getCount(), did); assertTrue("Latch count should be 0: " + latch.getCount(), did);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_simple() throws Exception { public void testFrameProcessing_simple() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class); FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
openSync(true); openSync(true);
assert30Frames(processor); assert15Frames(processor);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_afterSnapshot() throws Exception { public void testFrameProcessing_afterSnapshot() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class); FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
@ -858,10 +1056,12 @@ public abstract class CameraIntegrationTest extends BaseTest {
camera.takePictureSnapshot(); camera.takePictureSnapshot();
waitForPictureResult(true); waitForPictureResult(true);
assert30Frames(processor); assert15Frames(processor);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_afterRestart() throws Exception { public void testFrameProcessing_afterRestart() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class); FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
@ -869,11 +1069,13 @@ public abstract class CameraIntegrationTest extends BaseTest {
closeSync(true); closeSync(true);
openSync(true); openSync(true);
assert30Frames(processor); assert15Frames(processor);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testFrameProcessing_afterVideo() throws Exception { public void testFrameProcessing_afterVideo() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class); FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
@ -882,10 +1084,12 @@ public abstract class CameraIntegrationTest extends BaseTest {
takeVideoSync(true,4000); takeVideoSync(true,4000);
waitForVideoResult(true); waitForVideoResult(true);
assert30Frames(processor); assert15Frames(processor);
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_freezeRelease() throws Exception { public void testFrameProcessing_freezeRelease() throws Exception {
// Ensure that freeze/release cycles do not cause OOMs. // Ensure that freeze/release cycles do not cause OOMs.
// There was a bug doing this and it might resurface for any improper // There was a bug doing this and it might resurface for any improper
@ -895,7 +1099,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
camera.addFrameProcessor(processor); camera.addFrameProcessor(processor);
openSync(true); openSync(true);
assert30Frames(processor); assert15Frames(processor);
} }
public class FreezeReleaseFrameProcessor implements FrameProcessor { public class FreezeReleaseFrameProcessor implements FrameProcessor {
@ -910,6 +1114,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region Overlays //region Overlays
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOverlay_forPictureSnapshot() { public void testOverlay_forPictureSnapshot() {
Overlay overlay = mock(Overlay.class); Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
@ -922,6 +1128,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
@Test @Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOverlay_forVideoSnapshot() { public void testOverlay_forVideoSnapshot() {
Overlay overlay = mock(Overlay.class); Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true); when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
@ -934,4 +1142,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
} }
//endregion //endregion
@SuppressWarnings("SameParameterValue")
private static long estimateVideoBitRate(@NonNull Size size, int frameRate) {
// Nasty estimate for a LQ video
return Math.round(0.05D * size.getWidth() * size.getHeight() * frameRate);
}
@SuppressWarnings("SameParameterValue")
private static long estimateVideoBytes(long videoBitRate, long millis) {
// 1.3F accounts for audio.
return Math.round((videoBitRate * 1.3F) * (millis / 1000D) / 8D);
}
} }

@ -40,8 +40,8 @@ public class MockCameraEngine extends CameraEngine {
@NonNull @NonNull
@Override @Override
protected Task<Void> onStartEngine() { protected Task<CameraOptions> onStartEngine() {
return Tasks.forResult(null); return Tasks.forResult(mCameraOptions);
} }
@NonNull @NonNull

@ -5,6 +5,8 @@ import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity; import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid; import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.tools.Op; import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.tools.Retry;
import com.otaliastudios.cameraview.tools.RetryRule;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -26,6 +28,9 @@ public class GridLinesLayoutTest extends BaseTest {
@Rule @Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class); public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
@Rule
public RetryRule retryRule = new RetryRule(3);
private GridLinesLayout layout; private GridLinesLayout layout;
@NonNull @NonNull
@ -59,24 +64,28 @@ public class GridLinesLayoutTest extends BaseTest {
return result; return result;
} }
@Retry
@Test @Test
public void testOff() { public void testOff() {
int linesDrawn = setGridAndWait(Grid.OFF); int linesDrawn = setGridAndWait(Grid.OFF);
assertEquals(0, linesDrawn); assertEquals(0, linesDrawn);
} }
@Retry
@Test @Test
public void test3x3() { public void test3x3() {
int linesDrawn = setGridAndWait(Grid.DRAW_3X3); int linesDrawn = setGridAndWait(Grid.DRAW_3X3);
assertEquals(2, linesDrawn); assertEquals(2, linesDrawn);
} }
@Retry
@Test @Test
public void testPhi() { public void testPhi() {
int linesDrawn = setGridAndWait(Grid.DRAW_PHI); int linesDrawn = setGridAndWait(Grid.DRAW_PHI);
assertEquals(2, linesDrawn); assertEquals(2, linesDrawn);
} }
@Retry
@Test @Test
public void test4x4() { public void test4x4() {
int linesDrawn = setGridAndWait(Grid.DRAW_4X4); int linesDrawn = setGridAndWait(Grid.DRAW_4X4);

@ -220,12 +220,12 @@ public class WorkerHandlerTest extends BaseTest {
final WorkerHandler handler = WorkerHandler.get("handler"); final WorkerHandler handler = WorkerHandler.get("handler");
assertTrue(handler.getThread().isAlive()); assertTrue(handler.getThread().isAlive());
handler.destroy(); handler.destroy();
// Wait for the thread to die.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive());
WorkerHandler newHandler = WorkerHandler.get("handler"); WorkerHandler newHandler = WorkerHandler.get("handler");
assertNotSame(handler, newHandler); assertNotSame(handler, newHandler);
assertTrue(newHandler.getThread().isAlive()); assertTrue(newHandler.getThread().isAlive());
// Ensure old thread dies at some point.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive());
} }
@Test @Test

@ -27,10 +27,10 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
/** /**
* Not clear why, but for some reason on API 28 the UiThreadTests here crash for an internal NPE * Not clear why, but for some reason on API 28+ the UiThreadTests here crash for an internal NPE
* in FrameLayout.onMeasure. * in FrameLayout.onMeasure.
*/ */
@SdkExclude(minSdkVersion = 28, maxSdkVersion = 28) @SdkExclude(minSdkVersion = 28, maxSdkVersion = 29)
@TargetApi(17) @TargetApi(17)
public class MarkerLayoutTest extends BaseTest { public class MarkerLayoutTest extends BaseTest {

@ -0,0 +1,12 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
public class Emulator {
public static boolean isEmulator() {
// From Android's RequiresDeviceFilter
return Build.HARDWARE.equals("goldfish")
|| Build.HARDWARE.equals("ranchu")
|| Build.HARDWARE.equals("gce_x86");
}
}

@ -0,0 +1,9 @@
package com.otaliastudios.cameraview.tools;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface Retry {
boolean emulatorOnly() default false;
}

@ -0,0 +1,52 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import java.util.concurrent.atomic.AtomicInteger;
public class RetryRule implements TestRule {
private final static String TAG = RetryRule.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private AtomicInteger retries;
public RetryRule(int retries) {
this.retries = new AtomicInteger(retries);
}
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Retry retry = description.getAnnotation(Retry.class);
if (retry == null || retry.emulatorOnly() && !Emulator.isEmulator()) {
base.evaluate();
} else {
Throwable caught = null;
while (retries.getAndDecrement() > 0) {
try {
base.evaluate();
return;
} catch (Throwable throwable) {
LOG.e("[RETRY] Test failed.", retries.get(),
"retries available...");
LOG.e("*******************************************************");
caught = throwable;
}
}
if (caught != null) {
throw caught;
}
}
}
};
}
}

@ -15,4 +15,6 @@ public @interface SdkExclude {
int minSdkVersion() default 1; int minSdkVersion() default 1;
/** The maximum API level to drop (inclusive) */ /** The maximum API level to drop (inclusive) */
int maxSdkVersion() default Integer.MAX_VALUE; int maxSdkVersion() default Integer.MAX_VALUE;
/** Whether this filter only applies to emulators */
boolean emulatorOnly() default false;
} }

@ -15,10 +15,11 @@ import org.junit.runner.Description;
public class SdkExcludeFilter extends ParentFilter { public class SdkExcludeFilter extends ParentFilter {
protected boolean evaluateTest(Description description) { protected boolean evaluateTest(Description description) {
final SdkExclude sdkSuppress = getAnnotationForTest(description); SdkExclude annotation = getAnnotationForTest(description);
if (sdkSuppress != null) { if (annotation != null) {
if (Build.VERSION.SDK_INT >= sdkSuppress.minSdkVersion() if ((!annotation.emulatorOnly() || Emulator.isEmulator())
&& Build.VERSION.SDK_INT <= sdkSuppress.maxSdkVersion()) { && Build.VERSION.SDK_INT >= annotation.minSdkVersion()
&& Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
return false; // exclude the test return false; // exclude the test
} }
return true; // run the test return true; // run the test

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.tools;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Like {@link androidx.test.filters.SdkSuppress}, but with emulatorOnly().
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface SdkInclude {
/** The minimum API level to run (inclusive) */
int minSdkVersion() default 1;
/** The maximum API level to run (inclusive) */
int maxSdkVersion() default Integer.MAX_VALUE;
/** Whether this filter only applies to emulators */
boolean emulatorOnly() default false;
}

@ -0,0 +1,47 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
import androidx.annotation.Nullable;
import androidx.test.internal.runner.filters.ParentFilter;
import org.junit.runner.Description;
/**
* Filter for {@link SdkInclude}, based on
* {@link androidx.test.internal.runner.TestRequestBuilder}'s SdkSuppressFilter.
*/
public class SdkIncludeFilter extends ParentFilter {
protected boolean evaluateTest(Description description) {
SdkInclude annotation = getAnnotationForTest(description);
if (annotation != null) {
if ((!annotation.emulatorOnly() || Emulator.isEmulator())
&& Build.VERSION.SDK_INT >= annotation.minSdkVersion()
&& Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
return true; // run the test
}
return false; // drop the test
}
return true; // no annotation, run the test
}
@Nullable
private SdkInclude getAnnotationForTest(Description description) {
final SdkInclude s = description.getAnnotation(SdkInclude.class);
if (s != null) {
return s;
}
final Class<?> testClass = description.getTestClass();
if (testClass != null) {
return testClass.getAnnotation(SdkInclude.class);
}
return null;
}
@Override
public String describe() {
return "Skip tests annotated with SdkInclude";
}
}

@ -33,6 +33,8 @@ import java.io.OutputStream;
@SuppressWarnings("unused") @SuppressWarnings("unused")
public class CameraUtils { public class CameraUtils {
private final static String TAG = CameraUtils.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
/** /**
* Determines whether the device has valid camera sensors, so the library * Determines whether the device has valid camera sensors, so the library
@ -289,9 +291,9 @@ public class CameraUtils {
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL || exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE || exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE ||
exifOrientation == ExifInterface.ORIENTATION_TRANSVERSE; exifOrientation == ExifInterface.ORIENTATION_TRANSVERSE;
LOG.i("decodeBitmap:", "got orientation from EXIF.", orientation);
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); LOG.e("decodeBitmap:", "could not get orientation from EXIF.", e);
orientation = 0; orientation = 0;
flip = false; flip = false;
} finally { } finally {
@ -304,6 +306,7 @@ public class CameraUtils {
} else { } else {
orientation = rotation; orientation = rotation;
flip = false; flip = false;
LOG.i("decodeBitmap:", "got orientation from constructor.", orientation);
} }
Bitmap bitmap; Bitmap bitmap;

@ -125,6 +125,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private CameraPreview mCameraPreview; private CameraPreview mCameraPreview;
private OrientationHelper mOrientationHelper; private OrientationHelper mOrientationHelper;
private CameraEngine mCameraEngine; private CameraEngine mCameraEngine;
private Size mLastPreviewStreamSize;
private MediaActionSound mSound; private MediaActionSound mSound;
private AutoFocusMarker mAutoFocusMarker; private AutoFocusMarker mAutoFocusMarker;
@VisibleForTesting List<CameraListener> mListeners = new CopyOnWriteArrayList<>(); @VisibleForTesting List<CameraListener> mListeners = new CopyOnWriteArrayList<>();
@ -367,6 +368,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
protected void onDetachedFromWindow() { protected void onDetachedFromWindow() {
if (!mInEditor) mOrientationHelper.disable(); if (!mInEditor) mOrientationHelper.disable();
mLastPreviewStreamSize = null;
super.onDetachedFromWindow(); super.onDetachedFromWindow();
} }
@ -409,8 +411,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return; return;
} }
Size previewSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW); mLastPreviewStreamSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) { if (mLastPreviewStreamSize == null) {
LOG.w("onMeasure:", "surface is not ready. Calling default behavior."); LOG.w("onMeasure:", "surface is not ready. Calling default behavior.");
super.onMeasure(widthMeasureSpec, heightMeasureSpec); super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return; return;
@ -421,8 +423,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int heightMode = MeasureSpec.getMode(heightMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int widthValue = MeasureSpec.getSize(widthMeasureSpec); final int widthValue = MeasureSpec.getSize(widthMeasureSpec);
final int heightValue = MeasureSpec.getSize(heightMeasureSpec); final int heightValue = MeasureSpec.getSize(heightMeasureSpec);
final float previewWidth = previewSize.getWidth(); final float previewWidth = mLastPreviewStreamSize.getWidth();
final float previewHeight = previewSize.getHeight(); final float previewHeight = mLastPreviewStreamSize.getHeight();
// Pre-process specs // Pre-process specs
final ViewGroup.LayoutParams lp = getLayoutParams(); final ViewGroup.LayoutParams lp = getLayoutParams();
@ -437,10 +439,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (heightMode == AT_MOST && lp.height == MATCH_PARENT) heightMode = EXACTLY; if (heightMode == AT_MOST && lp.height == MATCH_PARENT) heightMode = EXACTLY;
} }
LOG.i("onMeasure:", "requested dimensions are (" + widthValue + "[" + ms(widthMode) LOG.i("onMeasure:", "requested dimensions are ("
+ "]x" + heightValue + "[" + ms(heightMode) + "])"); + widthValue + "[" + ms(widthMode) + "]x"
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x" + heightValue + "[" + ms(heightMode) + "])");
+ previewHeight + ")"); LOG.i("onMeasure:", "previewSize is", "("
+ previewWidth + "x" + previewHeight + ")");
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we // (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we
// should do, other than respect it. The preview will eventually be cropped at the sides // should do, other than respect it. The preview will eventually be cropped at the sides
@ -808,7 +811,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (mInEditor) return; if (mInEditor) return;
clearCameraListeners(); clearCameraListeners();
clearFrameProcessors(); clearFrameProcessors();
mCameraEngine.destroy(); mCameraEngine.destroy(true);
if (mCameraPreview != null) mCameraPreview.onDestroy(); if (mCameraPreview != null) mCameraPreview.onDestroy();
} }
@ -2036,17 +2039,26 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override @Override
public void onCameraPreviewStreamSizeChanged() { public void onCameraPreviewStreamSizeChanged() {
mLogger.i("onCameraPreviewStreamSizeChanged");
// Camera preview size has changed. // Camera preview size has changed.
// Request a layout pass for onMeasure() to do its stuff. // Request a layout pass for onMeasure() to do its stuff.
// Potentially this will change CameraView size, which changes Surface size, // Potentially this will change CameraView size, which changes Surface size,
// which triggers a new Preview size. But hopefully it will converge. // which triggers a new Preview size. But hopefully it will converge.
mUiHandler.post(new Runnable() { Size previewSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
@Override if (previewSize == null) {
public void run() { throw new RuntimeException("Preview stream size should not be null here.");
requestLayout(); } else if (previewSize.equals(mLastPreviewStreamSize)) {
} mLogger.i("onCameraPreviewStreamSizeChanged:",
}); "swallowing because the preview size has not changed.", previewSize);
} else {
mLogger.i("onCameraPreviewStreamSizeChanged: posting a requestLayout call.",
"Preview stream size:", previewSize);
mUiHandler.post(new Runnable() {
@Override
public void run() {
requestLayout();
}
});
}
} }
@Override @Override

@ -19,6 +19,7 @@ import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks; import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.PictureFormat; import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper; import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.engine.offset.Axis; import com.otaliastudios.cameraview.engine.offset.Axis;
@ -54,10 +55,6 @@ public class Camera1Engine extends CameraEngine implements
Camera.PreviewCallback, Camera.PreviewCallback,
Camera.ErrorCallback, Camera.ErrorCallback,
FrameManager.BufferCallback { FrameManager.BufferCallback {
private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final String JOB_FOCUS_RESET = "focus reset"; private static final String JOB_FOCUS_RESET = "focus reset";
private static final String JOB_FOCUS_END = "focus end"; private static final String JOB_FOCUS_END = "focus end";
@ -76,20 +73,15 @@ public class Camera1Engine extends CameraEngine implements
@Override @Override
public void onError(int error, Camera camera) { public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.",
"CAMERA_ERROR_SERVER_DIED");
restart();
return;
}
String message = LOG.e("Internal Camera1 error.", error); String message = LOG.e("Internal Camera1 error.", error);
Exception runtime = new RuntimeException(message); Exception runtime = new RuntimeException(message);
int reason; int reason;
switch (error) { switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break; case Camera.CAMERA_ERROR_SERVER_DIED:
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break; case Camera.CAMERA_ERROR_EVICTED:
reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: // Pass DISCONNECTED which is considered unrecoverable
reason = CameraException.REASON_DISCONNECTED; break;
default: reason = CameraException.REASON_UNKNOWN; default: reason = CameraException.REASON_UNKNOWN;
} }
throw new CameraException(runtime, reason); throw new CameraException(runtime, reason);
@ -146,7 +138,7 @@ public class Camera1Engine extends CameraEngine implements
@NonNull @NonNull
@EngineThread @EngineThread
@Override @Override
protected Task<Void> onStartEngine() { protected Task<CameraOptions> onStartEngine() {
try { try {
mCamera = Camera.open(mCameraId); mCamera = Camera.open(mCameraId);
} catch (Exception e) { } catch (Exception e) {
@ -165,7 +157,7 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW, mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW,
Axis.ABSOLUTE)); // <- not allowed during preview Axis.ABSOLUTE)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended"); LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null); return Tasks.forResult(mCameraOptions);
} }
@EngineThread @EngineThread
@ -248,15 +240,19 @@ public class Camera1Engine extends CameraEngine implements
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
LOG.i("onStopPreview:", "Started.");
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
mVideoRecorder.stop(true); mVideoRecorder.stop(true);
mVideoRecorder = null; mVideoRecorder = null;
} }
mPictureRecorder = null; mPictureRecorder = null;
getFrameManager().release(); getFrameManager().release();
LOG.i("onStopPreview:", "Releasing preview buffers.");
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try { try {
LOG.i("onStopPreview:", "Stopping preview.");
mCamera.stopPreview(); mCamera.stopPreview();
LOG.i("onStopPreview:", "Stopped preview.");
} catch (Exception e) { } catch (Exception e) {
LOG.e("stopPreview", "Could not stop preview", e); LOG.e("stopPreview", "Could not stop preview", e);
} }
@ -278,7 +274,9 @@ public class Camera1Engine extends CameraEngine implements
throw new RuntimeException("Unknown CameraPreview output class."); throw new RuntimeException("Unknown CameraPreview output class.");
} }
} catch (IOException e) { } catch (IOException e) {
LOG.e("unbindFromSurface", "Could not release surface", e); // NOTE: when this happens, the next onStopEngine() call hangs on camera.release(),
// Not sure for how long. This causes the destroy() flow to fail the timeout.
LOG.e("onStopBind", "Could not release surface", e);
} }
return Tasks.forResult(null); return Tasks.forResult(null);
} }
@ -293,6 +291,20 @@ public class Camera1Engine extends CameraEngine implements
if (mCamera != null) { if (mCamera != null) {
try { try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera."); LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
// Just like Camera2Engine, this call can hang (at least on emulators) and if
// we don't find a way around the lock, it leaves the camera in a bad state.
// This is anticipated by the exception in onStopBind() (see above).
//
// 12:29:32.163 E Camera3-Device: Camera 0: clearStreamingRequest: Device has encountered a serious error
// 12:29:32.163 E Camera2-StreamingProcessor: stopStream: Camera 0: Can't clear stream request: Function not implemented (-38)
// 12:29:32.163 E Camera2Client: stopPreviewL: Camera 0: Can't stop streaming: Function not implemented (-38)
// 12:29:32.273 E Camera2-StreamingProcessor: deletePreviewStream: Unable to delete old preview stream: Device or resource busy (-16)
// 12:29:32.274 E Camera2-CallbackProcessor: deleteStream: Unable to delete callback stream: Device or resource busy (-16)
// 12:29:32.274 E Camera3-Device: Camera 0: disconnect: Shutting down in an error state
//
// I believe there is a thread deadlock due to this call internally waiting to
// dispatch some callback to us (pending captures, ...), but the callback thread
// is blocked here. We try to workaround this in CameraEngine.destroy().
mCamera.release(); mCamera.release();
LOG.i("onStopEngine:", "Clean up.", "Released camera."); LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) { } catch (Exception e) {
@ -315,11 +327,13 @@ public class Camera1Engine extends CameraEngine implements
@EngineThread @EngineThread
@Override @Override
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) { protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
LOG.i("onTakePicture:", "executing.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT, stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR); Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT); stub.size = getPictureSize(Reference.OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera); mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take(); mPictureRecorder.take();
LOG.i("onTakePicture:", "executed.");
} }
@EngineThread @EngineThread
@ -327,6 +341,7 @@ public class Camera1Engine extends CameraEngine implements
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub, protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio, @NonNull AspectRatio outputRatio,
boolean doMetering) { boolean doMetering) {
LOG.i("onTakePictureSnapshot:", "executing.");
// Not the real size: it will be cropped to match the view ratio // Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT); stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0. // Actually it will be rotated and set to 0.
@ -337,6 +352,7 @@ public class Camera1Engine extends CameraEngine implements
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio); mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
} }
mPictureRecorder.take(); mPictureRecorder.take();
LOG.i("onTakePictureSnapshot:", "executed.");
} }
//endregion //endregion
@ -457,7 +473,7 @@ public class Camera1Engine extends CameraEngine implements
public void setFlash(@NonNull Flash flash) { public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash; final Flash old = mFlash;
mFlash = flash; mFlash = flash;
mFlashTask = mOrchestrator.scheduleStateful("flash", mFlashTask = mOrchestrator.scheduleStateful("flash (" + flash + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -508,7 +524,8 @@ public class Camera1Engine extends CameraEngine implements
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) { public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance; final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance; mWhiteBalance = whiteBalance;
mWhiteBalanceTask = mOrchestrator.scheduleStateful("white balance", mWhiteBalanceTask = mOrchestrator.scheduleStateful(
"white balance (" + whiteBalance + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -522,7 +539,11 @@ public class Camera1Engine extends CameraEngine implements
private boolean applyWhiteBalance(@NonNull Camera.Parameters params, private boolean applyWhiteBalance(@NonNull Camera.Parameters params,
@NonNull WhiteBalance oldWhiteBalance) { @NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) { if (mCameraOptions.supports(mWhiteBalance)) {
// If this lock key is present, the engine can throw when applying the
// parameters, not sure why. Since we never lock it, this should be
// harmless for the rest of the engine.
params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance)); params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
params.remove("auto-whitebalance-lock");
return true; return true;
} }
mWhiteBalance = oldWhiteBalance; mWhiteBalance = oldWhiteBalance;
@ -533,7 +554,7 @@ public class Camera1Engine extends CameraEngine implements
public void setHdr(@NonNull Hdr hdr) { public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr; final Hdr old = mHdr;
mHdr = hdr; mHdr = hdr;
mHdrTask = mOrchestrator.scheduleStateful("hdr", mHdrTask = mOrchestrator.scheduleStateful("hdr (" + hdr + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -557,7 +578,7 @@ public class Camera1Engine extends CameraEngine implements
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) { public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
final float old = mZoomValue; final float old = mZoomValue;
mZoomValue = zoom; mZoomValue = zoom;
mZoomTask = mOrchestrator.scheduleStateful("zoom", mZoomTask = mOrchestrator.scheduleStateful("zoom (" + zoom + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -589,7 +610,8 @@ public class Camera1Engine extends CameraEngine implements
@Nullable final PointF[] points, final boolean notify) { @Nullable final PointF[] points, final boolean notify) {
final float old = mExposureCorrectionValue; final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue; mExposureCorrectionValue = EVvalue;
mExposureCorrectionTask = mOrchestrator.scheduleStateful("exposure correction", mExposureCorrectionTask = mOrchestrator.scheduleStateful(
"exposure correction (" + EVvalue + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -629,7 +651,8 @@ public class Camera1Engine extends CameraEngine implements
public void setPlaySounds(boolean playSounds) { public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds; final boolean old = mPlaySounds;
mPlaySounds = playSounds; mPlaySounds = playSounds;
mPlaySoundsTask = mOrchestrator.scheduleStateful("play sounds", mPlaySoundsTask = mOrchestrator.scheduleStateful(
"play sounds (" + playSounds + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -665,7 +688,8 @@ public class Camera1Engine extends CameraEngine implements
public void setPreviewFrameRate(float previewFrameRate) { public void setPreviewFrameRate(float previewFrameRate) {
final float old = previewFrameRate; final float old = previewFrameRate;
mPreviewFrameRate = previewFrameRate; mPreviewFrameRate = previewFrameRate;
mPreviewFrameRateTask = mOrchestrator.scheduleStateful("preview fps", mPreviewFrameRateTask = mOrchestrator.scheduleStateful(
"preview fps (" + previewFrameRate + ")",
CameraState.ENGINE, CameraState.ENGINE,
new Runnable() { new Runnable() {
@Override @Override
@ -738,9 +762,8 @@ public class Camera1Engine extends CameraEngine implements
@Override @Override
public void onPreviewFrame(byte[] data, Camera camera) { public void onPreviewFrame(byte[] data, Camera camera) {
if (data == null) { if (data == null) {
// Let's test this with an exception. // Seen this happen in logs.
throw new RuntimeException("Camera1 returns null data from onPreviewFrame! " + return;
"This would make the frame processors crash later.");
} }
Frame frame = getFrameManager().getFrame(data, Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(), System.currentTimeMillis(),

@ -35,6 +35,7 @@ import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks; import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Facing; import com.otaliastudios.cameraview.controls.Facing;
@ -80,10 +81,6 @@ import java.util.concurrent.ExecutionException;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener, public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener,
ActionHolder { ActionHolder {
private static final String TAG = Camera2Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21; private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888; private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
@VisibleForTesting static final long METER_TIMEOUT = 2500; @VisibleForTesting static final long METER_TIMEOUT = 2500;
@ -129,16 +126,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
//region Utilities //region Utilities
@VisibleForTesting
@NonNull @NonNull
private <T> T readCharacteristic(@NonNull CameraCharacteristics.Key<T> key, <T> T readCharacteristic(@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) { @NonNull T fallback) {
return readCharacteristic(mCameraCharacteristics, key, fallback); return readCharacteristic(mCameraCharacteristics, key, fallback);
} }
@NonNull @NonNull
private <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics, private <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics,
@NonNull CameraCharacteristics.Key<T> key, @NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) { @NonNull T fallback) {
T value = characteristics.get(key); T value = characteristics.get(key);
return value == null ? fallback : value; return value == null ? fallback : value;
} }
@ -239,11 +237,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
* it should be set before calling this method, for example by calling * it should be set before calling this method, for example by calling
* {@link #createRepeatingRequestBuilder(int)}. * {@link #createRepeatingRequestBuilder(int)}.
*/ */
@EngineThread
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected void applyRepeatingRequestBuilder() { protected void applyRepeatingRequestBuilder() {
applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED); applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED);
} }
@EngineThread
private void applyRepeatingRequestBuilder(boolean checkStarted, int errorReason) { private void applyRepeatingRequestBuilder(boolean checkStarted, int errorReason) {
if ((getState() == CameraState.PREVIEW && !isChangingState()) || !checkStarted) { if ((getState() == CameraState.PREVIEW && !isChangingState()) || !checkStarted) {
try { try {
@ -329,6 +329,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@EngineThread @EngineThread
@Override @Override
protected void onPreviewStreamSizeChanged() { protected void onPreviewStreamSizeChanged() {
LOG.i("onPreviewStreamSizeChanged:", "Calling restartBind().");
restartBind(); restartBind();
} }
@ -374,8 +375,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@SuppressLint("MissingPermission") @SuppressLint("MissingPermission")
@NonNull @NonNull
@Override @Override
protected Task<Void> onStartEngine() { protected Task<CameraOptions> onStartEngine() {
final TaskCompletionSource<Void> task = new TaskCompletionSource<>(); final TaskCompletionSource<CameraOptions> task = new TaskCompletionSource<>();
try { try {
// We have a valid camera for this Facing. Go on. // We have a valid camera for this Facing. Go on.
mManager.openCamera(mCameraId, new CameraDevice.StateCallback() { mManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@ -385,7 +386,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Set parameters that might have been set before the camera was opened. // Set parameters that might have been set before the camera was opened.
try { try {
LOG.i("createCamera:", "Applying default parameters."); LOG.i("onStartEngine:", "Opened camera device.");
mCameraCharacteristics = mManager.getCameraCharacteristics(mCameraId); mCameraCharacteristics = mManager.getCameraCharacteristics(mCameraId);
boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW); boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
int format; int format;
@ -401,24 +402,34 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
task.trySetException(createCameraException(e)); task.trySetException(createCameraException(e));
return; return;
} }
task.trySetResult(null); task.trySetResult(mCameraOptions);
} }
@Override @Override
public void onDisconnected(@NonNull CameraDevice camera) { public void onDisconnected(@NonNull CameraDevice camera) {
// Not sure if this is called INSTEAD of onOpened() or can be called after // Not sure if this is called INSTEAD of onOpened() or can be called after
// as well. However, using trySetException should address this problem - // as well. Cover both cases with an unrecoverable exception so that the
// it will only trigger if the task has no result. // engine is properly destroyed.
// CameraException exception
// Docs say to release this camera instance, however, since we throw an = new CameraException(CameraException.REASON_DISCONNECTED);
// unrecoverable CameraException, this will trigger a stop() through the if (!task.getTask().isComplete()) {
// exception handler. task.trySetException(exception);
task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED)); } else {
LOG.i("CameraDevice.StateCallback reported disconnection.");
throw exception;
}
} }
@Override @Override
public void onError(@NonNull CameraDevice camera, int error) { public void onError(@NonNull CameraDevice camera, int error) {
task.trySetException(createCameraException(error)); if (!task.getTask().isComplete()) {
task.trySetException(createCameraException(error));
} else {
// This happened while the engine is running. Throw unrecoverable exception
// so that engine is properly destroyed.
LOG.e("CameraDevice.StateCallback reported an error:", error);
throw new CameraException(CameraException.REASON_DISCONNECTED);
}
} }
}, null); }, null);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
@ -555,6 +566,12 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
String message = LOG.e("onConfigureFailed! Session", session); String message = LOG.e("onConfigureFailed! Session", session);
throw new RuntimeException(message); throw new RuntimeException(message);
} }
@Override
public void onReady(@NonNull CameraCaptureSession session) {
super.onReady(session);
LOG.i("CameraCaptureSession.StateCallback reported onReady.");
}
}, null); }, null);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
throw createCameraException(e); throw createCameraException(e);
@ -566,7 +583,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull @NonNull
@Override @Override
protected Task<Void> onStartPreview() { protected Task<Void> onStartPreview() {
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged."); LOG.i("onStartPreview:", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged(); mCallback.onCameraPreviewStreamSizeChanged();
Size previewSizeForView = getPreviewStreamSize(Reference.VIEW); Size previewSizeForView = getPreviewStreamSize(Reference.VIEW);
@ -579,11 +596,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
getFrameManager().setUp(FRAME_PROCESSING_FORMAT, mFrameProcessingSize); getFrameManager().setUp(FRAME_PROCESSING_FORMAT, mFrameProcessingSize);
} }
LOG.i("onStartPreview", "Starting preview."); LOG.i("onStartPreview:", "Starting preview.");
addRepeatingRequestBuilderSurfaces(); addRepeatingRequestBuilderSurfaces();
applyRepeatingRequestBuilder(false, applyRepeatingRequestBuilder(false,
CameraException.REASON_FAILED_TO_START_PREVIEW); CameraException.REASON_FAILED_TO_START_PREVIEW);
LOG.i("onStartPreview", "Started preview."); LOG.i("onStartPreview:", "Started preview.");
// Start delayed video if needed. // Start delayed video if needed.
if (mFullVideoPendingStub != null) { if (mFullVideoPendingStub != null) {
@ -610,7 +627,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull @NonNull
@Override @Override
protected Task<Void> onStopPreview() { protected Task<Void> onStopPreview() {
LOG.i("onStopPreview:", "About to clean up."); LOG.i("onStopPreview:", "Started.");
if (mVideoRecorder != null) { if (mVideoRecorder != null) {
// This should synchronously call onVideoResult that will reset the repeating builder // This should synchronously call onVideoResult that will reset the repeating builder
// to the PREVIEW template. This is very important. // to the PREVIEW template. This is very important.
@ -621,15 +638,25 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (hasFrameProcessors()) { if (hasFrameProcessors()) {
getFrameManager().release(); getFrameManager().release();
} }
try { // Removing the part below for now. It hangs on emulators and can take a lot of time
// NOTE: should we wait for onReady() like docs say? // in real devices, for benefits that I'm not 100% sure about.
// Leaving this synchronous for now. if (false) {
mSession.stopRepeating(); try {
} catch (CameraAccessException e) { // Preferring abortCaptures() over stopRepeating(): it makes sure that all
// This tells us that we should stop everything. It's better to throw an unrecoverable // in-flight operations are discarded as fast as possible, which is what we want.
// exception rather than just swallow this, so everything gets stopped. // NOTE: this call is asynchronous. Should find a good way to wait for the outcome.
LOG.w("stopRepeating failed!", e); LOG.i("onStopPreview:", "calling abortCaptures().");
throw createCameraException(e); mSession.abortCaptures();
LOG.i("onStopPreview:", "called abortCaptures().");
} catch (CameraAccessException e) {
// This tells us that we should stop everything. It's better to throw an
// unrecoverable exception rather than just swallow, so everything gets stopped.
LOG.w("onStopPreview:", "abortCaptures failed!", e);
throw createCameraException(e);
} catch (IllegalStateException e) {
// This tells us that the session was already closed.
// Not sure if this can happen, but we can swallow it.
}
} }
removeRepeatingRequestBuilderSurfaces(); removeRepeatingRequestBuilderSurfaces();
mLastRepeatingResult = null; mLastRepeatingResult = null;
@ -672,6 +699,18 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected Task<Void> onStopEngine() { protected Task<Void> onStopEngine() {
try { try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera."); LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
// Just like Camera1Engine, this call can hang (at least on emulators) and if
// we don't find a way around the lock, it leaves the camera in a bad state.
//
// 12:33:28.152 2888 5470 I CameraEngine: onStopEngine: Clean up. Releasing camera.
// 12:33:29.476 1384 1555 E audio_hw_generic: pcm_write failed cannot write stream data: I/O error
// 12:33:33.206 1512 3616 E Camera3-Device: Camera 0: waitUntilDrainedLocked: Error waiting for HAL to drain: Connection timed out (-110)
// 12:33:33.242 1512 3616 E CameraDeviceClient: detachDevice: waitUntilDrained failed with code 0xffffff92
// 12:33:33.243 1512 3616 E Camera3-Device: Camera 0: disconnect: Shutting down in an error state
//
// I believe there is a thread deadlock due to this call internally waiting to
// dispatch some callback to us (pending captures, ...), but the callback thread
// is blocked here. We try to workaround this in CameraEngine.destroy().
mCamera.close(); mCamera.close();
LOG.i("onStopEngine:", "Clean up.", "Released camera."); LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) { } catch (Exception e) {
@ -790,7 +829,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean unlock = (fullPicture && getPictureMetering()) boolean unlock = (fullPicture && getPictureMetering())
|| (!fullPicture && getPictureSnapshotMetering()); || (!fullPicture && getPictureSnapshotMetering());
if (unlock) { if (unlock) {
unlockAndResetMetering(); mOrchestrator.scheduleStateful("reset metering after picture",
CameraState.PREVIEW,
new Runnable() {
@Override
public void run() {
unlockAndResetMetering();
}
});
} }
} }
@ -880,12 +926,16 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void onVideoRecordingEnd() { public void onVideoRecordingEnd() {
super.onVideoRecordingEnd(); super.onVideoRecordingEnd();
// SnapshotRecorder will invoke this on its own thread which is risky, but if it was a // SnapshotRecorder will invoke this on its own thread which is risky, but if it was a
// snapshot, this function returns early so its safe. // snapshot, this function does nothing so it's safe.
boolean needsIssue549Workaround = (mVideoRecorder instanceof Full2VideoRecorder) && boolean needsIssue549Workaround = (mVideoRecorder instanceof Full2VideoRecorder) &&
(readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1) (readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY); == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
if (needsIssue549Workaround) { if (needsIssue549Workaround) {
LOG.w("Applying the Issue549 workaround.", Thread.currentThread());
maybeRestorePreviewTemplateAfterVideo(); maybeRestorePreviewTemplateAfterVideo();
LOG.w("Applied the Issue549 workaround. Sleeping...");
try { Thread.sleep(600); } catch (InterruptedException ignore) {}
LOG.w("Applied the Issue549 workaround. Slept!");
} }
} }
@ -1364,7 +1414,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
Image image = null; Image image = null;
try { try {
image = reader.acquireLatestImage(); image = reader.acquireLatestImage();
} catch (IllegalStateException ignore) { } } catch (Exception ignore) { }
if (image == null) { if (image == null) {
LOG.w("onImageAvailable", "we have a byte buffer but no Image!"); LOG.w("onImageAvailable", "we have a byte buffer but no Image!");
getFrameManager().onBufferUnused(data); getFrameManager().onBufferUnused(data);
@ -1447,7 +1497,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point); mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mOrchestrator.remove("reset metering"); mOrchestrator.remove("reset metering");
if (shouldResetAutoFocus()) { if (shouldResetAutoFocus()) {
mOrchestrator.scheduleDelayed("reset metering", mOrchestrator.scheduleStatefulDelayed("reset metering",
CameraState.PREVIEW,
getAutoFocusResetDelay(), getAutoFocusResetDelay(),
new Runnable() { new Runnable() {
@Override @Override
@ -1478,26 +1529,26 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return mMeterAction; return mMeterAction;
} }
@EngineThread
private void unlockAndResetMetering() { private void unlockAndResetMetering() {
if (getState() == CameraState.PREVIEW && !isChangingState()) { // Needs the PREVIEW state!
Actions.sequence( Actions.sequence(
new BaseAction() { new BaseAction() {
@Override @Override
protected void onStart(@NonNull ActionHolder holder) { protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder); super.onStart(holder);
applyDefaultFocus(holder.getBuilder(this)); applyDefaultFocus(holder.getBuilder(this));
holder.getBuilder(this) holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AE_LOCK, false); .set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this) holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AWB_LOCK, false); .set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.applyBuilder(this); holder.applyBuilder(this);
setState(STATE_COMPLETED); setState(STATE_COMPLETED);
// TODO should wait results? // TODO should wait results?
} }
}, },
new MeterResetAction() new MeterResetAction()
).start(Camera2Engine.this); ).start(Camera2Engine.this);
}
} }
//endregion //endregion
@ -1534,14 +1585,19 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return mRepeatingRequestBuilder; return mRepeatingRequestBuilder;
} }
@EngineThread
@Override @Override
public void applyBuilder(@NonNull Action source) { public void applyBuilder(@NonNull Action source) {
// NOTE: Should never be called on a non-engine thread!
// Non-engine threads are not protected by the uncaught exception handler
// and can make the process crash.
applyRepeatingRequestBuilder(); applyRepeatingRequestBuilder();
} }
@Override @Override
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder) public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException { throws CameraAccessException {
// Risky - would be better to ensure that thread is the engine one.
if (getState() == CameraState.PREVIEW && !isChangingState()) { if (getState() == CameraState.PREVIEW && !isChangingState()) {
mSession.capture(builder.build(), mRepeatingRequestCallback, null); mSession.capture(builder.build(), mRepeatingRequestCallback, null);
} }

@ -8,8 +8,10 @@ import android.location.Location;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import com.google.android.gms.tasks.Continuation;
import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.SuccessContinuation;
import com.google.android.gms.tasks.Task; import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks; import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException; import com.otaliastudios.cameraview.CameraException;
@ -83,8 +85,9 @@ import java.util.concurrent.TimeUnit;
* S3 and S4 are also performed. * S3 and S4 are also performed.
* - {@link #stop(boolean)}: ASYNC - stops everything: undoes S4, then S3, then S2. * - {@link #stop(boolean)}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start. * - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: SYNC - performs a {@link #stop(boolean)} that will go on no matter what, * - {@link #destroy(boolean)}: SYNC - performs a {@link #stop(boolean)} that will go on no matter
* without throwing. Makes the engine unusable and clears resources. * what, without throwing. Makes the engine unusable and clears
* resources.
* *
* THREADING * THREADING
* Subclasses should always execute code on the thread given by {@link #mHandler}. * Subclasses should always execute code on the thread given by {@link #mHandler}.
@ -131,8 +134,11 @@ public abstract class CameraEngine implements
void dispatchOnVideoRecordingEnd(); void dispatchOnVideoRecordingEnd();
} }
private static final String TAG = CameraEngine.class.getSimpleName(); protected static final String TAG = CameraEngine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); protected static final CameraLogger LOG = CameraLogger.create(TAG);
// If this is 2, this means we'll try to run destroy() twice.
private static final int DESTROY_RETRIES = 2;
// Need to be protected // Need to be protected
@SuppressWarnings("WeakerAccess") protected final Callback mCallback; @SuppressWarnings("WeakerAccess") protected final Callback mCallback;
@ -211,10 +217,9 @@ public abstract class CameraEngine implements
protected CameraEngine(@NonNull Callback callback) { protected CameraEngine(@NonNull Callback callback) {
mCallback = callback; mCallback = callback;
mCrashHandler = new Handler(Looper.getMainLooper()); mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
mFrameManager = instantiateFrameManager(); mFrameManager = instantiateFrameManager();
mAngles = new Angles(); mAngles = new Angles();
recreateHandler(false);
} }
public void setPreview(@NonNull CameraPreview cameraPreview) { public void setPreview(@NonNull CameraPreview cameraPreview) {
@ -248,7 +253,8 @@ public abstract class CameraEngine implements
private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler { private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler {
@Override @Override
public void uncaughtException(@NonNull Thread thread, @NonNull Throwable throwable) { public void uncaughtException(@NonNull Thread thread, @NonNull Throwable throwable) {
// No-op. LOG.w("EXCEPTION:", "In the NoOpExceptionHandler, probably while destroying.",
"Thread:", thread, "Error:", throwable);
} }
} }
@ -265,43 +271,57 @@ public abstract class CameraEngine implements
* @param throwable the throwable * @param throwable the throwable
* @param fromExceptionHandler true if coming from exception handler * @param fromExceptionHandler true if coming from exception handler
*/ */
private void handleException(@NonNull Thread thread, private void handleException(@NonNull final Thread thread,
final @NonNull Throwable throwable, final @NonNull Throwable throwable,
final boolean fromExceptionHandler) { final boolean fromExceptionHandler) {
if (!(throwable instanceof CameraException)) { // 1. If this comes from the exception handler, the thread has crashed. Replace it.
// This is unexpected, either a bug or something the developer should know. // Most actions are wrapped into Tasks so don't go here, but some callbacks do
// Release and crash the UI thread so we get bug reports. // (at least in Camera1, e.g. onError).
LOG.e("uncaughtException:", "Unexpected exception:", throwable); if (fromExceptionHandler) {
mCrashHandler.post(new Runnable() { LOG.e("EXCEPTION:", "Handler thread is gone. Replacing.");
@Override recreateHandler(false);
public void run() { }
destroy();
// Throws an unchecked exception without unnecessary wrapping. // 2. Depending on the exception, we must destroy(false|true) to release resources, and
// notify the outside, either with the callback or by crashing the app.
LOG.e("EXCEPTION:", "Scheduling on the crash handler...");
mCrashHandler.post(new Runnable() {
@Override
public void run() {
if (throwable instanceof CameraException) {
CameraException exception = (CameraException) throwable;
if (exception.isUnrecoverable()) {
LOG.e("EXCEPTION:", "Got CameraException. " +
"Since it is unrecoverable, executing destroy(false).");
destroy(false);
}
LOG.e("EXCEPTION:", "Got CameraException. Dispatching to callback.");
mCallback.dispatchError(exception);
} else {
LOG.e("EXCEPTION:", "Unexpected error! Executing destroy(true).");
destroy(true);
LOG.e("EXCEPTION:", "Unexpected error! Throwing.");
if (throwable instanceof RuntimeException) { if (throwable instanceof RuntimeException) {
throw (RuntimeException) throwable; throw (RuntimeException) throwable;
} else { } else {
throw new RuntimeException(throwable); throw new RuntimeException(throwable);
} }
} }
}); }
return; });
} }
final CameraException cameraException = (CameraException) throwable;
LOG.e("uncaughtException:", "Got CameraException:", cameraException,
"on state:", getState());
if (fromExceptionHandler) {
// Got to restart the handler.
thread.interrupt();
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
}
mCallback.dispatchError(cameraException); /**
if (cameraException.isUnrecoverable()) { * Recreates the handler, to ensure we use a fresh one from now on.
// Stop everything (if needed) without notifying teardown errors. * If we suspect that handler is currently stuck, the orchestrator should be reset
stop(true); * because it hosts a chain of tasks and the last one will never complete.
} * @param resetOrchestrator true to reset
*/
private void recreateHandler(boolean resetOrchestrator) {
if (mHandler != null) mHandler.destroy();
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
if (resetOrchestrator) mOrchestrator.reset();
} }
//endregion //endregion
@ -326,31 +346,53 @@ public abstract class CameraEngine implements
* Calls {@link #stop(boolean)} and waits for it. * Calls {@link #stop(boolean)} and waits for it.
* Not final due to mockito requirements. * Not final due to mockito requirements.
* *
* If unrecoverably is true, this also releases resources and the engine will not be in a
* functional state after. If forever is false, this really is just a synchronous stop.
*
* NOTE: Should not be called on the orchestrator thread! This would cause deadlocks due to us * NOTE: Should not be called on the orchestrator thread! This would cause deadlocks due to us
* awaiting for {@link #stop(boolean)} to return. * awaiting for {@link #stop(boolean)} to return.
*/ */
public void destroy() { public void destroy(boolean unrecoverably) {
LOG.i("DESTROY:", "state:", getState(), "thread:", Thread.currentThread()); destroy(unrecoverably, 0);
// Prevent CameraEngine leaks. Don't set to null, or exceptions }
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler()); private void destroy(boolean unrecoverably, int depth) {
// Stop if needed, synchronously and silently. LOG.i("DESTROY:", "state:", getState(),
"thread:", Thread.currentThread(),
"depth:", depth,
"unrecoverably:", unrecoverably);
if (unrecoverably) {
// Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
}
// Cannot use Tasks.await() because we might be on the UI thread. // Cannot use Tasks.await() because we might be on the UI thread.
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
stop(true).addOnCompleteListener( stop(true).addOnCompleteListener(
WorkerHandler.get().getExecutor(), mHandler.getExecutor(),
new OnCompleteListener<Void>() { new OnCompleteListener<Void>() {
@Override @Override
public void onComplete(@NonNull Task<Void> task) { public void onComplete(@NonNull Task<Void> task) {
latch.countDown(); latch.countDown();
} }
}); });
try { try {
boolean success = latch.await(3, TimeUnit.SECONDS); boolean success = latch.await(6, TimeUnit.SECONDS);
if (!success) { if (!success) {
LOG.e("Probably some deadlock in destroy.", // This thread is likely stuck. The reason might be deadlock issues in the internal
// camera implementation, at least in emulators: see Camera1Engine and Camera2Engine
// onStopEngine() implementation and comments.
LOG.e("DESTROY: Could not destroy synchronously after 6 seconds.",
"Current thread:", Thread.currentThread(), "Current thread:", Thread.currentThread(),
"Handler thread: ", mHandler.getThread()); "Handler thread:", mHandler.getThread());
depth++;
if (depth < DESTROY_RETRIES) {
recreateHandler(true);
LOG.e("DESTROY: Trying again on thread:", mHandler.getThread());
destroy(unrecoverably, depth);
} else {
LOG.w("DESTROY: Giving up because DESTROY_RETRIES was reached.");
}
} }
} catch (InterruptedException ignore) {} } catch (InterruptedException ignore) {}
} }
@ -406,19 +448,24 @@ public abstract class CameraEngine implements
private Task<Void> startEngine() { private Task<Void> startEngine() {
return mOrchestrator.scheduleStateChange(CameraState.OFF, CameraState.ENGINE, return mOrchestrator.scheduleStateChange(CameraState.OFF, CameraState.ENGINE,
true, true,
new Callable<Task<Void>>() { new Callable<Task<CameraOptions>>() {
@Override @Override
public Task<Void> call() { public Task<CameraOptions> call() {
if (!collectCameraInfo(mFacing)) { if (!collectCameraInfo(mFacing)) {
LOG.e("onStartEngine:", "No camera available for facing", mFacing); LOG.e("onStartEngine:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA); throw new CameraException(CameraException.REASON_NO_CAMERA);
} }
return onStartEngine(); return onStartEngine();
} }
}).addOnSuccessListener(new OnSuccessListener<Void>() { }).onSuccessTask(new SuccessContinuation<CameraOptions, Void>() {
@NonNull
@Override @Override
public void onSuccess(Void aVoid) { public Task<Void> then(@Nullable CameraOptions cameraOptions) {
mCallback.dispatchOnCameraOpened(mCameraOptions); // Put this on the outer task so we're sure it's called after getState() is changed.
// This was breaking some tests on rare occasions.
if (cameraOptions == null) throw new RuntimeException("Null options!");
mCallback.dispatchOnCameraOpened(cameraOptions);
return Tasks.forResult(null);
} }
}); });
} }
@ -436,6 +483,8 @@ public abstract class CameraEngine implements
}).addOnSuccessListener(new OnSuccessListener<Void>() { }).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override @Override
public void onSuccess(Void aVoid) { public void onSuccess(Void aVoid) {
// Put this on the outer task so we're sure it's called after getState() is OFF.
// This was breaking some tests on rare occasions.
mCallback.dispatchOnCameraClosed(); mCallback.dispatchOnCameraClosed();
} }
}); });
@ -447,7 +496,7 @@ public abstract class CameraEngine implements
*/ */
@NonNull @NonNull
@EngineThread @EngineThread
protected abstract Task<Void> onStartEngine(); protected abstract Task<CameraOptions> onStartEngine();
/** /**
* Stops the engine. * Stops the engine.

@ -11,7 +11,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler; import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.util.ArrayDeque; import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException; import java.util.concurrent.CancellationException;
@ -40,9 +42,9 @@ public class CameraOrchestrator {
protected static class Token { protected static class Token {
public final String name; public final String name;
public final Task<Void> task; public final Task<?> task;
private Token(@NonNull String name, @NonNull Task<Void> task) { private Token(@NonNull String name, @NonNull Task<?> task) {
this.name = name; this.name = name;
this.task = task; this.task = task;
} }
@ -76,39 +78,42 @@ public class CameraOrchestrator {
}); });
} }
@SuppressWarnings("unchecked")
@NonNull @NonNull
public Task<Void> schedule(@NonNull final String name, public <T> Task<T> schedule(@NonNull final String name,
final boolean dispatchExceptions, final boolean dispatchExceptions,
@NonNull final Callable<Task<Void>> job) { @NonNull final Callable<Task<T>> job) {
LOG.i(name.toUpperCase(), "- Scheduling."); LOG.i(name.toUpperCase(), "- Scheduling.");
final TaskCompletionSource<Void> source = new TaskCompletionSource<>(); final TaskCompletionSource<T> source = new TaskCompletionSource<>();
final WorkerHandler handler = mCallback.getJobWorker(name); final WorkerHandler handler = mCallback.getJobWorker(name);
synchronized (mLock) { synchronized (mLock) {
applyCompletionListener(mJobs.getLast().task, handler, applyCompletionListener(mJobs.getLast().task, handler,
new OnCompleteListener<Void>() { new OnCompleteListener() {
@Override @Override
public void onComplete(@NonNull Task<Void> task) { public void onComplete(@NonNull Task task) {
synchronized (mLock) { synchronized (mLock) {
mJobs.removeFirst(); mJobs.removeFirst();
ensureToken(); ensureToken();
} }
try { try {
LOG.i(name.toUpperCase(), "- Executing."); LOG.i(name.toUpperCase(), "- Executing.");
Task<Void> inner = job.call(); Task<T> inner = job.call();
applyCompletionListener(inner, handler, new OnCompleteListener<Void>() { applyCompletionListener(inner, handler, new OnCompleteListener<T>() {
@Override @Override
public void onComplete(@NonNull Task<Void> task) { public void onComplete(@NonNull Task<T> task) {
Exception e = task.getException(); Exception e = task.getException();
LOG.i(name.toUpperCase(), "- Finished.", e);
if (e != null) { if (e != null) {
LOG.w(name.toUpperCase(), "- Finished with ERROR.", e);
if (dispatchExceptions) { if (dispatchExceptions) {
mCallback.handleJobException(name, e); mCallback.handleJobException(name, e);
} }
source.trySetException(e); source.trySetException(e);
} else if (task.isCanceled()) { } else if (task.isCanceled()) {
LOG.i(name.toUpperCase(), "- Finished because ABORTED.");
source.trySetException(new CancellationException()); source.trySetException(new CancellationException());
} else { } else {
source.trySetResult(null); LOG.i(name.toUpperCase(), "- Finished.");
source.trySetResult(task.getResult());
} }
} }
}); });
@ -151,24 +156,38 @@ public class CameraOrchestrator {
mCallback.getJobWorker(name).remove(mDelayedJobs.get(name)); mCallback.getJobWorker(name).remove(mDelayedJobs.get(name));
mDelayedJobs.remove(name); mDelayedJobs.remove(name);
} }
Token token = new Token(name, Tasks.<Void>forResult(null)); Token token = new Token(name, Tasks.forResult(null));
//noinspection StatementWithEmptyBody //noinspection StatementWithEmptyBody
while (mJobs.remove(token)) { /* do nothing */ } while (mJobs.remove(token)) { /* do nothing */ }
ensureToken(); ensureToken();
} }
} }
public void reset() {
synchronized (mLock) {
List<String> all = new ArrayList<>();
//noinspection CollectionAddAllCanBeReplacedWithConstructor
all.addAll(mDelayedJobs.keySet());
for (Token token : mJobs) {
all.add(token.name);
}
for (String job : all) {
remove(job);
}
}
}
private void ensureToken() { private void ensureToken() {
synchronized (mLock) { synchronized (mLock) {
if (mJobs.isEmpty()) { if (mJobs.isEmpty()) {
mJobs.add(new Token("BASE", Tasks.<Void>forResult(null))); mJobs.add(new Token("BASE", Tasks.forResult(null)));
} }
} }
} }
private static void applyCompletionListener(@NonNull final Task<Void> task, private static <T> void applyCompletionListener(@NonNull final Task<T> task,
@NonNull WorkerHandler handler, @NonNull WorkerHandler handler,
@NonNull final OnCompleteListener<Void> listener) { @NonNull final OnCompleteListener<T> listener) {
if (task.isComplete()) { if (task.isComplete()) {
handler.run(new Runnable() { handler.run(new Runnable() {
@Override @Override

@ -37,7 +37,8 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
public boolean hasPendingStateChange() { public boolean hasPendingStateChange() {
synchronized (mLock) { synchronized (mLock) {
for (Token token : mJobs) { for (Token token : mJobs) {
if (token.name.contains(" > ") && !token.task.isComplete()) { if ((token.name.contains(" >> ") || token.name.contains(" << "))
&& !token.task.isComplete()) {
return true; return true;
} }
} }
@ -46,28 +47,29 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
} }
@NonNull @NonNull
public Task<Void> scheduleStateChange(@NonNull final CameraState fromState, public <T> Task<T> scheduleStateChange(@NonNull final CameraState fromState,
@NonNull final CameraState toState, @NonNull final CameraState toState,
boolean dispatchExceptions, boolean dispatchExceptions,
@NonNull final Callable<Task<Void>> stateChange) { @NonNull final Callable<Task<T>> stateChange) {
final int changeCount = ++mStateChangeCount; final int changeCount = ++mStateChangeCount;
mTargetState = toState; mTargetState = toState;
final boolean isTearDown = !toState.isAtLeast(fromState); final boolean isTearDown = !toState.isAtLeast(fromState);
final String changeName = fromState.name() + " > " + toState.name(); final String name = isTearDown ? fromState.name() + " << " + toState.name()
return schedule(changeName, dispatchExceptions, new Callable<Task<Void>>() { : fromState.name() + " >> " + toState.name();
return schedule(name, dispatchExceptions, new Callable<Task<T>>() {
@Override @Override
public Task<Void> call() throws Exception { public Task<T> call() throws Exception {
if (getCurrentState() != fromState) { if (getCurrentState() != fromState) {
LOG.w(changeName.toUpperCase(), "- State mismatch, aborting. current:", LOG.w(name.toUpperCase(), "- State mismatch, aborting. current:",
getCurrentState(), "from:", fromState, "to:", toState); getCurrentState(), "from:", fromState, "to:", toState);
return Tasks.forCanceled(); return Tasks.forCanceled();
} else { } else {
Executor executor = mCallback.getJobWorker(changeName).getExecutor(); Executor executor = mCallback.getJobWorker(name).getExecutor();
return stateChange.call().continueWithTask(executor, return stateChange.call().continueWithTask(executor,
new Continuation<Void, Task<Void>>() { new Continuation<T, Task<T>>() {
@Override @Override
public Task<Void> then(@NonNull Task<Void> task) { public Task<T> then(@NonNull Task<T> task) {
if (task.isSuccessful() || isTearDown) { if (task.isSuccessful() || isTearDown) {
mCurrentState = toState; mCurrentState = toState;
} }
@ -76,9 +78,9 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
}); });
} }
} }
}).addOnCompleteListener(new OnCompleteListener<Void>() { }).addOnCompleteListener(new OnCompleteListener<T>() {
@Override @Override
public void onComplete(@NonNull Task<Void> task) { public void onComplete(@NonNull Task<T> task) {
if (changeCount == mStateChangeCount) { if (changeCount == mStateChangeCount) {
mTargetState = mCurrentState; mTargetState = mCurrentState;
} }

@ -1,8 +1,11 @@
package com.otaliastudios.cameraview.internal; package com.otaliastudios.cameraview.internal;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build; import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -49,8 +52,9 @@ import java.util.List;
* - MediaCodecList (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#322) * - MediaCodecList (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#322)
* *
* To be fair, what {@link android.media.MediaRecorder} does is actually choose the first one * To be fair, what {@link android.media.MediaRecorder} does is actually choose the first one
* that configures itself without errors. We currently do not offer this option here. * that configures itself without errors. We offer this option through
* TODO add a tryConfigure() step, that throws AudioException/VideoException ? * {@link #tryConfigureVideo(String, Size, int, int)} and
* {@link #tryConfigureAudio(String, int, int, int)}.
* *
* 2. {@link #MODE_PREFER_HARDWARE} * 2. {@link #MODE_PREFER_HARDWARE}
* *
@ -350,4 +354,64 @@ public class DeviceEncoders {
} }
} }
@SuppressLint("NewApi")
public void tryConfigureVideo(@NonNull String mimeType,
@NonNull Size size,
int frameRate,
int bitRate) {
if (mVideoEncoder != null) {
MediaCodec codec = null;
try {
MediaFormat format = MediaFormat.createVideoFormat(mimeType, size.getWidth(),
size.getHeight());
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
codec = MediaCodec.createByCodecName(mVideoEncoder.getName());
codec.configure(format, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
throw new VideoException("Failed to configure video codec: " + e.getMessage());
} finally {
if (codec != null) {
try {
codec.release();
} catch (Exception ignore) {}
}
}
}
}
@SuppressLint("NewApi")
public void tryConfigureAudio(@NonNull String mimeType,
int bitRate,
int sampleRate,
int channels) {
if (mAudioEncoder != null) {
MediaCodec codec = null;
try {
final MediaFormat format = MediaFormat.createAudioFormat(mimeType, sampleRate,
channels);
int channelMask = channels == 2 ? AudioFormat.CHANNEL_IN_STEREO
: AudioFormat.CHANNEL_IN_MONO;
format.setInteger(MediaFormat.KEY_CHANNEL_MASK, channelMask);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
codec = MediaCodec.createByCodecName(mAudioEncoder.getName());
codec.configure(format, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
throw new AudioException("Failed to configure video audio: " + e.getMessage());
} finally {
if (codec != null) {
try {
codec.release();
} catch (Exception ignore) {}
}
}
}
}
} }

@ -89,18 +89,26 @@ public class WorkerHandler {
get().post(action); get().post(action);
} }
private String mName;
private HandlerThread mThread; private HandlerThread mThread;
private Handler mHandler; private Handler mHandler;
private Executor mExecutor; private Executor mExecutor;
private WorkerHandler(@NonNull String name) { private WorkerHandler(@NonNull String name) {
mThread = new HandlerThread(name); mName = name;
mThread = new HandlerThread(name) {
@NonNull
@Override
public String toString() {
return super.toString() + "[" + getThreadId() + "]";
}
};
mThread.setDaemon(true); mThread.setDaemon(true);
mThread.start(); mThread.start();
mHandler = new Handler(mThread.getLooper()); mHandler = new Handler(mThread.getLooper());
mExecutor = new Executor() { mExecutor = new Executor() {
@Override @Override
public void execute(Runnable command) { public void execute(@NonNull Runnable command) {
WorkerHandler.this.run(command); WorkerHandler.this.run(command);
} }
}; };
@ -245,6 +253,10 @@ public class WorkerHandler {
// after quit(), the thread will die at some point in the future. Might take some ms. // after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {} // try { handler.getThread().join(); } catch (InterruptedException ignore) {}
} }
// This should not be needed, but just to be sure, let's remove it from cache.
// For example, interrupt() won't interrupt the thread if it's blocked - it will throw
// an exception instead.
sCache.remove(mName);
} }
/** /**

@ -2,8 +2,8 @@ package com.otaliastudios.cameraview.picture;
import android.hardware.Camera; import android.hardware.Camera;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.internal.utils.ExifHelper; import com.otaliastudios.cameraview.internal.utils.ExifHelper;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
@ -16,18 +16,16 @@ import java.io.IOException;
/** /**
* A {@link PictureResult} that uses standard APIs. * A {@link PictureResult} that uses standard APIs.
*/ */
public class Full1PictureRecorder extends PictureRecorder { public class Full1PictureRecorder extends FullPictureRecorder {
private static final String TAG = Full1PictureRecorder.class.getSimpleName(); private final Camera mCamera;
@SuppressWarnings("unused") private final Camera1Engine mEngine;
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera mCamera;
public Full1PictureRecorder(@NonNull PictureResult.Stub stub, public Full1PictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener, @NonNull Camera1Engine engine,
@NonNull Camera camera) { @NonNull Camera camera) {
super(stub, listener); super(stub, engine);
mEngine = engine;
mCamera = camera; mCamera = camera;
// We set the rotation to the camera parameters, but we don't know if the result will be // We set the rotation to the camera parameters, but we don't know if the result will be
@ -39,10 +37,15 @@ public class Full1PictureRecorder extends PictureRecorder {
@Override @Override
public void take() { public void take() {
LOG.i("take() called.");
// Stopping the preview callback is important on older APIs / emulators,
// or takePicture can hang and leave the camera in a bad state.
mCamera.setPreviewCallbackWithBuffer(null);
mCamera.takePicture( mCamera.takePicture(
new Camera.ShutterCallback() { new Camera.ShutterCallback() {
@Override @Override
public void onShutter() { public void onShutter() {
LOG.i("take(): got onShutter callback.");
dispatchOnShutter(true); dispatchOnShutter(true);
} }
}, },
@ -51,6 +54,7 @@ public class Full1PictureRecorder extends PictureRecorder {
new Camera.PictureCallback() { new Camera.PictureCallback() {
@Override @Override
public void onPictureTaken(byte[] data, final Camera camera) { public void onPictureTaken(byte[] data, final Camera camera) {
LOG.i("take(): got picture callback.");
int exifRotation; int exifRotation;
try { try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data)); ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
@ -63,16 +67,19 @@ public class Full1PictureRecorder extends PictureRecorder {
} }
mResult.data = data; mResult.data = data;
mResult.rotation = exifRotation; mResult.rotation = exifRotation;
LOG.i("take(): starting preview again. ", Thread.currentThread());
camera.setPreviewCallbackWithBuffer(mEngine);
camera.startPreview(); // This is needed, read somewhere in the docs. camera.startPreview(); // This is needed, read somewhere in the docs.
dispatchResult(); dispatchResult();
} }
} }
); );
LOG.i("take() returned.");
} }
@Override @Override
protected void dispatchResult() { protected void dispatchResult() {
mCamera = null; LOG.i("dispatching result. Thread:", Thread.currentThread());
super.dispatchResult(); super.dispatchResult();
} }
} }

@ -9,7 +9,6 @@ import android.media.Image;
import android.media.ImageReader; import android.media.ImageReader;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.PictureFormat; import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.Camera2Engine; import com.otaliastudios.cameraview.engine.Camera2Engine;
@ -33,12 +32,9 @@ import androidx.exifinterface.media.ExifInterface;
* A {@link PictureResult} that uses standard APIs. * A {@link PictureResult} that uses standard APIs.
*/ */
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2PictureRecorder extends PictureRecorder public class Full2PictureRecorder extends FullPictureRecorder
implements ImageReader.OnImageAvailableListener { implements ImageReader.OnImageAvailableListener {
private static final String TAG = Full2PictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private final ActionHolder mHolder; private final ActionHolder mHolder;
private final Action mAction; private final Action mAction;
private final ImageReader mPictureReader; private final ImageReader mPictureReader;
@ -71,6 +67,7 @@ public class Full2PictureRecorder extends PictureRecorder
mResult = null; mResult = null;
mError = e; mError = e;
dispatchResult(); dispatchResult();
setState(STATE_COMPLETED);
} }
} }
@ -148,7 +145,9 @@ public class Full2PictureRecorder extends PictureRecorder
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL); ExifInterface.ORIENTATION_NORMAL);
mResult.rotation = ExifHelper.getOrientation(exifOrientation); mResult.rotation = ExifHelper.getOrientation(exifOrientation);
} catch (IOException ignore) { } } catch (IOException ignore) {
// Should not happen
}
} }
private void readRawImage(@NonNull Image image) { private void readRawImage(@NonNull Image image) {

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.picture;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
/**
* Helps with logging.
*/
public abstract class FullPictureRecorder extends PictureRecorder {
private static final String TAG = FullPictureRecorder.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
public FullPictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
super(stub, listener);
}
}

@ -1,14 +1,11 @@
package com.otaliastudios.cameraview.picture; package com.otaliastudios.cameraview.picture;
import android.graphics.ImageFormat;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.YuvImage; import android.graphics.YuvImage;
import android.hardware.Camera; import android.hardware.Camera;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera1Engine; import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference; import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.utils.CropHelper; import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper; import com.otaliastudios.cameraview.internal.utils.RotationHelper;
@ -23,11 +20,7 @@ import java.io.ByteArrayOutputStream;
/** /**
* A {@link PictureRecorder} that uses standard APIs. * A {@link PictureRecorder} that uses standard APIs.
*/ */
public class Snapshot1PictureRecorder extends PictureRecorder { public class Snapshot1PictureRecorder extends SnapshotPictureRecorder {
private static final String TAG = Snapshot1PictureRecorder.class.getSimpleName();
@SuppressWarnings("unused")
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera1Engine mEngine1; private Camera1Engine mEngine1;
private Camera mCamera; private Camera mCamera;

@ -10,7 +10,6 @@ import android.os.Build;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera2Engine; import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.action.Action; import com.otaliastudios.cameraview.engine.action.Action;
@ -44,8 +43,6 @@ import com.otaliastudios.cameraview.size.AspectRatio;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder { public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
private final static String TAG = Snapshot2PictureRecorder.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private final static long LOCK_TIMEOUT = 2500; private final static long LOCK_TIMEOUT = 2500;
private class FlashAction extends BaseAction { private class FlashAction extends BaseAction {

@ -9,7 +9,6 @@ import android.opengl.EGLContext;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.os.Build; import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult; import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface; import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay; import com.otaliastudios.cameraview.overlay.Overlay;
@ -54,10 +53,7 @@ import android.view.Surface;
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface, * 2. We have overlays to be drawn - we don't want to draw them on the preview surface,
* not even for a frame. * not even for a frame.
*/ */
public class SnapshotGlPictureRecorder extends PictureRecorder { public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private CameraEngine mEngine; private CameraEngine mEngine;
private GlCameraPreview mPreview; private GlCameraPreview mPreview;

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.picture;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
/**
* Helps with logging.
*/
public abstract class SnapshotPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotPictureRecorder.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
public SnapshotPictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
super(stub, listener);
}
}

@ -3,9 +3,7 @@ package com.otaliastudios.cameraview.video;
import android.hardware.Camera; import android.hardware.Camera;
import android.media.CamcorderProfile; import android.media.CamcorderProfile;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.util.Log;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.Camera1Engine; import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.internal.utils.CamcorderProfiles; import com.otaliastudios.cameraview.internal.utils.CamcorderProfiles;
@ -19,9 +17,6 @@ import androidx.annotation.NonNull;
*/ */
public class Full1VideoRecorder extends FullVideoRecorder { public class Full1VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full1VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private final Camera1Engine mEngine; private final Camera1Engine mEngine;
private final Camera mCamera; private final Camera mCamera;
private final int mCameraId; private final int mCameraId;

@ -1,19 +1,14 @@
package com.otaliastudios.cameraview.video; package com.otaliastudios.cameraview.video;
import android.annotation.SuppressLint;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.CamcorderProfile; import android.media.CamcorderProfile;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.os.Build; import android.os.Build;
import android.view.Surface; import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.Camera2Engine; import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.action.Action; import com.otaliastudios.cameraview.engine.action.Action;
import com.otaliastudios.cameraview.engine.action.ActionCallback;
import com.otaliastudios.cameraview.engine.action.ActionHolder; import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.BaseAction; import com.otaliastudios.cameraview.engine.action.BaseAction;
import com.otaliastudios.cameraview.engine.action.CompletionCallback; import com.otaliastudios.cameraview.engine.action.CompletionCallback;
@ -31,9 +26,6 @@ import androidx.annotation.RequiresApi;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP) @RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2VideoRecorder extends FullVideoRecorder { public class Full2VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full2VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private ActionHolder mHolder; private ActionHolder mHolder;
private final String mCameraId; private final String mCameraId;
private Surface mInputSurface; private Surface mInputSurface;

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.video;
import android.media.CamcorderProfile; import android.media.CamcorderProfile;
import android.media.MediaRecorder; import android.media.MediaRecorder;
import android.os.Handler;
import com.otaliastudios.cameraview.CameraLogger; import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult; import com.otaliastudios.cameraview.VideoResult;
@ -29,7 +30,7 @@ import androidx.annotation.Nullable;
public abstract class FullVideoRecorder extends VideoRecorder { public abstract class FullVideoRecorder extends VideoRecorder {
private static final String TAG = FullVideoRecorder.class.getSimpleName(); private static final String TAG = FullVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG); protected static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") protected MediaRecorder mMediaRecorder; @SuppressWarnings("WeakerAccess") protected MediaRecorder mMediaRecorder;
private CamcorderProfile mProfile; private CamcorderProfile mProfile;
@ -76,15 +77,22 @@ public abstract class FullVideoRecorder extends VideoRecorder {
@SuppressWarnings("SameParameterValue") @SuppressWarnings("SameParameterValue")
private boolean prepareMediaRecorder(@NonNull VideoResult.Stub stub, private boolean prepareMediaRecorder(@NonNull VideoResult.Stub stub,
boolean applyEncodersConstraints) { boolean applyEncodersConstraints) {
LOG.i("prepareMediaRecorder:", "Preparing on thread", Thread.currentThread());
// 1. Create reference and ask for the CamcorderProfile // 1. Create reference and ask for the CamcorderProfile
mMediaRecorder = new MediaRecorder(); mMediaRecorder = new MediaRecorder();
mProfile = getCamcorderProfile(stub); mProfile = getCamcorderProfile(stub);
// 2. Set the video and audio sources. // 2. Set the video and audio sources.
applyVideoSource(stub, mMediaRecorder); applyVideoSource(stub, mMediaRecorder);
boolean hasAudio = stub.audio == Audio.ON int audioChannels = 0;
|| stub.audio == Audio.MONO if (stub.audio == Audio.ON) {
|| stub.audio == Audio.STEREO; audioChannels = mProfile.audioChannels;
} else if (stub.audio == Audio.MONO) {
audioChannels = 1;
} else if (stub.audio == Audio.STEREO) {
audioChannels = 2;
}
boolean hasAudio = audioChannels > 0;
if (hasAudio) { if (hasAudio) {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT); mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
} }
@ -149,18 +157,35 @@ public abstract class FullVideoRecorder extends VideoRecorder {
LOG.i("prepareMediaRecorder:", "Checking DeviceEncoders...", LOG.i("prepareMediaRecorder:", "Checking DeviceEncoders...",
"videoOffset:", videoEncoderOffset, "videoOffset:", videoEncoderOffset,
"audioOffset:", audioEncoderOffset); "audioOffset:", audioEncoderOffset);
DeviceEncoders encoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER, DeviceEncoders encoders;
videoType, audioType, videoEncoderOffset, audioEncoderOffset); try {
encoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
} catch (RuntimeException e) {
LOG.w("prepareMediaRecorder:", "Could not respect encoders parameters.",
"Trying again without checking encoders.");
return prepareMediaRecorder(stub, false);
}
try { try {
newVideoSize = encoders.getSupportedVideoSize(stub.size); newVideoSize = encoders.getSupportedVideoSize(stub.size);
newVideoBitRate = encoders.getSupportedVideoBitRate(stub.videoBitRate); newVideoBitRate = encoders.getSupportedVideoBitRate(stub.videoBitRate);
newAudioBitRate = encoders.getSupportedAudioBitRate(stub.audioBitRate);
newVideoFrameRate = encoders.getSupportedVideoFrameRate(newVideoSize, newVideoFrameRate = encoders.getSupportedVideoFrameRate(newVideoSize,
stub.videoFrameRate); stub.videoFrameRate);
encoders.tryConfigureVideo(videoType, newVideoSize, newVideoFrameRate,
newVideoBitRate);
if (hasAudio) {
newAudioBitRate = encoders.getSupportedAudioBitRate(stub.audioBitRate);
encoders.tryConfigureAudio(audioType, newAudioBitRate,
mProfile.audioSampleRate, audioChannels);
}
encodersFound = true; encodersFound = true;
} catch (DeviceEncoders.VideoException videoException) { } catch (DeviceEncoders.VideoException videoException) {
LOG.i("prepareMediaRecorder:", "Got VideoException:",
videoException.getMessage());
videoEncoderOffset++; videoEncoderOffset++;
} catch (DeviceEncoders.AudioException audioException) { } catch (DeviceEncoders.AudioException audioException) {
LOG.i("prepareMediaRecorder:", "Got AudioException:",
audioException.getMessage());
audioEncoderOffset++; audioEncoderOffset++;
} }
} }
@ -183,13 +208,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
// 6B. Configure MediaRecorder from stub and from profile (audio) // 6B. Configure MediaRecorder from stub and from profile (audio)
if (hasAudio) { if (hasAudio) {
if (stub.audio == Audio.ON) { mMediaRecorder.setAudioChannels(audioChannels);
mMediaRecorder.setAudioChannels(mProfile.audioChannels);
} else if (stub.audio == Audio.MONO) {
mMediaRecorder.setAudioChannels(1);
} else if (stub.audio == Audio.STEREO) {
mMediaRecorder.setAudioChannels(2);
}
mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate); mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
mMediaRecorder.setAudioEncoder(mProfile.audioCodec); mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
mMediaRecorder.setAudioEncodingBitRate(stub.audioBitRate); mMediaRecorder.setAudioEncodingBitRate(stub.audioBitRate);
@ -203,21 +222,49 @@ public abstract class FullVideoRecorder extends VideoRecorder {
} }
mMediaRecorder.setOutputFile(stub.file.getAbsolutePath()); mMediaRecorder.setOutputFile(stub.file.getAbsolutePath());
mMediaRecorder.setOrientationHint(stub.rotation); mMediaRecorder.setOrientationHint(stub.rotation);
mMediaRecorder.setMaxFileSize(stub.maxSize); // When using MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, the recorder might have stopped
// before calling it. But this creates issues on Camera2 Legacy devices - they need a
// callback BEFORE the recorder stops (see Camera2Engine). For this reason, we increase
// the max size and use MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING instead.
// Would do this with max duration as well but there's no such callback.
mMediaRecorder.setMaxFileSize(stub.maxSize <= 0 ? stub.maxSize
: Math.round(stub.maxSize / 0.9D));
LOG.i("prepareMediaRecorder:", "Increased max size from", stub.maxSize, "to",
Math.round(stub.maxSize / 0.9D));
mMediaRecorder.setMaxDuration(stub.maxDuration); mMediaRecorder.setMaxDuration(stub.maxDuration);
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() { mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
@Override @Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) { public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
LOG.i("OnInfoListener:", "Received info", what, extra,
"Thread: ", Thread.currentThread());
boolean shouldStop = false;
switch (what) { switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED; mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
stop(false); shouldStop = true;
break; break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING:
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED: case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
// On rare occasions APPROACHING is not called. Make sure we listen to
// REACHED as well.
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED; mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
stop(false); shouldStop = true;
break; break;
} }
if (shouldStop) {
LOG.i("OnInfoListener:", "Stopping");
stop(false);
}
}
});
mMediaRecorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
@Override
public void onError(MediaRecorder mr, int what, int extra) {
LOG.e("OnErrorListener: got error", what, extra, ". Stopping.");
mResult = null;
mError = new RuntimeException("MediaRecorder error: " + what + " " + extra);
LOG.i("OnErrorListener:", "Stopping");
stop(false);
} }
}); });
@ -259,7 +306,10 @@ public abstract class FullVideoRecorder extends VideoRecorder {
if (mMediaRecorder != null) { if (mMediaRecorder != null) {
dispatchVideoRecordingEnd(); dispatchVideoRecordingEnd();
try { try {
LOG.i("stop:", "Stopping MediaRecorder...");
// TODO HANGS (rare, emulator only)
mMediaRecorder.stop(); mMediaRecorder.stop();
LOG.i("stop:", "Stopped MediaRecorder.");
} catch (Exception e) { } catch (Exception e) {
// This can happen if stopVideo() is called right after takeVideo() // This can happen if stopVideo() is called right after takeVideo()
// (in which case we don't care). Or when prepare()/start() have failed for // (in which case we don't care). Or when prepare()/start() have failed for
@ -271,7 +321,17 @@ public abstract class FullVideoRecorder extends VideoRecorder {
mError = e; mError = e;
} }
} }
mMediaRecorder.release(); try {
LOG.i("stop:", "Releasing MediaRecorder...");
mMediaRecorder.release();
LOG.i("stop:", "Released MediaRecorder.");
} catch (Exception e) {
mResult = null;
if (mError == null) {
LOG.w("stop:", "Error while releasing media recorder.", e);
mError = e;
}
}
} }
mProfile = null; mProfile = null;
mMediaRecorder = null; mMediaRecorder = null;

@ -146,6 +146,19 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
case DEVICE_DEFAULT: videoType = "video/avc"; break; case DEVICE_DEFAULT: videoType = "video/avc"; break;
} }
String audioType = "audio/mp4a-latm"; String audioType = "audio/mp4a-latm";
TextureConfig videoConfig = new TextureConfig();
AudioConfig audioConfig = new AudioConfig();
// See if we have audio
int audioChannels = 0;
if (mResult.audio == Audio.ON) {
audioChannels = audioConfig.channels;
} else if (mResult.audio == Audio.MONO) {
audioChannels = 1;
} else if (mResult.audio == Audio.STEREO) {
audioChannels = 2;
}
boolean hasAudio = audioChannels > 0;
// Check the availability of values // Check the availability of values
Size newVideoSize = null; Size newVideoSize = null;
@ -157,18 +170,42 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
boolean encodersFound = false; boolean encodersFound = false;
DeviceEncoders deviceEncoders = null; DeviceEncoders deviceEncoders = null;
while (!encodersFound) { while (!encodersFound) {
LOG.i("Checking DeviceEncoders...",
"videoOffset:", videoEncoderOffset,
"audioOffset:", audioEncoderOffset);
try {
deviceEncoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
} catch (RuntimeException e) {
LOG.w("Could not respect encoders parameters.",
"Going on again without checking encoders, possibly failing.");
newVideoSize = mResult.size;
newVideoBitRate = mResult.videoBitRate;
newVideoFrameRate = mResult.videoFrameRate;
newAudioBitRate = mResult.audioBitRate;
break;
}
deviceEncoders = new DeviceEncoders(DeviceEncoders.MODE_PREFER_HARDWARE, deviceEncoders = new DeviceEncoders(DeviceEncoders.MODE_PREFER_HARDWARE,
videoType, audioType, videoEncoderOffset, audioEncoderOffset); videoType, audioType, videoEncoderOffset, audioEncoderOffset);
try { try {
newVideoSize = deviceEncoders.getSupportedVideoSize(mResult.size); newVideoSize = deviceEncoders.getSupportedVideoSize(mResult.size);
newVideoBitRate = deviceEncoders.getSupportedVideoBitRate(mResult.videoBitRate); newVideoBitRate = deviceEncoders.getSupportedVideoBitRate(mResult.videoBitRate);
newAudioBitRate = deviceEncoders.getSupportedAudioBitRate(mResult.audioBitRate);
newVideoFrameRate = deviceEncoders.getSupportedVideoFrameRate(newVideoSize, newVideoFrameRate = deviceEncoders.getSupportedVideoFrameRate(newVideoSize,
mResult.videoFrameRate); mResult.videoFrameRate);
deviceEncoders.tryConfigureVideo(videoType, newVideoSize, newVideoFrameRate,
newVideoBitRate);
if (hasAudio) {
newAudioBitRate = deviceEncoders
.getSupportedAudioBitRate(mResult.audioBitRate);
deviceEncoders.tryConfigureAudio(audioType, newAudioBitRate,
audioConfig.samplingFrequency, audioChannels);
}
encodersFound = true; encodersFound = true;
} catch (DeviceEncoders.VideoException videoException) { } catch (DeviceEncoders.VideoException videoException) {
LOG.i("Got VideoException:", videoException.getMessage());
videoEncoderOffset++; videoEncoderOffset++;
} catch (DeviceEncoders.AudioException audioException) { } catch (DeviceEncoders.AudioException audioException) {
LOG.i("Got AudioException:", audioException.getMessage());
audioEncoderOffset++; audioEncoderOffset++;
} }
} }
@ -178,7 +215,6 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mResult.videoFrameRate = newVideoFrameRate; mResult.videoFrameRate = newVideoFrameRate;
// Video // Video
TextureConfig videoConfig = new TextureConfig();
videoConfig.width = mResult.size.getWidth(); videoConfig.width = mResult.size.getWidth();
videoConfig.height = mResult.size.getHeight(); videoConfig.height = mResult.size.getHeight();
videoConfig.bitRate = mResult.videoBitRate; videoConfig.bitRate = mResult.videoBitRate;
@ -206,12 +242,9 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Audio // Audio
AudioMediaEncoder audioEncoder = null; AudioMediaEncoder audioEncoder = null;
if (mResult.audio == Audio.ON || mResult.audio == Audio.MONO if (hasAudio) {
|| mResult.audio == Audio.STEREO) {
AudioConfig audioConfig = new AudioConfig();
audioConfig.bitRate = mResult.audioBitRate; audioConfig.bitRate = mResult.audioBitRate;
if (mResult.audio == Audio.MONO) audioConfig.channels = 1; audioConfig.channels = audioChannels;
if (mResult.audio == Audio.STEREO) audioConfig.channels = 2;
audioConfig.encoder = deviceEncoders.getAudioEncoder(); audioConfig.encoder = deviceEncoders.getAudioEncoder();
audioEncoder = new AudioMediaEncoder(audioConfig); audioEncoder = new AudioMediaEncoder(audioConfig);
} }
@ -231,16 +264,17 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
} }
if (mCurrentState == STATE_RECORDING) { if (mCurrentState == STATE_RECORDING) {
LOG.v("dispatching frame."); LOG.i("scheduling frame.");
TextureMediaEncoder textureEncoder
= (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestampNanos = surfaceTexture.getTimestamp();
// NOTE: this is an approximation but it seems to work:
frame.timestampMillis = System.currentTimeMillis();
surfaceTexture.getTransformMatrix(frame.transform);
synchronized (mEncoderEngineLock) { synchronized (mEncoderEngineLock) {
if (mEncoderEngine != null) { // Can be null on teardown. if (mEncoderEngine != null) { // Can be null on teardown.
LOG.i("dispatching frame.");
TextureMediaEncoder textureEncoder
= (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestampNanos = surfaceTexture.getTimestamp();
// NOTE: this is an approximation but it seems to work:
frame.timestampMillis = System.currentTimeMillis();
surfaceTexture.getTransformMatrix(frame.transform);
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame); mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
} }
} }

@ -73,6 +73,7 @@ public abstract class VideoRecorder {
"Ignoring. state:", mState); "Ignoring. state:", mState);
return; return;
} }
LOG.i("start:", "Changed state to STATE_RECORDING");
mState = STATE_RECORDING; mState = STATE_RECORDING;
} }
mResult = stub; mResult = stub;
@ -91,6 +92,7 @@ public abstract class VideoRecorder {
"Ignoring. isCameraShutdown:", isCameraShutdown); "Ignoring. isCameraShutdown:", isCameraShutdown);
return; return;
} }
LOG.i("stop:", "Changed state to STATE_STOPPING");
mState = STATE_STOPPING; mState = STATE_STOPPING;
} }
onStop(isCameraShutdown); onStop(isCameraShutdown);
@ -123,10 +125,15 @@ public abstract class VideoRecorder {
*/ */
protected final void dispatchResult() { protected final void dispatchResult() {
synchronized (mStateLock) { synchronized (mStateLock) {
if (!isRecording()) return; if (!isRecording()) {
LOG.w("dispatchResult:", "Called, but not recording! Aborting.");
return;
}
LOG.i("dispatchResult:", "Changed state to STATE_IDLE.");
mState = STATE_IDLE; mState = STATE_IDLE;
} }
onDispatchResult(); onDispatchResult();
LOG.i("dispatchResult:", "About to dispatch result:", mResult, mError);
if (mListener != null) { if (mListener != null) {
mListener.onVideoResult(mResult, mError); mListener.onVideoResult(mResult, mError);
} }
@ -148,6 +155,7 @@ public abstract class VideoRecorder {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchVideoRecordingStart() { protected void dispatchVideoRecordingStart() {
LOG.i("dispatchVideoRecordingStart:", "About to dispatch.");
if (mListener != null) { if (mListener != null) {
mListener.onVideoRecordingStart(); mListener.onVideoRecordingStart();
} }
@ -160,6 +168,7 @@ public abstract class VideoRecorder {
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
@CallSuper @CallSuper
protected void dispatchVideoRecordingEnd() { protected void dispatchVideoRecordingEnd() {
LOG.i("dispatchVideoRecordingEnd:", "About to dispatch.");
if (mListener != null) { if (mListener != null) {
mListener.onVideoRecordingEnd(); mListener.onVideoRecordingEnd();
} }

@ -16,11 +16,11 @@ public class AudioConfig {
public int channels = 1; public int channels = 1;
public String encoder; public String encoder;
public String mimeType = "audio/mp4a-latm"; public String mimeType = "audio/mp4a-latm";
public int samplingFrequency = 44100; // samples/sec
// Not configurable options (for now) // Not configurable options (for now)
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices. // The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec
// If sampleSizePerChannel changes, review noise introduction // If sampleSizePerChannel changes, review noise introduction
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit]. final int sampleSizePerChannel = 2; // byte/sample/channel [16bit].
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@ -28,10 +28,11 @@ public class AudioConfig {
@NonNull @NonNull
AudioConfig copy() { AudioConfig copy() {
AudioConfig config = new AudioConfig(); AudioConfig config = new AudioConfig();
config.bitRate = this.bitRate; config.bitRate = bitRate;
config.channels = this.channels; config.channels = channels;
config.encoder = this.encoder; config.encoder = encoder;
config.mimeType = mimeType; config.mimeType = mimeType;
config.samplingFrequency = samplingFrequency;
return config; return config;
} }

@ -349,14 +349,17 @@ public class AudioMediaEncoder extends MediaEncoder {
*/ */
private class AudioEncodingThread extends Thread { private class AudioEncodingThread extends Thread {
private AudioEncodingThread() { private AudioEncodingThread() {
setPriority(Thread.MAX_PRIORITY); // Not sure about this... This thread can do VERY time consuming operations,
// and slowing down the preview/camera threads can break them e.g. hit internal
// timeouts for camera requests to be consumed.
// setPriority(Thread.MAX_PRIORITY);
} }
@Override @Override
public void run() { public void run() {
encoding: while (true) { encoding: while (true) {
if (mInputBufferQueue.isEmpty()) { if (mInputBufferQueue.isEmpty()) {
skipFrames(2); skipFrames(3);
} else { } else {
LOG.v("encoding thread - performing", mInputBufferQueue.size(), LOG.v("encoding thread - performing", mInputBufferQueue.size(),
"pending operations."); "pending operations.");
@ -387,7 +390,7 @@ public class AudioMediaEncoder extends MediaEncoder {
} else if (tryAcquireInputBuffer(inputBuffer)) { } else if (tryAcquireInputBuffer(inputBuffer)) {
encode(inputBuffer); encode(inputBuffer);
} else { } else {
skipFrames(1); skipFrames(3);
} }
} }
} }

@ -397,7 +397,7 @@ public abstract class MediaEncoder {
@SuppressLint("LogNotTimber") @SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess") @SuppressWarnings("WeakerAccess")
protected final void drainOutput(boolean drainAll) { protected final void drainOutput(boolean drainAll) {
LOG.v(mName, "DRAINING - EOS:", drainAll); LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) { if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing."); LOG.e("drain() was called before prepare() or after releasing.");
return; return;
@ -407,6 +407,7 @@ public abstract class MediaEncoder {
} }
while (true) { while (true) {
int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, OUTPUT_TIMEOUT_US); int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, OUTPUT_TIMEOUT_US);
LOG.i(mName, "DRAINING - Got status:", encoderStatus);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet // no output available yet
if (!drainAll) break; // out of while if (!drainAll) break; // out of while
@ -534,14 +535,17 @@ public abstract class MediaEncoder {
* max length allowed. We will move to {@link #STATE_LIMIT_REACHED} and request a stop. * max length allowed. We will move to {@link #STATE_LIMIT_REACHED} and request a stop.
*/ */
private void onMaxLengthReached() { private void onMaxLengthReached() {
if (mMaxLengthReached) return; if (mMaxLengthReached) {
mMaxLengthReached = true; LOG.w(mName, "onMaxLengthReached: Called twice.");
if (mState >= STATE_LIMIT_REACHED) {
LOG.w(mName, "onMaxLengthReached: Reached in wrong state. Aborting.", mState);
} else { } else {
LOG.w(mName, "onMaxLengthReached: Requesting a stop."); mMaxLengthReached = true;
setState(STATE_LIMIT_REACHED); if (mState >= STATE_LIMIT_REACHED) {
mController.requestStop(mTrackIndex); LOG.w(mName, "onMaxLengthReached: Reached in wrong state. Aborting.", mState);
} else {
LOG.w(mName, "onMaxLengthReached: Requesting a stop.");
setState(STATE_LIMIT_REACHED);
mController.requestStop(mTrackIndex);
}
} }
} }

@ -119,12 +119,13 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
@Override @Override
protected boolean shouldRenderFrame(long timestampUs) { protected boolean shouldRenderFrame(long timestampUs) {
if (!super.shouldRenderFrame(timestampUs)) { if (!super.shouldRenderFrame(timestampUs)) {
LOG.i("shouldRenderFrame - Dropping frame because of super()");
return false; return false;
} else if (mFrameNumber <= 10) { } else if (mFrameNumber <= 10) {
// Always render the first few frames, or muxer fails. // Always render the first few frames, or muxer fails.
return true; return true;
} else if (getPendingEvents(FRAME_EVENT) > 2) { } else if (getPendingEvents(FRAME_EVENT) > 2) {
LOG.v("shouldRenderFrame - Dropping, we already have too many pending events:", LOG.i("shouldRenderFrame - Dropping, we already have too many pending events:",
getPendingEvents(FRAME_EVENT)); getPendingEvents(FRAME_EVENT));
return false; return false;
} else { } else {
@ -177,17 +178,21 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
} }
// First, drain any previous data. // First, drain any previous data.
LOG.v("onEvent -", LOG.i("onEvent -",
"frameNumber:", mFrameNumber, "frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(), "timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- draining."); "- draining.");
drainOutput(false); drainOutput(false);
// Then draw on the surface. // Then draw on the surface.
LOG.v("onEvent -", LOG.i("onEvent -",
"frameNumber:", mFrameNumber, "frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(), "timestampUs:", frame.timestampUs(),
"- rendering."); "hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- drawing.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some // 1. We must scale this matrix like GlCameraPreview does, because it might have some
// cropping. Scaling takes place with respect to the (0, 0, 0) point, so we must apply // cropping. Scaling takes place with respect to the (0, 0, 0) point, so we must apply
@ -218,6 +223,12 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
Matrix.translateM(mConfig.overlayDrawer.getTransform(), Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, -0.5F, -0.5F, 0); 0, -0.5F, -0.5F, 0);
} }
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- gl rendering.");
mViewport.drawFrame(frame.timestampUs(), mConfig.textureId, transform); mViewport.drawFrame(frame.timestampUs(), mConfig.textureId, transform);
if (mConfig.hasOverlay()) { if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render(frame.timestampUs()); mConfig.overlayDrawer.render(frame.timestampUs());
@ -225,6 +236,12 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mWindow.setPresentationTime(frame.timestampNanos); mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers(); mWindow.swapBuffers();
mFramePool.recycle(frame); mFramePool.recycle(frame);
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- gl rendered.");
} }
@Override @Override

@ -1,15 +1,15 @@
coverage: coverage:
precision: 1 precision: 1
round: down round: nearest
range: "30...70" range: "10...90"
status: status:
project: project:
default: default:
target: 40% target: 70%
patch: patch:
default: default:
target: 70% target: 60%
changes: no changes: no
comment: comment:

Loading…
Cancel
Save