Testing and stability improvements (#696)

pull/708/head
Mattia Iavarone 5 years ago committed by GitHub
parent 3db6fd3fc0
commit 4a6b9be905
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 43
      .github/workflows/build.yml
  2. 14
      .github/workflows/emulator_script.sh
  3. 25
      cameraview/build.gradle
  4. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  5. 3
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  6. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java
  7. 26
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java
  8. 428
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  9. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  10. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/GridLinesLayoutTest.java
  11. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/WorkerHandlerTest.java
  12. 4
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/markers/MarkerLayoutTest.java
  13. 12
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Emulator.java
  14. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/Retry.java
  15. 52
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/RetryRule.java
  16. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExclude.java
  17. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkExcludeFilter.java
  18. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkInclude.java
  19. 47
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/tools/SdkIncludeFilter.java
  20. 7
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraUtils.java
  21. 44
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  22. 77
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  23. 168
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  24. 161
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  25. 55
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/orchestrator/CameraOrchestrator.java
  26. 30
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/orchestrator/CameraStateOrchestrator.java
  27. 68
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/DeviceEncoders.java
  28. 16
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/WorkerHandler.java
  29. 27
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full1PictureRecorder.java
  30. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Full2PictureRecorder.java
  31. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/FullPictureRecorder.java
  32. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  33. 3
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot2PictureRecorder.java
  34. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotGlPictureRecorder.java
  35. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/SnapshotPictureRecorder.java
  36. 5
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full1VideoRecorder.java
  37. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/video/Full2VideoRecorder.java
  38. 96
      cameraview/src/main/java/com/otaliastudios/cameraview/video/FullVideoRecorder.java
  39. 64
      cameraview/src/main/java/com/otaliastudios/cameraview/video/SnapshotVideoRecorder.java
  40. 11
      cameraview/src/main/java/com/otaliastudios/cameraview/video/VideoRecorder.java
  41. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioConfig.java
  42. 9
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/AudioMediaEncoder.java
  43. 20
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/MediaEncoder.java
  44. 25
      cameraview/src/main/java/com/otaliastudios/cameraview/video/encoding/TextureMediaEncoder.java
  45. 8
      codecov.yml

@ -1,4 +1,5 @@
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
# Renaming ? Change the README badge.
name: Build
on:
push:
@ -37,28 +38,46 @@ jobs:
name: Emulator Tests
runs-on: macOS-latest
strategy:
fail-fast: false
matrix:
# TODO 29 fails due to Mockito issues, probably reproducible locally.
# 22, 23, 24, 25, 26, 27, 28 work - some of them, with SdkExclude restrictions.
# TODO 29 fails due to Mockito issues, probably reproducible locally
# 22-28 work (some of them, with SdkExclude restrictions)
EMULATOR_API: [22, 23, 24, 25, 26, 27, 28]
EMULATOR_ARCH: [x86_64]
include:
- EMULATOR_API: 28
EMULATOR_ARCH: x86_64
- EMULATOR_API: 27
EMULATOR_ARCH: x86_64
- EMULATOR_API: 26
EMULATOR_ARCH: x86_64
- EMULATOR_API: 25
EMULATOR_ARCH: x86
- EMULATOR_API: 24
EMULATOR_ARCH: x86
- EMULATOR_API: 23
EMULATOR_ARCH: x86
- EMULATOR_API: 22
EMULATOR_ARCH: x86
steps:
- uses: actions/checkout@v1
- uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Execute emulator tests
uses: reactivecircus/android-emulator-runner@v2
timeout-minutes: 20
uses: reactivecircus/android-emulator-runner@v2.2.0
with:
api-level: ${{ matrix.EMULATOR_API }}
arch: ${{ matrix.EMULATOR_ARCH }}
disable-animations: true
emulator-options: -no-snapshot -no-window -no-boot-anim -camera-back emulated -camera-front emulated -memory 2048
script: ./gradlew cameraview:connectedCheck
profile: Nexus 5X
emulator-options: -no-snapshot -no-window -no-boot-anim -camera-back emulated -camera-front emulated -gpu swiftshader_indirect
emulator-build: 6031357
script: ./.github/workflows/emulator_script.sh
- name: Upload emulator tests artifact
uses: actions/upload-artifact@v1
with:
name: emulator_tests
name: emulator_tests_${{ matrix.EMULATOR_API }}
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected
CODE_COVERAGE:
name: Code Coverage Report
@ -72,13 +91,15 @@ jobs:
- name: Download unit tests artifact
uses: actions/download-artifact@v1
with:
name: unit_tests
path: ./cameraview/build/jacoco/
name: unit_tests
path: ./cameraview/build/jacoco/
- name: Download emulator tests artifact
uses: actions/download-artifact@v1
with:
name: emulator_tests
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected
# 27 is the EMULATOR_API with less SdkExclude annotations, and should have
# the best possible coverage.
name: emulator_tests_27
path: ./cameraview/build/outputs/code_coverage/debugAndroidTest/connected
- name: Create merged coverage report
run: ./gradlew cameraview:mergeCoverageReports
- name: Upload merged coverage report (GitHub)

@ -0,0 +1,14 @@
#!/usr/bin/env bash
# Core
ADB_TAGS="CameraView:I CameraCallbacks:I CameraOrchestrator:I CameraEngine:I"
ADB_TAGS="$ADB_TAGS CameraUtils:I WorkerHandler:I"
# Recorders
ADB_TAGS="$ADB_TAGS VideoRecorder:I FullVideoRecorder:I SnapshotVideoRecorder:I"
ADB_TAGS="$ADB_TAGS FullPictureRecorder:I SnapshotPictureRecorder:I DeviceEncoders:I"
# Video encoders
ADB_TAGS="$ADB_TAGS MediaEncoderEngine:I MediaEncoder:I AudioMediaEncoder:I VideoMediaEncoder:I TextureMediaEncoder:I"
# Debugging
ADB_TAGS="$ADB_TAGS CameraIntegrationTest:I MessageQueue:W MPEG4Writer:I"
adb logcat -c
adb logcat $ADB_TAGS *:E -v color &
./gradlew cameraview:connectedCheck

@ -17,7 +17,9 @@ android {
versionCode 1
versionName project.version
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
testInstrumentationRunnerArgument "filter", "com.otaliastudios.cameraview.tools.SdkExcludeFilter"
testInstrumentationRunnerArgument "filter", "" +
"com.otaliastudios.cameraview.tools.SdkExcludeFilter," +
"com.otaliastudios.cameraview.tools.SdkIncludeFilter"
}
buildTypes {
@ -241,17 +243,16 @@ task mergeCoverageReports(type: JacocoReport) {
if (isCI) {
// All these classes are tested by the integration tests that we are not able to
// run on the CI emulator.
classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*')
classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*')
classFilter.add('**/com/otaliastudios/cameraview/picture/**.*')
classFilter.add('**/com/otaliastudios/cameraview/video/**.*')
// TODO these below could be easily testable ALSO outside of the integration tests
classFilter.add('**/com/otaliastudios/cameraview/orchestrator/**.*')
classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/CameraEngine**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/Camera1Engine**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/Camera2Engine**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/action/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/lock/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/engine/meter/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/picture/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/video/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/orchestrator/**.*')
// classFilter.add('**/com/otaliastudios/cameraview/video/encoding/**.*')
}
// We don't test OpenGL filters.
classFilter.add('**/com/otaliastudios/cameraview/filters/**.*')

@ -10,6 +10,7 @@ import android.os.PowerManager;
import androidx.annotation.NonNull;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.GrantPermissionRule;
import com.otaliastudios.cameraview.tools.Op;
@ -89,19 +90,6 @@ public class BaseTest {
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
protected static void grantAllPermissions() {
grantPermission("android.permission.CAMERA");
grantPermission("android.permission.RECORD_AUDIO");
grantPermission("android.permission.WRITE_EXTERNAL_STORAGE");
}
@SuppressWarnings("WeakerAccess")
protected static void grantPermission(@NonNull String permission) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) return;
String command = "pm grant " + getContext().getPackageName() + " " + permission;
InstrumentationRegistry.getInstrumentation().getUiAutomation().executeShellCommand(command);
}
@NonNull
protected static Stubber doCountDown(@NonNull final CountDownLatch latch) {
return doAnswer(new Answer<Object>() {

@ -6,6 +6,7 @@ import android.content.res.TypedArray;
import android.graphics.PointF;
import android.location.Location;
import androidx.annotation.NonNull;
import androidx.test.annotation.UiThreadTest;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.MediumTest;
@ -135,7 +136,7 @@ public class CameraViewTest extends BaseTest {
public void testDestroy() {
cameraView.destroy();
verify(mockPreview, times(1)).onDestroy();
verify(mockController, times(1)).destroy();
verify(mockController, times(1)).destroy(true);
}
//region testDefaults

@ -18,8 +18,8 @@ import androidx.test.filters.RequiresDevice;
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
@RequiresDevice
public class Camera1IntegrationTest extends CameraIntegrationTest {
// @RequiresDevice
public class Camera1IntegrationTest extends CameraIntegrationTest<Camera1Engine> {
@NonNull
@Override

@ -1,7 +1,9 @@
package com.otaliastudios.cameraview.engine;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.os.Handler;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
@ -24,8 +26,8 @@ import java.util.concurrent.CountDownLatch;
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
@RequiresDevice
public class Camera2IntegrationTest extends CameraIntegrationTest {
// @RequiresDevice
public class Camera2IntegrationTest extends CameraIntegrationTest<Camera2Engine> {
@NonNull
@Override
@ -39,7 +41,6 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
// Extra wait for the first frame to be dispatched.
// This is because various classes require getLastResult to be non-null
// and that's typically the case in a real app.
Camera2Engine engine = (Camera2Engine) controller;
final CountDownLatch latch = new CountDownLatch(1);
new BaseAction() {
@Override
@ -50,7 +51,7 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
latch.countDown();
setState(STATE_COMPLETED);
}
}.start(engine);
}.start(controller);
try { latch.await(); } catch (InterruptedException ignore) {}
}
@ -58,4 +59,21 @@ public class Camera2IntegrationTest extends CameraIntegrationTest {
protected long getMeteringTimeoutMillis() {
return Camera2Engine.METER_TIMEOUT;
}
/**
* setMaxDuration can crash on legacy devices (most emulator are), and I don't see
* any way to fix this in code. They shouldn't use Camera2 at all.
* @return true if possible.
*/
@Override
protected boolean canSetVideoMaxDuration() {
if (!super.canSetVideoMaxDuration()) return false;
boolean shouldOpen = !camera.isOpened();
if (shouldOpen) openSync(true);
boolean result = controller.readCharacteristic(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
!= CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY;
if (shouldOpen) closeSync(true);
return result;
}
}

@ -28,14 +28,20 @@ import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.tools.Emulator;
import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.tools.Retry;
import com.otaliastudios.cameraview.tools.RetryRule;
import com.otaliastudios.cameraview.tools.SdkExclude;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.test.rule.ActivityTestRule;
import androidx.test.rule.GrantPermissionRule;
import org.junit.After;
import org.junit.Before;
@ -64,89 +70,102 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public abstract class CameraIntegrationTest extends BaseTest {
public abstract class CameraIntegrationTest<E extends CameraEngine> extends BaseTest {
private final static CameraLogger LOG = CameraLogger.create(CameraIntegrationTest.class.getSimpleName());
private final static long DELAY = 8000;
@Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
public ActivityTestRule<TestActivity> activityRule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera;
protected CameraEngine controller;
private CameraListener listener;
private Op<Throwable> uiExceptionOp;
@Rule
public RetryRule retryRule = new RetryRule(3);
@BeforeClass
public static void grant() {
grantAllPermissions();
}
@Rule
public GrantPermissionRule permissionRule = GrantPermissionRule.grant(
"android.permission.CAMERA",
"android.permission.RECORD_AUDIO",
"android.permission.WRITE_EXTERNAL_STORAGE"
);
protected CameraView camera;
protected E controller;
private CameraListener listener;
private Op<Throwable> error;
@NonNull
protected abstract Engine getEngine();
@Before
public void setUp() {
LOG.e("Test started. Setting up camera.");
LOG.w("[TEST STARTED]", "Setting up camera.");
WorkerHandler.destroyAll();
uiSync(new Runnable() {
@Override
public void run() {
camera = new CameraView(rule.getActivity()) {
camera = new CameraView(activityRule.getActivity()) {
@NonNull
@Override
protected CameraEngine instantiateCameraEngine(@NonNull Engine engine, @NonNull CameraEngine.Callback callback) {
controller = super.instantiateCameraEngine(getEngine(), callback);
protected CameraEngine instantiateCameraEngine(
@NonNull Engine engine,
@NonNull CameraEngine.Callback callback) {
//noinspection unchecked
controller = (E) super.instantiateCameraEngine(getEngine(), callback);
return controller;
}
};
listener = mock(CameraListener.class);
camera.setExperimental(true);
camera.setEngine(getEngine());
camera.addCameraListener(listener);
rule.getActivity().inflate(camera);
// Ensure that controller exceptions are thrown on this thread (not on the UI thread).
// TODO this makes debugging for wrong tests very hard, as we don't get the exception
// unless waitForUiException() is called.
uiExceptionOp = new Op<>();
WorkerHandler crashThread = WorkerHandler.get("CrashThread");
crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
uiExceptionOp.controller().end(e);
}
});
controller.mCrashHandler = crashThread.getHandler();
activityRule.getActivity().inflate(camera);
}
});
listener = mock(CameraListener.class);
camera.addCameraListener(listener);
error = new Op<>();
WorkerHandler crashThread = WorkerHandler.get("CrashThread");
crashThread.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(@NonNull Thread thread, @NonNull Throwable exception) {
error.controller().end(exception);
}
});
controller.mCrashHandler = crashThread.getHandler();
camera.addCameraListener(new CameraListener() {
@Override
public void onCameraError(@NonNull CameraException exception) {
super.onCameraError(exception);
if (exception.isUnrecoverable()) {
LOG.e("[UNRECOVERABLE CAMERAEXCEPTION]",
"Got unrecoverable exception, not clear what to do in a test.");
}
}
});
}
@After
public void tearDown() {
LOG.e("Test ended. Tearing down camera.");
LOG.w("[TEST ENDED]", "Tearing down camera.");
camera.destroy();
WorkerHandler.destroyAll();
LOG.w("[TEST ENDED]", "Torn down camera.");
}
private void waitForUiException() throws Throwable {
Throwable throwable = uiExceptionOp.await(DELAY);
if (throwable != null) {
throw throwable;
}
}
private CameraOptions openSync(boolean expectSuccess) {
camera.open();
protected final CameraOptions openSync(boolean expectSuccess) {
final Op<CameraOptions> open = new Op<>();
doEndOp(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
camera.open();
CameraOptions result = open.await(DELAY);
if (expectSuccess) {
LOG.i("[OPEN SYNC]", "Expecting success.");
assertNotNull("Can open", result);
onOpenSync();
} else {
LOG.i("[OPEN SYNC]", "Expecting failure.");
assertNull("Should not open", result);
}
return result;
@ -160,14 +179,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
while (controller.getState() != CameraState.PREVIEW) {}
}
private void closeSync(boolean expectSuccess) {
camera.close();
protected final void closeSync(boolean expectSuccess) {
final Op<Boolean> close = new Op<>();
doEndOp(close, true).when(listener).onCameraClosed();
camera.close();
Boolean result = close.await(DELAY);
if (expectSuccess) {
LOG.i("[CLOSE SYNC]", "Expecting success.");
assertNotNull("Can close", result);
} else {
LOG.i("[CLOSE SYNC]", "Expecting failure.");
assertNull("Should not close", result);
}
}
@ -175,45 +196,64 @@ public abstract class CameraIntegrationTest extends BaseTest {
@SuppressWarnings("UnusedReturnValue")
@Nullable
private VideoResult waitForVideoResult(boolean expectSuccess) {
// CountDownLatch for onVideoRecordingEnd.
CountDownLatch onVideoRecordingEnd = new CountDownLatch(1);
doCountDown(onVideoRecordingEnd).when(listener).onVideoRecordingEnd();
// Op for onVideoTaken.
final Op<VideoResult> video = new Op<>();
doEndOp(video, 0).when(listener).onVideoTaken(any(VideoResult.class));
doEndOp(video, null).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
Op<Boolean> wait1 = new Op<>();
Op<VideoResult> wait2 = new Op<>();
doEndOp(wait1, true).when(listener).onVideoRecordingEnd();
doEndOp(wait1, false).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@Override
public boolean matches(CameraException argument) {
return argument.getReason() == CameraException.REASON_VIDEO_FAILED;
}
}));
doEndOp(wait2, 0).when(listener).onVideoTaken(any(VideoResult.class));
doEndOp(wait2, null).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@Override
public boolean matches(CameraException argument) {
return argument.getReason() == CameraException.REASON_VIDEO_FAILED;
}
}));
int maxLoops = 10;
int loops = 0;
// Wait for onVideoTaken and check.
VideoResult result = video.await(DELAY);
// First wait for onVideoRecordingEnd().
// It seems that when running all the tests together, the device can go in some
// power saving mode which makes the CPU extremely slow. This is especially problematic
// with video snapshots where we do lots of processing. The videoEnd callback can return
// long after the actual stop() call, so if we're still processing, let's wait more.
if (expectSuccess && camera.isTakingVideo()) {
while (camera.isTakingVideo()) {
video.listen();
result = video.await(DELAY);
LOG.i("[WAIT VIDEO]", "Waiting for onVideoRecordingEnd()...");
Boolean wait1Result = wait1.await(DELAY);
if (expectSuccess) {
while (wait1Result == null && loops <= maxLoops) {
LOG.w("[WAIT VIDEO]", "Waiting extra", DELAY, "milliseconds...");
wait1.listen();
wait1Result = wait1.await(DELAY);
loops++;
}
}
// Now wait for onVideoResult(). One cycle should be enough.
LOG.i("[WAIT VIDEO]", "Waiting for onVideoTaken()...");
VideoResult wait2Result = wait2.await(DELAY);
if (expectSuccess) {
while (wait2Result == null && loops <= maxLoops) {
LOG.w("[WAIT VIDEO]", "Waiting extra", DELAY, "milliseconds...");
wait2.listen();
wait2Result = wait2.await(DELAY);
loops++;
}
// Sleep another 1000, because camera.isTakingVideo() might return false even
// if the result still has to be dispatched. Rare but could happen.
try { Thread.sleep(1000); } catch (InterruptedException ignore) {}
}
// Now we should be OK.
// Assert.
if (expectSuccess) {
assertEquals("Should call onVideoRecordingEnd", 0, onVideoRecordingEnd.getCount());
assertNotNull("Should end video", result);
assertNotNull("Should call onVideoRecordingEnd", wait1Result);
assertTrue("Should call onVideoRecordingEnd", wait1Result);
assertNotNull("Should call onVideoTaken", wait2Result);
} else {
assertNull("Should not end video", result);
assertTrue("Should not call onVideoRecordingEnd",
wait1Result == null || !wait1Result);
assertNull("Should not call onVideoTaken", wait2Result);
}
return result;
return wait2Result;
}
@Nullable
@ -228,18 +268,29 @@ public abstract class CameraIntegrationTest extends BaseTest {
}));
PictureResult result = pic.await(DELAY);
if (expectSuccess) {
LOG.i("[WAIT PICTURE]", "Expecting success.");
assertNotNull("Can take picture", result);
} else {
LOG.i("[WAIT PICTURE]", "Expecting failure.");
assertNull("Should not take picture", result);
}
return result;
}
private void takeVideoSync(boolean expectSuccess) {
/**
* Emulators do not respond well to setVideoMaxDuration() with full videos.
* The mediaRecorder.stop() call can hang forever.
* @return true if possible
*/
protected boolean canSetVideoMaxDuration() {
return !Emulator.isEmulator();
}
protected void takeVideoSync(boolean expectSuccess) {
takeVideoSync(expectSuccess,0);
}
private void takeVideoSync(boolean expectSuccess, int duration) {
protected void takeVideoSync(boolean expectSuccess, int duration) {
final Op<Boolean> op = new Op<>();
doEndOp(op, true).when(listener).onVideoRecordingStart();
doEndOp(op, false).when(listener).onCameraError(argThat(new ArgumentMatcher<CameraException>() {
@ -250,15 +301,33 @@ public abstract class CameraIntegrationTest extends BaseTest {
}));
File file = new File(getContext().getFilesDir(), "video.mp4");
if (duration > 0) {
camera.takeVideo(file, duration);
if (canSetVideoMaxDuration()) {
camera.takeVideo(file, duration);
} else {
final int delay = Math.round(duration * 1.2F); // Compensate for thread jumps, ...
uiSync(new Runnable() {
@Override
public void run() {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
camera.stopVideo();
}
}, delay);
}
});
camera.takeVideo(file);
}
} else {
camera.takeVideo(file);
}
Boolean result = op.await(DELAY);
if (expectSuccess) {
LOG.i("[WAIT VIDEO START]", "Expecting success.");
assertNotNull("should start video recording or get CameraError", result);
assertTrue("should start video recording successfully", result);
} else {
LOG.i("[WAIT VIDEO START]", "Expecting failure.");
assertTrue("should not start video recording", result == null || !result);
}
}
@ -285,36 +354,53 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
Boolean result = op.await(DELAY);
if (expectSuccess) {
LOG.i("[WAIT VIDEO SNAP START]", "Expecting success.");
assertNotNull("should start video recording or get CameraError", result);
assertTrue("should start video recording successfully", result);
} else {
LOG.i("[WAIT VIDEO SNAP START]", "Expecting failure.");
assertTrue("should not start video recording", result == null || !result);
}
}
private void waitForError() throws Throwable {
Throwable throwable = error.await(DELAY);
if (throwable != null) {
throw throwable;
}
}
//region test open/close
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOpenClose() {
assertEquals(controller.getState(), CameraState.OFF);
assertEquals(CameraState.OFF, controller.getState());
openSync(true);
assertTrue(controller.getState().isAtLeast(CameraState.ENGINE));
closeSync(true);
assertEquals(controller.getState(), CameraState.OFF);
assertEquals(CameraState.OFF, controller.getState());
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOpenTwice() {
openSync(true);
openSync(false);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCloseTwice() {
closeSync(false);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
// This works great on the device but crashes often on the emulator.
// There must be something wrong with the emulated camera...
// Like stopPreview() and release() are not really sync calls?
@ -333,6 +419,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartInitializesOptions() {
assertNull(camera.getCameraOptions());
openSync(true);
@ -345,6 +433,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
// Test things that should reset the camera.
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetFacing() throws Exception {
CameraOptions o = openSync(true);
int size = o.getSupportedFacing().size();
@ -362,6 +452,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetMode() throws Exception {
camera.setMode(Mode.PICTURE);
openSync(true);
@ -384,6 +476,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
// When camera is open, parameters will be set only if supported.
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetZoom() {
CameraOptions options = openSync(true);
float oldValue = camera.getZoom();
@ -400,6 +494,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetExposureCorrection() {
CameraOptions options = openSync(true);
float oldValue = camera.getExposureCorrection();
@ -416,6 +512,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetFlash() {
CameraOptions options = openSync(true);
Flash[] values = Flash.values();
@ -435,6 +533,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetWhiteBalance() {
CameraOptions options = openSync(true);
WhiteBalance[] values = WhiteBalance.values();
@ -453,6 +553,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetHdr() {
CameraOptions options = openSync(true);
Hdr[] values = Hdr.values();
@ -471,6 +573,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetAudio() {
openSync(true);
Audio[] values = Audio.values();
@ -481,6 +585,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetLocation() {
openSync(true);
camera.setLocation(10d, 2d);
@ -493,15 +599,22 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetPreviewFrameRate() {
openSync(true);
CameraOptions options = openSync(true);
camera.setPreviewFrameRate(30);
Op<Void> op = new Op<>(controller.mPreviewFrameRateTask);
op.await(300);
assertEquals(camera.getPreviewFrameRate(), 30, 0);
assertEquals(camera.getPreviewFrameRate(),
Math.min(options.getPreviewFrameRateMaxValue(),
Math.max(options.getPreviewFrameRateMinValue(), 30)),
0);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testSetPlaySounds() {
boolean oldValue = camera.getPlaySounds();
boolean newValue = !oldValue;
@ -530,17 +643,18 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region test takeVideo
@Test(expected = RuntimeException.class)
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartVideo_whileInPictureMode() throws Throwable {
// Fails on Travis. Some emulators can't deal with MediaRecorder
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
camera.setMode(Mode.PICTURE);
openSync(true);
takeVideoSync(false);
waitForUiException();
waitForError();
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testStartEndVideo() {
camera.setMode(Mode.VIDEO);
openSync(true);
@ -549,6 +663,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartEndVideoSnapshot() {
// TODO should check api level for snapshot?
openSync(true);
@ -557,6 +673,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testStartEndVideo_withManualStop() {
camera.setMode(Mode.VIDEO);
openSync(true);
@ -576,6 +694,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartEndVideoSnapshot_withManualStop() {
openSync(true);
takeVideoSnapshotSync(true);
@ -594,6 +714,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideo_withoutStarting() {
camera.setMode(Mode.VIDEO);
openSync(true);
@ -602,32 +724,54 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testEndVideo_withMaxSize() {
camera.setMode(Mode.VIDEO);
camera.setVideoMaxSize(3000*1000); // Less is risky
camera.setVideoSize(SizeSelectors.maxArea(480 * 360));
openSync(true);
// Assuming video frame rate is 20...
//noinspection ConstantConditions
camera.setVideoBitRate((int) estimateVideoBitRate(camera.getVideoSize(), 20));
camera.setVideoMaxSize(estimateVideoBytes(camera.getVideoBitRate(), 5000));
takeVideoSync(true);
waitForVideoResult(true);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideoSnapshot_withMaxSize() {
camera.setVideoMaxSize(3000*1000);
openSync(true);
camera.setSnapshotMaxWidth(480);
camera.setSnapshotMaxHeight(480);
// We don't want a very low FPS or the video frames are too sparse and recording
// can fail (e.g. audio reaching completion while video still struggling to start)
camera.setPreviewFrameRate(30F);
//noinspection ConstantConditions
camera.setVideoBitRate((int) estimateVideoBitRate(camera.getSnapshotSize(),
(int) camera.getPreviewFrameRate()));
camera.setVideoMaxSize(estimateVideoBytes(camera.getVideoBitRate(), 5000));
takeVideoSnapshotSync(true);
waitForVideoResult(true);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testEndVideo_withMaxDuration() {
camera.setMode(Mode.VIDEO);
camera.setVideoMaxDuration(4000);
openSync(true);
takeVideoSync(true);
waitForVideoResult(true);
if (canSetVideoMaxDuration()) {
camera.setMode(Mode.VIDEO);
camera.setVideoMaxDuration(4000);
openSync(true);
takeVideoSync(true);
waitForVideoResult(true);
}
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testEndVideoSnapshot_withMaxDuration() {
camera.setVideoMaxDuration(4000);
openSync(true);
@ -640,6 +784,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region startAutoFocus
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStartAutoFocus() {
CameraOptions o = openSync(true);
@ -657,6 +803,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testStopAutoFocus() {
CameraOptions o = openSync(true);
@ -681,12 +829,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region capture
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_beforeStarted() {
camera.takePicture();
waitForPictureResult(false);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_concurrentCalls() throws Exception {
// Second take should fail.
openSync(true);
@ -702,34 +854,57 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_size() {
// Decoding can fail for large bitmaps. set a small size.
// camera.setPictureSize(SizeSelectors.smallest());
openSync(true);
Size size = camera.getPictureSize();
assertNotNull(size);
camera.takePicture();
PictureResult result = waitForPictureResult(true);
assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap);
assertEquals(result.getSize(), size);
assertEquals(bitmap.getWidth(), size.getWidth());
assertEquals(bitmap.getHeight(), size.getHeight());
assertNotNull(result.getData());
assertNull(result.getLocation());
assertFalse(result.isSnapshot());
assertEquals(result.getSize(), size);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(),
Integer.MAX_VALUE, Integer.MAX_VALUE);
if (bitmap != null) {
assertNotNull(bitmap);
String message = LOG.i("[PICTURE SIZE]", "Desired:", size, "Bitmap:",
new Size(bitmap.getWidth(), bitmap.getHeight()));
if (!Emulator.isEmulator()) {
assertEquals(message, bitmap.getWidth(), size.getWidth());
assertEquals(message, bitmap.getHeight(), size.getHeight());
} else {
// Emulator implementation sometimes does not rotate the image correctly.
assertTrue(message, bitmap.getWidth() == size.getWidth()
|| bitmap.getWidth() == size.getHeight());
assertTrue(message, bitmap.getHeight() == size.getWidth()
|| bitmap.getHeight() == size.getHeight());
assertEquals(message, size.getWidth() * size.getHeight(),
bitmap.getWidth() * bitmap.getHeight());
}
}
}
@Test(expected = RuntimeException.class)
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_whileInVideoMode() throws Throwable {
camera.setMode(Mode.VIDEO);
openSync(true);
camera.takePicture();
waitForUiException();
waitForError();
camera.takePicture();
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_withMetering() {
openSync(true);
camera.setPictureMetering(true);
@ -738,6 +913,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCapturePicture_withoutMetering() {
openSync(true);
camera.setPictureMetering(false);
@ -746,12 +923,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_beforeStarted() {
camera.takePictureSnapshot();
waitForPictureResult(false);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_concurrentCalls() throws Exception {
// Second take should fail.
openSync(true);
@ -767,6 +948,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_size() {
openSync(true);
Size size = camera.getSnapshotSize();
@ -775,17 +958,24 @@ public abstract class CameraIntegrationTest extends BaseTest {
PictureResult result = waitForPictureResult(true);
assertNotNull(result);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(), Integer.MAX_VALUE, Integer.MAX_VALUE);
assertNotNull(bitmap);
assertEquals(result.getSize(), size);
assertEquals(bitmap.getWidth(), size.getWidth());
assertEquals(bitmap.getHeight(), size.getHeight());
assertNotNull(result.getData());
assertNull(result.getLocation());
assertTrue(result.isSnapshot());
assertEquals(result.getSize(), size);
Bitmap bitmap = CameraUtils.decodeBitmap(result.getData(),
Integer.MAX_VALUE, Integer.MAX_VALUE);
if (bitmap != null) {
String message = LOG.i("[PICTURE SIZE]", "Desired:", size, "Bitmap:",
new Size(bitmap.getWidth(), bitmap.getHeight()));
assertNotNull(bitmap);
assertEquals(message, bitmap.getWidth(), size.getWidth());
assertEquals(message, bitmap.getHeight(), size.getHeight());
}
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_withMetering() {
openSync(true);
camera.setPictureSnapshotMetering(true);
@ -794,6 +984,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testCaptureSnapshot_withoutMetering() {
openSync(true);
camera.setPictureSnapshotMetering(false);
@ -807,6 +999,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
@SuppressWarnings("ConstantConditions")
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testPictureFormat_DNG() {
openSync(true);
if (camera.getCameraOptions().supports(PictureFormat.DNG)) {
@ -830,24 +1024,28 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region Frame Processing
private void assert30Frames(FrameProcessor mock) throws Exception {
// Expect 30 frames
CountDownLatch latch = new CountDownLatch(30);
private void assert15Frames(@NonNull FrameProcessor mock) throws Exception {
// Expect 15 frames. Time is very high because currently Camera2 keeps a very low FPS.
CountDownLatch latch = new CountDownLatch(15);
doCountDown(latch).when(mock).process(any(Frame.class));
boolean did = latch.await(15, TimeUnit.SECONDS);
boolean did = latch.await(30, TimeUnit.SECONDS);
assertTrue("Latch count should be 0: " + latch.getCount(), did);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_simple() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
openSync(true);
assert30Frames(processor);
assert15Frames(processor);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_afterSnapshot() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
@ -858,10 +1056,12 @@ public abstract class CameraIntegrationTest extends BaseTest {
camera.takePictureSnapshot();
waitForPictureResult(true);
assert30Frames(processor);
assert15Frames(processor);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_afterRestart() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
@ -869,11 +1069,13 @@ public abstract class CameraIntegrationTest extends BaseTest {
closeSync(true);
openSync(true);
assert30Frames(processor);
assert15Frames(processor);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 25, emulatorOnly = true)
public void testFrameProcessing_afterVideo() throws Exception {
FrameProcessor processor = mock(FrameProcessor.class);
camera.addFrameProcessor(processor);
@ -882,10 +1084,12 @@ public abstract class CameraIntegrationTest extends BaseTest {
takeVideoSync(true,4000);
waitForVideoResult(true);
assert30Frames(processor);
assert15Frames(processor);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_freezeRelease() throws Exception {
// Ensure that freeze/release cycles do not cause OOMs.
// There was a bug doing this and it might resurface for any improper
@ -895,7 +1099,7 @@ public abstract class CameraIntegrationTest extends BaseTest {
camera.addFrameProcessor(processor);
openSync(true);
assert30Frames(processor);
assert15Frames(processor);
}
public class FreezeReleaseFrameProcessor implements FrameProcessor {
@ -910,6 +1114,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
//region Overlays
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOverlay_forPictureSnapshot() {
Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
@ -922,6 +1128,8 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testOverlay_forVideoSnapshot() {
Overlay overlay = mock(Overlay.class);
when(overlay.drawsOn(any(Overlay.Target.class))).thenReturn(true);
@ -934,4 +1142,16 @@ public abstract class CameraIntegrationTest extends BaseTest {
}
//endregion
@SuppressWarnings("SameParameterValue")
private static long estimateVideoBitRate(@NonNull Size size, int frameRate) {
// Nasty estimate for a LQ video
return Math.round(0.05D * size.getWidth() * size.getHeight() * frameRate);
}
@SuppressWarnings("SameParameterValue")
private static long estimateVideoBytes(long videoBitRate, long millis) {
// 1.3F accounts for audio.
return Math.round((videoBitRate * 1.3F) * (millis / 1000D) / 8D);
}
}

@ -40,8 +40,8 @@ public class MockCameraEngine extends CameraEngine {
@NonNull
@Override
protected Task<Void> onStartEngine() {
return Tasks.forResult(null);
protected Task<CameraOptions> onStartEngine() {
return Tasks.forResult(mCameraOptions);
}
@NonNull

@ -5,6 +5,8 @@ import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.TestActivity;
import com.otaliastudios.cameraview.controls.Grid;
import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.tools.Retry;
import com.otaliastudios.cameraview.tools.RetryRule;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -26,6 +28,9 @@ public class GridLinesLayoutTest extends BaseTest {
@Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
@Rule
public RetryRule retryRule = new RetryRule(3);
private GridLinesLayout layout;
@NonNull
@ -59,24 +64,28 @@ public class GridLinesLayoutTest extends BaseTest {
return result;
}
@Retry
@Test
public void testOff() {
int linesDrawn = setGridAndWait(Grid.OFF);
assertEquals(0, linesDrawn);
}
@Retry
@Test
public void test3x3() {
int linesDrawn = setGridAndWait(Grid.DRAW_3X3);
assertEquals(2, linesDrawn);
}
@Retry
@Test
public void testPhi() {
int linesDrawn = setGridAndWait(Grid.DRAW_PHI);
assertEquals(2, linesDrawn);
}
@Retry
@Test
public void test4x4() {
int linesDrawn = setGridAndWait(Grid.DRAW_4X4);

@ -220,12 +220,12 @@ public class WorkerHandlerTest extends BaseTest {
final WorkerHandler handler = WorkerHandler.get("handler");
assertTrue(handler.getThread().isAlive());
handler.destroy();
// Wait for the thread to die.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive());
WorkerHandler newHandler = WorkerHandler.get("handler");
assertNotSame(handler, newHandler);
assertTrue(newHandler.getThread().isAlive());
// Ensure old thread dies at some point.
try { handler.getThread().join(500); } catch (InterruptedException ignore) {}
assertFalse(handler.getThread().isAlive());
}
@Test

@ -27,10 +27,10 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Not clear why, but for some reason on API 28 the UiThreadTests here crash for an internal NPE
* Not clear why, but for some reason on API 28+ the UiThreadTests here crash for an internal NPE
* in FrameLayout.onMeasure.
*/
@SdkExclude(minSdkVersion = 28, maxSdkVersion = 28)
@SdkExclude(minSdkVersion = 28, maxSdkVersion = 29)
@TargetApi(17)
public class MarkerLayoutTest extends BaseTest {

@ -0,0 +1,12 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
public class Emulator {
public static boolean isEmulator() {
// From Android's RequiresDeviceFilter
return Build.HARDWARE.equals("goldfish")
|| Build.HARDWARE.equals("ranchu")
|| Build.HARDWARE.equals("gce_x86");
}
}

@ -0,0 +1,9 @@
package com.otaliastudios.cameraview.tools;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.RUNTIME)
public @interface Retry {
boolean emulatorOnly() default false;
}

@ -0,0 +1,52 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import java.util.concurrent.atomic.AtomicInteger;
public class RetryRule implements TestRule {
private final static String TAG = RetryRule.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private AtomicInteger retries;
public RetryRule(int retries) {
this.retries = new AtomicInteger(retries);
}
@Override
public Statement apply(final Statement base, final Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
Retry retry = description.getAnnotation(Retry.class);
if (retry == null || retry.emulatorOnly() && !Emulator.isEmulator()) {
base.evaluate();
} else {
Throwable caught = null;
while (retries.getAndDecrement() > 0) {
try {
base.evaluate();
return;
} catch (Throwable throwable) {
LOG.e("[RETRY] Test failed.", retries.get(),
"retries available...");
LOG.e("*******************************************************");
caught = throwable;
}
}
if (caught != null) {
throw caught;
}
}
}
};
}
}

@ -15,4 +15,6 @@ public @interface SdkExclude {
int minSdkVersion() default 1;
/** The maximum API level to drop (inclusive) */
int maxSdkVersion() default Integer.MAX_VALUE;
/** Whether this filter only applies to emulators */
boolean emulatorOnly() default false;
}

@ -15,10 +15,11 @@ import org.junit.runner.Description;
public class SdkExcludeFilter extends ParentFilter {
protected boolean evaluateTest(Description description) {
final SdkExclude sdkSuppress = getAnnotationForTest(description);
if (sdkSuppress != null) {
if (Build.VERSION.SDK_INT >= sdkSuppress.minSdkVersion()
&& Build.VERSION.SDK_INT <= sdkSuppress.maxSdkVersion()) {
SdkExclude annotation = getAnnotationForTest(description);
if (annotation != null) {
if ((!annotation.emulatorOnly() || Emulator.isEmulator())
&& Build.VERSION.SDK_INT >= annotation.minSdkVersion()
&& Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
return false; // exclude the test
}
return true; // run the test

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.tools;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Like {@link androidx.test.filters.SdkSuppress}, but with emulatorOnly().
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface SdkInclude {
/** The minimum API level to run (inclusive) */
int minSdkVersion() default 1;
/** The maximum API level to run (inclusive) */
int maxSdkVersion() default Integer.MAX_VALUE;
/** Whether this filter only applies to emulators */
boolean emulatorOnly() default false;
}

@ -0,0 +1,47 @@
package com.otaliastudios.cameraview.tools;
import android.os.Build;
import androidx.annotation.Nullable;
import androidx.test.internal.runner.filters.ParentFilter;
import org.junit.runner.Description;
/**
* Filter for {@link SdkInclude}, based on
* {@link androidx.test.internal.runner.TestRequestBuilder}'s SdkSuppressFilter.
*/
public class SdkIncludeFilter extends ParentFilter {
protected boolean evaluateTest(Description description) {
SdkInclude annotation = getAnnotationForTest(description);
if (annotation != null) {
if ((!annotation.emulatorOnly() || Emulator.isEmulator())
&& Build.VERSION.SDK_INT >= annotation.minSdkVersion()
&& Build.VERSION.SDK_INT <= annotation.maxSdkVersion()) {
return true; // run the test
}
return false; // drop the test
}
return true; // no annotation, run the test
}
@Nullable
private SdkInclude getAnnotationForTest(Description description) {
final SdkInclude s = description.getAnnotation(SdkInclude.class);
if (s != null) {
return s;
}
final Class<?> testClass = description.getTestClass();
if (testClass != null) {
return testClass.getAnnotation(SdkInclude.class);
}
return null;
}
@Override
public String describe() {
return "Skip tests annotated with SdkInclude";
}
}

@ -33,6 +33,8 @@ import java.io.OutputStream;
@SuppressWarnings("unused")
public class CameraUtils {
private final static String TAG = CameraUtils.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
/**
* Determines whether the device has valid camera sensors, so the library
@ -289,9 +291,9 @@ public class CameraUtils {
exifOrientation == ExifInterface.ORIENTATION_FLIP_VERTICAL ||
exifOrientation == ExifInterface.ORIENTATION_TRANSPOSE ||
exifOrientation == ExifInterface.ORIENTATION_TRANSVERSE;
LOG.i("decodeBitmap:", "got orientation from EXIF.", orientation);
} catch (IOException e) {
e.printStackTrace();
LOG.e("decodeBitmap:", "could not get orientation from EXIF.", e);
orientation = 0;
flip = false;
} finally {
@ -304,6 +306,7 @@ public class CameraUtils {
} else {
orientation = rotation;
flip = false;
LOG.i("decodeBitmap:", "got orientation from constructor.", orientation);
}
Bitmap bitmap;

@ -125,6 +125,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private CameraPreview mCameraPreview;
private OrientationHelper mOrientationHelper;
private CameraEngine mCameraEngine;
private Size mLastPreviewStreamSize;
private MediaActionSound mSound;
private AutoFocusMarker mAutoFocusMarker;
@VisibleForTesting List<CameraListener> mListeners = new CopyOnWriteArrayList<>();
@ -367,6 +368,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
protected void onDetachedFromWindow() {
if (!mInEditor) mOrientationHelper.disable();
mLastPreviewStreamSize = null;
super.onDetachedFromWindow();
}
@ -409,8 +411,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
return;
}
Size previewSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) {
mLastPreviewStreamSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
if (mLastPreviewStreamSize == null) {
LOG.w("onMeasure:", "surface is not ready. Calling default behavior.");
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
return;
@ -421,8 +423,8 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
final int widthValue = MeasureSpec.getSize(widthMeasureSpec);
final int heightValue = MeasureSpec.getSize(heightMeasureSpec);
final float previewWidth = previewSize.getWidth();
final float previewHeight = previewSize.getHeight();
final float previewWidth = mLastPreviewStreamSize.getWidth();
final float previewHeight = mLastPreviewStreamSize.getHeight();
// Pre-process specs
final ViewGroup.LayoutParams lp = getLayoutParams();
@ -437,10 +439,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (heightMode == AT_MOST && lp.height == MATCH_PARENT) heightMode = EXACTLY;
}
LOG.i("onMeasure:", "requested dimensions are (" + widthValue + "[" + ms(widthMode)
+ "]x" + heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "(" + previewWidth + "x"
+ previewHeight + ")");
LOG.i("onMeasure:", "requested dimensions are ("
+ widthValue + "[" + ms(widthMode) + "]x"
+ heightValue + "[" + ms(heightMode) + "])");
LOG.i("onMeasure:", "previewSize is", "("
+ previewWidth + "x" + previewHeight + ")");
// (1) If we have fixed dimensions (either 300dp or MATCH_PARENT), there's nothing we
// should do, other than respect it. The preview will eventually be cropped at the sides
@ -808,7 +811,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
if (mInEditor) return;
clearCameraListeners();
clearFrameProcessors();
mCameraEngine.destroy();
mCameraEngine.destroy(true);
if (mCameraPreview != null) mCameraPreview.onDestroy();
}
@ -2036,17 +2039,26 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@Override
public void onCameraPreviewStreamSizeChanged() {
mLogger.i("onCameraPreviewStreamSizeChanged");
// Camera preview size has changed.
// Request a layout pass for onMeasure() to do its stuff.
// Potentially this will change CameraView size, which changes Surface size,
// which triggers a new Preview size. But hopefully it will converge.
mUiHandler.post(new Runnable() {
@Override
public void run() {
requestLayout();
}
});
Size previewSize = mCameraEngine.getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) {
throw new RuntimeException("Preview stream size should not be null here.");
} else if (previewSize.equals(mLastPreviewStreamSize)) {
mLogger.i("onCameraPreviewStreamSizeChanged:",
"swallowing because the preview size has not changed.", previewSize);
} else {
mLogger.i("onCameraPreviewStreamSizeChanged: posting a requestLayout call.",
"Preview stream size:", previewSize);
mUiHandler.post(new Runnable() {
@Override
public void run() {
requestLayout();
}
});
}
}
@Override

@ -19,6 +19,7 @@ import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.engine.offset.Axis;
@ -54,10 +55,6 @@ public class Camera1Engine extends CameraEngine implements
Camera.PreviewCallback,
Camera.ErrorCallback,
FrameManager.BufferCallback {
private static final String TAG = Camera1Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final String JOB_FOCUS_RESET = "focus reset";
private static final String JOB_FOCUS_END = "focus end";
@ -76,20 +73,15 @@ public class Camera1Engine extends CameraEngine implements
@Override
public void onError(int error, Camera camera) {
if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
// Looks like this is recoverable.
LOG.w("Recoverable error inside the onError callback.",
"CAMERA_ERROR_SERVER_DIED");
restart();
return;
}
String message = LOG.e("Internal Camera1 error.", error);
Exception runtime = new RuntimeException(message);
int reason;
switch (error) {
case Camera.CAMERA_ERROR_EVICTED: reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: reason = CameraException.REASON_UNKNOWN; break;
case Camera.CAMERA_ERROR_SERVER_DIED:
case Camera.CAMERA_ERROR_EVICTED:
reason = CameraException.REASON_DISCONNECTED; break;
case Camera.CAMERA_ERROR_UNKNOWN: // Pass DISCONNECTED which is considered unrecoverable
reason = CameraException.REASON_DISCONNECTED; break;
default: reason = CameraException.REASON_UNKNOWN;
}
throw new CameraException(runtime, reason);
@ -146,7 +138,7 @@ public class Camera1Engine extends CameraEngine implements
@NonNull
@EngineThread
@Override
protected Task<Void> onStartEngine() {
protected Task<CameraOptions> onStartEngine() {
try {
mCamera = Camera.open(mCameraId);
} catch (Exception e) {
@ -165,7 +157,7 @@ public class Camera1Engine extends CameraEngine implements
mCamera.setDisplayOrientation(getAngles().offset(Reference.SENSOR, Reference.VIEW,
Axis.ABSOLUTE)); // <- not allowed during preview
LOG.i("onStartEngine:", "Ended");
return Tasks.forResult(null);
return Tasks.forResult(mCameraOptions);
}
@EngineThread
@ -248,15 +240,19 @@ public class Camera1Engine extends CameraEngine implements
@NonNull
@Override
protected Task<Void> onStopPreview() {
LOG.i("onStopPreview:", "Started.");
if (mVideoRecorder != null) {
mVideoRecorder.stop(true);
mVideoRecorder = null;
}
mPictureRecorder = null;
getFrameManager().release();
LOG.i("onStopPreview:", "Releasing preview buffers.");
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
try {
LOG.i("onStopPreview:", "Stopping preview.");
mCamera.stopPreview();
LOG.i("onStopPreview:", "Stopped preview.");
} catch (Exception e) {
LOG.e("stopPreview", "Could not stop preview", e);
}
@ -278,7 +274,9 @@ public class Camera1Engine extends CameraEngine implements
throw new RuntimeException("Unknown CameraPreview output class.");
}
} catch (IOException e) {
LOG.e("unbindFromSurface", "Could not release surface", e);
// NOTE: when this happens, the next onStopEngine() call hangs on camera.release(),
// Not sure for how long. This causes the destroy() flow to fail the timeout.
LOG.e("onStopBind", "Could not release surface", e);
}
return Tasks.forResult(null);
}
@ -293,6 +291,20 @@ public class Camera1Engine extends CameraEngine implements
if (mCamera != null) {
try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
// Just like Camera2Engine, this call can hang (at least on emulators) and if
// we don't find a way around the lock, it leaves the camera in a bad state.
// This is anticipated by the exception in onStopBind() (see above).
//
// 12:29:32.163 E Camera3-Device: Camera 0: clearStreamingRequest: Device has encountered a serious error
// 12:29:32.163 E Camera2-StreamingProcessor: stopStream: Camera 0: Can't clear stream request: Function not implemented (-38)
// 12:29:32.163 E Camera2Client: stopPreviewL: Camera 0: Can't stop streaming: Function not implemented (-38)
// 12:29:32.273 E Camera2-StreamingProcessor: deletePreviewStream: Unable to delete old preview stream: Device or resource busy (-16)
// 12:29:32.274 E Camera2-CallbackProcessor: deleteStream: Unable to delete callback stream: Device or resource busy (-16)
// 12:29:32.274 E Camera3-Device: Camera 0: disconnect: Shutting down in an error state
//
// I believe there is a thread deadlock due to this call internally waiting to
// dispatch some callback to us (pending captures, ...), but the callback thread
// is blocked here. We try to workaround this in CameraEngine.destroy().
mCamera.release();
LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) {
@ -315,11 +327,13 @@ public class Camera1Engine extends CameraEngine implements
@EngineThread
@Override
protected void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering) {
LOG.i("onTakePicture:", "executing.");
stub.rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
stub.size = getPictureSize(Reference.OUTPUT);
mPictureRecorder = new Full1PictureRecorder(stub, Camera1Engine.this, mCamera);
mPictureRecorder.take();
LOG.i("onTakePicture:", "executed.");
}
@EngineThread
@ -327,6 +341,7 @@ public class Camera1Engine extends CameraEngine implements
protected void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering) {
LOG.i("onTakePictureSnapshot:", "executing.");
// Not the real size: it will be cropped to match the view ratio
stub.size = getUncroppedSnapshotSize(Reference.OUTPUT);
// Actually it will be rotated and set to 0.
@ -337,6 +352,7 @@ public class Camera1Engine extends CameraEngine implements
mPictureRecorder = new Snapshot1PictureRecorder(stub, this, mCamera, outputRatio);
}
mPictureRecorder.take();
LOG.i("onTakePictureSnapshot:", "executed.");
}
//endregion
@ -457,7 +473,7 @@ public class Camera1Engine extends CameraEngine implements
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
mFlash = flash;
mFlashTask = mOrchestrator.scheduleStateful("flash",
mFlashTask = mOrchestrator.scheduleStateful("flash (" + flash + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -508,7 +524,8 @@ public class Camera1Engine extends CameraEngine implements
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
mWhiteBalanceTask = mOrchestrator.scheduleStateful("white balance",
mWhiteBalanceTask = mOrchestrator.scheduleStateful(
"white balance (" + whiteBalance + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -522,7 +539,11 @@ public class Camera1Engine extends CameraEngine implements
private boolean applyWhiteBalance(@NonNull Camera.Parameters params,
@NonNull WhiteBalance oldWhiteBalance) {
if (mCameraOptions.supports(mWhiteBalance)) {
// If this lock key is present, the engine can throw when applying the
// parameters, not sure why. Since we never lock it, this should be
// harmless for the rest of the engine.
params.setWhiteBalance(mMapper.mapWhiteBalance(mWhiteBalance));
params.remove("auto-whitebalance-lock");
return true;
}
mWhiteBalance = oldWhiteBalance;
@ -533,7 +554,7 @@ public class Camera1Engine extends CameraEngine implements
public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr;
mHdr = hdr;
mHdrTask = mOrchestrator.scheduleStateful("hdr",
mHdrTask = mOrchestrator.scheduleStateful("hdr (" + hdr + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -557,7 +578,7 @@ public class Camera1Engine extends CameraEngine implements
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
final float old = mZoomValue;
mZoomValue = zoom;
mZoomTask = mOrchestrator.scheduleStateful("zoom",
mZoomTask = mOrchestrator.scheduleStateful("zoom (" + zoom + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -589,7 +610,8 @@ public class Camera1Engine extends CameraEngine implements
@Nullable final PointF[] points, final boolean notify) {
final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mExposureCorrectionTask = mOrchestrator.scheduleStateful("exposure correction",
mExposureCorrectionTask = mOrchestrator.scheduleStateful(
"exposure correction (" + EVvalue + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -629,7 +651,8 @@ public class Camera1Engine extends CameraEngine implements
public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
mPlaySoundsTask = mOrchestrator.scheduleStateful("play sounds",
mPlaySoundsTask = mOrchestrator.scheduleStateful(
"play sounds (" + playSounds + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -665,7 +688,8 @@ public class Camera1Engine extends CameraEngine implements
public void setPreviewFrameRate(float previewFrameRate) {
final float old = previewFrameRate;
mPreviewFrameRate = previewFrameRate;
mPreviewFrameRateTask = mOrchestrator.scheduleStateful("preview fps",
mPreviewFrameRateTask = mOrchestrator.scheduleStateful(
"preview fps (" + previewFrameRate + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -738,9 +762,8 @@ public class Camera1Engine extends CameraEngine implements
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (data == null) {
// Let's test this with an exception.
throw new RuntimeException("Camera1 returns null data from onPreviewFrame! " +
"This would make the frame processors crash later.");
// Seen this happen in logs.
return;
}
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),

@ -35,6 +35,7 @@ import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Facing;
@ -80,10 +81,6 @@ import java.util.concurrent.ExecutionException;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener,
ActionHolder {
private static final String TAG = Camera2Engine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
@VisibleForTesting static final long METER_TIMEOUT = 2500;
@ -129,16 +126,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
//region Utilities
@VisibleForTesting
@NonNull
private <T> T readCharacteristic(@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
<T> T readCharacteristic(@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
return readCharacteristic(mCameraCharacteristics, key, fallback);
}
@NonNull
private <T> T readCharacteristic(@NonNull CameraCharacteristics characteristics,
@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
@NonNull CameraCharacteristics.Key<T> key,
@NonNull T fallback) {
T value = characteristics.get(key);
return value == null ? fallback : value;
}
@ -239,11 +237,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
* it should be set before calling this method, for example by calling
* {@link #createRepeatingRequestBuilder(int)}.
*/
@EngineThread
@SuppressWarnings("WeakerAccess")
protected void applyRepeatingRequestBuilder() {
applyRepeatingRequestBuilder(true, CameraException.REASON_DISCONNECTED);
}
@EngineThread
private void applyRepeatingRequestBuilder(boolean checkStarted, int errorReason) {
if ((getState() == CameraState.PREVIEW && !isChangingState()) || !checkStarted) {
try {
@ -329,6 +329,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@EngineThread
@Override
protected void onPreviewStreamSizeChanged() {
LOG.i("onPreviewStreamSizeChanged:", "Calling restartBind().");
restartBind();
}
@ -374,8 +375,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@SuppressLint("MissingPermission")
@NonNull
@Override
protected Task<Void> onStartEngine() {
final TaskCompletionSource<Void> task = new TaskCompletionSource<>();
protected Task<CameraOptions> onStartEngine() {
final TaskCompletionSource<CameraOptions> task = new TaskCompletionSource<>();
try {
// We have a valid camera for this Facing. Go on.
mManager.openCamera(mCameraId, new CameraDevice.StateCallback() {
@ -385,7 +386,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Set parameters that might have been set before the camera was opened.
try {
LOG.i("createCamera:", "Applying default parameters.");
LOG.i("onStartEngine:", "Opened camera device.");
mCameraCharacteristics = mManager.getCameraCharacteristics(mCameraId);
boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
int format;
@ -401,24 +402,34 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
task.trySetException(createCameraException(e));
return;
}
task.trySetResult(null);
task.trySetResult(mCameraOptions);
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
// Not sure if this is called INSTEAD of onOpened() or can be called after
// as well. However, using trySetException should address this problem -
// it will only trigger if the task has no result.
//
// Docs say to release this camera instance, however, since we throw an
// unrecoverable CameraException, this will trigger a stop() through the
// exception handler.
task.trySetException(new CameraException(CameraException.REASON_DISCONNECTED));
// as well. Cover both cases with an unrecoverable exception so that the
// engine is properly destroyed.
CameraException exception
= new CameraException(CameraException.REASON_DISCONNECTED);
if (!task.getTask().isComplete()) {
task.trySetException(exception);
} else {
LOG.i("CameraDevice.StateCallback reported disconnection.");
throw exception;
}
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
task.trySetException(createCameraException(error));
if (!task.getTask().isComplete()) {
task.trySetException(createCameraException(error));
} else {
// This happened while the engine is running. Throw unrecoverable exception
// so that engine is properly destroyed.
LOG.e("CameraDevice.StateCallback reported an error:", error);
throw new CameraException(CameraException.REASON_DISCONNECTED);
}
}
}, null);
} catch (CameraAccessException e) {
@ -555,6 +566,12 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
String message = LOG.e("onConfigureFailed! Session", session);
throw new RuntimeException(message);
}
@Override
public void onReady(@NonNull CameraCaptureSession session) {
super.onReady(session);
LOG.i("CameraCaptureSession.StateCallback reported onReady.");
}
}, null);
} catch (CameraAccessException e) {
throw createCameraException(e);
@ -566,7 +583,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull
@Override
protected Task<Void> onStartPreview() {
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
LOG.i("onStartPreview:", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged();
Size previewSizeForView = getPreviewStreamSize(Reference.VIEW);
@ -579,11 +596,11 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
getFrameManager().setUp(FRAME_PROCESSING_FORMAT, mFrameProcessingSize);
}
LOG.i("onStartPreview", "Starting preview.");
LOG.i("onStartPreview:", "Starting preview.");
addRepeatingRequestBuilderSurfaces();
applyRepeatingRequestBuilder(false,
CameraException.REASON_FAILED_TO_START_PREVIEW);
LOG.i("onStartPreview", "Started preview.");
LOG.i("onStartPreview:", "Started preview.");
// Start delayed video if needed.
if (mFullVideoPendingStub != null) {
@ -610,7 +627,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@NonNull
@Override
protected Task<Void> onStopPreview() {
LOG.i("onStopPreview:", "About to clean up.");
LOG.i("onStopPreview:", "Started.");
if (mVideoRecorder != null) {
// This should synchronously call onVideoResult that will reset the repeating builder
// to the PREVIEW template. This is very important.
@ -621,15 +638,25 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (hasFrameProcessors()) {
getFrameManager().release();
}
try {
// NOTE: should we wait for onReady() like docs say?
// Leaving this synchronous for now.
mSession.stopRepeating();
} catch (CameraAccessException e) {
// This tells us that we should stop everything. It's better to throw an unrecoverable
// exception rather than just swallow this, so everything gets stopped.
LOG.w("stopRepeating failed!", e);
throw createCameraException(e);
// Removing the part below for now. It hangs on emulators and can take a lot of time
// in real devices, for benefits that I'm not 100% sure about.
if (false) {
try {
// Preferring abortCaptures() over stopRepeating(): it makes sure that all
// in-flight operations are discarded as fast as possible, which is what we want.
// NOTE: this call is asynchronous. Should find a good way to wait for the outcome.
LOG.i("onStopPreview:", "calling abortCaptures().");
mSession.abortCaptures();
LOG.i("onStopPreview:", "called abortCaptures().");
} catch (CameraAccessException e) {
// This tells us that we should stop everything. It's better to throw an
// unrecoverable exception rather than just swallow, so everything gets stopped.
LOG.w("onStopPreview:", "abortCaptures failed!", e);
throw createCameraException(e);
} catch (IllegalStateException e) {
// This tells us that the session was already closed.
// Not sure if this can happen, but we can swallow it.
}
}
removeRepeatingRequestBuilderSurfaces();
mLastRepeatingResult = null;
@ -672,6 +699,18 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
protected Task<Void> onStopEngine() {
try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
// Just like Camera1Engine, this call can hang (at least on emulators) and if
// we don't find a way around the lock, it leaves the camera in a bad state.
//
// 12:33:28.152 2888 5470 I CameraEngine: onStopEngine: Clean up. Releasing camera.
// 12:33:29.476 1384 1555 E audio_hw_generic: pcm_write failed cannot write stream data: I/O error
// 12:33:33.206 1512 3616 E Camera3-Device: Camera 0: waitUntilDrainedLocked: Error waiting for HAL to drain: Connection timed out (-110)
// 12:33:33.242 1512 3616 E CameraDeviceClient: detachDevice: waitUntilDrained failed with code 0xffffff92
// 12:33:33.243 1512 3616 E Camera3-Device: Camera 0: disconnect: Shutting down in an error state
//
// I believe there is a thread deadlock due to this call internally waiting to
// dispatch some callback to us (pending captures, ...), but the callback thread
// is blocked here. We try to workaround this in CameraEngine.destroy().
mCamera.close();
LOG.i("onStopEngine:", "Clean up.", "Released camera.");
} catch (Exception e) {
@ -790,7 +829,14 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean unlock = (fullPicture && getPictureMetering())
|| (!fullPicture && getPictureSnapshotMetering());
if (unlock) {
unlockAndResetMetering();
mOrchestrator.scheduleStateful("reset metering after picture",
CameraState.PREVIEW,
new Runnable() {
@Override
public void run() {
unlockAndResetMetering();
}
});
}
}
@ -880,12 +926,16 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void onVideoRecordingEnd() {
super.onVideoRecordingEnd();
// SnapshotRecorder will invoke this on its own thread which is risky, but if it was a
// snapshot, this function returns early so its safe.
// snapshot, this function does nothing so it's safe.
boolean needsIssue549Workaround = (mVideoRecorder instanceof Full2VideoRecorder) &&
(readCharacteristic(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, -1)
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
if (needsIssue549Workaround) {
LOG.w("Applying the Issue549 workaround.", Thread.currentThread());
maybeRestorePreviewTemplateAfterVideo();
LOG.w("Applied the Issue549 workaround. Sleeping...");
try { Thread.sleep(600); } catch (InterruptedException ignore) {}
LOG.w("Applied the Issue549 workaround. Slept!");
}
}
@ -1364,7 +1414,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
Image image = null;
try {
image = reader.acquireLatestImage();
} catch (IllegalStateException ignore) { }
} catch (Exception ignore) { }
if (image == null) {
LOG.w("onImageAvailable", "we have a byte buffer but no Image!");
getFrameManager().onBufferUnused(data);
@ -1447,7 +1497,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mOrchestrator.remove("reset metering");
if (shouldResetAutoFocus()) {
mOrchestrator.scheduleDelayed("reset metering",
mOrchestrator.scheduleStatefulDelayed("reset metering",
CameraState.PREVIEW,
getAutoFocusResetDelay(),
new Runnable() {
@Override
@ -1478,26 +1529,26 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return mMeterAction;
}
@EngineThread
private void unlockAndResetMetering() {
if (getState() == CameraState.PREVIEW && !isChangingState()) {
Actions.sequence(
new BaseAction() {
@Override
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
applyDefaultFocus(holder.getBuilder(this));
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.applyBuilder(this);
setState(STATE_COMPLETED);
// TODO should wait results?
}
},
new MeterResetAction()
).start(Camera2Engine.this);
}
// Needs the PREVIEW state!
Actions.sequence(
new BaseAction() {
@Override
protected void onStart(@NonNull ActionHolder holder) {
super.onStart(holder);
applyDefaultFocus(holder.getBuilder(this));
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AE_LOCK, false);
holder.getBuilder(this)
.set(CaptureRequest.CONTROL_AWB_LOCK, false);
holder.applyBuilder(this);
setState(STATE_COMPLETED);
// TODO should wait results?
}
},
new MeterResetAction()
).start(Camera2Engine.this);
}
//endregion
@ -1534,14 +1585,19 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
return mRepeatingRequestBuilder;
}
@EngineThread
@Override
public void applyBuilder(@NonNull Action source) {
// NOTE: Should never be called on a non-engine thread!
// Non-engine threads are not protected by the uncaught exception handler
// and can make the process crash.
applyRepeatingRequestBuilder();
}
@Override
public void applyBuilder(@NonNull Action source, @NonNull CaptureRequest.Builder builder)
throws CameraAccessException {
// Risky - would be better to ensure that thread is the engine one.
if (getState() == CameraState.PREVIEW && !isChangingState()) {
mSession.capture(builder.build(), mRepeatingRequestCallback, null);
}

@ -8,8 +8,10 @@ import android.location.Location;
import android.os.Handler;
import android.os.Looper;
import com.google.android.gms.tasks.Continuation;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.SuccessContinuation;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
@ -83,8 +85,9 @@ import java.util.concurrent.TimeUnit;
* S3 and S4 are also performed.
* - {@link #stop(boolean)}: ASYNC - stops everything: undoes S4, then S3, then S2.
* - {@link #restart()}: ASYNC - completes a stop then a start.
* - {@link #destroy()}: SYNC - performs a {@link #stop(boolean)} that will go on no matter what,
* without throwing. Makes the engine unusable and clears resources.
* - {@link #destroy(boolean)}: SYNC - performs a {@link #stop(boolean)} that will go on no matter
* what, without throwing. Makes the engine unusable and clears
* resources.
*
* THREADING
* Subclasses should always execute code on the thread given by {@link #mHandler}.
@ -131,8 +134,11 @@ public abstract class CameraEngine implements
void dispatchOnVideoRecordingEnd();
}
private static final String TAG = CameraEngine.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
protected static final String TAG = CameraEngine.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
// If this is 2, this means we'll try to run destroy() twice.
private static final int DESTROY_RETRIES = 2;
// Need to be protected
@SuppressWarnings("WeakerAccess") protected final Callback mCallback;
@ -211,10 +217,9 @@ public abstract class CameraEngine implements
protected CameraEngine(@NonNull Callback callback) {
mCallback = callback;
mCrashHandler = new Handler(Looper.getMainLooper());
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
mFrameManager = instantiateFrameManager();
mAngles = new Angles();
recreateHandler(false);
}
public void setPreview(@NonNull CameraPreview cameraPreview) {
@ -248,7 +253,8 @@ public abstract class CameraEngine implements
private static class NoOpExceptionHandler implements Thread.UncaughtExceptionHandler {
@Override
public void uncaughtException(@NonNull Thread thread, @NonNull Throwable throwable) {
// No-op.
LOG.w("EXCEPTION:", "In the NoOpExceptionHandler, probably while destroying.",
"Thread:", thread, "Error:", throwable);
}
}
@ -265,43 +271,57 @@ public abstract class CameraEngine implements
* @param throwable the throwable
* @param fromExceptionHandler true if coming from exception handler
*/
private void handleException(@NonNull Thread thread,
private void handleException(@NonNull final Thread thread,
final @NonNull Throwable throwable,
final boolean fromExceptionHandler) {
if (!(throwable instanceof CameraException)) {
// This is unexpected, either a bug or something the developer should know.
// Release and crash the UI thread so we get bug reports.
LOG.e("uncaughtException:", "Unexpected exception:", throwable);
mCrashHandler.post(new Runnable() {
@Override
public void run() {
destroy();
// Throws an unchecked exception without unnecessary wrapping.
// 1. If this comes from the exception handler, the thread has crashed. Replace it.
// Most actions are wrapped into Tasks so don't go here, but some callbacks do
// (at least in Camera1, e.g. onError).
if (fromExceptionHandler) {
LOG.e("EXCEPTION:", "Handler thread is gone. Replacing.");
recreateHandler(false);
}
// 2. Depending on the exception, we must destroy(false|true) to release resources, and
// notify the outside, either with the callback or by crashing the app.
LOG.e("EXCEPTION:", "Scheduling on the crash handler...");
mCrashHandler.post(new Runnable() {
@Override
public void run() {
if (throwable instanceof CameraException) {
CameraException exception = (CameraException) throwable;
if (exception.isUnrecoverable()) {
LOG.e("EXCEPTION:", "Got CameraException. " +
"Since it is unrecoverable, executing destroy(false).");
destroy(false);
}
LOG.e("EXCEPTION:", "Got CameraException. Dispatching to callback.");
mCallback.dispatchError(exception);
} else {
LOG.e("EXCEPTION:", "Unexpected error! Executing destroy(true).");
destroy(true);
LOG.e("EXCEPTION:", "Unexpected error! Throwing.");
if (throwable instanceof RuntimeException) {
throw (RuntimeException) throwable;
} else {
throw new RuntimeException(throwable);
}
}
});
return;
}
final CameraException cameraException = (CameraException) throwable;
LOG.e("uncaughtException:", "Got CameraException:", cameraException,
"on state:", getState());
if (fromExceptionHandler) {
// Got to restart the handler.
thread.interrupt();
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
}
}
});
}
mCallback.dispatchError(cameraException);
if (cameraException.isUnrecoverable()) {
// Stop everything (if needed) without notifying teardown errors.
stop(true);
}
/**
* Recreates the handler, to ensure we use a fresh one from now on.
* If we suspect that handler is currently stuck, the orchestrator should be reset
* because it hosts a chain of tasks and the last one will never complete.
* @param resetOrchestrator true to reset
*/
private void recreateHandler(boolean resetOrchestrator) {
if (mHandler != null) mHandler.destroy();
mHandler = WorkerHandler.get("CameraViewEngine");
mHandler.getThread().setUncaughtExceptionHandler(new CrashExceptionHandler());
if (resetOrchestrator) mOrchestrator.reset();
}
//endregion
@ -326,31 +346,53 @@ public abstract class CameraEngine implements
* Calls {@link #stop(boolean)} and waits for it.
* Not final due to mockito requirements.
*
* If unrecoverably is true, this also releases resources and the engine will not be in a
* functional state after. If forever is false, this really is just a synchronous stop.
*
* NOTE: Should not be called on the orchestrator thread! This would cause deadlocks due to us
* awaiting for {@link #stop(boolean)} to return.
*/
public void destroy() {
LOG.i("DESTROY:", "state:", getState(), "thread:", Thread.currentThread());
// Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
// Stop if needed, synchronously and silently.
public void destroy(boolean unrecoverably) {
destroy(unrecoverably, 0);
}
private void destroy(boolean unrecoverably, int depth) {
LOG.i("DESTROY:", "state:", getState(),
"thread:", Thread.currentThread(),
"depth:", depth,
"unrecoverably:", unrecoverably);
if (unrecoverably) {
// Prevent CameraEngine leaks. Don't set to null, or exceptions
// inside the standard stop() method might crash the main thread.
mHandler.getThread().setUncaughtExceptionHandler(new NoOpExceptionHandler());
}
// Cannot use Tasks.await() because we might be on the UI thread.
final CountDownLatch latch = new CountDownLatch(1);
stop(true).addOnCompleteListener(
WorkerHandler.get().getExecutor(),
mHandler.getExecutor(),
new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
latch.countDown();
}
});
@Override
public void onComplete(@NonNull Task<Void> task) {
latch.countDown();
}
});
try {
boolean success = latch.await(3, TimeUnit.SECONDS);
boolean success = latch.await(6, TimeUnit.SECONDS);
if (!success) {
LOG.e("Probably some deadlock in destroy.",
// This thread is likely stuck. The reason might be deadlock issues in the internal
// camera implementation, at least in emulators: see Camera1Engine and Camera2Engine
// onStopEngine() implementation and comments.
LOG.e("DESTROY: Could not destroy synchronously after 6 seconds.",
"Current thread:", Thread.currentThread(),
"Handler thread: ", mHandler.getThread());
"Handler thread:", mHandler.getThread());
depth++;
if (depth < DESTROY_RETRIES) {
recreateHandler(true);
LOG.e("DESTROY: Trying again on thread:", mHandler.getThread());
destroy(unrecoverably, depth);
} else {
LOG.w("DESTROY: Giving up because DESTROY_RETRIES was reached.");
}
}
} catch (InterruptedException ignore) {}
}
@ -406,19 +448,24 @@ public abstract class CameraEngine implements
private Task<Void> startEngine() {
return mOrchestrator.scheduleStateChange(CameraState.OFF, CameraState.ENGINE,
true,
new Callable<Task<Void>>() {
new Callable<Task<CameraOptions>>() {
@Override
public Task<Void> call() {
public Task<CameraOptions> call() {
if (!collectCameraInfo(mFacing)) {
LOG.e("onStartEngine:", "No camera available for facing", mFacing);
throw new CameraException(CameraException.REASON_NO_CAMERA);
}
return onStartEngine();
}
}).addOnSuccessListener(new OnSuccessListener<Void>() {
}).onSuccessTask(new SuccessContinuation<CameraOptions, Void>() {
@NonNull
@Override
public void onSuccess(Void aVoid) {
mCallback.dispatchOnCameraOpened(mCameraOptions);
public Task<Void> then(@Nullable CameraOptions cameraOptions) {
// Put this on the outer task so we're sure it's called after getState() is changed.
// This was breaking some tests on rare occasions.
if (cameraOptions == null) throw new RuntimeException("Null options!");
mCallback.dispatchOnCameraOpened(cameraOptions);
return Tasks.forResult(null);
}
});
}
@ -436,6 +483,8 @@ public abstract class CameraEngine implements
}).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
// Put this on the outer task so we're sure it's called after getState() is OFF.
// This was breaking some tests on rare occasions.
mCallback.dispatchOnCameraClosed();
}
});
@ -447,7 +496,7 @@ public abstract class CameraEngine implements
*/
@NonNull
@EngineThread
protected abstract Task<Void> onStartEngine();
protected abstract Task<CameraOptions> onStartEngine();
/**
* Stops the engine.

@ -11,7 +11,9 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
@ -40,9 +42,9 @@ public class CameraOrchestrator {
protected static class Token {
public final String name;
public final Task<Void> task;
public final Task<?> task;
private Token(@NonNull String name, @NonNull Task<Void> task) {
private Token(@NonNull String name, @NonNull Task<?> task) {
this.name = name;
this.task = task;
}
@ -76,39 +78,42 @@ public class CameraOrchestrator {
});
}
@SuppressWarnings("unchecked")
@NonNull
public Task<Void> schedule(@NonNull final String name,
final boolean dispatchExceptions,
@NonNull final Callable<Task<Void>> job) {
public <T> Task<T> schedule(@NonNull final String name,
final boolean dispatchExceptions,
@NonNull final Callable<Task<T>> job) {
LOG.i(name.toUpperCase(), "- Scheduling.");
final TaskCompletionSource<Void> source = new TaskCompletionSource<>();
final TaskCompletionSource<T> source = new TaskCompletionSource<>();
final WorkerHandler handler = mCallback.getJobWorker(name);
synchronized (mLock) {
applyCompletionListener(mJobs.getLast().task, handler,
new OnCompleteListener<Void>() {
new OnCompleteListener() {
@Override
public void onComplete(@NonNull Task<Void> task) {
public void onComplete(@NonNull Task task) {
synchronized (mLock) {
mJobs.removeFirst();
ensureToken();
}
try {
LOG.i(name.toUpperCase(), "- Executing.");
Task<Void> inner = job.call();
applyCompletionListener(inner, handler, new OnCompleteListener<Void>() {
Task<T> inner = job.call();
applyCompletionListener(inner, handler, new OnCompleteListener<T>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
public void onComplete(@NonNull Task<T> task) {
Exception e = task.getException();
LOG.i(name.toUpperCase(), "- Finished.", e);
if (e != null) {
LOG.w(name.toUpperCase(), "- Finished with ERROR.", e);
if (dispatchExceptions) {
mCallback.handleJobException(name, e);
}
source.trySetException(e);
} else if (task.isCanceled()) {
LOG.i(name.toUpperCase(), "- Finished because ABORTED.");
source.trySetException(new CancellationException());
} else {
source.trySetResult(null);
LOG.i(name.toUpperCase(), "- Finished.");
source.trySetResult(task.getResult());
}
}
});
@ -151,24 +156,38 @@ public class CameraOrchestrator {
mCallback.getJobWorker(name).remove(mDelayedJobs.get(name));
mDelayedJobs.remove(name);
}
Token token = new Token(name, Tasks.<Void>forResult(null));
Token token = new Token(name, Tasks.forResult(null));
//noinspection StatementWithEmptyBody
while (mJobs.remove(token)) { /* do nothing */ }
ensureToken();
}
}
public void reset() {
synchronized (mLock) {
List<String> all = new ArrayList<>();
//noinspection CollectionAddAllCanBeReplacedWithConstructor
all.addAll(mDelayedJobs.keySet());
for (Token token : mJobs) {
all.add(token.name);
}
for (String job : all) {
remove(job);
}
}
}
private void ensureToken() {
synchronized (mLock) {
if (mJobs.isEmpty()) {
mJobs.add(new Token("BASE", Tasks.<Void>forResult(null)));
mJobs.add(new Token("BASE", Tasks.forResult(null)));
}
}
}
private static void applyCompletionListener(@NonNull final Task<Void> task,
@NonNull WorkerHandler handler,
@NonNull final OnCompleteListener<Void> listener) {
private static <T> void applyCompletionListener(@NonNull final Task<T> task,
@NonNull WorkerHandler handler,
@NonNull final OnCompleteListener<T> listener) {
if (task.isComplete()) {
handler.run(new Runnable() {
@Override

@ -37,7 +37,8 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
public boolean hasPendingStateChange() {
synchronized (mLock) {
for (Token token : mJobs) {
if (token.name.contains(" > ") && !token.task.isComplete()) {
if ((token.name.contains(" >> ") || token.name.contains(" << "))
&& !token.task.isComplete()) {
return true;
}
}
@ -46,28 +47,29 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
}
@NonNull
public Task<Void> scheduleStateChange(@NonNull final CameraState fromState,
@NonNull final CameraState toState,
boolean dispatchExceptions,
@NonNull final Callable<Task<Void>> stateChange) {
public <T> Task<T> scheduleStateChange(@NonNull final CameraState fromState,
@NonNull final CameraState toState,
boolean dispatchExceptions,
@NonNull final Callable<Task<T>> stateChange) {
final int changeCount = ++mStateChangeCount;
mTargetState = toState;
final boolean isTearDown = !toState.isAtLeast(fromState);
final String changeName = fromState.name() + " > " + toState.name();
return schedule(changeName, dispatchExceptions, new Callable<Task<Void>>() {
final String name = isTearDown ? fromState.name() + " << " + toState.name()
: fromState.name() + " >> " + toState.name();
return schedule(name, dispatchExceptions, new Callable<Task<T>>() {
@Override
public Task<Void> call() throws Exception {
public Task<T> call() throws Exception {
if (getCurrentState() != fromState) {
LOG.w(changeName.toUpperCase(), "- State mismatch, aborting. current:",
LOG.w(name.toUpperCase(), "- State mismatch, aborting. current:",
getCurrentState(), "from:", fromState, "to:", toState);
return Tasks.forCanceled();
} else {
Executor executor = mCallback.getJobWorker(changeName).getExecutor();
Executor executor = mCallback.getJobWorker(name).getExecutor();
return stateChange.call().continueWithTask(executor,
new Continuation<Void, Task<Void>>() {
new Continuation<T, Task<T>>() {
@Override
public Task<Void> then(@NonNull Task<Void> task) {
public Task<T> then(@NonNull Task<T> task) {
if (task.isSuccessful() || isTearDown) {
mCurrentState = toState;
}
@ -76,9 +78,9 @@ public class CameraStateOrchestrator extends CameraOrchestrator {
});
}
}
}).addOnCompleteListener(new OnCompleteListener<Void>() {
}).addOnCompleteListener(new OnCompleteListener<T>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
public void onComplete(@NonNull Task<T> task) {
if (changeCount == mStateChangeCount) {
mTargetState = mCurrentState;
}

@ -1,8 +1,11 @@
package com.otaliastudios.cameraview.internal;
import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import androidx.annotation.NonNull;
@ -49,8 +52,9 @@ import java.util.List;
* - MediaCodecList (https://android.googlesource.com/platform/frameworks/av/+/master/media/libstagefright/MediaCodecList.cpp#322)
*
* To be fair, what {@link android.media.MediaRecorder} does is actually choose the first one
* that configures itself without errors. We currently do not offer this option here.
* TODO add a tryConfigure() step, that throws AudioException/VideoException ?
* that configures itself without errors. We offer this option through
* {@link #tryConfigureVideo(String, Size, int, int)} and
* {@link #tryConfigureAudio(String, int, int, int)}.
*
* 2. {@link #MODE_PREFER_HARDWARE}
*
@ -350,4 +354,64 @@ public class DeviceEncoders {
}
}
@SuppressLint("NewApi")
public void tryConfigureVideo(@NonNull String mimeType,
@NonNull Size size,
int frameRate,
int bitRate) {
if (mVideoEncoder != null) {
MediaCodec codec = null;
try {
MediaFormat format = MediaFormat.createVideoFormat(mimeType, size.getWidth(),
size.getHeight());
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
codec = MediaCodec.createByCodecName(mVideoEncoder.getName());
codec.configure(format, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
throw new VideoException("Failed to configure video codec: " + e.getMessage());
} finally {
if (codec != null) {
try {
codec.release();
} catch (Exception ignore) {}
}
}
}
}
@SuppressLint("NewApi")
public void tryConfigureAudio(@NonNull String mimeType,
int bitRate,
int sampleRate,
int channels) {
if (mAudioEncoder != null) {
MediaCodec codec = null;
try {
final MediaFormat format = MediaFormat.createAudioFormat(mimeType, sampleRate,
channels);
int channelMask = channels == 2 ? AudioFormat.CHANNEL_IN_STEREO
: AudioFormat.CHANNEL_IN_MONO;
format.setInteger(MediaFormat.KEY_CHANNEL_MASK, channelMask);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
codec = MediaCodec.createByCodecName(mAudioEncoder.getName());
codec.configure(format, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
throw new AudioException("Failed to configure video audio: " + e.getMessage());
} finally {
if (codec != null) {
try {
codec.release();
} catch (Exception ignore) {}
}
}
}
}
}

@ -89,18 +89,26 @@ public class WorkerHandler {
get().post(action);
}
private String mName;
private HandlerThread mThread;
private Handler mHandler;
private Executor mExecutor;
private WorkerHandler(@NonNull String name) {
mThread = new HandlerThread(name);
mName = name;
mThread = new HandlerThread(name) {
@NonNull
@Override
public String toString() {
return super.toString() + "[" + getThreadId() + "]";
}
};
mThread.setDaemon(true);
mThread.start();
mHandler = new Handler(mThread.getLooper());
mExecutor = new Executor() {
@Override
public void execute(Runnable command) {
public void execute(@NonNull Runnable command) {
WorkerHandler.this.run(command);
}
};
@ -245,6 +253,10 @@ public class WorkerHandler {
// after quit(), the thread will die at some point in the future. Might take some ms.
// try { handler.getThread().join(); } catch (InterruptedException ignore) {}
}
// This should not be needed, but just to be sure, let's remove it from cache.
// For example, interrupt() won't interrupt the thread if it's blocked - it will throw
// an exception instead.
sCache.remove(mName);
}
/**

@ -2,8 +2,8 @@ package com.otaliastudios.cameraview.picture;
import android.hardware.Camera;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.internal.utils.ExifHelper;
import androidx.annotation.NonNull;
@ -16,18 +16,16 @@ import java.io.IOException;
/**
* A {@link PictureResult} that uses standard APIs.
*/
public class Full1PictureRecorder extends PictureRecorder {
public class Full1PictureRecorder extends FullPictureRecorder {
private static final String TAG = Full1PictureRecorder.class.getSimpleName();
@SuppressWarnings("unused")
private static final CameraLogger LOG = CameraLogger.create(TAG);
private Camera mCamera;
private final Camera mCamera;
private final Camera1Engine mEngine;
public Full1PictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener,
@NonNull Camera1Engine engine,
@NonNull Camera camera) {
super(stub, listener);
super(stub, engine);
mEngine = engine;
mCamera = camera;
// We set the rotation to the camera parameters, but we don't know if the result will be
@ -39,10 +37,15 @@ public class Full1PictureRecorder extends PictureRecorder {
@Override
public void take() {
LOG.i("take() called.");
// Stopping the preview callback is important on older APIs / emulators,
// or takePicture can hang and leave the camera in a bad state.
mCamera.setPreviewCallbackWithBuffer(null);
mCamera.takePicture(
new Camera.ShutterCallback() {
@Override
public void onShutter() {
LOG.i("take(): got onShutter callback.");
dispatchOnShutter(true);
}
},
@ -51,6 +54,7 @@ public class Full1PictureRecorder extends PictureRecorder {
new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, final Camera camera) {
LOG.i("take(): got picture callback.");
int exifRotation;
try {
ExifInterface exif = new ExifInterface(new ByteArrayInputStream(data));
@ -63,16 +67,19 @@ public class Full1PictureRecorder extends PictureRecorder {
}
mResult.data = data;
mResult.rotation = exifRotation;
LOG.i("take(): starting preview again. ", Thread.currentThread());
camera.setPreviewCallbackWithBuffer(mEngine);
camera.startPreview(); // This is needed, read somewhere in the docs.
dispatchResult();
}
}
);
LOG.i("take() returned.");
}
@Override
protected void dispatchResult() {
mCamera = null;
LOG.i("dispatching result. Thread:", Thread.currentThread());
super.dispatchResult();
}
}

@ -9,7 +9,6 @@ import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.Camera2Engine;
@ -33,12 +32,9 @@ import androidx.exifinterface.media.ExifInterface;
* A {@link PictureResult} that uses standard APIs.
*/
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2PictureRecorder extends PictureRecorder
public class Full2PictureRecorder extends FullPictureRecorder
implements ImageReader.OnImageAvailableListener {
private static final String TAG = Full2PictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private final ActionHolder mHolder;
private final Action mAction;
private final ImageReader mPictureReader;
@ -71,6 +67,7 @@ public class Full2PictureRecorder extends PictureRecorder
mResult = null;
mError = e;
dispatchResult();
setState(STATE_COMPLETED);
}
}
@ -148,7 +145,9 @@ public class Full2PictureRecorder extends PictureRecorder
int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
mResult.rotation = ExifHelper.getOrientation(exifOrientation);
} catch (IOException ignore) { }
} catch (IOException ignore) {
// Should not happen
}
}
private void readRawImage(@NonNull Image image) {

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.picture;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
/**
* Helps with logging.
*/
public abstract class FullPictureRecorder extends PictureRecorder {
private static final String TAG = FullPictureRecorder.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
public FullPictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
super(stub, listener);
}
}

@ -1,14 +1,11 @@
package com.otaliastudios.cameraview.picture;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.engine.CameraEngine;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.RotationHelper;
@ -23,11 +20,7 @@ import java.io.ByteArrayOutputStream;
/**
* A {@link PictureRecorder} that uses standard APIs.
*/
public class Snapshot1PictureRecorder extends PictureRecorder {
private static final String TAG = Snapshot1PictureRecorder.class.getSimpleName();
@SuppressWarnings("unused")
private static final CameraLogger LOG = CameraLogger.create(TAG);
public class Snapshot1PictureRecorder extends SnapshotPictureRecorder {
private Camera1Engine mEngine1;
private Camera mCamera;

@ -10,7 +10,6 @@ import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.action.Action;
@ -44,8 +43,6 @@ import com.otaliastudios.cameraview.size.AspectRatio;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Snapshot2PictureRecorder extends SnapshotGlPictureRecorder {
private final static String TAG = Snapshot2PictureRecorder.class.getSimpleName();
private final static CameraLogger LOG = CameraLogger.create(TAG);
private final static long LOCK_TIMEOUT = 2500;
private class FlashAction extends BaseAction {

@ -9,7 +9,6 @@ import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.os.Build;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.internal.egl.EglBaseSurface;
import com.otaliastudios.cameraview.overlay.Overlay;
@ -54,10 +53,7 @@ import android.view.Surface;
* 2. We have overlays to be drawn - we don't want to draw them on the preview surface,
* not even for a frame.
*/
public class SnapshotGlPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotGlPictureRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
public class SnapshotGlPictureRecorder extends SnapshotPictureRecorder {
private CameraEngine mEngine;
private GlCameraPreview mPreview;

@ -0,0 +1,20 @@
package com.otaliastudios.cameraview.picture;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.PictureResult;
/**
* Helps with logging.
*/
public abstract class SnapshotPictureRecorder extends PictureRecorder {
private static final String TAG = SnapshotPictureRecorder.class.getSimpleName();
protected static final CameraLogger LOG = CameraLogger.create(TAG);
public SnapshotPictureRecorder(@NonNull PictureResult.Stub stub,
@Nullable PictureResultListener listener) {
super(stub, listener);
}
}

@ -3,9 +3,7 @@ package com.otaliastudios.cameraview.video;
import android.hardware.Camera;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.util.Log;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.Camera1Engine;
import com.otaliastudios.cameraview.internal.utils.CamcorderProfiles;
@ -19,9 +17,6 @@ import androidx.annotation.NonNull;
*/
public class Full1VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full1VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private final Camera1Engine mEngine;
private final Camera mCamera;
private final int mCameraId;

@ -1,19 +1,14 @@
package com.otaliastudios.cameraview.video;
import android.annotation.SuppressLint;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Build;
import android.view.Surface;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.engine.Camera2Engine;
import com.otaliastudios.cameraview.engine.action.Action;
import com.otaliastudios.cameraview.engine.action.ActionCallback;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.BaseAction;
import com.otaliastudios.cameraview.engine.action.CompletionCallback;
@ -31,9 +26,6 @@ import androidx.annotation.RequiresApi;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Full2VideoRecorder extends FullVideoRecorder {
private static final String TAG = Full2VideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
private ActionHolder mHolder;
private final String mCameraId;
private Surface mInputSurface;

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview.video;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Handler;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.VideoResult;
@ -29,7 +30,7 @@ import androidx.annotation.Nullable;
public abstract class FullVideoRecorder extends VideoRecorder {
private static final String TAG = FullVideoRecorder.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
protected static final CameraLogger LOG = CameraLogger.create(TAG);
@SuppressWarnings("WeakerAccess") protected MediaRecorder mMediaRecorder;
private CamcorderProfile mProfile;
@ -76,15 +77,22 @@ public abstract class FullVideoRecorder extends VideoRecorder {
@SuppressWarnings("SameParameterValue")
private boolean prepareMediaRecorder(@NonNull VideoResult.Stub stub,
boolean applyEncodersConstraints) {
LOG.i("prepareMediaRecorder:", "Preparing on thread", Thread.currentThread());
// 1. Create reference and ask for the CamcorderProfile
mMediaRecorder = new MediaRecorder();
mProfile = getCamcorderProfile(stub);
// 2. Set the video and audio sources.
applyVideoSource(stub, mMediaRecorder);
boolean hasAudio = stub.audio == Audio.ON
|| stub.audio == Audio.MONO
|| stub.audio == Audio.STEREO;
int audioChannels = 0;
if (stub.audio == Audio.ON) {
audioChannels = mProfile.audioChannels;
} else if (stub.audio == Audio.MONO) {
audioChannels = 1;
} else if (stub.audio == Audio.STEREO) {
audioChannels = 2;
}
boolean hasAudio = audioChannels > 0;
if (hasAudio) {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
}
@ -149,18 +157,35 @@ public abstract class FullVideoRecorder extends VideoRecorder {
LOG.i("prepareMediaRecorder:", "Checking DeviceEncoders...",
"videoOffset:", videoEncoderOffset,
"audioOffset:", audioEncoderOffset);
DeviceEncoders encoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
DeviceEncoders encoders;
try {
encoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
} catch (RuntimeException e) {
LOG.w("prepareMediaRecorder:", "Could not respect encoders parameters.",
"Trying again without checking encoders.");
return prepareMediaRecorder(stub, false);
}
try {
newVideoSize = encoders.getSupportedVideoSize(stub.size);
newVideoBitRate = encoders.getSupportedVideoBitRate(stub.videoBitRate);
newAudioBitRate = encoders.getSupportedAudioBitRate(stub.audioBitRate);
newVideoFrameRate = encoders.getSupportedVideoFrameRate(newVideoSize,
stub.videoFrameRate);
encoders.tryConfigureVideo(videoType, newVideoSize, newVideoFrameRate,
newVideoBitRate);
if (hasAudio) {
newAudioBitRate = encoders.getSupportedAudioBitRate(stub.audioBitRate);
encoders.tryConfigureAudio(audioType, newAudioBitRate,
mProfile.audioSampleRate, audioChannels);
}
encodersFound = true;
} catch (DeviceEncoders.VideoException videoException) {
LOG.i("prepareMediaRecorder:", "Got VideoException:",
videoException.getMessage());
videoEncoderOffset++;
} catch (DeviceEncoders.AudioException audioException) {
LOG.i("prepareMediaRecorder:", "Got AudioException:",
audioException.getMessage());
audioEncoderOffset++;
}
}
@ -183,13 +208,7 @@ public abstract class FullVideoRecorder extends VideoRecorder {
// 6B. Configure MediaRecorder from stub and from profile (audio)
if (hasAudio) {
if (stub.audio == Audio.ON) {
mMediaRecorder.setAudioChannels(mProfile.audioChannels);
} else if (stub.audio == Audio.MONO) {
mMediaRecorder.setAudioChannels(1);
} else if (stub.audio == Audio.STEREO) {
mMediaRecorder.setAudioChannels(2);
}
mMediaRecorder.setAudioChannels(audioChannels);
mMediaRecorder.setAudioSamplingRate(mProfile.audioSampleRate);
mMediaRecorder.setAudioEncoder(mProfile.audioCodec);
mMediaRecorder.setAudioEncodingBitRate(stub.audioBitRate);
@ -203,21 +222,49 @@ public abstract class FullVideoRecorder extends VideoRecorder {
}
mMediaRecorder.setOutputFile(stub.file.getAbsolutePath());
mMediaRecorder.setOrientationHint(stub.rotation);
mMediaRecorder.setMaxFileSize(stub.maxSize);
// When using MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED, the recorder might have stopped
// before calling it. But this creates issues on Camera2 Legacy devices - they need a
// callback BEFORE the recorder stops (see Camera2Engine). For this reason, we increase
// the max size and use MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING instead.
// Would do this with max duration as well but there's no such callback.
mMediaRecorder.setMaxFileSize(stub.maxSize <= 0 ? stub.maxSize
: Math.round(stub.maxSize / 0.9D));
LOG.i("prepareMediaRecorder:", "Increased max size from", stub.maxSize, "to",
Math.round(stub.maxSize / 0.9D));
mMediaRecorder.setMaxDuration(stub.maxDuration);
mMediaRecorder.setOnInfoListener(new MediaRecorder.OnInfoListener() {
@Override
public void onInfo(MediaRecorder mediaRecorder, int what, int extra) {
LOG.i("OnInfoListener:", "Received info", what, extra,
"Thread: ", Thread.currentThread());
boolean shouldStop = false;
switch (what) {
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED:
mResult.endReason = VideoResult.REASON_MAX_DURATION_REACHED;
stop(false);
shouldStop = true;
break;
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING:
case MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED:
// On rare occasions APPROACHING is not called. Make sure we listen to
// REACHED as well.
mResult.endReason = VideoResult.REASON_MAX_SIZE_REACHED;
stop(false);
shouldStop = true;
break;
}
if (shouldStop) {
LOG.i("OnInfoListener:", "Stopping");
stop(false);
}
}
});
mMediaRecorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
@Override
public void onError(MediaRecorder mr, int what, int extra) {
LOG.e("OnErrorListener: got error", what, extra, ". Stopping.");
mResult = null;
mError = new RuntimeException("MediaRecorder error: " + what + " " + extra);
LOG.i("OnErrorListener:", "Stopping");
stop(false);
}
});
@ -259,7 +306,10 @@ public abstract class FullVideoRecorder extends VideoRecorder {
if (mMediaRecorder != null) {
dispatchVideoRecordingEnd();
try {
LOG.i("stop:", "Stopping MediaRecorder...");
// TODO HANGS (rare, emulator only)
mMediaRecorder.stop();
LOG.i("stop:", "Stopped MediaRecorder.");
} catch (Exception e) {
// This can happen if stopVideo() is called right after takeVideo()
// (in which case we don't care). Or when prepare()/start() have failed for
@ -271,7 +321,17 @@ public abstract class FullVideoRecorder extends VideoRecorder {
mError = e;
}
}
mMediaRecorder.release();
try {
LOG.i("stop:", "Releasing MediaRecorder...");
mMediaRecorder.release();
LOG.i("stop:", "Released MediaRecorder.");
} catch (Exception e) {
mResult = null;
if (mError == null) {
LOG.w("stop:", "Error while releasing media recorder.", e);
mError = e;
}
}
}
mProfile = null;
mMediaRecorder = null;

@ -146,6 +146,19 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
case DEVICE_DEFAULT: videoType = "video/avc"; break;
}
String audioType = "audio/mp4a-latm";
TextureConfig videoConfig = new TextureConfig();
AudioConfig audioConfig = new AudioConfig();
// See if we have audio
int audioChannels = 0;
if (mResult.audio == Audio.ON) {
audioChannels = audioConfig.channels;
} else if (mResult.audio == Audio.MONO) {
audioChannels = 1;
} else if (mResult.audio == Audio.STEREO) {
audioChannels = 2;
}
boolean hasAudio = audioChannels > 0;
// Check the availability of values
Size newVideoSize = null;
@ -157,18 +170,42 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
boolean encodersFound = false;
DeviceEncoders deviceEncoders = null;
while (!encodersFound) {
LOG.i("Checking DeviceEncoders...",
"videoOffset:", videoEncoderOffset,
"audioOffset:", audioEncoderOffset);
try {
deviceEncoders = new DeviceEncoders(DeviceEncoders.MODE_RESPECT_ORDER,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
} catch (RuntimeException e) {
LOG.w("Could not respect encoders parameters.",
"Going on again without checking encoders, possibly failing.");
newVideoSize = mResult.size;
newVideoBitRate = mResult.videoBitRate;
newVideoFrameRate = mResult.videoFrameRate;
newAudioBitRate = mResult.audioBitRate;
break;
}
deviceEncoders = new DeviceEncoders(DeviceEncoders.MODE_PREFER_HARDWARE,
videoType, audioType, videoEncoderOffset, audioEncoderOffset);
try {
newVideoSize = deviceEncoders.getSupportedVideoSize(mResult.size);
newVideoBitRate = deviceEncoders.getSupportedVideoBitRate(mResult.videoBitRate);
newAudioBitRate = deviceEncoders.getSupportedAudioBitRate(mResult.audioBitRate);
newVideoFrameRate = deviceEncoders.getSupportedVideoFrameRate(newVideoSize,
mResult.videoFrameRate);
deviceEncoders.tryConfigureVideo(videoType, newVideoSize, newVideoFrameRate,
newVideoBitRate);
if (hasAudio) {
newAudioBitRate = deviceEncoders
.getSupportedAudioBitRate(mResult.audioBitRate);
deviceEncoders.tryConfigureAudio(audioType, newAudioBitRate,
audioConfig.samplingFrequency, audioChannels);
}
encodersFound = true;
} catch (DeviceEncoders.VideoException videoException) {
LOG.i("Got VideoException:", videoException.getMessage());
videoEncoderOffset++;
} catch (DeviceEncoders.AudioException audioException) {
LOG.i("Got AudioException:", audioException.getMessage());
audioEncoderOffset++;
}
}
@ -178,7 +215,6 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
mResult.videoFrameRate = newVideoFrameRate;
// Video
TextureConfig videoConfig = new TextureConfig();
videoConfig.width = mResult.size.getWidth();
videoConfig.height = mResult.size.getHeight();
videoConfig.bitRate = mResult.videoBitRate;
@ -206,12 +242,9 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
// Audio
AudioMediaEncoder audioEncoder = null;
if (mResult.audio == Audio.ON || mResult.audio == Audio.MONO
|| mResult.audio == Audio.STEREO) {
AudioConfig audioConfig = new AudioConfig();
if (hasAudio) {
audioConfig.bitRate = mResult.audioBitRate;
if (mResult.audio == Audio.MONO) audioConfig.channels = 1;
if (mResult.audio == Audio.STEREO) audioConfig.channels = 2;
audioConfig.channels = audioChannels;
audioConfig.encoder = deviceEncoders.getAudioEncoder();
audioEncoder = new AudioMediaEncoder(audioConfig);
}
@ -231,16 +264,17 @@ public class SnapshotVideoRecorder extends VideoRecorder implements RendererFram
}
if (mCurrentState == STATE_RECORDING) {
LOG.v("dispatching frame.");
TextureMediaEncoder textureEncoder
= (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestampNanos = surfaceTexture.getTimestamp();
// NOTE: this is an approximation but it seems to work:
frame.timestampMillis = System.currentTimeMillis();
surfaceTexture.getTransformMatrix(frame.transform);
LOG.i("scheduling frame.");
synchronized (mEncoderEngineLock) {
if (mEncoderEngine != null) { // Can be null on teardown.
LOG.i("dispatching frame.");
TextureMediaEncoder textureEncoder
= (TextureMediaEncoder) mEncoderEngine.getVideoEncoder();
TextureMediaEncoder.Frame frame = textureEncoder.acquireFrame();
frame.timestampNanos = surfaceTexture.getTimestamp();
// NOTE: this is an approximation but it seems to work:
frame.timestampMillis = System.currentTimeMillis();
surfaceTexture.getTransformMatrix(frame.transform);
mEncoderEngine.notify(TextureMediaEncoder.FRAME_EVENT, frame);
}
}

@ -73,6 +73,7 @@ public abstract class VideoRecorder {
"Ignoring. state:", mState);
return;
}
LOG.i("start:", "Changed state to STATE_RECORDING");
mState = STATE_RECORDING;
}
mResult = stub;
@ -91,6 +92,7 @@ public abstract class VideoRecorder {
"Ignoring. isCameraShutdown:", isCameraShutdown);
return;
}
LOG.i("stop:", "Changed state to STATE_STOPPING");
mState = STATE_STOPPING;
}
onStop(isCameraShutdown);
@ -123,10 +125,15 @@ public abstract class VideoRecorder {
*/
protected final void dispatchResult() {
synchronized (mStateLock) {
if (!isRecording()) return;
if (!isRecording()) {
LOG.w("dispatchResult:", "Called, but not recording! Aborting.");
return;
}
LOG.i("dispatchResult:", "Changed state to STATE_IDLE.");
mState = STATE_IDLE;
}
onDispatchResult();
LOG.i("dispatchResult:", "About to dispatch result:", mResult, mError);
if (mListener != null) {
mListener.onVideoResult(mResult, mError);
}
@ -148,6 +155,7 @@ public abstract class VideoRecorder {
@SuppressWarnings("WeakerAccess")
@CallSuper
protected void dispatchVideoRecordingStart() {
LOG.i("dispatchVideoRecordingStart:", "About to dispatch.");
if (mListener != null) {
mListener.onVideoRecordingStart();
}
@ -160,6 +168,7 @@ public abstract class VideoRecorder {
@SuppressWarnings("WeakerAccess")
@CallSuper
protected void dispatchVideoRecordingEnd() {
LOG.i("dispatchVideoRecordingEnd:", "About to dispatch.");
if (mListener != null) {
mListener.onVideoRecordingEnd();
}

@ -16,11 +16,11 @@ public class AudioConfig {
public int channels = 1;
public String encoder;
public String mimeType = "audio/mp4a-latm";
public int samplingFrequency = 44100; // samples/sec
// Not configurable options (for now)
final int encoding = AudioFormat.ENCODING_PCM_16BIT; // Determines the sampleSizePerChannel
// The 44.1KHz frequency is the only setting guaranteed to be available on all devices.
final int samplingFrequency = 44100; // samples/sec
// If sampleSizePerChannel changes, review noise introduction
final int sampleSizePerChannel = 2; // byte/sample/channel [16bit].
final int byteRatePerChannel = samplingFrequency * sampleSizePerChannel; // byte/sec/channel
@ -28,10 +28,11 @@ public class AudioConfig {
@NonNull
AudioConfig copy() {
AudioConfig config = new AudioConfig();
config.bitRate = this.bitRate;
config.channels = this.channels;
config.encoder = this.encoder;
config.bitRate = bitRate;
config.channels = channels;
config.encoder = encoder;
config.mimeType = mimeType;
config.samplingFrequency = samplingFrequency;
return config;
}

@ -349,14 +349,17 @@ public class AudioMediaEncoder extends MediaEncoder {
*/
private class AudioEncodingThread extends Thread {
private AudioEncodingThread() {
setPriority(Thread.MAX_PRIORITY);
// Not sure about this... This thread can do VERY time consuming operations,
// and slowing down the preview/camera threads can break them e.g. hit internal
// timeouts for camera requests to be consumed.
// setPriority(Thread.MAX_PRIORITY);
}
@Override
public void run() {
encoding: while (true) {
if (mInputBufferQueue.isEmpty()) {
skipFrames(2);
skipFrames(3);
} else {
LOG.v("encoding thread - performing", mInputBufferQueue.size(),
"pending operations.");
@ -387,7 +390,7 @@ public class AudioMediaEncoder extends MediaEncoder {
} else if (tryAcquireInputBuffer(inputBuffer)) {
encode(inputBuffer);
} else {
skipFrames(1);
skipFrames(3);
}
}
}

@ -397,7 +397,7 @@ public abstract class MediaEncoder {
@SuppressLint("LogNotTimber")
@SuppressWarnings("WeakerAccess")
protected final void drainOutput(boolean drainAll) {
LOG.v(mName, "DRAINING - EOS:", drainAll);
LOG.i(mName, "DRAINING - EOS:", drainAll);
if (mMediaCodec == null) {
LOG.e("drain() was called before prepare() or after releasing.");
return;
@ -407,6 +407,7 @@ public abstract class MediaEncoder {
}
while (true) {
int encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, OUTPUT_TIMEOUT_US);
LOG.i(mName, "DRAINING - Got status:", encoderStatus);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!drainAll) break; // out of while
@ -534,14 +535,17 @@ public abstract class MediaEncoder {
* max length allowed. We will move to {@link #STATE_LIMIT_REACHED} and request a stop.
*/
private void onMaxLengthReached() {
if (mMaxLengthReached) return;
mMaxLengthReached = true;
if (mState >= STATE_LIMIT_REACHED) {
LOG.w(mName, "onMaxLengthReached: Reached in wrong state. Aborting.", mState);
if (mMaxLengthReached) {
LOG.w(mName, "onMaxLengthReached: Called twice.");
} else {
LOG.w(mName, "onMaxLengthReached: Requesting a stop.");
setState(STATE_LIMIT_REACHED);
mController.requestStop(mTrackIndex);
mMaxLengthReached = true;
if (mState >= STATE_LIMIT_REACHED) {
LOG.w(mName, "onMaxLengthReached: Reached in wrong state. Aborting.", mState);
} else {
LOG.w(mName, "onMaxLengthReached: Requesting a stop.");
setState(STATE_LIMIT_REACHED);
mController.requestStop(mTrackIndex);
}
}
}

@ -119,12 +119,13 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
@Override
protected boolean shouldRenderFrame(long timestampUs) {
if (!super.shouldRenderFrame(timestampUs)) {
LOG.i("shouldRenderFrame - Dropping frame because of super()");
return false;
} else if (mFrameNumber <= 10) {
// Always render the first few frames, or muxer fails.
return true;
} else if (getPendingEvents(FRAME_EVENT) > 2) {
LOG.v("shouldRenderFrame - Dropping, we already have too many pending events:",
LOG.i("shouldRenderFrame - Dropping, we already have too many pending events:",
getPendingEvents(FRAME_EVENT));
return false;
} else {
@ -177,17 +178,21 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
}
// First, drain any previous data.
LOG.v("onEvent -",
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- draining.");
drainOutput(false);
// Then draw on the surface.
LOG.v("onEvent -",
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"- rendering.");
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- drawing.");
// 1. We must scale this matrix like GlCameraPreview does, because it might have some
// cropping. Scaling takes place with respect to the (0, 0, 0) point, so we must apply
@ -218,6 +223,12 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
Matrix.translateM(mConfig.overlayDrawer.getTransform(),
0, -0.5F, -0.5F, 0);
}
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- gl rendering.");
mViewport.drawFrame(frame.timestampUs(), mConfig.textureId, transform);
if (mConfig.hasOverlay()) {
mConfig.overlayDrawer.render(frame.timestampUs());
@ -225,6 +236,12 @@ public class TextureMediaEncoder extends VideoMediaEncoder<TextureConfig> {
mWindow.setPresentationTime(frame.timestampNanos);
mWindow.swapBuffers();
mFramePool.recycle(frame);
LOG.i("onEvent -",
"frameNumber:", mFrameNumber,
"timestampUs:", frame.timestampUs(),
"hasReachedMaxLength:", hasReachedMaxLength(),
"thread:", Thread.currentThread(),
"- gl rendered.");
}
@Override

@ -1,15 +1,15 @@
coverage:
precision: 1
round: down
range: "30...70"
round: nearest
range: "10...90"
status:
project:
default:
target: 40%
target: 70%
patch:
default:
target: 70%
target: 60%
changes: no
comment:

Loading…
Cancel
Save