New tests, add CameraController.STATE (#37)

* Add set/get parameters tests

* CameraCallbacks tests

* Nits

* Nits 2

* Nits 3

* Nits 4

* CameraController integration tests

* Fix some bugs

* Add CameraController.STATE, fix CameraView.isStarted()

* Fix old tests

* Video and focus tests

* Remove video test, fails on Travis

* Add picture tests

* Remove other video test

* Add audio tests

* new travis.yml

* WorkerHandler changes

* Better GridLinesLayoutTest

* Try add logcat to travis build

* Debug failing test

* Add logs

* Grant permissions, bump tests to N

* Add wake locks

* Add wake lock permission

* Remove integration tests

* Change view matcher

* Refinements
pull/43/head
Mattia Iavarone 7 years ago committed by GitHub
parent 0a8e2f3de7
commit cf3bfc1ed0
  1. 52
      .travis.yml
  2. 1
      README.md
  3. 4
      cameraview/src/androidTest/AndroidManifest.xml
  4. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/AspectRatioTest.java
  5. 134
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/BaseTest.java
  6. 304
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraCallbacksTest.java
  7. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraLoggerTest.java
  8. 21
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraOptions1Test.java
  9. 24
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraUtilsTest.java
  10. 181
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  11. 50
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CropHelperTest.java
  12. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/ExtraProperties1Test.java
  13. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/GestureLayoutTest.java
  14. 44
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/GridLinesLayoutTest.java
  15. 507
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/IntegrationTest.java
  16. 2
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/Mapper1Test.java
  17. 14
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockCameraController.java
  18. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/MockPreview.java
  19. 5
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/OrientationHelperTest.java
  20. 22
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PinchGestureLayoutTest.java
  21. 6
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/PreviewTest.java
  22. 9
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/ScrollGestureLayoutTest.java
  23. 10
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/TapGestureLayoutTest.java
  24. 1
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/TestActivity.java
  25. 18
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/WorkerHandlerTest.java
  26. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/AspectRatio.java
  27. 186
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera1.java
  28. 159
      cameraview/src/main/java/com/otaliastudios/cameraview/Camera2.java
  29. 162
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraController.java
  30. 135
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  31. 4
      cameraview/src/main/utils/com/otaliastudios/cameraview/CameraUtils.java
  32. 16
      cameraview/src/main/utils/com/otaliastudios/cameraview/Task.java
  33. 23
      cameraview/src/main/utils/com/otaliastudios/cameraview/WorkerHandler.java
  34. 8
      cameraview/src/main/views/com/otaliastudios/cameraview/GridLinesLayout.java
  35. 11
      cameraview/src/test/java/com/otaliastudios/cameraview/SizeTest.java
  36. 6
      codecov.yml

@ -1,3 +1,5 @@
# https://github.com/andstatus/andstatus/blob/master/.travis.yml
language: android
branches:
@ -12,9 +14,11 @@ jdk:
env:
global:
# Run android tests on api level 22
- EMULATOR_API=22
# Where to run androidTests
- EMULATOR_API=22 # 24 has some issues, probably some overlayed window
- EMULATOR_ABI=armeabi-v7a
- EMULATOR_TAG=default
- PATH=$ANDROID_HOME:$ANDROID_HOME/emulator:$ANDROID_HOME/platform-tools:$PATH
android:
components:
@ -23,16 +27,44 @@ android:
- build-tools-26.0.1
- android-26
- doc-26
# Android tests
- android-$EMULATOR_API
- sys-img-$EMULATOR_ABI-android-$EMULATOR_API
# sys-img-x86-google_apis-26
install:
# Setup
- echo $ANDROID_HOME # We assume this is correctly set when setting path
- sdkmanager --list || true # Look at the packages
- echo yes | sdkmanager "tools" # Ensure tools is updated
- echo yes | sdkmanager "emulator" # Ensure emulator is present
# Install emulator
- export EMULATOR="system-images;android-$EMULATOR_API;$EMULATOR_TAG;$EMULATOR_ABI"
- echo yes | sdkmanager "platforms;android-$EMULATOR_API" # Install sdk
- echo yes | sdkmanager "$EMULATOR" # Install system image
- sdkmanager --list || true # Check everything is updated
# Create adn start emulator
- echo no | avdmanager create avd -n test -k "$EMULATOR" -f # Create emulator
- which emulator # ensure we are using the right emulator (home/emulator/)
- emulator -avd test -no-window -camera-back emulated -camera-front emulated -memory 2048 -writable-system & # Launch
- adb wait-for-device # Wait for adb process
- adb remount # Mount as writable
before_script:
- echo no | android create avd --force --name test --target android-$EMULATOR_API --abi $EMULATOR_ABI
- emulator -avd test -no-audio -no-window &
- android-wait-for-emulator
- adb shell input keyevent 82 &
# Wait for emulator
- android-wait-for-emulator # Wait for emulator ready to interact
- adb shell settings put global window_animation_scale 0 & # Disable animations
- adb shell settings put global transition_animation_scale 0 & # Disable animations
- adb shell settings put global animator_duration_scale 0 & # Disable animations
# Unlock and configure logs.
# Would be great to use -v color to adb logcat but looks not supported on travis.
- sleep 20 # Sleep 20 seconds just in case
- adb shell input keyevent 82 & # Dispatch unlock event
- adb logcat --help # See if this version supports color
- adb logcat -c # Clear logcat
- adb logcat Test:V TestRunner:V CameraView:V CameraController:V Camera1:V WorkerHandler:V THREAD_STATE:S *:E &
# - export LOGCAT_PID=$! # Save PID of the logcat process. Should kill later with kill $LOGCAT_PID
script:
- ./gradlew clean testDebugUnitTest connectedCheck mergedCoverageReport

@ -433,6 +433,7 @@ Other APIs not mentioned above are provided, and are well documented and comment
|`toggleFlash()`|Toggles the flash value between `Flash.OFF`, `Flash.ON`, and `Flash.AUTO`.|
|`setLocation(Location)`|Sets location data to be appended to picture/video metadata.|
|`setLocation(double, double)`|Sets latitude and longitude to be appended to picture/video metadata.|
|`getLocation()`|Retrieves location data previously applied with setLocation().|
|`startAutoFocus(float, float)`|Starts an autofocus process at the given coordinates, with respect to the view dimensions.|
|`getPreviewSize()`|Returns the size of the preview surface. If CameraView was not constrained in its layout phase (e.g. it was `wrap_content`), this will return the same aspect ratio of CameraView.|
|`getSnapshotSize()`|Returns `getPreviewSize()`, since a snapshot is a preview frame.|

@ -3,9 +3,13 @@
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WAKE_LOCK"/>
<uses-permission android:name="android.permission.DISABLE_KEYGUARD"/>
<application>
<activity
android:configChanges="orientation|screenLayout|keyboardHidden"
android:hardwareAccelerated="true"
android:name=".TestActivity"/>
</application>

@ -16,7 +16,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class AspectRatioTest {
public class AspectRatioTest extends BaseTest {
@Test
public void testConstructor() {
@ -29,13 +29,31 @@ public class AspectRatioTest {
@Test
public void testEquals() {
AspectRatio ratio = AspectRatio.of(50, 10);
assertFalse(ratio.equals(null));
assertTrue(ratio.equals(ratio));
AspectRatio ratio1 = AspectRatio.of(5, 1);
assertTrue(ratio.equals(ratio1));
AspectRatio.sCache.clear();
AspectRatio ratio2 = AspectRatio.of(500, 100);
assertTrue(ratio.equals(ratio2));
Size size = new Size(500, 100);
assertTrue(ratio.matches(size));
}
@Test
public void testCompare() {
AspectRatio ratio1 = AspectRatio.of(10, 10);
AspectRatio ratio2 = AspectRatio.of(10, 2);
AspectRatio ratio3 = AspectRatio.of(2, 10);
assertTrue(ratio1.compareTo(ratio2) < 0);
assertTrue(ratio1.compareTo(ratio3) > 0);
assertTrue(ratio1.compareTo(ratio1) == 0);
assertNotEquals(ratio1.hashCode(), ratio2.hashCode());
}
@Test
public void testInverse() {
AspectRatio ratio = AspectRatio.of(50, 10);

@ -1,19 +1,76 @@
package com.otaliastudios.cameraview;
import android.annotation.SuppressLint;
import android.app.KeyguardManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.PowerManager;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.espresso.core.internal.deps.guava.collect.ObjectArrays;
import android.support.test.rule.ActivityTestRule;
import android.view.View;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.Stubber;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.concurrent.CountDownLatch;
import static android.content.Context.KEYGUARD_SERVICE;
import static android.content.Context.POWER_SERVICE;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class BaseTest {
public static CameraLogger LOG = CameraLogger.create("Test");
private static KeyguardManager.KeyguardLock keyguardLock;
private static PowerManager.WakeLock wakeLock;
// https://github.com/linkedin/test-butler/blob/bc2bb4df13d0a554d2e2b0ea710795017717e710/test-butler-app/src/main/java/com/linkedin/android/testbutler/ButlerService.java#L121
@BeforeClass
@SuppressWarnings("MissingPermission")
public static void wakeUp() {
CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
// Acquire a keyguard lock to prevent the lock screen from randomly appearing and breaking tests
KeyguardManager keyguardManager = (KeyguardManager) context().getSystemService(KEYGUARD_SERVICE);
keyguardLock = keyguardManager.newKeyguardLock("CameraViewLock");
keyguardLock.disableKeyguard();
// Acquire a wake lock to prevent the cpu from going to sleep and breaking tests
PowerManager powerManager = (PowerManager) context().getSystemService(POWER_SERVICE);
wakeLock = powerManager.newWakeLock(PowerManager.FULL_WAKE_LOCK
| PowerManager.ACQUIRE_CAUSES_WAKEUP
| PowerManager.ON_AFTER_RELEASE, "CameraViewLock");
wakeLock.acquire();
}
@AfterClass
@SuppressWarnings("MissingPermission")
public static void releaseWakeUp() {
wakeLock.release();
keyguardLock.reenableKeyguard();
}
public static void ui(Runnable runnable) {
InstrumentationRegistry.getInstrumentation().runOnMainSync(runnable);
}
@ -35,7 +92,82 @@ public class BaseTest {
});
}
public static void waitUi() {
public static void idle() {
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
public static void sleep(long time) {
try {
Thread.sleep(time);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public static void grantPermissions() {
grantPermission("android.permission.CAMERA");
grantPermission("android.permission.RECORD_AUDIO");
grantPermission("android.permission.WRITE_EXTERNAL_STORAGE");
}
public static void grantPermission(String permission) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) return;
String command = "pm grant " + context().getPackageName() + " " + permission;
InstrumentationRegistry.getInstrumentation().getUiAutomation().executeShellCommand(command);
}
public static byte[] mockJpeg(int width, int height) {
Bitmap source = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
ByteArrayOutputStream os = new ByteArrayOutputStream();
source.compress(Bitmap.CompressFormat.JPEG, 100, os);
return os.toByteArray();
}
public static YuvImage mockYuv(int width, int height) {
YuvImage y = mock(YuvImage.class);
when(y.getWidth()).thenReturn(width);
when(y.getHeight()).thenReturn(height);
when(y.compressToJpeg(any(Rect.class), anyInt(), any(OutputStream.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
Rect rect = (Rect) invocation.getArguments()[0];
OutputStream stream = (OutputStream) invocation.getArguments()[2];
stream.write(mockJpeg(rect.width(), rect.height()));
return true;
}
});
return y;
}
public static Stubber doCountDown(final CountDownLatch latch) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
latch.countDown();
return null;
}
});
}
public static <T> Stubber doEndTask(final Task<T> task, final T response) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(response);
return null;
}
});
}
public static Stubber doEndTask(final Task task, final int withReturnArgument) {
return doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
Object o = invocation.getArguments()[withReturnArgument];
//noinspection unchecked
task.end(o);
return null;
}
});
}
}

@ -0,0 +1,304 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.support.test.filters.MediumTest;
import android.support.test.runner.AndroidJUnit4;
import android.view.ViewGroup;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import static junit.framework.Assert.assertNotNull;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyFloat;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@MediumTest
public class CameraCallbacksTest extends BaseTest {
private CameraView camera;
private CameraView.CameraCallbacks callbacks;
private CameraListener listener;
private MockCameraController mockController;
private MockPreview mockPreview;
private Task<Boolean> task;
@Before
public void setUp() {
ui(new Runnable() {
@Override
public void run() {
Context context = context();
listener = mock(CameraListener.class);
camera = new CameraView(context) {
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks, Preview preview) {
mockController = new MockCameraController(callbacks, preview);
return mockController;
}
@Override
protected Preview instantiatePreview(Context context, ViewGroup container) {
mockPreview = new MockPreview(context, container);
return mockPreview;
}
@Override
protected boolean checkPermissions(SessionType sessionType, Audio audio) {
return true;
}
};
camera.addCameraListener(listener);
callbacks = camera.mCameraCallbacks;
task = new Task<>();
task.listen();
}
});
}
@After
public void tearDown() {
camera = null;
mockController = null;
mockPreview = null;
callbacks = null;
listener = null;
}
// Completes our task.
private Answer completeTask() {
return new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
task.end(true);
return null;
}
};
}
@Test
public void testDontDispatchIfRemoved() {
camera.removeCameraListener(listener);
doAnswer(completeTask()).when(listener).onCameraOpened(null);
callbacks.dispatchOnCameraOpened(null);
assertNull(task.await(200));
verify(listener, never()).onCameraOpened(null);
}
@Test
public void testDontDispatchIfCleared() {
camera.clearCameraListeners();
doAnswer(completeTask()).when(listener).onCameraOpened(null);
callbacks.dispatchOnCameraOpened(null);
assertNull(task.await(200));
verify(listener, never()).onCameraOpened(null);
}
@Test
public void testDispatchOnCameraOpened() {
doAnswer(completeTask()).when(listener).onCameraOpened(null);
callbacks.dispatchOnCameraOpened(null);
assertNotNull(task.await(200));
verify(listener, times(1)).onCameraOpened(null);
}
@Test
public void testDispatchOnCameraClosed() {
doAnswer(completeTask()).when(listener).onCameraClosed();
callbacks.dispatchOnCameraClosed();
assertNotNull(task.await(200));
verify(listener, times(1)).onCameraClosed();
}
@Test
public void testDispatchOnVideoTaken() {
doAnswer(completeTask()).when(listener).onVideoTaken(null);
callbacks.dispatchOnVideoTaken(null);
assertNotNull(task.await(200));
verify(listener, times(1)).onVideoTaken(null);
}
@Test
public void testDispatchOnZoomChanged() {
doAnswer(completeTask()).when(listener).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class));
callbacks.dispatchOnZoomChanged(0f, null);
assertNotNull(task.await(200));
verify(listener, times(1)).onZoomChanged(anyFloat(), any(float[].class), any(PointF[].class));
}
@Test
public void testDispatchOnExposureCorrectionChanged() {
doAnswer(completeTask()).when(listener).onExposureCorrectionChanged(0f, null, null);
callbacks.dispatchOnExposureCorrectionChanged(0f, null, null);
assertNotNull(task.await(200));
verify(listener, times(1)).onExposureCorrectionChanged(0f, null, null);
}
@Test
public void testDispatchOnFocusStart() {
// Enable tap gesture.
// Can't mock package protected. camera.mTapGestureLayout = mock(TapGestureLayout.class);
camera.mapGesture(Gesture.TAP, GestureAction.FOCUS_WITH_MARKER);
PointF point = new PointF();
doAnswer(completeTask()).when(listener).onFocusStart(point);
callbacks.dispatchOnFocusStart(Gesture.TAP, point);
assertNotNull(task.await(200));
verify(listener, times(1)).onFocusStart(point);
// Can't mock package protected. verify(camera.mTapGestureLayout, times(1)).onFocusStart(point);
}
@Test
public void testDispatchOnFocusEnd() {
// Enable tap gesture.
// Can't mock package protected. camera.mTapGestureLayout = mock(TapGestureLayout.class);
camera.mapGesture(Gesture.TAP, GestureAction.FOCUS_WITH_MARKER);
PointF point = new PointF();
boolean success = true;
doAnswer(completeTask()).when(listener).onFocusEnd(success, point);
callbacks.dispatchOnFocusEnd(Gesture.TAP, success, point);
assertNotNull(task.await(200));
verify(listener, times(1)).onFocusEnd(success, point);
// Can't mock package protected. verify(camera.mTapGestureLayout, times(1)).onFocusEnd(success);
}
@Test
public void testOrientationCallbacks_deviceOnly() {
doAnswer(completeTask()).when(listener).onOrientationChanged(anyInt());
// Assert not called. Both methods must be called.
callbacks.onDeviceOrientationChanged(0);
assertNull(task.await(200));
verify(listener, never()).onOrientationChanged(anyInt());
}
@Test
public void testOrientationCallbacks_displayOnly() {
doAnswer(completeTask()).when(listener).onOrientationChanged(anyInt());
// Assert not called. Both methods must be called.
callbacks.onDisplayOffsetChanged(0);
assertNull(task.await(200));
verify(listener, never()).onOrientationChanged(anyInt());
}
@Test
public void testOrientationCallbacks_both() {
doAnswer(completeTask()).when(listener).onOrientationChanged(anyInt());
// Assert called.
callbacks.onDisplayOffsetChanged(0);
callbacks.onDeviceOrientationChanged(90);
assertNotNull(task.await(200));
verify(listener, times(1)).onOrientationChanged(anyInt());
}
@Test
public void testProcessJpeg() {
int[] viewDim = new int[]{ 200, 200 };
int[] imageDim = new int[]{ 1000, 1600 };
// With crop flag: expect a 1:1 ratio.
int[] output = testProcessImage(true, true, viewDim, imageDim);
LOG.i("testProcessJpeg", output);
assertEquals(output[0], 1000);
assertEquals(output[1], 1000);
// Without crop flag: expect original ratio.
output = testProcessImage(true, false, viewDim, imageDim);
LOG.i("testProcessJpeg", output);
assertEquals(output[0], imageDim[0]);
assertEquals(output[1], imageDim[1]);
}
@Test
public void testProcessYuv() {
int[] viewDim = new int[]{ 200, 200 };
int[] imageDim = new int[]{ 1000, 1600 };
// With crop flag: expect a 1:1 ratio.
int[] output = testProcessImage(false, true, viewDim, imageDim);
LOG.i("testProcessYuv", output);
assertEquals(output[0], 1000);
assertEquals(output[1], 1000);
// Without crop flag: expect original ratio.
output = testProcessImage(false, false, viewDim, imageDim);
LOG.i("testProcessYuv", output);
assertEquals(output[0], imageDim[0]);
assertEquals(output[1], imageDim[1]);
}
private int[] testProcessImage(boolean jpeg, boolean crop, int[] viewDim, int[] imageDim) {
// End our task when onPictureTaken is called. Take note of the result.
final Task<byte[]> jpegTask = new Task<>();
jpegTask.listen();
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
jpegTask.end((byte[]) invocation.getArguments()[0]);
return null;
}
}).when(listener).onPictureTaken(any(byte[].class));
// Fake our own dimensions.
camera.setTop(0);
camera.setBottom(viewDim[1]);
camera.setLeft(0);
camera.setRight(viewDim[0]);
// Ensure the image will (not) be cropped.
camera.setCropOutput(crop);
mockPreview.setIsCropping(crop);
// Create fake JPEG array and trigger the process.
if (jpeg) {
callbacks.processImage(mockJpeg(imageDim[0], imageDim[1]), true, false);
} else {
callbacks.processSnapshot(mockYuv(imageDim[0], imageDim[1]), true, false);
}
// Wait for result and get out dimensions.
byte[] result = jpegTask.await(3000);
assertNotNull("Image was processed", result);
Bitmap bitmap = BitmapFactory.decodeByteArray(result, 0, result.length);
return new int[]{ bitmap.getWidth(), bitmap.getHeight() };
}
}

@ -11,7 +11,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class CameraLoggerTest {
public class CameraLoggerTest extends BaseTest {
@Test
public void testLoggerLevels() {

@ -11,6 +11,7 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -20,7 +21,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class CameraOptions1Test {
public class CameraOptions1Test extends BaseTest {
@Test
public void testEmpty() {
@ -51,9 +52,27 @@ public class CameraOptions1Test {
assertEquals(o.getSupportedFacing().size(), supported.size());
for (Facing facing : s) {
assertTrue(supported.contains(m.<Integer>map(facing)));
assertTrue(o.supports(facing));
}
}
@Test
public void testGestureActions() {
Camera.Parameters params = mock(Camera.Parameters.class);
when(params.getSupportedFocusModes()).thenReturn(Collections.<String>emptyList());
when(params.isZoomSupported()).thenReturn(true);
when(params.getMaxExposureCompensation()).thenReturn(0);
when(params.getMinExposureCompensation()).thenReturn(0);
CameraOptions o = new CameraOptions(params);
assertFalse(o.supports(GestureAction.FOCUS));
assertFalse(o.supports(GestureAction.FOCUS_WITH_MARKER));
assertTrue(o.supports(GestureAction.CAPTURE));
assertTrue(o.supports(GestureAction.NONE));
assertTrue(o.supports(GestureAction.ZOOM));
assertFalse(o.supports(GestureAction.EXPOSURE_CORRECTION));
}
@Test
public void testWhiteBalance() {
Camera.Parameters params = mock(Camera.Parameters.class);

@ -22,7 +22,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class CameraUtilsTest {
public class CameraUtilsTest extends BaseTest {
@Test
public void testHasCameras() {
@ -41,13 +41,29 @@ public class CameraUtilsTest {
int w = 100, h = 200, color = Color.WHITE;
Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
source.setPixel(0, 0, color);
ByteArrayOutputStream os = new ByteArrayOutputStream();
final ByteArrayOutputStream os = new ByteArrayOutputStream();
// Using lossy JPG we can't have strict comparison of values after compression.
source.compress(Bitmap.CompressFormat.PNG, 100, os);
// No orientation.
Bitmap other = CameraUtils.decodeBitmap(os.toByteArray());
final Task<Bitmap> decode = new Task<>();
decode.listen();
final CameraUtils.BitmapCallback callback = new CameraUtils.BitmapCallback() {
@Override
public void onBitmapReady(Bitmap bitmap) {
decode.end(bitmap);
}
};
// Run on ui because it involves handlers.
ui(new Runnable() {
@Override
public void run() {
CameraUtils.decodeBitmap(os.toByteArray(), callback);
}
});
Bitmap other = decode.await(800);
assertNotNull(other);
assertEquals(100, w);
assertEquals(200, h);
assertEquals(color, other.getPixel(0, 0));

@ -1,16 +1,11 @@
package com.otaliastudios.cameraview;
import android.app.Instrumentation;
import android.content.Context;
import android.hardware.Camera;
import android.location.Location;
import android.os.Looper;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.MediumTest;
import android.support.test.runner.AndroidJUnit4;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import org.junit.After;
@ -34,13 +29,12 @@ public class CameraViewTest extends BaseTest {
private Preview mockPreview;
private boolean hasPermissions;
@Before
public void setUp() {
ui(new Runnable() {
@Override
public void run() {
Context context = InstrumentationRegistry.getContext();
Context context = context();
cameraView = new CameraView(context) {
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks, Preview preview) {
@ -92,7 +86,9 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getWhiteBalance(), WhiteBalance.DEFAULT);
assertEquals(cameraView.getSessionType(), SessionType.DEFAULT);
assertEquals(cameraView.getHdr(), Hdr.DEFAULT);
assertEquals(cameraView.getAudio(), Audio.DEFAULT);
assertEquals(cameraView.getVideoQuality(), VideoQuality.DEFAULT);
assertEquals(cameraView.getLocation(), null);
// Self managed
assertEquals(cameraView.getExposureCorrection(), 0f, 0f);
@ -106,23 +102,6 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getGestureAction(Gesture.SCROLL_VERTICAL), GestureAction.DEFAULT_SCROLL_VERTICAL);
}
@Test
public void testStartWithPermissions() {
hasPermissions = true;
cameraView.start();
assertTrue(cameraView.isStarted());
cameraView.stop();
assertFalse(cameraView.isStarted());
}
@Test
public void testStartWithoutPermissions() {
hasPermissions = false;
cameraView.start();
assertFalse(cameraView.isStarted());
}
//endregion
//region testGesture
@ -176,6 +155,7 @@ public class CameraViewTest extends BaseTest {
@Test
public void testGestureAction_capture() {
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@Override
@ -194,6 +174,7 @@ public class CameraViewTest extends BaseTest {
@Test
public void testGestureAction_focus() {
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@Override
@ -218,6 +199,7 @@ public class CameraViewTest extends BaseTest {
@Test
public void testGestureAction_zoom() {
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@Override
@ -241,6 +223,7 @@ public class CameraViewTest extends BaseTest {
when(o.getExposureCorrectionMinValue()).thenReturn(-10f);
when(o.getExposureCorrectionMaxValue()).thenReturn(10f);
mockController.setMockCameraOptions(o);
mockController.mockStarted(true);
MotionEvent event = MotionEvent.obtain(0L, 0L, 0, 0f, 0f, 0);
ui(new Runnable() {
@ -267,6 +250,16 @@ public class CameraViewTest extends BaseTest {
mockController.setMockPreviewSize(size);
}
@Test
public void testMeasure_early() {
mockController.setMockPreviewSize(null);
cameraView.measure(
makeMeasureSpec(500, EXACTLY),
makeMeasureSpec(500, EXACTLY));
assertEquals(cameraView.getMeasuredWidth(), 500);
assertEquals(cameraView.getMeasuredHeight(), 500);
}
@Test
public void testMeasure_matchParentBoth() {
mockPreviewSize();
@ -400,7 +393,7 @@ public class CameraViewTest extends BaseTest {
source.setLongitude(-10d);
source.setAltitude(50d);
cameraView.setLocation(source);
Location other = mockController.mLocation;
Location other = cameraView.getLocation();
assertEquals(10d, other.getLatitude(), 0d);
assertEquals(-10d, other.getLongitude(), 0d);
assertEquals(50d, other.getAltitude(), 0d);
@ -410,8 +403,142 @@ public class CameraViewTest extends BaseTest {
//endregion
// TODO: test permissions
//region test autofocus
@Test(expected = IllegalArgumentException.class)
public void testStartAutoFocus_illegal() {
cameraView.startAutoFocus(-1, -1);
}
@Test(expected = IllegalArgumentException.class)
public void testStartAutoFocus_illegal2() {
cameraView.setLeft(0);
cameraView.setRight(100);
cameraView.setTop(0);
cameraView.setBottom(100);
cameraView.startAutoFocus(200, 200);
}
@Test
public void testStartAutoFocus() {
cameraView.setLeft(0);
cameraView.setRight(100);
cameraView.setTop(0);
cameraView.setBottom(100);
cameraView.startAutoFocus(50, 50);
assertTrue(mockController.mFocusStarted);
}
//endregion
// TODO: test CameraCallbacks
//region test setParameters
@Test
public void testSetCropOutput() {
cameraView.setCropOutput(true);
assertTrue(cameraView.getCropOutput());
cameraView.setCropOutput(false);
assertFalse(cameraView.getCropOutput());
}
@Test
public void testSetJpegQuality() {
cameraView.setJpegQuality(10);
assertEquals(cameraView.getJpegQuality(), 10);
cameraView.setJpegQuality(100);
assertEquals(cameraView.getJpegQuality(), 100);
}
@Test(expected = IllegalArgumentException.class)
public void testSetJpegQuality_illegal() {
cameraView.setJpegQuality(-10);
}
@Test
public void testSetFlash() {
cameraView.setFlash(Flash.TORCH);
assertEquals(cameraView.getFlash(), Flash.TORCH);
cameraView.setFlash(Flash.OFF);
assertEquals(cameraView.getFlash(), Flash.OFF);
}
@Test
public void testToggleFlash() {
cameraView.setFlash(Flash.OFF);
cameraView.toggleFlash();
assertEquals(cameraView.getFlash(), Flash.ON);
cameraView.toggleFlash();
assertEquals(cameraView.getFlash(), Flash.AUTO);
cameraView.toggleFlash();
assertEquals(cameraView.getFlash(), Flash.OFF);
}
@Test
public void testSetFacing() {
cameraView.setFacing(Facing.FRONT);
assertEquals(cameraView.getFacing(), Facing.FRONT);
cameraView.setFacing(Facing.BACK);
assertEquals(cameraView.getFacing(), Facing.BACK);
}
@Test
public void testToggleFacing() {
cameraView.setFacing(Facing.FRONT);
cameraView.toggleFacing();
assertEquals(cameraView.getFacing(), Facing.BACK);
cameraView.toggleFacing();
assertEquals(cameraView.getFacing(), Facing.FRONT);
}
@Test
public void testSetGrid() {
cameraView.setGrid(Grid.DRAW_3X3);
assertEquals(cameraView.getGrid(), Grid.DRAW_3X3);
cameraView.setGrid(Grid.OFF);
assertEquals(cameraView.getGrid(), Grid.OFF);
}
@Test
public void testSetWhiteBalance() {
cameraView.setWhiteBalance(WhiteBalance.CLOUDY);
assertEquals(cameraView.getWhiteBalance(), WhiteBalance.CLOUDY);
cameraView.setWhiteBalance(WhiteBalance.AUTO);
assertEquals(cameraView.getWhiteBalance(), WhiteBalance.AUTO);
}
@Test
public void testSessionType() {
cameraView.setSessionType(SessionType.VIDEO);
assertEquals(cameraView.getSessionType(), SessionType.VIDEO);
cameraView.setSessionType(SessionType.PICTURE);
assertEquals(cameraView.getSessionType(), SessionType.PICTURE);
}
@Test
public void testHdr() {
cameraView.setHdr(Hdr.ON);
assertEquals(cameraView.getHdr(), Hdr.ON);
cameraView.setHdr(Hdr.OFF);
assertEquals(cameraView.getHdr(), Hdr.OFF);
}
@Test
public void testAudio() {
cameraView.setAudio(Audio.ON);
assertEquals(cameraView.getAudio(), Audio.ON);
cameraView.setAudio(Audio.OFF);
assertEquals(cameraView.getAudio(), Audio.OFF);
}
@Test
public void testVideoQuality() {
cameraView.setVideoQuality(VideoQuality.MAX_1080P);
assertEquals(cameraView.getVideoQuality(), VideoQuality.MAX_1080P);
cameraView.setVideoQuality(VideoQuality.LOWEST);
assertEquals(cameraView.getVideoQuality(), VideoQuality.LOWEST);
}
//endregion
// TODO: test permissions
}

@ -30,7 +30,7 @@ import static org.mockito.Mockito.when;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class CropHelperTest {
public class CropHelperTest extends BaseTest {
@Test
public void testCropFromYuv() {
@ -38,46 +38,30 @@ public class CropHelperTest {
testCropFromYuv(1600, 1600, AspectRatio.of(9, 16));
}
private void testCropFromYuv(final int w, final int h, final AspectRatio target) {
final boolean wider = target.toFloat() > ((float) w / (float) h);
// Not sure how to test YuvImages...
YuvImage i = mock(YuvImage.class);
when(i.getWidth()).thenReturn(w);
when(i.getHeight()).thenReturn(h);
when(i.compressToJpeg(any(Rect.class), anyInt(), any(OutputStream.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock iom) throws Throwable {
Object[] args = iom.getArguments();
Rect rect = (Rect) args[0];
// Assert.
AspectRatio ratio = AspectRatio.of(rect.width(), rect.height());
assertEquals(target, ratio);
if (wider) { // width must match.
assertEquals(rect.width(), w);
} else {
assertEquals(rect.height(), h);
}
return true;
}
});
CropHelper.cropToJpeg(i, target, 100);
}
@Test
public void testCropFromJpeg() {
testCropFromJpeg(1600, 1600, AspectRatio.of(16, 9));
testCropFromJpeg(1600, 1600, AspectRatio.of(9, 16));
}
private void testCropFromJpeg(int w, int h, AspectRatio target) {
private void testCropFromYuv(final int w, final int h, final AspectRatio target) {
final boolean wider = target.toFloat() > ((float) w / (float) h);
byte[] b = CropHelper.cropToJpeg(mockYuv(w, h), target, 100);
Bitmap result = BitmapFactory.decodeByteArray(b, 0, b.length);
Bitmap source = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
ByteArrayOutputStream os = new ByteArrayOutputStream();
source.compress(Bitmap.CompressFormat.JPEG, 100, os);
byte[] b = CropHelper.cropToJpeg(os.toByteArray(), target, 100);
// Assert.
AspectRatio ratio = AspectRatio.of(result.getWidth(), result.getHeight());
assertEquals(target, ratio);
if (wider) { // width must match.
assertEquals(result.getWidth(), w);
} else {
assertEquals(result.getHeight(), h);
}
}
private void testCropFromJpeg(int w, int h, AspectRatio target) {
final boolean wider = target.toFloat() > ((float) w / (float) h);
byte[] b = CropHelper.cropToJpeg(mockJpeg(w, h), target, 100);
Bitmap result = BitmapFactory.decodeByteArray(b, 0, b.length);
// Assert.

@ -17,7 +17,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class ExtraProperties1Test {
public class ExtraProperties1Test extends BaseTest {
@Test
public void testConstructor1() {

@ -8,6 +8,7 @@ import android.support.test.espresso.Root;
import android.support.test.espresso.ViewAssertion;
import android.support.test.espresso.ViewInteraction;
import android.support.test.espresso.assertion.ViewAssertions;
import android.support.test.espresso.matcher.RootMatchers;
import android.support.test.espresso.matcher.ViewMatchers;
import android.support.test.rule.ActivityTestRule;
import android.view.MotionEvent;
@ -57,6 +58,8 @@ public abstract class GestureLayoutTest<T extends GestureLayout> extends BaseTes
}
protected final ViewInteraction onLayout() {
return onView(Matchers.<View>is(layout)).inRoot(any(Root.class));
return onView(Matchers.<View>is(layout))
.inRoot(RootMatchers.withDecorView(
Matchers.is(rule.getActivity().getWindow().getDecorView())));
}
}

@ -2,8 +2,6 @@ package com.otaliastudios.cameraview;
import android.graphics.Canvas;
import android.graphics.drawable.ColorDrawable;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.MediumTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
@ -14,6 +12,7 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@MediumTest
@ -32,47 +31,44 @@ public class GridLinesLayoutTest extends BaseTest {
TestActivity a = rule.getActivity();
layout = new GridLinesLayout(a);
layout.setGridMode(Grid.OFF);
layout.drawTask.listen();
a.getContentView().addView(layout);
layout.vert = mock(ColorDrawable.class);
layout.horiz = mock(ColorDrawable.class);
}
});
// Wait for first draw.
layout.drawTask.await(1000);
}
private int setGridAndWait(Grid value) {
layout.drawTask.listen();
layout.setGridMode(value);
Integer result = layout.drawTask.await(1000);
assertNotNull(result);
return result;
}
@Test
public void testOff() {
layout.drawTask.listen();
layout.setGridMode(Grid.OFF);
layout.drawTask.await();
verify(layout.vert, never()).draw(any(Canvas.class));
verify(layout.horiz, never()).draw(any(Canvas.class));
int linesDrawn = setGridAndWait(Grid.OFF);
assertEquals(linesDrawn, 0);
}
@Test
public void test3x3() {
layout.drawTask.listen();
layout.setGridMode(Grid.DRAW_3X3);
layout.drawTask.await();
verify(layout.vert, times(2)).draw(any(Canvas.class));
verify(layout.horiz, times(2)).draw(any(Canvas.class));
int linesDrawn = setGridAndWait(Grid.DRAW_3X3);
assertEquals(linesDrawn, 2);
}
@Test
public void testPhi() {
layout.drawTask.listen();
layout.setGridMode(Grid.DRAW_PHI);
layout.drawTask.await();
verify(layout.vert, times(2)).draw(any(Canvas.class));
verify(layout.horiz, times(2)).draw(any(Canvas.class));
int linesDrawn = setGridAndWait(Grid.DRAW_PHI);
assertEquals(linesDrawn, 2);
}
@Test
public void test4x4() {
layout.drawTask.listen();
layout.setGridMode(Grid.DRAW_4X4);
layout.drawTask.await();
verify(layout.vert, times(3)).draw(any(Canvas.class));
verify(layout.horiz, times(3)).draw(any(Canvas.class));
int linesDrawn = setGridAndWait(Grid.DRAW_4X4);
assertEquals(linesDrawn, 3);
}
}

@ -0,0 +1,507 @@
package com.otaliastudios.cameraview;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.support.test.filters.MediumTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.view.ViewGroup;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.File;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
/**
* These tests work great on real devices, and are the only way to test actual CameraController
* implementation - we really need to open the camera device.
* Unfortunately they fail unreliably on emulated devices, due to some bug with the
* emulated camera controller. Waiting for it to be fixed.
*/
@RunWith(AndroidJUnit4.class)
@MediumTest
@Ignore
public class IntegrationTest extends BaseTest {
@Rule
public ActivityTestRule<TestActivity> rule = new ActivityTestRule<>(TestActivity.class);
private CameraView camera;
private Camera1 controller;
private CameraListener listener;
@BeforeClass
public static void grant() {
grantPermissions();
}
@Before
public void setUp() {
WorkerHandler.destroy();
ui(new Runnable() {
@Override
public void run() {
camera = new CameraView(rule.getActivity()) {
@Override
protected CameraController instantiateCameraController(CameraCallbacks callbacks, Preview preview) {
controller = new Camera1(callbacks, preview);
return controller;
}
};
listener = mock(CameraListener.class);
camera.addCameraListener(listener);
rule.getActivity().inflate(camera);
}
});
}
@After
public void tearDown() throws Exception {
camera.stopCapturingVideo();
camera.destroy();
WorkerHandler.destroy();
}
private CameraOptions waitForOpen(boolean expectSuccess) {
final Task<CameraOptions> open = new Task<>();
open.listen();
doEndTask(open, 0).when(listener).onCameraOpened(any(CameraOptions.class));
CameraOptions result = open.await(4000);
if (expectSuccess) {
assertNotNull("Can open", result);
} else {
assertNull("Should not open", result);
}
return result;
}
private Boolean waitForClose(boolean expectSuccess) {
final Task<Boolean> close = new Task<>();
close.listen();
doEndTask(close, true).when(listener).onCameraClosed();
Boolean result = close.await(4000);
if (expectSuccess) {
assertNotNull("Can close", result);
} else {
assertNull("Should not close", result);
}
return result;
}
private Boolean waitForVideo(boolean expectSuccess) {
final Task<Boolean> video = new Task<>();
video.listen();
doEndTask(video, true).when(listener).onVideoTaken(any(File.class));
Boolean result = video.await(2000);
if (expectSuccess) {
assertNotNull("Can take video", result);
} else {
assertNull("Should not take video", result);
}
return result;
}
private byte[] waitForPicture(boolean expectSuccess) {
final Task<byte[]> pic = new Task<>();
pic.listen();
doEndTask(pic, 0).when(listener).onPictureTaken(any(byte[].class));
byte[] result = pic.await(5000);
if (expectSuccess) {
assertNotNull("Can take picture", result);
} else {
assertNull("Should not take picture", result);
}
return result;
}
//region test open/close
//-@Test
public void testOpenClose() throws Exception {
// Starting and stopping are hard to get since they happen on another thread.
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
camera.start();
waitForOpen(true);
assertEquals(controller.getState(), CameraController.STATE_STARTED);
camera.stop();
waitForClose(true);
assertEquals(controller.getState(), CameraController.STATE_STOPPED);
}
//-@Test
public void testOpenTwice() {
camera.start();
waitForOpen(true);
camera.start();
waitForOpen(false);
}
//-@Test
public void testCloseTwice() {
camera.stop();
waitForClose(false);
}
@Test
// This works great on the device but crashes often on the emulator.
// There must be something wrong with the emulated camera...
// Like stopPreview() and release() are not really sync calls?
public void testConcurrentCalls() throws Exception {
final CountDownLatch latch = new CountDownLatch(4);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.start();
camera.stop();
camera.start();
camera.stop();
boolean did = latch.await(10, TimeUnit.SECONDS);
assertTrue("Handles concurrent calls to start & stop, " + latch.getCount(), did);
}
@Test
public void testStartInitializesOptions() {
assertNull(camera.getCameraOptions());
assertNull(camera.getExtraProperties());
camera.start();
waitForOpen(true);
assertNotNull(camera.getCameraOptions());
assertNotNull(camera.getExtraProperties());
}
//endregion
//region test Facing/SessionType
// Test things that should reset the camera.
@Test
public void testSetFacing() throws Exception {
camera.start();
CameraOptions o = waitForOpen(true);
int size = o.getSupportedFacing().size();
if (size > 1) {
// set facing should call stop and start again.
final CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.toggleFacing();
boolean did = latch.await(2, TimeUnit.SECONDS);
assertTrue("Handles setFacing while active", did);
}
}
@Test
public void testSetSessionType() throws Exception {
camera.setSessionType(SessionType.PICTURE);
camera.start();
waitForOpen(true);
// set session type should call stop and start again.
final CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onCameraOpened(any(CameraOptions.class));
doCountDown(latch).when(listener).onCameraClosed();
camera.setSessionType(SessionType.VIDEO);
boolean did = latch.await(2, TimeUnit.SECONDS);
assertTrue("Handles setSessionType while active", did);
assertEquals(camera.getSessionType(), SessionType.VIDEO);
}
//endregion
//region test Set Parameters
// When camera is open, parameters will be set only if supported.
@Test
public void testSetZoom() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isZoomSupported();
float oldValue = camera.getZoom();
float newValue = 0.65f;
camera.setZoom(newValue);
assertEquals(can ? newValue : oldValue, camera.getZoom(), 0f);
}
@Test
public void testSetExposureCorrection() {
camera.start();
CameraOptions options = waitForOpen(true);
boolean can = options.isExposureCorrectionSupported();
float oldValue = camera.getExposureCorrection();
float newValue = options.getExposureCorrectionMaxValue();
camera.setExposureCorrection(newValue);
assertEquals(can ? newValue : oldValue, camera.getExposureCorrection(), 0f);
}
@Test
public void testSetFlash() {
camera.start();
CameraOptions options = waitForOpen(true);
Flash[] values = Flash.values();
Flash oldValue = camera.getFlash();
for (Flash value : values) {
camera.setFlash(value);
if (options.supports(value)) {
assertEquals(camera.getFlash(), value);
oldValue = value;
} else {
assertEquals(camera.getFlash(), oldValue);
}
}
}
@Test
public void testSetWhiteBalance() {
camera.start();
CameraOptions options = waitForOpen(true);
WhiteBalance[] values = WhiteBalance.values();
WhiteBalance oldValue = camera.getWhiteBalance();
for (WhiteBalance value : values) {
camera.setWhiteBalance(value);
if (options.supports(value)) {
assertEquals(camera.getWhiteBalance(), value);
oldValue = value;
} else {
assertEquals(camera.getWhiteBalance(), oldValue);
}
}
}
@Test
public void testSetHdr() {
camera.start();
CameraOptions options = waitForOpen(true);
Hdr[] values = Hdr.values();
Hdr oldValue = camera.getHdr();
for (Hdr value : values) {
camera.setHdr(value);
if (options.supports(value)) {
assertEquals(camera.getHdr(), value);
oldValue = value;
} else {
assertEquals(camera.getHdr(), oldValue);
}
}
}
@Test
public void testSetAudio() {
// TODO: when permissions are managed, check that Audio.ON triggers the audio permission
camera.start();
waitForOpen(true);
Audio[] values = Audio.values();
for (Audio value : values) {
camera.setAudio(value);
assertEquals(camera.getAudio(), value);
}
}
@Test
public void testSetLocation() {
camera.start();
waitForOpen(true);
camera.setLocation(10d, 2d);
assertNotNull(camera.getLocation());
assertEquals(camera.getLocation().getLatitude(), 10d, 0d);
assertEquals(camera.getLocation().getLongitude(), 2d, 0d);
// This also ensures there are no crashes when attaching it to camera parameters.
}
//endregion
//region testSetVideoQuality
// This can be tricky because can trigger layout changes.
// TODO: @Test(expected = IllegalStateException.class)
// Can't run on Travis, MediaRecorder not supported.
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
public void testSetVideoQuality_whileRecording() {
camera.setSessionType(SessionType.VIDEO);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null);
camera.setVideoQuality(VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_whileInPictureSessionType() {
camera.setSessionType(SessionType.PICTURE);
camera.setVideoQuality(VideoQuality.HIGHEST);
camera.start();
waitForOpen(true);
camera.setVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_whileNotStarted() {
camera.setVideoQuality(VideoQuality.HIGHEST);
assertEquals(camera.getVideoQuality(), VideoQuality.HIGHEST);
camera.setVideoQuality(VideoQuality.LOWEST);
assertEquals(camera.getVideoQuality(), VideoQuality.LOWEST);
}
@Test
public void testSetVideoQuality_shouldRecompute() {
// If video quality changes bring to a new capture size,
// this might bring to a new aspect ratio,
// which might bring to a new preview size. No idea how to test.
assertTrue(true);
}
//endregion
//region test startVideo
// TODO: @Test(expected = IllegalStateException.class)
// Fails on Travis. Some emulators can't deal with MediaRecorder
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
public void testStartVideo_whileInPictureMode() {
camera.setSessionType(SessionType.PICTURE);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null);
}
// TODO: @Test
// Fails on Travis. Some emulators can't deal with MediaRecorder,
// Error while starting MediaRecorder. java.lang.RuntimeException: start failed.
// as documented. This works locally though.
public void testStartEndVideo() {
camera.setSessionType(SessionType.VIDEO);
camera.start();
waitForOpen(true);
camera.startCapturingVideo(null, 1000);
waitForVideo(true); // waits 2000
}
@Test
public void testEndVideo_withoutStarting() {
camera.setSessionType(SessionType.VIDEO);
camera.start();
waitForOpen(true);
camera.stopCapturingVideo();
waitForVideo(false);
}
//endregion
//region startAutoFocus
// TODO: won't test onStopAutoFocus because that is not guaranteed to be called
@Test
public void testStartAutoFocus() {
camera.start();
CameraOptions o = waitForOpen(true);
camera.startAutoFocus(1, 1);
if (o.isAutoFocusSupported()) {
verify(listener, times(1)).onFocusStart(new PointF(1, 1));
} else {
verify(listener, never()).onFocusStart(any(PointF.class));
}
}
//endregion
//region capture
@Test
public void testCapturePicture_beforeStarted() {
camera.capturePicture();
waitForPicture(false);
}
@Test
public void testCapturePicture_concurrentCalls() throws Exception {
// Second take should fail.
camera.start();
waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onPictureTaken(any(byte[].class));
camera.capturePicture();
camera.capturePicture();
boolean did = latch.await(4, TimeUnit.SECONDS);
assertFalse(did);
assertEquals(latch.getCount(), 1);
}
@Test
public void testCapturePicture_size() throws Exception {
camera.setCropOutput(false);
camera.start();
waitForOpen(true);
Size size = camera.getCaptureSize();
camera.capturePicture();
byte[] jpeg = waitForPicture(true);
Bitmap b = CameraUtils.decodeBitmap(jpeg);
// Result can actually have swapped dimensions
// Which one, depends on factors including device physical orientation
assertTrue(b.getWidth() == size.getHeight() || b.getWidth() == size.getWidth());
assertTrue(b.getHeight() == size.getHeight() || b.getHeight() == size.getWidth());
}
@Test
public void testCaptureSnapshot_beforeStarted() {
camera.captureSnapshot();
waitForPicture(false);
}
@Test
public void testCaptureSnapshot_concurrentCalls() throws Exception {
// Second take should fail.
camera.start();
waitForOpen(true);
CountDownLatch latch = new CountDownLatch(2);
doCountDown(latch).when(listener).onPictureTaken(any(byte[].class));
camera.captureSnapshot();
camera.captureSnapshot();
boolean did = latch.await(4, TimeUnit.SECONDS);
assertFalse(did);
assertEquals(latch.getCount(), 1);
}
@Test
public void testCaptureSnapshot_size() throws Exception {
camera.setCropOutput(false);
camera.start();
waitForOpen(true);
Size size = camera.getPreviewSize();
camera.captureSnapshot();
byte[] jpeg = waitForPicture(true);
Bitmap b = CameraUtils.decodeBitmap(jpeg);
// Result can actually have swapped dimensions
// Which one, depends on factors including device physical orientation
assertTrue(b.getWidth() == size.getHeight() || b.getWidth() == size.getWidth());
assertTrue(b.getHeight() == size.getHeight() || b.getHeight() == size.getWidth());
}
}

@ -13,7 +13,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class Mapper1Test {
public class Mapper1Test extends BaseTest {
private Mapper mapper = new Mapper.Mapper1();

@ -10,7 +10,6 @@ import java.io.File;
public class MockCameraController extends CameraController {
Location mLocation;
boolean mPictureCaptured;
boolean mFocusStarted;
boolean mZoomChanged;
@ -28,12 +27,16 @@ public class MockCameraController extends CameraController {
mPreviewSize = size;
}
void mockStarted(boolean started) {
mState = started ? STATE_STARTED : STATE_STOPPED;
}
@Override
void onStart() {
void onStart() throws Exception {
}
@Override
void onStop() {
void onStop() throws Exception {
}
@Override
@ -114,10 +117,7 @@ public class MockCameraController extends CameraController {
return false;
}
@Override
boolean isCameraOpened() {
return true;
}
@Override
boolean startAutoFocus(@Nullable Gesture gesture, PointF point) {

@ -13,6 +13,11 @@ public class MockPreview extends Preview<View, Void> {
super(context, parent, null);
}
public void setIsCropping(boolean crop) {
getView().setScaleX(crop ? 2 : 1);
getView().setScaleY(crop ? 2 : 1);
}
@NonNull
@Override
protected View onCreateView(Context context, ViewGroup parent) {

@ -64,9 +64,10 @@ public class OrientationHelperTest extends BaseTest {
@Test
public void testRotation() {
helper.enable(context());
reset(callbacks); // Reset counts.
// Sometimes (on some APIs) the helper will trigger an update to 0
// right after enabling. But that's fine for us, times(1) will be OK either way.
helper.enable(context());
helper.mListener.onOrientationChanged(OrientationEventListener.ORIENTATION_UNKNOWN);
assertEquals(helper.mLastOrientation, 0);
helper.mListener.onOrientationChanged(10);

@ -2,33 +2,13 @@ package com.otaliastudios.cameraview;
import android.content.Context;
import android.support.test.espresso.Espresso;
import android.support.test.espresso.UiController;
import android.support.test.espresso.ViewAction;
import android.support.test.espresso.action.CoordinatesProvider;
import android.support.test.espresso.action.GeneralLocation;
import android.support.test.espresso.action.GeneralSwipeAction;
import android.support.test.espresso.action.MotionEvents;
import android.support.test.espresso.action.PrecisionDescriber;
import android.support.test.espresso.action.Press;
import android.support.test.espresso.action.Swipe;
import android.support.test.espresso.action.ViewActions;
import android.support.test.espresso.assertion.ViewAssertions;
import android.support.test.espresso.matcher.ViewMatchers;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.matcher.ViewMatchers.withId;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@ -69,7 +49,7 @@ public class PinchGestureLayoutTest extends GestureLayoutTest<PinchGestureLayout
touch.listen();
touch.start();
onLayout().perform(action);
Gesture found = touch.await(10000, TimeUnit.MILLISECONDS);
Gesture found = touch.await(10000);
assertNotNull(found);
// How will this move our parameter?

@ -13,8 +13,6 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@ -57,7 +55,7 @@ public abstract class PreviewTest extends BaseTest {
// Wait for surface to be available.
protected void ensureAvailable() {
assertNotNull(availability.await(2, TimeUnit.SECONDS));
assertNotNull(availability.await(2000));
}
// Trigger a destroy.
@ -68,7 +66,7 @@ public abstract class PreviewTest extends BaseTest {
rule.getActivity().getContentView().removeView(preview.getView());
}
});
waitUi();
idle();
}
@After

@ -3,17 +3,12 @@ package com.otaliastudios.cameraview;
import android.content.Context;
import android.support.test.espresso.ViewAction;
import android.support.test.espresso.assertion.ViewAssertions;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.action.ViewActions.swipeDown;
import static android.support.test.espresso.action.ViewActions.swipeLeft;
@ -50,7 +45,7 @@ public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayo
touch.listen();
touch.start();
onLayout().perform(swipeUp());
Gesture found = touch.await(500, TimeUnit.MILLISECONDS);
Gesture found = touch.await(500);
assertNull(found);
}
@ -58,7 +53,7 @@ public class ScrollGestureLayoutTest extends GestureLayoutTest<ScrollGestureLayo
touch.listen();
touch.start();
onLayout().perform(scroll);
Gesture found = touch.await(500, TimeUnit.MILLISECONDS);
Gesture found = touch.await(500);
assertEquals(found, expected);
// How will this move our parameter?

@ -14,10 +14,6 @@ import android.view.MotionEvent;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.TimeUnit;
import static android.support.test.espresso.Espresso.*;
import static android.support.test.espresso.matcher.ViewMatchers.*;
import static android.support.test.espresso.action.ViewActions.*;
import static org.junit.Assert.*;
@ -46,7 +42,7 @@ public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> {
Tap.SINGLE, GeneralLocation.CENTER, Press.FINGER,
InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
onLayout().perform(a);
Gesture found = touch.await(500, TimeUnit.MILLISECONDS);
Gesture found = touch.await(500);
assertEquals(found, Gesture.TAP);
Size size = rule.getActivity().getContentSize();
@ -60,7 +56,7 @@ public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> {
touch.listen();
touch.start();
onLayout().perform(click());
Gesture found = touch.await(500, TimeUnit.MILLISECONDS);
Gesture found = touch.await(500);
assertNull(found);
}
@ -72,7 +68,7 @@ public class TapGestureLayoutTest extends GestureLayoutTest<TapGestureLayout> {
Tap.LONG, GeneralLocation.CENTER, Press.FINGER,
InputDevice.SOURCE_UNKNOWN, MotionEvent.BUTTON_PRIMARY);
onLayout().perform(a);
Gesture found = touch.await(500, TimeUnit.MILLISECONDS);
Gesture found = touch.await(500);
assertEquals(found, Gesture.LONG_TAP);
Size size = rule.getActivity().getContentSize();
assertEquals(layout.getPoints()[0].x, (size.getWidth() / 2f), 1f);

@ -24,6 +24,7 @@ public class TestActivity extends Activity {
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
CameraLogger.setLogLevel(CameraLogger.LEVEL_VERBOSE);
wakeScreen();
// Match parent decor view.

@ -11,7 +11,7 @@ import static org.junit.Assert.*;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class WorkerHandlerTest {
public class WorkerHandlerTest extends BaseTest {
@Test
public void testCache() {
@ -21,4 +21,20 @@ public class WorkerHandlerTest {
assertTrue(w1 == w1a);
assertFalse(w1 == w2);
}
@Test
public void testStaticRun() {
final Task<Boolean> task = new Task<>();
task.listen();
Runnable action = new Runnable() {
@Override
public void run() {
task.end(true);
}
};
WorkerHandler.run(action);
Boolean result = task.await(500);
assertNotNull(result);
assertTrue(result);
}
}

@ -7,7 +7,7 @@ import android.util.SparseArray;
public class AspectRatio implements Comparable<AspectRatio>, Parcelable {
private final static SparseArray<SparseArray<AspectRatio>> sCache = new SparseArray<>(16);
final static SparseArray<SparseArray<AspectRatio>> sCache = new SparseArray<>(16);
private final int mX;
private final int mY;

@ -9,11 +9,9 @@ import android.location.Location;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.File;
@ -36,13 +34,11 @@ class Camera1 extends CameraController {
private int mSensorOffset;
private Location mLocation;
private final int mPostFocusResetDelay = 3000;
private Runnable mPostFocusResetRunnable = new Runnable() {
@Override
public void run() {
if (!isCameraOpened()) return;
if (!isCameraAvailable()) return;
mCamera.cancelAutoFocus();
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
@ -70,8 +66,21 @@ class Camera1 extends CameraController {
*/
@Override
public void onSurfaceAvailable() {
LOG.i("onSurfaceAvailable, size is", mPreview.getSurfaceSize());
if (shouldSetup()) setup();
LOG.i("onSurfaceAvailable:", "Size is", mPreview.getSurfaceSize());
if (!shouldSetup()) return;
mHandler.post(new Runnable() {
@Override
public void run() {
if (!shouldSetup()) return;
LOG.i("onSurfaceAvailable:", "Inside handler. About to bind.");
try {
setup();
} catch (Exception e) {
LOG.w("onSurfaceAvailable:", "Exception while binding camera to preview.", e);
throw new RuntimeException(e);
}
}
});
}
/**
@ -85,10 +94,13 @@ class Camera1 extends CameraController {
// Compute a new camera preview size.
Size newSize = computePreviewSize();
if (!newSize.equals(mPreviewSize)) {
LOG.i("onSurfaceChanged:", "Computed a new preview size. Dispatching.");
mPreviewSize = newSize;
mCameraCallbacks.onCameraPreviewSizeChanged();
synchronized (mLock) {
LOG.i("onSurfaceChanged:", "Stopping preview.");
mCamera.stopPreview();
LOG.i("onSurfaceChanged:", "Stopped preview.");
Camera.Parameters params = mCamera.getParameters();
params.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mCamera.setParameters(params);
@ -98,33 +110,33 @@ class Camera1 extends CameraController {
invertPreviewSizes ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
invertPreviewSizes ? mPreviewSize.getWidth() : mPreviewSize.getHeight()
);
LOG.i("onSurfaceChanged:", "Restarting preview.");
mCamera.startPreview();
LOG.i("onSurfaceChanged:", "Restarted preview.");
}
}
}
private boolean shouldSetup() {
return isCameraOpened() && mPreview.isReady() && !mIsSetup;
return isCameraAvailable() && mPreview.isReady() && !mIsSetup;
}
// The act of binding an "open" camera to a "ready" preview.
// These can happen at different times but we want to end up here.
private void setup() {
try {
Object output = mPreview.getOutput();
if (mPreview.getOutputClass() == SurfaceHolder.class) {
mCamera.setPreviewDisplay((SurfaceHolder) output);
} else {
mCamera.setPreviewTexture((SurfaceTexture) output);
}
} catch (IOException e) {
LOG.e("Error while trying to setup Camera1.", e);
throw new RuntimeException(e);
@WorkerThread
private void setup() throws Exception {
LOG.i("setup:", "Started");
Object output = mPreview.getOutput();
if (mPreview.getOutputClass() == SurfaceHolder.class) {
mCamera.setPreviewDisplay((SurfaceHolder) output);
} else {
mCamera.setPreviewTexture((SurfaceTexture) output);
}
boolean invertPreviewSizes = shouldFlipSizes(); // mDisplayOffset % 180 != 0;
boolean invertPreviewSizes = shouldFlipSizes();
mCaptureSize = computeCaptureSize();
mPreviewSize = computePreviewSize();
LOG.i("setup:", "Dispatching onCameraPreviewSizeChanged.");
mCameraCallbacks.onCameraPreviewSizeChanged();
mPreview.setDesiredSize(
invertPreviewSizes ? mPreviewSize.getHeight() : mPreviewSize.getWidth(),
@ -136,20 +148,26 @@ class Camera1 extends CameraController {
params.setPictureSize(mCaptureSize.getWidth(), mCaptureSize.getHeight()); // <- allowed
mCamera.setParameters(params);
}
LOG.i("setup:", "Starting preview with startPreview().");
mCamera.startPreview();
LOG.i("setup:", "Started preview with startPreview().");
mIsSetup = true;
}
@WorkerThread
@Override
void onStart() {
if (isCameraOpened()) onStop();
void onStart() throws Exception {
if (isCameraAvailable()) {
LOG.w("onStart:", "Camera not available. Should not happen.");
onStop(); // Should not happen.
}
if (collectCameraId()) {
mCamera = Camera.open(mCameraId);
// Set parameters that might have been set before the camera was opened.
synchronized (mLock) {
LOG.i("onStart:", "Applying default parameters.");
Camera.Parameters params = mCamera.getParameters();
mExtraProperties = new ExtraProperties(params);
mOptions = new CameraOptions(params);
@ -164,19 +182,37 @@ class Camera1 extends CameraController {
// Try starting preview.
mCamera.setDisplayOrientation(computeSensorToDisplayOffset()); // <- not allowed during preview
if (shouldSetup()) setup();
mCameraCallbacks.dispatchOnCameraOpened(mOptions);
LOG.i("onStart:", "Ended");
}
}
@WorkerThread
@Override
void onStop() {
void onStop() throws Exception {
Exception error = null;
LOG.i("onStop:", "About to clean up.");
mHandler.get().removeCallbacks(mPostFocusResetRunnable);
if (isCameraOpened()) {
if (isCameraAvailable()) {
LOG.i("onStop:", "Clean up.", "Ending video?", mIsCapturingVideo);
if (mIsCapturingVideo) endVideo();
mCamera.stopPreview();
mCamera.release();
mCameraCallbacks.dispatchOnCameraClosed();
try {
LOG.i("onStop:", "Clean up.", "Stopping preview.");
mCamera.stopPreview();
LOG.i("onStop:", "Clean up.", "Stopped preview.");
} catch (Exception e) {
LOG.w("onStop:", "Clean up.", "Exception while stopping preview.");
error = e;
}
try {
LOG.i("onStop:", "Clean up.", "Releasing camera.");
mCamera.release();
LOG.i("onStop:", "Clean up.", "Released camera.");
} catch (Exception e) {
LOG.w("onStop:", "Clean up.", "Exception while releasing camera.");
error = e;
}
}
mExtraProperties = null;
mOptions = null;
@ -184,6 +220,8 @@ class Camera1 extends CameraController {
mPreviewSize = null;
mCaptureSize = null;
mIsSetup = false;
if (error != null) throw error;
}
private boolean collectCameraId() {
@ -205,8 +243,8 @@ class Camera1 extends CameraController {
void setSessionType(SessionType sessionType) {
if (sessionType != mSessionType) {
mSessionType = sessionType;
if (isCameraOpened()) {
start();
if (isCameraAvailable()) {
restart();
}
}
}
@ -215,7 +253,7 @@ class Camera1 extends CameraController {
void setLocation(Location location) {
Location oldLocation = mLocation;
mLocation = location;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeLocation(params, oldLocation)) mCamera.setParameters(params);
@ -243,8 +281,8 @@ class Camera1 extends CameraController {
void setFacing(Facing facing) {
if (facing != mFacing) {
mFacing = facing;
if (collectCameraId() && isCameraOpened()) {
start();
if (collectCameraId() && isCameraAvailable()) {
restart();
}
}
}
@ -253,7 +291,7 @@ class Camera1 extends CameraController {
void setWhiteBalance(WhiteBalance whiteBalance) {
WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeWhiteBalance(params, old)) mCamera.setParameters(params);
@ -274,7 +312,7 @@ class Camera1 extends CameraController {
void setHdr(Hdr hdr) {
Hdr old = mHdr;
mHdr = hdr;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeHdr(params, old)) mCamera.setParameters(params);
@ -306,7 +344,7 @@ class Camera1 extends CameraController {
void setFlash(Flash flash) {
Flash old = mFlash;
mFlash = flash;
if (isCameraOpened()) {
if (isCameraAvailable()) {
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
if (mergeFlash(params, old)) mCamera.setParameters(params);
@ -355,11 +393,13 @@ class Camera1 extends CameraController {
@Override
void setVideoQuality(VideoQuality videoQuality) {
if (mIsCapturingVideo) {
// TODO: actually any call to getParameters() could fail while recording a video.
// See. https://stackoverflow.com/questions/14941625/correct-handling-of-exception-getparameters-failed-empty-parameters
throw new IllegalStateException("Can't change video quality while recording a video.");
}
mVideoQuality = videoQuality;
if (isCameraOpened() && mSessionType == SessionType.VIDEO) {
if (isCameraAvailable() && mSessionType == SessionType.VIDEO) {
// Change capture size to a size that fits the video aspect ratio.
Size oldSize = mCaptureSize;
mCaptureSize = computeCaptureSize();
@ -373,15 +413,15 @@ class Camera1 extends CameraController {
}
onSurfaceChanged();
}
LOG.i("captureSize: "+mCaptureSize);
LOG.i("previewSize: "+mPreviewSize);
LOG.i("setVideoQuality:", "captureSize:", mCaptureSize);
LOG.i("setVideoQuality:", "previewSize:", mPreviewSize);
}
}
@Override
boolean capturePicture() {
if (mIsCapturingImage) return false;
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (mSessionType == SessionType.VIDEO && mIsCapturingVideo) {
if (!mOptions.isVideoSnapshotSupported()) return false;
}
@ -421,7 +461,7 @@ class Camera1 extends CameraController {
@Override
boolean captureSnapshot() {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (mIsCapturingImage) return false;
// This won't work while capturing a video.
// Switch to capturePicture.
@ -446,7 +486,7 @@ class Camera1 extends CameraController {
final int postWidth = flip ? preHeight : preWidth;
final int postHeight = flip ? preWidth : preHeight;
final int format = params.getPreviewFormat();
new Thread(new Runnable() {
WorkerHandler.run(new Runnable() {
@Override
public void run() {
@ -456,7 +496,7 @@ class Camera1 extends CameraController {
mCameraCallbacks.processSnapshot(yuv, consistentWithView, exifFlip);
mIsCapturingImage = false;
}
}).start();
});
}
});
return true;
@ -465,13 +505,12 @@ class Camera1 extends CameraController {
@Override
boolean shouldFlipSizes() {
int offset = computeSensorToDisplayOffset();
LOG.i("shouldFlip:", "mDeviceOrientation=", mDeviceOrientation, "mSensorOffset=", mSensorOffset);
LOG.i("shouldFlip:", "sensorToDisplay=", offset);
LOG.i("shouldFlipSizes:", "mDeviceOrientation=", mDeviceOrientation, "mSensorOffset=", mSensorOffset);
LOG.i("shouldFlipSizes:", "sensorToDisplay=", offset);
return offset % 180 != 0;
}
@Override
boolean isCameraOpened() {
private boolean isCameraAvailable() {
return mCamera != null;
}
@ -527,7 +566,7 @@ class Camera1 extends CameraController {
// Choose the max size.
List<Size> captureSizes = sizesFromList(params.getSupportedPictureSizes());
Size maxSize = Collections.max(captureSizes);
LOG.i("computeCaptureSize:", "computed", maxSize);
LOG.i("size:", "computeCaptureSize:", "computed", maxSize);
return Collections.max(captureSizes);
} else {
// Choose according to developer choice in setVideoQuality.
@ -536,7 +575,7 @@ class Camera1 extends CameraController {
List<Size> captureSizes = sizesFromList(params.getSupportedPictureSizes());
CamcorderProfile profile = getCamcorderProfile(mVideoQuality);
AspectRatio targetRatio = AspectRatio.of(profile.videoFrameWidth, profile.videoFrameHeight);
LOG.i("computeCaptureSize:", "videoQuality:", mVideoQuality, "targetRatio:", targetRatio);
LOG.i("size:", "computeCaptureSize:", "videoQuality:", mVideoQuality, "targetRatio:", targetRatio);
return matchSize(captureSizes, targetRatio, new Size(0, 0), true);
}
}
@ -546,7 +585,7 @@ class Camera1 extends CameraController {
List<Size> previewSizes = sizesFromList(params.getSupportedPreviewSizes());
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
Size biggerThan = mPreview.getSurfaceSize();
LOG.i("computePreviewSize:", "targetRatio:", targetRatio, "surface size:", biggerThan);
LOG.i("size:", "computePreviewSize:", "targetRatio:", targetRatio, "surface size:", biggerThan);
return matchSize(previewSizes, targetRatio, biggerThan, false);
}
@ -557,24 +596,23 @@ class Camera1 extends CameraController {
@Override
boolean startVideo(@NonNull File videoFile) {
mVideoFile = videoFile;
if (mIsCapturingVideo) return false;
if (!isCameraOpened()) return false;
Camera.Parameters params = mCamera.getParameters();
params.setVideoStabilization(false);
if (!isCameraAvailable()) return false;
if (mSessionType == SessionType.VIDEO) {
mVideoFile = videoFile;
mIsCapturingVideo = true;
initMediaRecorder();
try {
mMediaRecorder.prepare();
mMediaRecorder.start();
return true;
} catch (Exception e) {
e.printStackTrace();
LOG.e("Error while starting MediaRecorder. Swallowing.", e);
mVideoFile = null;
mCamera.lock();
endVideo();
return false;
}
mMediaRecorder.start();
return true;
} else {
throw new IllegalStateException("Can't record video while session type is picture");
}
@ -584,9 +622,17 @@ class Camera1 extends CameraController {
boolean endVideo() {
if (mIsCapturingVideo) {
mIsCapturingVideo = false;
mMediaRecorder.stop();
mMediaRecorder.release();
mMediaRecorder = null;
if (mMediaRecorder != null) {
try {
mMediaRecorder.stop();
mMediaRecorder.release();
} catch (Exception e) {
// This can happen if endVideo() is called right after startVideo().
// We don't care.
LOG.w("Error while closing media recorder. Swallowing", e);
}
mMediaRecorder = null;
}
if (mVideoFile != null) {
mCameraCallbacks.dispatchOnVideoTaken(mVideoFile);
mVideoFile = null;
@ -678,7 +724,7 @@ class Camera1 extends CameraController {
@Override
boolean setZoom(float zoom) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isZoomSupported()) return false;
synchronized (mLock) {
Camera.Parameters params = mCamera.getParameters();
@ -692,7 +738,7 @@ class Camera1 extends CameraController {
@Override
boolean setExposureCorrection(float EVvalue) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isExposureCorrectionSupported()) return false;
float max = mOptions.getExposureCorrectionMaxValue();
float min = mOptions.getExposureCorrectionMinValue();
@ -712,7 +758,7 @@ class Camera1 extends CameraController {
@Override
boolean startAutoFocus(@Nullable final Gesture gesture, PointF point) {
if (!isCameraOpened()) return false;
if (!isCameraAvailable()) return false;
if (!mOptions.isAutoFocusSupported()) return false;
final PointF p = new PointF(point.x, point.y); // copy.
List<Camera.Area> meteringAreas2 = computeMeteringAreas(p.x, p.y);
@ -756,8 +802,8 @@ class Camera1 extends CameraController {
double theta = ((double) displayToSensor) * Math.PI / 180;
double sensorClickX = viewClickX * Math.cos(theta) - viewClickY * Math.sin(theta);
double sensorClickY = viewClickX * Math.sin(theta) + viewClickY * Math.cos(theta);
LOG.i("viewClickX:", viewClickX, "viewClickY:", viewClickY);
LOG.i("sensorClickX:", sensorClickX, "sensorClickY:", sensorClickY);
LOG.i("focus:", "viewClickX:", viewClickX, "viewClickY:", viewClickY);
LOG.i("focus:", "sensorClickX:", sensorClickX, "sensorClickY:", sensorClickY);
// Compute the rect bounds.
Rect rect1 = computeMeteringArea(sensorClickX, sensorClickY, 150d);
@ -778,7 +824,7 @@ class Camera1 extends CameraController {
int bottom = (int) Math.min(centerY + delta, 1000);
int left = (int) Math.max(centerX - delta, -1000);
int right = (int) Math.min(centerX + delta, 1000);
LOG.i("metering area:", "top:", top, "left:", left, "bottom:", bottom, "right:", right);
LOG.i("focus:", "computeMeteringArea:", "top:", top, "left:", left, "bottom:", bottom, "right:", right);
return new Rect(left, top, right, bottom);
}
@ -797,7 +843,7 @@ class Camera1 extends CameraController {
for (Camera.Size size : sizes) {
result.add(new Size(size.width, size.height));
}
LOG.i("sizesFromList:", result);
LOG.i("size:", "sizesFromList:", result);
return result;
}
@ -827,8 +873,8 @@ class Camera1 extends CameraController {
}
}
LOG.i("matchSize:", "found consistent:", consistent.size());
LOG.i("matchSize:", "found big enough and consistent:", bigEnoughAndConsistent.size());
LOG.i("size:", "matchSize:", "found consistent:", consistent.size());
LOG.i("size:", "matchSize:", "found big enough and consistent:", bigEnoughAndConsistent.size());
Size result;
if (biggestPossible) {
if (bigEnoughAndConsistent.size() > 0) {
@ -847,7 +893,7 @@ class Camera1 extends CameraController {
result = Collections.max(sizes);
}
}
LOG.i("matchSize:", "returning result", result);
LOG.i("size", "matchSize:", "returning result", result);
return result;
}

@ -13,36 +13,22 @@ import android.location.Location;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import android.util.Log;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.TreeSet;
@TargetApi(21)
class Camera2 extends CameraController {
private CameraDevice mCamera;
private CameraCharacteristics mCameraCharacteristics;
private CameraManager mCameraManager;
private String mCameraId;
private Mapper mMapper = new Mapper.Mapper2();
private final HashMap<String, ExtraProperties> mExtraPropertiesMap = new HashMap<>();
@Override
boolean setExposureCorrection(float EVvalue) {
return false;
public Camera2(CameraView.CameraCallbacks callback, Preview preview) {
super(callback, preview);
}
@Override
boolean setZoom(float zoom) {
return false;
public void onSurfaceAvailable() {
}
@Override
@ -51,98 +37,42 @@ class Camera2 extends CameraController {
}
@Override
public void onSurfaceAvailable() {
void onStart() throws Exception {
}
Camera2(CameraView.CameraCallbacks callback, Preview preview, Context context) {
super(callback, preview);
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
// Get all view angles
try {
for (final String cameraId : mCameraManager.getCameraIdList()) {
CameraCharacteristics characteristics =
mCameraManager.getCameraCharacteristics(cameraId);
@SuppressWarnings("ConstantConditions")
int orientation = characteristics.get(CameraCharacteristics.LENS_FACING);
if (orientation == CameraCharacteristics.LENS_FACING_BACK) {
ExtraProperties props = new ExtraProperties(characteristics);
mExtraPropertiesMap.put(cameraId, props);
}
}
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to get camera view angles", e);
}
}
// CameraImpl:
@WorkerThread
@Override
void onStart() {
void onStop() throws Exception {
}
@WorkerThread
@Override
void onStop() {
void setSessionType(SessionType sessionType) {
}
@Override
void onDisplayOffset(int displayOrientation) {
void setFacing(Facing facing) {
}
@Override
void onDeviceOrientation(int deviceOrientation) {
boolean setZoom(float zoom) {
return false;
}
@Override
void setFacing(Facing facing) {
int internalFacing = mMapper.map(facing);
final String[] ids;
try {
ids = mCameraManager.getCameraIdList();
} catch (CameraAccessException e) {
return;
}
if (ids.length == 0) {
throw new RuntimeException("No camera available.");
}
//
// for (String id : ids) {
// CameraCharacteristics cameraCharacteristics = mCameraManager.getCameraCharacteristics(id);
// Integer level = cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
// if (level == null || level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
// continue;
// }
// Integer internal = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
// if (internal == null) {
// throw new NullPointerException("Unexpected state: LENS_FACING null");
// }
// if (internal == internalFacing) {
// mCameraId = id;
// mCameraCharacteristics = cameraCharacteristics;
// return true;
// }
// }
if (mFacing == facing && isCameraOpened()) {
stop();
start();
}
boolean setExposureCorrection(float EVvalue) {
return false;
}
@Override
void setFlash(Flash flash) {
}
@Override
void setSessionType(SessionType sessionType) {
void setWhiteBalance(WhiteBalance whiteBalance) {
}
@ -161,11 +91,6 @@ class Camera2 extends CameraController {
}
@Override
void setWhiteBalance(WhiteBalance whiteBalance) {
}
@Override
void setVideoQuality(VideoQuality videoQuality) {
@ -173,16 +98,16 @@ class Camera2 extends CameraController {
@Override
boolean capturePicture() {
return true;
return false;
}
@Override
boolean captureSnapshot() {
return true;
return false;
}
@Override
boolean startVideo(@NonNull File videoFile) {
boolean startVideo(@NonNull File file) {
return false;
}
@ -191,59 +116,13 @@ class Camera2 extends CameraController {
return false;
}
@Override
boolean shouldFlipSizes() {
return false;
}
@Override
boolean isCameraOpened() {
return mCamera != null;
}
@Override
boolean startAutoFocus(Gesture gesture, PointF point) {
return true;
}
// Internal
private List<Size> getAvailableCaptureResolutions() {
List<Size> output = new ArrayList<>();
if (mCameraCharacteristics != null) {
StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
}
for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) {
output.add(new Size(size.getWidth(), size.getHeight()));
}
}
return output;
}
private List<Size> getAvailablePreviewResolutions() {
List<Size> output = new ArrayList<>();
if (mCameraCharacteristics != null) {
StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
}
for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
output.add(new Size(size.getWidth(), size.getHeight()));
}
}
return output;
boolean startAutoFocus(@Nullable Gesture gesture, PointF point) {
return false;
}
}

@ -2,6 +2,7 @@ package com.otaliastudios.cameraview;
import android.graphics.PointF;
import android.location.Location;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
@ -10,6 +11,14 @@ import java.io.File;
abstract class CameraController implements Preview.SurfaceCallback {
private static final String TAG = CameraController.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
static final int STATE_STOPPING = -1; // Camera is about to be stopped.
static final int STATE_STOPPED = 0; // Camera is stopped.
static final int STATE_STARTING = 1; // Camera is about to start.
static final int STATE_STARTED = 2; // Camera is available and we can set parameters.
protected final CameraView.CameraCallbacks mCameraCallbacks;
protected final Preview mPreview;
@ -19,6 +28,7 @@ abstract class CameraController implements Preview.SurfaceCallback {
protected VideoQuality mVideoQuality;
protected SessionType mSessionType;
protected Hdr mHdr;
protected Location mLocation;
protected Audio mAudio;
protected Size mCaptureSize;
@ -29,6 +39,7 @@ abstract class CameraController implements Preview.SurfaceCallback {
protected int mDisplayOffset;
protected int mDeviceOrientation;
protected int mState = STATE_STOPPED;
protected WorkerHandler mHandler;
@ -37,37 +48,148 @@ abstract class CameraController implements Preview.SurfaceCallback {
mPreview = preview;
mPreview.setSurfaceCallback(this);
mHandler = WorkerHandler.get("CameraViewController");
mHandler.getThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread thread, Throwable throwable) {
// Something went wrong. Thread is terminated (about to?).
// Move to other thread and stop resources.
LOG.w("Interrupting thread, due to exception.", throwable);
thread.interrupt();
LOG.w("Interrupted thread. Posting a stopImmediately.", ss());
mHandler = WorkerHandler.get("CameraViewController");
mHandler.post(new Runnable() {
@Override
public void run() {
stopImmediately();
}
});
}
});
}
//region Start&Stop
private String ss() {
switch (mState) {
case STATE_STOPPING: return "STATE_STOPPING";
case STATE_STOPPED: return "STATE_STOPPED";
case STATE_STARTING: return "STATE_STARTING";
case STATE_STARTED: return "STATE_STARTED";
}
return "null";
}
// Starts the preview asynchronously.
final void start() {
LOG.i("Start:", "posting runnable. State:", ss());
mHandler.post(new Runnable() {
@Override
public void run() {
onStart();
try {
LOG.i("Start:", "executing. State:", ss());
if (mState >= STATE_STARTING) return;
mState = STATE_STARTING;
LOG.i("Start:", "about to call onStart()", ss());
onStart();
LOG.i("Start:", "returned from onStart().", "Dispatching.", ss());
mState = STATE_STARTED;
mCameraCallbacks.dispatchOnCameraOpened(mOptions);
} catch (Exception e) {
LOG.e("Error while starting the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
// Stops the preview asynchronously.
final void stop() {
LOG.i("Stop:", "posting runnable. State:", ss());
mHandler.post(new Runnable() {
@Override
public void run() {
onStop();
try {
LOG.i("Stop:", "executing. State:", ss());
if (mState <= STATE_STOPPED) return;
mState = STATE_STOPPING;
LOG.i("Stop:", "about to call onStop()");
onStop();
LOG.i("Stop:", "returned from onStop().", "Dispatching.");
mState = STATE_STOPPED;
mCameraCallbacks.dispatchOnCameraClosed();
} catch (Exception e) {
LOG.e("Error while stopping the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
// Stops the preview synchronously, ensuring no exceptions are thrown.
void stopImmediately() {
try {
// Don't check, try stop again.
LOG.i("Stop immediately. State was:", ss());
mState = STATE_STOPPING;
onStop();
mState = STATE_STOPPED;
LOG.i("Stop immediately. Stopped. State is:", ss());
} catch (Exception e) {
// Do nothing.
LOG.i("Stop immediately. Exception while stopping.", e);
mState = STATE_STOPPED;
}
}
// Forces a restart.
protected final void restart() {
LOG.i("Restart:", "posting runnable");
mHandler.post(new Runnable() {
@Override
public void run() {
try {
LOG.i("Restart:", "executing. Needs stopping:", mState > STATE_STOPPED, ss());
// Don't stop if stopped.
if (mState > STATE_STOPPED) {
mState = STATE_STOPPING;
onStop();
mState = STATE_STOPPED;
LOG.i("Restart:", "stopped. Dispatching.", ss());
mCameraCallbacks.dispatchOnCameraClosed();
}
LOG.i("Restart: about to start. State:", ss());
mState = STATE_STARTING;
onStart();
mState = STATE_STARTED;
LOG.i("Restart: returned from start. Dispatching. State:", ss());
mCameraCallbacks.dispatchOnCameraOpened(mOptions);
} catch (Exception e) {
LOG.e("Error while restarting the camera engine.", e);
throw new RuntimeException(e);
}
}
});
}
// Starts the preview.
// At the end of this method camera must be available, e.g. for setting parameters.
@WorkerThread
abstract void onStart();
abstract void onStart() throws Exception;
// Stops the preview.
@WorkerThread
abstract void onStop();
abstract void onStop() throws Exception;
// Returns current state.
final int getState() {
return mState;
}
//endregion
@ -86,25 +208,36 @@ abstract class CameraController implements Preview.SurfaceCallback {
//region Abstract setParameters
// Should restart the session if active.
abstract void setSessionType(SessionType sessionType);
// Should restart the session if active.
abstract void setFacing(Facing facing);
// If opened and supported, apply and return true.
abstract boolean setZoom(float zoom);
// If opened and supported, apply and return true.
abstract boolean setExposureCorrection(float EVvalue);
abstract void setFacing(Facing facing);
// If closed, keep. If opened, check supported and apply.
abstract void setFlash(Flash flash);
// If closed, keep. If opened, check supported and apply.
abstract void setWhiteBalance(WhiteBalance whiteBalance);
abstract void setVideoQuality(VideoQuality videoQuality);
abstract void setSessionType(SessionType sessionType);
// If closed, keep. If opened, check supported and apply.
abstract void setHdr(Hdr hdr);
// If closed, keep. If opened, check supported and apply.
abstract void setLocation(Location location);
// Just set.
abstract void setAudio(Audio audio);
abstract void setLocation(Location location);
// Throw if capturing. If in video session, recompute capture size, and, if needed, preview size.
abstract void setVideoQuality(VideoQuality videoQuality);
//endregion
@ -118,11 +251,8 @@ abstract class CameraController implements Preview.SurfaceCallback {
abstract boolean endVideo();
abstract boolean shouldFlipSizes(); // Wheter the Sizes should be flipped to match the view orientation.
abstract boolean isCameraOpened();
abstract boolean startAutoFocus(@Nullable Gesture gesture, PointF point);
//endregion
@ -163,6 +293,10 @@ abstract class CameraController implements Preview.SurfaceCallback {
return mHdr;
}
final Location getLocation() {
return mLocation;
}
final Audio getAudio() {
return mAudio;
}

@ -16,16 +16,12 @@ import android.location.Location;
import android.media.MediaActionSound;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.ByteArrayOutputStream;
@ -59,17 +55,18 @@ public class CameraView extends FrameLayout {
private HashMap<Gesture, GestureAction> mGestureMap = new HashMap<>(4);
// Components
private CameraCallbacks mCameraCallbacks;
/* for tests */ CameraCallbacks mCameraCallbacks;
private OrientationHelper mOrientationHelper;
private CameraController mCameraController;
private Preview mPreviewImpl;
private ArrayList<CameraListener> mListeners = new ArrayList<>(2);
// Views
GridLinesLayout mGridLinesLayout;
PinchGestureLayout mPinchGestureLayout;
TapGestureLayout mTapGestureLayout;
ScrollGestureLayout mScrollGestureLayout;
private boolean mIsStarted;
private boolean mKeepScreenOn;
// Threading
@ -114,7 +111,7 @@ public class CameraView extends FrameLayout {
a.recycle();
// Components
mCameraCallbacks = new CameraCallbacks();
mCameraCallbacks = new Callbacks();
mPreviewImpl = instantiatePreview(context, this);
mCameraController = instantiateCameraController(mCameraCallbacks, mPreviewImpl);
mUiHandler = new Handler(Looper.getMainLooper());
@ -130,8 +127,6 @@ public class CameraView extends FrameLayout {
addView(mTapGestureLayout);
addView(mScrollGestureLayout);
mIsStarted = false;
// Apply self managed
setCropOutput(cropOutput);
setJpegQuality(jpegQuality);
@ -410,7 +405,7 @@ public class CameraView extends FrameLayout {
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mCameraController.isCameraOpened()) return true;
if (!isStarted()) return true;
// Pass to our own GestureLayouts
CameraOptions options = mCameraController.getCameraOptions(); // Non null
@ -480,7 +475,11 @@ public class CameraView extends FrameLayout {
* @return whether the camera has started
*/
public boolean isStarted() {
return mIsStarted;
return mCameraController.getState() >= CameraController.STATE_STARTED;
}
private boolean isStopped() {
return mCameraController.getState() == CameraController.STATE_STOPPED;
}
@ -489,13 +488,9 @@ public class CameraView extends FrameLayout {
* This should be called onResume(), or when you are ready with permissions.
*/
public void start() {
if (mIsStarted || !isEnabled()) {
// Already started, do nothing.
return;
}
if (!isEnabled()) return;
if (checkPermissions(getSessionType(), getAudio())) {
mIsStarted = true;
// Update display orientation for current CameraController
mOrientationHelper.enable(getContext());
mCameraController.start();
@ -559,17 +554,13 @@ public class CameraView extends FrameLayout {
* This should be called onPause().
*/
public void stop() {
if (!mIsStarted) {
// Already stopped, do nothing.
return;
}
mIsStarted = false;
mCameraController.stop();
}
public void destroy() {
// TODO: this is not strictly needed
mCameraCallbacks.clearListeners(); // Release inner listener.
clearCameraListeners(); // Release
mCameraController.stopImmediately();
}
//endregion
@ -737,6 +728,16 @@ public class CameraView extends FrameLayout {
}
/**
* Retrieves the location previously applied with setLocation().
*
* @return the current location, if any.
*/
@Nullable
public Location getLocation() {
return mCameraController.getLocation();
}
/**
* Sets desired white balance to current camera session.
*
@ -867,7 +868,7 @@ public class CameraView extends FrameLayout {
*/
public void setAudio(Audio audio) {
if (audio == getAudio() || !mIsStarted) {
if (audio == getAudio() || isStopped()) {
// Check did took place, or will happen on start().
mCameraController.setAudio(audio);
@ -921,7 +922,7 @@ public class CameraView extends FrameLayout {
*/
public void setSessionType(SessionType sessionType) {
if (sessionType == getSessionType() || !mIsStarted) {
if (sessionType == getSessionType() || isStopped()) {
// Check did took place, or will happen on start().
mCameraController.setSessionType(sessionType);
@ -1032,10 +1033,8 @@ public class CameraView extends FrameLayout {
*/
@Deprecated
public void setCameraListener(CameraListener cameraListener) {
mCameraCallbacks.clearListeners();
if (cameraListener != null) {
mCameraCallbacks.addListener(cameraListener);
}
mListeners.clear();
addCameraListener(cameraListener);
}
@ -1047,7 +1046,7 @@ public class CameraView extends FrameLayout {
*/
public void addCameraListener(CameraListener cameraListener) {
if (cameraListener != null) {
mCameraCallbacks.addListener(cameraListener);
mListeners.add(cameraListener);
}
}
@ -1059,7 +1058,7 @@ public class CameraView extends FrameLayout {
*/
public void removeCameraListener(CameraListener cameraListener) {
if (cameraListener != null) {
mCameraCallbacks.removeListener(cameraListener);
mListeners.remove(cameraListener);
}
}
@ -1069,7 +1068,7 @@ public class CameraView extends FrameLayout {
* to camera events.
*/
public void clearCameraListeners() {
mCameraCallbacks.clearListeners();
mListeners.clear();
}
@ -1129,11 +1128,16 @@ public class CameraView extends FrameLayout {
*/
public void startCapturingVideo(File file) {
if (file == null) {
file = new File(getContext().getExternalFilesDir(null), "video.mp4");
file = new File(getContext().getFilesDir(), "video.mp4");
}
if (mCameraController.startVideo(file)) {
mKeepScreenOn = getKeepScreenOn();
if (!mKeepScreenOn) setKeepScreenOn(true);
mUiHandler.post(new Runnable() {
@Override
public void run() {
mKeepScreenOn = getKeepScreenOn();
if (!mKeepScreenOn) setKeepScreenOn(true);
}
});
}
}
@ -1154,7 +1158,7 @@ public class CameraView extends FrameLayout {
throw new IllegalArgumentException("Video duration can't be < 500 milliseconds");
}
startCapturingVideo(file);
postDelayed(new Runnable() {
mUiHandler.postDelayed(new Runnable() {
@Override
public void run() {
stopCapturingVideo();
@ -1172,7 +1176,12 @@ public class CameraView extends FrameLayout {
*/
public void stopCapturingVideo() {
if (mCameraController.endVideo()) {
if (getKeepScreenOn() != mKeepScreenOn) setKeepScreenOn(mKeepScreenOn);
mUiHandler.post(new Runnable() {
@Override
public void run() {
if (getKeepScreenOn() != mKeepScreenOn) setKeepScreenOn(mKeepScreenOn);
}
});
}
}
@ -1247,20 +1256,31 @@ public class CameraView extends FrameLayout {
}
}
interface CameraCallbacks extends OrientationHelper.Callbacks {
void dispatchOnCameraOpened(CameraOptions options);
void dispatchOnCameraClosed();
void onCameraPreviewSizeChanged();
void processImage(byte[] jpeg, boolean consistentWithView, boolean flipHorizontally);
void processSnapshot(YuvImage image, boolean consistentWithView, boolean flipHorizontally);
void dispatchOnVideoTaken(File file);
void dispatchOnFocusStart(@Nullable Gesture trigger, PointF where);
void dispatchOnFocusEnd(@Nullable Gesture trigger, boolean success, PointF where);
void dispatchOnZoomChanged(final float newValue, final PointF[] fingers);
void dispatchOnExposureCorrectionChanged(float newValue, float[] bounds, PointF[] fingers);
}
class CameraCallbacks implements OrientationHelper.Callbacks {
private class Callbacks implements CameraCallbacks {
// Outer listeners
private ArrayList<CameraListener> mListeners = new ArrayList<>(2);
private CameraLogger mLogger = CameraLogger.create(CameraCallbacks.class.getSimpleName());
// Orientation TODO: move this logic into OrientationHelper
private Integer mDisplayOffset;
private Integer mDeviceOrientation;
CameraCallbacks() {}
Callbacks() {}
@Override
public void dispatchOnCameraOpened(final CameraOptions options) {
mLogger.i("dispatchOnCameraOpened", options);
mUiHandler.post(new Runnable() {
@ -1273,7 +1293,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnCameraClosed() {
mLogger.i("dispatchOnCameraClosed");
mUiHandler.post(new Runnable() {
@ -1286,7 +1306,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void onCameraPreviewSizeChanged() {
mLogger.i("onCameraPreviewSizeChanged");
// Camera preview size, as returned by getPreviewSize(), has changed.
@ -1319,6 +1339,7 @@ public class CameraView extends FrameLayout {
* @param flipHorizontally whether this picture should be flipped horizontally after decoding,
* because it was taken with the front camera.
*/
@Override
public void processImage(final byte[] jpeg, final boolean consistentWithView, final boolean flipHorizontally) {
mLogger.i("processImage");
mWorkerHandler.post(new Runnable() {
@ -1340,7 +1361,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void processSnapshot(final YuvImage yuv, final boolean consistentWithView, boolean flipHorizontally) {
mLogger.i("processSnapshot");
mWorkerHandler.post(new Runnable() {
@ -1378,7 +1399,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnVideoTaken(final File video) {
mLogger.i("dispatchOnVideoTaken", video);
mUiHandler.post(new Runnable() {
@ -1391,7 +1412,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnFocusStart(@Nullable final Gesture gesture, final PointF point) {
mLogger.i("dispatchOnFocusStart", gesture, point);
mUiHandler.post(new Runnable() {
@ -1408,7 +1429,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnFocusEnd(@Nullable final Gesture gesture, final boolean success,
final PointF point) {
mLogger.i("dispatchOnFocusEnd", gesture, success, point);
@ -1466,7 +1487,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnZoomChanged(final float newValue, final PointF[] fingers) {
mLogger.i("dispatchOnZoomChanged", newValue);
mUiHandler.post(new Runnable() {
@ -1479,7 +1500,7 @@ public class CameraView extends FrameLayout {
});
}
@Override
public void dispatchOnExposureCorrectionChanged(final float newValue,
final float[] bounds,
final PointF[] fingers) {
@ -1493,24 +1514,6 @@ public class CameraView extends FrameLayout {
}
});
}
private void addListener(@NonNull CameraListener cameraListener) {
mLogger.i("addListener");
mListeners.add(cameraListener);
}
private void removeListener(@NonNull CameraListener cameraListener) {
mLogger.i("removeListener");
mListeners.remove(cameraListener);
}
private void clearListeners() {
mLogger.i("clearListeners");
mListeners.clear();
}
}
//endregion

@ -68,7 +68,7 @@ public class CameraUtils {
*/
public static void decodeBitmap(final byte[] source, final BitmapCallback callback) {
final Handler ui = new Handler();
new Thread(new Runnable() {
WorkerHandler.run(new Runnable() {
@Override
public void run() {
final Bitmap bitmap = decodeBitmap(source);
@ -79,7 +79,7 @@ public class CameraUtils {
}
});
}
}).start();
});
}

@ -44,19 +44,15 @@ class Task<T> {
}
}
T await(long millis) {
return await(millis, TimeUnit.MILLISECONDS);
}
T await() {
try {
mLatch.await();
} catch (Exception e) {
e.printStackTrace();
}
T result = mResult;
mResult = null;
mLatch = null;
return result;
return await(1, TimeUnit.MINUTES);
}
T await(long time, @NonNull TimeUnit unit) {
private T await(long time, @NonNull TimeUnit unit) {
try {
mLatch.await(time, unit);
} catch (Exception e) {

@ -36,6 +36,13 @@ class WorkerHandler {
return handler;
}
// Handy util to perform action in a fallback thread.
// Not to be used for long-running operations since they will
// block the fallback thread.
public static void run(Runnable action) {
get("FallbackCameraThread").post(action);
}
private HandlerThread mThread;
private Handler mHandler;
@ -53,4 +60,20 @@ class WorkerHandler {
public void post(Runnable runnable) {
mHandler.post(runnable);
}
public Thread getThread() {
return mThread;
}
public static void destroy() {
for (String key : sCache.keySet()) {
WeakReference<WorkerHandler> ref = sCache.get(key);
WorkerHandler handler = ref.get();
if (handler != null && handler.getThread().isAlive()) {
handler.getThread().interrupt();
}
ref.clear();
}
sCache.clear();
}
}

@ -16,11 +16,11 @@ class GridLinesLayout extends View {
private Grid gridMode;
Drawable horiz;
Drawable vert;
private Drawable horiz;
private Drawable vert;
private final float width;
Task<Void> drawTask = new Task<>();
Task<Integer> drawTask = new Task<>();
private final static float GOLDEN_RATIO_INV = 0.61803398874989f;
@ -92,6 +92,6 @@ class GridLinesLayout extends View {
vert.draw(canvas);
canvas.translate(- pos * getWidth(), 0);
}
drawTask.end(null);
drawTask.end(count);
}
}

@ -19,12 +19,23 @@ public class SizeTest {
@Test
public void testEquals() {
Size s1 = new Size(10, 20);
assertTrue(s1.equals(s1));
assertFalse(s1.equals(null));
assertFalse(s1.equals(""));
Size s2 = new Size(10, 0);
Size s3 = new Size(10, 20);
assertTrue(s1.equals(s3));
assertFalse(s1.equals(s2));
}
@Test
public void testHashCode() {
Size s1 = new Size(10, 20);
Size s2 = new Size(10, 0);
assertNotEquals(s1.hashCode(), s2.hashCode());
}
@Test
public void testCompare() {
Size s1 = new Size(10, 20);

@ -1,15 +1,15 @@
coverage:
precision: 1
round: down
range: "30...100"
range: "40...100"
status:
project:
default:
target: 40%
target: 50%
patch:
default:
target: 60%
target: 70%
changes: no
comment:

Loading…
Cancel
Save