Frame Processing maxWidth, maxHeight and format (#704)

* Create CameraEngine and CameraBaseEngine

* Promote filters to stable - no experimental flag

* Fix setSnapshotMaxWidth / Height bugs

* Add setFrameProcessingMaxWidth and setFrameProcessingMaxHeight

* Add setFrameProcessingMaxWidth and setFrameProcessingMaxHeight (docs)

* Prepare Frame for Images, abstract FrameManager, create ByteBufferFrameManager

* Fix tests

* Fix unit tests

* Send Images for Camera2

* Tests

* Add CameraView.setFrameProcessingFormat(int), tests, docs

* Add CameraOptions.getSupportedFrameProcessingFormats(), tests

* Add CameraEngine support, integration tests

* Fix demo app, add getFrameProcessingPoolSize

* Fix tests

* Fix tests
pull/708/head
Mattia Iavarone 5 years ago committed by GitHub
parent 4a6b9be905
commit e1721bb77d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .github/workflows/build.yml
  2. 3
      README.md
  3. 51
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/CameraViewTest.java
  4. 17
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera1IntegrationTest.java
  5. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/Camera2IntegrationTest.java
  6. 53
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/CameraIntegrationTest.java
  7. 28
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/MockCameraEngine.java
  8. 7
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/engine/options/Camera1OptionsTest.java
  9. 105
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java
  10. 98
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java
  11. 100
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/internal/utils/ImageHelperTest.java
  12. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraOptions.java
  13. 191
      cameraview/src/main/java/com/otaliastudios/cameraview/CameraView.java
  14. 82
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  15. 191
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  16. 927
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraBaseEngine.java
  17. 862
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/CameraEngine.java
  18. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/options/Camera1Options.java
  19. 10
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/options/Camera2Options.java
  20. 162
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManager.java
  21. 34
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  22. 191
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  23. 30
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/ImageFrameManager.java
  24. 100
      cameraview/src/main/java/com/otaliastudios/cameraview/internal/utils/ImageHelper.java
  25. 4
      cameraview/src/main/res/values/attrs.xml
  26. 40
      cameraview/src/test/java/com/otaliastudios/cameraview/frame/FrameTest.java
  27. 36
      demo/src/main/java/com/otaliastudios/cameraview/demo/CameraActivity.java
  28. 42
      demo/src/main/java/com/otaliastudios/cameraview/demo/Option.java
  29. 62
      docs/_posts/2018-12-20-frame-processing.md
  30. 6
      docs/_posts/2019-08-06-filters.md

@ -64,7 +64,7 @@ jobs:
with:
java-version: 1.8
- name: Execute emulator tests
timeout-minutes: 20
timeout-minutes: 30
uses: reactivecircus/android-emulator-runner@v2.2.0
with:
api-level: ${{ matrix.EMULATOR_API }}

@ -114,6 +114,9 @@ Using CameraView is extremely simple:
app:cameraVideoSizeAspectRatio="@string/video_ratio"
app:cameraSnapshotMaxWidth="@integer/snapshot_max_width"
app:cameraSnapshotMaxHeight="@integer/snapshot_max_height"
app:cameraFrameProcessingMaxWidth="@integer/processing_max_width"
app:cameraFrameProcessingMaxHeight="@integer/processing_max_height"
app:cameraFrameProcessingFormat="@integer/processing_format"
app:cameraVideoBitRate="@integer/video_bit_rate"
app:cameraAudioBitRate="@integer/audio_bit_rate"
app:cameraGestureTap="none|autoFocus|takePicture"

@ -3,6 +3,7 @@ package com.otaliastudios.cameraview;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.ImageFormat;
import android.graphics.PointF;
import android.location.Location;
import androidx.annotation.NonNull;
@ -168,8 +169,13 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getLocation(), null);
assertEquals(cameraView.getExposureCorrection(), 0f, 0f);
assertEquals(cameraView.getZoom(), 0f, 0f);
assertEquals(cameraView.getVideoMaxDuration(), 0, 0);
assertEquals(cameraView.getVideoMaxSize(), 0, 0);
assertEquals(cameraView.getVideoMaxDuration(), 0);
assertEquals(cameraView.getVideoMaxSize(), 0);
assertEquals(cameraView.getSnapshotMaxWidth(), 0);
assertEquals(cameraView.getSnapshotMaxHeight(), 0);
assertEquals(cameraView.getFrameProcessingMaxWidth(), 0);
assertEquals(cameraView.getFrameProcessingMaxHeight(), 0);
assertEquals(cameraView.getFrameProcessingFormat(), 0);
// Self managed
GestureParser gestures = new GestureParser(empty);
@ -801,6 +807,30 @@ public class CameraViewTest extends BaseTest {
assertEquals(cameraView.getPreviewFrameRate(), 60, 0);
}
@Test
public void testSnapshotMaxSize() {
cameraView.setSnapshotMaxWidth(500);
assertEquals(500, cameraView.getSnapshotMaxWidth());
cameraView.setSnapshotMaxHeight(700);
assertEquals(700, cameraView.getSnapshotMaxHeight());
}
@Test
public void testFrameProcessingMaxSize() {
cameraView.setFrameProcessingMaxWidth(500);
assertEquals(500, cameraView.getFrameProcessingMaxWidth());
cameraView.setFrameProcessingMaxHeight(700);
assertEquals(700, cameraView.getFrameProcessingMaxHeight());
}
@Test
public void testFrameProcessingFormat() {
cameraView.setFrameProcessingFormat(ImageFormat.YUV_420_888);
assertEquals(ImageFormat.YUV_420_888, cameraView.getFrameProcessingFormat());
cameraView.setFrameProcessingFormat(ImageFormat.YUV_422_888);
assertEquals(ImageFormat.YUV_422_888, cameraView.getFrameProcessingFormat());
}
//endregion
//region Lists of listeners and processors
@ -975,26 +1005,11 @@ public class CameraViewTest extends BaseTest {
}
//endregion
// TODO: test permissions
//region Filter
@Test(expected = RuntimeException.class)
public void testSetFilter_notExperimental() {
cameraView.setExperimental(false);
cameraView.setFilter(Filters.AUTO_FIX.newInstance());
}
@Test
public void testSetFilter_notExperimental_noFilter() {
cameraView.setExperimental(false);
cameraView.setFilter(Filters.NONE.newInstance());
// no exception thrown
}
@Test
public void testSetFilter() {
cameraView.setExperimental(true);
Filter filter = Filters.AUTO_FIX.newInstance();
cameraView.setFilter(filter);
verify(mockPreview, times(1)).setFilter(filter);
@ -1002,4 +1017,6 @@ public class CameraViewTest extends BaseTest {
//noinspection ResultOfMethodCallIgnored
verify(mockPreview, times(1)).getCurrentFilter();
}
//endregion
}

@ -1,6 +1,13 @@
package com.otaliastudios.cameraview.engine;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameProcessor;
import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.tools.Retry;
import com.otaliastudios.cameraview.tools.SdkExclude;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -10,6 +17,10 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.filters.RequiresDevice;
import java.util.Collection;
import static org.junit.Assert.assertNotNull;
/**
* These tests work great on real devices, and are the only way to test actual CameraEngine
* implementation - we really need to open the camera device.
@ -31,4 +42,10 @@ public class Camera1IntegrationTest extends CameraIntegrationTest<Camera1Engine>
protected long getMeteringTimeoutMillis() {
return Camera1Engine.AUTOFOCUS_END_DELAY_MILLIS;
}
@Override
public void testFrameProcessing_maxSize() {
// Camera1Engine does not support different sizes.
// super.testFrameProcessing_maxSize();
}
}

@ -9,6 +9,7 @@ import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.engine.action.ActionHolder;
import com.otaliastudios.cameraview.engine.action.BaseAction;
import org.junit.Test;
import org.junit.runner.RunWith;
import androidx.annotation.NonNull;
@ -76,4 +77,10 @@ public class Camera2IntegrationTest extends CameraIntegrationTest<Camera2Engine>
if (shouldOpen) closeSync(true);
return result;
}
@Override
public void testFrameProcessing_freezeRelease() {
// Camera2 Frames are not freezable.
// super.testFrameProcessing_freezeRelease();
}
}

@ -51,6 +51,7 @@ import org.junit.Test;
import org.mockito.ArgumentMatcher;
import java.io.File;
import java.util.Collection;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@ -70,7 +71,7 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public abstract class CameraIntegrationTest<E extends CameraEngine> extends BaseTest {
public abstract class CameraIntegrationTest<E extends CameraBaseEngine> extends BaseTest {
private final static CameraLogger LOG = CameraLogger.create(CameraIntegrationTest.class.getSimpleName());
private final static long DELAY = 8000;
@ -1043,6 +1044,27 @@ public abstract class CameraIntegrationTest<E extends CameraEngine> extends Base
assert15Frames(processor);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_maxSize() {
final int max = 600;
camera.setFrameProcessingMaxWidth(max);
camera.setFrameProcessingMaxHeight(max);
final Op<Size> sizeOp = new Op<>();
camera.addFrameProcessor(new FrameProcessor() {
@Override
public void process(@NonNull Frame frame) {
sizeOp.controller().end(frame.getSize());
}
});
openSync(true);
Size size = sizeOp.await(2000);
assertNotNull(size);
assertTrue(size.getWidth() <= max);
assertTrue(size.getHeight() <= max);
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
@ -1109,6 +1131,35 @@ public abstract class CameraIntegrationTest<E extends CameraEngine> extends Base
}
}
@Test
@Retry(emulatorOnly = true)
@SdkExclude(maxSdkVersion = 22, emulatorOnly = true)
public void testFrameProcessing_format() {
CameraOptions o = openSync(true);
Collection<Integer> formats = o.getSupportedFrameProcessingFormats();
for (int format : formats) {
LOG.i("[TEST FRAME FORMAT]", "Testing", format, "...");
Op<Boolean> op = testFrameProcessorFormat(format);
assertNotNull(op.await(DELAY));
}
}
@NonNull
private Op<Boolean> testFrameProcessorFormat(final int format) {
final Op<Boolean> op = new Op<>();
camera.setFrameProcessingFormat(format);
camera.addFrameProcessor(new FrameProcessor() {
@Override
public void process(@NonNull Frame frame) {
if (frame.getFormat() == format) {
op.controller().start();
op.controller().end(true);
}
}
});
return op;
}
//endregion
//region Overlays

@ -13,6 +13,7 @@ import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.ByteBufferFrameManager;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.controls.Hdr;
@ -27,7 +28,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
public class MockCameraEngine extends CameraEngine {
public class MockCameraEngine extends CameraBaseEngine {
public boolean mPictureCaptured;
public boolean mFocusStarted;
@ -83,7 +84,7 @@ public class MockCameraEngine extends CameraEngine {
}
public void setMockState(@NonNull CameraState state) {
Task<Void> change = mOrchestrator.scheduleStateChange(getState(),
Task<Void> change = getOrchestrator().scheduleStateChange(getState(),
state,
false,
new Callable<Task<Void>>() {
@ -109,7 +110,6 @@ public class MockCameraEngine extends CameraEngine {
mExposureCorrectionChanged = true;
}
@Override
public void setFlash(@NonNull Flash flash) {
mFlash = flash;
@ -135,6 +135,16 @@ public class MockCameraEngine extends CameraEngine {
mPictureFormat = pictureFormat;
}
@Override
public void setHasFrameProcessors(boolean hasFrameProcessors) {
mHasFrameProcessors = hasFrameProcessors;
}
@Override
public void setFrameProcessingFormat(int format) {
mFrameProcessingFormat = format;
}
@Override
public void takePicture(@NonNull PictureResult.Stub stub) {
super.takePicture(stub);
@ -172,6 +182,12 @@ public class MockCameraEngine extends CameraEngine {
return new ArrayList<>();
}
@NonNull
@Override
protected List<Size> getFrameProcessingAvailableSizes() {
return new ArrayList<>();
}
@Override
public void startAutoFocus(@Nullable Gesture gesture, @NonNull PointF point) {
mFocusStarted = true;
@ -180,13 +196,11 @@ public class MockCameraEngine extends CameraEngine {
@NonNull
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, null);
return new ByteBufferFrameManager(2, null);
}
@Override
public void setPlaySounds(boolean playSounds) {
}
public void setPlaySounds(boolean playSounds) { }
@Override
protected boolean collectCameraInfo(@NonNull Facing facing) {

@ -1,6 +1,7 @@
package com.otaliastudios.cameraview.engine.options;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import com.otaliastudios.cameraview.BaseTest;
@ -8,6 +9,7 @@ import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
import com.otaliastudios.cameraview.gesture.GestureAction;
import com.otaliastudios.cameraview.controls.Grid;
@ -54,6 +56,11 @@ public class Camera1OptionsTest extends BaseTest {
assertFalse(o.isZoomSupported());
assertEquals(o.getExposureCorrectionMaxValue(), 0f, 0);
assertEquals(o.getExposureCorrectionMinValue(), 0f, 0);
// Static
assertEquals(1, o.getSupportedPictureFormats().size());
assertTrue(o.getSupportedPictureFormats().contains(PictureFormat.JPEG));
assertEquals(1, o.getSupportedFrameProcessingFormats().size());
assertTrue(o.getSupportedFrameProcessingFormats().contains(ImageFormat.NV21));
}
private Camera.Size mockCameraSize(int width, int height) {

@ -0,0 +1,105 @@
package com.otaliastudios.cameraview.frame;
import android.graphics.ImageFormat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class ByteBufferFrameManagerTest extends BaseTest {
private ByteBufferFrameManager.BufferCallback callback;
@Before
public void setUp() {
callback = mock(ByteBufferFrameManager.BufferCallback.class);
}
@After
public void tearDown() {
callback = null;
}
@Test
public void testAllocate() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
verify(callback, times(1)).onBufferAvailable(any(byte[].class));
reset(callback);
manager = new ByteBufferFrameManager(5, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
verify(callback, times(5)).onBufferAvailable(any(byte[].class));
}
@Test
public void testOnFrameReleased_alreadyFull() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
int length = manager.getFrameBytes();
Frame frame1 = manager.getFrame(new byte[length], 0, 0);
// Since frame1 is already taken and poolSize = 1, a new Frame is created.
Frame frame2 = manager.getFrame(new byte[length], 0, 0);
// Release the first frame so it goes back into the pool.
manager.onFrameReleased(frame1, (byte[]) frame1.getData());
reset(callback);
// Release the second. The pool is already full, so onBufferAvailable should not be called
// since this Frame instance will NOT be reused.
manager.onFrameReleased(frame2, (byte[]) frame2.getData());
verify(callback, never()).onBufferAvailable((byte[]) frame2.getData());
}
@Test
public void testOnFrameReleased_sameLength() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
int length = manager.getFrameBytes();
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
// Release the frame and ensure that onBufferAvailable is called.
reset(callback);
manager.onFrameReleased(frame, (byte[]) frame.getData());
verify(callback, times(1)).onBufferAvailable(picture);
}
@Test
public void testOnFrameReleased_differentLength() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
int length = manager.getFrameBytes();
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
// Don't release the frame. Change the allocation size.
manager.setUp(ImageFormat.NV16, new Size(15, 15));
// Now release the old frame and ensure that onBufferAvailable is NOT called,
// because the released data has wrong length.
manager.onFrameReleased(frame, (byte[]) frame.getData());
reset(callback);
verify(callback, never()).onBufferAvailable(picture);
}
}

@ -3,116 +3,44 @@ package com.otaliastudios.cameraview.frame;
import android.graphics.ImageFormat;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class FrameManagerTest extends BaseTest {
private FrameManager.BufferCallback callback;
@Before
public void setUp() {
callback = mock(FrameManager.BufferCallback.class);
}
@After
public void tearDown() {
callback = null;
}
@Test
public void testAllocate() {
FrameManager manager = new FrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
verify(callback, times(1)).onBufferAvailable(any(byte[].class));
reset(callback);
manager = new FrameManager(5, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
verify(callback, times(5)).onBufferAvailable(any(byte[].class));
}
@Test
public void testFrameRecycling() {
// A 1-pool manager will always recycle the same frame.
FrameManager manager = new FrameManager(1, callback);
FrameManager<String> manager = new FrameManager<String>(1, String.class) {
@Override
protected void onFrameDataReleased(@NonNull String data, boolean recycled) { }
@NonNull
@Override
protected String onCloneFrameData(@NonNull String data) {
return data;
}
};
manager.setUp(ImageFormat.NV21, new Size(50, 50));
Frame first = manager.getFrame(null, 0, 0);
Frame first = manager.getFrame("foo", 0, 0);
first.release();
Frame second = manager.getFrame(null, 0, 0);
Frame second = manager.getFrame("bar", 0, 0);
second.release();
assertEquals(first, second);
}
@Test
public void testOnFrameReleased_alreadyFull() {
FrameManager manager = new FrameManager(1, callback);
int length = manager.setUp(ImageFormat.NV21, new Size(50, 50));
Frame frame1 = manager.getFrame(new byte[length], 0, 0);
// Since frame1 is already taken and poolSize = 1, a new Frame is created.
Frame frame2 = manager.getFrame(new byte[length], 0, 0);
// Release the first frame so it goes back into the pool.
manager.onFrameReleased(frame1, frame1.getData());
reset(callback);
// Release the second. The pool is already full, so onBufferAvailable should not be called
// since this Frame instance will NOT be reused.
manager.onFrameReleased(frame2, frame2.getData());
verify(callback, never()).onBufferAvailable(frame2.getData());
}
@Test
public void testOnFrameReleased_sameLength() {
FrameManager manager = new FrameManager(1, callback);
int length = manager.setUp(ImageFormat.NV21, new Size(50, 50));
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
// Release the frame and ensure that onBufferAvailable is called.
reset(callback);
manager.onFrameReleased(frame, frame.getData());
verify(callback, times(1)).onBufferAvailable(picture);
}
@Test
public void testOnFrameReleased_differentLength() {
FrameManager manager = new FrameManager(1, callback);
int length = manager.setUp(ImageFormat.NV21, new Size(50, 50));
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
// Don't release the frame. Change the allocation size.
manager.setUp(ImageFormat.NV16, new Size(15, 15));
// Now release the old frame and ensure that onBufferAvailable is NOT called,
// because the released data has wrong length.
manager.onFrameReleased(frame, frame.getData());
reset(callback);
verify(callback, never()).onBufferAvailable(picture);
}
}

@ -1,100 +0,0 @@
package com.otaliastudios.cameraview.internal.utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.Looper;
import android.view.Surface;
import androidx.annotation.NonNull;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.tools.Op;
import com.otaliastudios.cameraview.tools.SdkExclude;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.ByteArrayOutputStream;
import static org.junit.Assert.assertNotNull;
/**
* Starting from API 29, surface.lockCanvas() sets the surface format to RGBA_8888:
* https://github.com/aosp-mirror/platform_frameworks_base/blob/android10-release/core/jni/android_view_Surface.cpp#L215-L217 .
* For this reason, acquireLatestImage crashes because we requested a different format.
*/
@SdkExclude(minSdkVersion = 29)
@RunWith(AndroidJUnit4.class)
@SmallTest
public class ImageHelperTest extends BaseTest {
@NonNull
private Image getImage() {
ImageReader reader = ImageReader.newInstance(100, 100, ImageFormat.YUV_420_888, 1);
Surface readerSurface = reader.getSurface();
final Op<Image> imageOp = new Op<>();
reader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image != null) imageOp.controller().end(image);
}
}, new Handler(Looper.getMainLooper()));
// Write on reader surface.
Canvas readerCanvas = readerSurface.lockCanvas(null);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setColor(Color.RED);
readerCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.MULTIPLY);
readerCanvas.drawCircle(50, 50, 50, paint);
readerSurface.unlockCanvasAndPost(readerCanvas);
// Wait
Image image = imageOp.await(5000);
assertNotNull(image);
return image;
}
@Test
public void testImage() {
Image image = getImage();
int width = image.getWidth();
int height = image.getHeight();
int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21);
int sizeBits = width * height * bitsPerPixel;
int sizeBytes = (int) Math.ceil(sizeBits / 8.0d);
byte[] bytes = new byte[sizeBytes];
ImageHelper.convertToNV21(image, bytes);
image.close();
// Read the image
YuvImage yuvImage = new YuvImage(bytes, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, jpegStream);
byte[] jpegByteArray = jpegStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
assertNotNull(bitmap);
// Wanted to do assertions on the color here but it doesn't work. There must be an issue
// with how we are drawing the image in this test, since in real camera, the algorithm works well.
// So for now let's just test that nothing crashes during this process.
// int color = bitmap.getPixel(bitmap.getWidth() - 1, bitmap.getHeight() - 1);
// assertEquals(Color.red(color), 255, 5);
// assertEquals(Color.green(color), 0, 5);
// assertEquals(Color.blue(color), 0, 5);
// assertEquals(Color.alpha(color), 0, 5);
}
}

@ -56,6 +56,7 @@ public abstract class CameraOptions {
protected Set<AspectRatio> supportedPictureAspectRatio = new HashSet<>(4);
protected Set<AspectRatio> supportedVideoAspectRatio = new HashSet<>(3);
protected Set<PictureFormat> supportedPictureFormats = new HashSet<>(2);
protected Set<Integer> supportedFrameProcessingFormats = new HashSet<>(2);
protected boolean zoomSupported;
protected boolean exposureCorrectionSupported;
@ -237,6 +238,18 @@ public abstract class CameraOptions {
return Collections.unmodifiableSet(supportedPictureFormats);
}
/**
* Set of supported formats for frame processing,
* as {@link ImageFormat} constants.
*
* @see CameraView#setFrameProcessingFormat(int)
* @return a collection of supported values.
*/
@NonNull
public final Collection<Integer> getSupportedFrameProcessingFormats() {
return Collections.unmodifiableSet(supportedFrameProcessingFormats);
}
/**
* Whether zoom is supported. If this is false, pinch-to-zoom
* will not work and {@link CameraView#setZoom(float)} will have no effect.

@ -201,6 +201,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
boolean pictureSnapshotMetering = a.getBoolean(
R.styleable.CameraView_cameraPictureSnapshotMetering,
DEFAULT_PICTURE_SNAPSHOT_METERING);
int snapshotMaxWidth = a.getInteger(R.styleable.CameraView_cameraSnapshotMaxWidth, 0);
int snapshotMaxHeight = a.getInteger(R.styleable.CameraView_cameraSnapshotMaxHeight, 0);
int frameMaxWidth = a.getInteger(R.styleable.CameraView_cameraFrameProcessingMaxWidth, 0);
int frameMaxHeight = a.getInteger(R.styleable.CameraView_cameraFrameProcessingMaxHeight, 0);
int frameFormat = a.getInteger(R.styleable.CameraView_cameraFrameProcessingFormat, 0);
// Size selectors and gestures
SizeSelectorParser sizeSelectors = new SizeSelectorParser(a);
@ -257,6 +262,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setVideoBitRate(videoBitRate);
setAutoFocusResetDelay(autoFocusResetDelay);
setPreviewFrameRate(videoFrameRate);
setSnapshotMaxWidth(snapshotMaxWidth);
setSnapshotMaxHeight(snapshotMaxHeight);
setFrameProcessingMaxWidth(frameMaxWidth);
setFrameProcessingMaxHeight(frameMaxHeight);
setFrameProcessingFormat(frameFormat);
mCameraEngine.setHasFrameProcessors(!mFrameProcessors.isEmpty());
// Apply gestures
mapGesture(Gesture.TAP, gestures.getTapAction());
@ -667,7 +678,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
break;
case FILTER_CONTROL_1:
if (!mExperimental) break;
if (getFilter() instanceof OneParameterFilter) {
OneParameterFilter filter = (OneParameterFilter) getFilter();
oldValue = filter.getParameter1();
@ -679,7 +689,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
break;
case FILTER_CONTROL_2:
if (!mExperimental) break;
if (getFilter() instanceof TwoParameterFilter) {
TwoParameterFilter filter = (TwoParameterFilter) getFilter();
oldValue = filter.getParameter2();
@ -968,6 +977,11 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
setVideoBitRate(oldEngine.getVideoBitRate());
setAutoFocusResetDelay(oldEngine.getAutoFocusResetDelay());
setPreviewFrameRate(oldEngine.getPreviewFrameRate());
setSnapshotMaxWidth(oldEngine.getSnapshotMaxWidth());
setSnapshotMaxHeight(oldEngine.getSnapshotMaxHeight());
setFrameProcessingMaxWidth(oldEngine.getFrameProcessingMaxWidth());
setFrameProcessingMaxHeight(oldEngine.getFrameProcessingMaxHeight());
setFrameProcessingFormat(0 /* this is very engine specific, so do not pass */);
}
/**
@ -1555,47 +1569,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mListeners.clear();
}
/**
* Adds a {@link FrameProcessor} instance to be notified of
* new frames in the preview stream.
*
* @param processor a frame processor.
*/
public void addFrameProcessor(@Nullable FrameProcessor processor) {
if (processor != null) {
mFrameProcessors.add(processor);
if (mFrameProcessors.size() == 1) {
mCameraEngine.setHasFrameProcessors(true);
}
}
}
/**
* Remove a {@link FrameProcessor} that was previously registered.
*
* @param processor a frame processor
*/
public void removeFrameProcessor(@Nullable FrameProcessor processor) {
if (processor != null) {
mFrameProcessors.remove(processor);
if (mFrameProcessors.size() == 0) {
mCameraEngine.setHasFrameProcessors(false);
}
}
}
/**
* Clears the list of {@link FrameProcessor} that have been registered
* to preview frames.
*/
public void clearFrameProcessors() {
boolean had = mFrameProcessors.size() > 0;
mFrameProcessors.clear();
if (had) {
mCameraEngine.setHasFrameProcessors(false);
}
}
/**
* Asks the camera to capture an image of the current scene.
* This will trigger {@link CameraListener#onPictureTaken(PictureResult)} if a listener
@ -1767,6 +1740,24 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mCameraEngine.setSnapshotMaxHeight(maxHeight);
}
/**
* The max width for snapshots.
* @see #setSnapshotMaxWidth(int)
* @return max width
*/
public int getSnapshotMaxWidth() {
return mCameraEngine.getSnapshotMaxWidth();
}
/**
* The max height for snapshots.
* @see #setSnapshotMaxHeight(int)
* @return max height
*/
public int getSnapshotMaxHeight() {
return mCameraEngine.getSnapshotMaxHeight();
}
/**
* Returns the size used for snapshots, or null if it hasn't been computed
* (for example if the surface is not ready). This is the preview size, rotated to match
@ -2277,6 +2268,110 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
//endregion
//region Frame Processing
/**
* Adds a {@link FrameProcessor} instance to be notified of
* new frames in the preview stream.
*
* @param processor a frame processor.
*/
public void addFrameProcessor(@Nullable FrameProcessor processor) {
if (processor != null) {
mFrameProcessors.add(processor);
if (mFrameProcessors.size() == 1) {
mCameraEngine.setHasFrameProcessors(true);
}
}
}
/**
* Remove a {@link FrameProcessor} that was previously registered.
*
* @param processor a frame processor
*/
public void removeFrameProcessor(@Nullable FrameProcessor processor) {
if (processor != null) {
mFrameProcessors.remove(processor);
if (mFrameProcessors.size() == 0) {
mCameraEngine.setHasFrameProcessors(false);
}
}
}
/**
* Clears the list of {@link FrameProcessor} that have been registered
* to preview frames.
*/
public void clearFrameProcessors() {
boolean had = mFrameProcessors.size() > 0;
mFrameProcessors.clear();
if (had) {
mCameraEngine.setHasFrameProcessors(false);
}
}
/**
* Sets the max width for frame processing {@link Frame}s.
* This option is only supported by {@link Engine#CAMERA2} and will have no effect
* on other engines.
*
* @param maxWidth max width for frames
*/
public void setFrameProcessingMaxWidth(int maxWidth) {
mCameraEngine.setFrameProcessingMaxWidth(maxWidth);
}
/**
* Sets the max height for frame processing {@link Frame}s.
* This option is only supported by {@link Engine#CAMERA2} and will have no effect
* on other engines.
*
* @param maxHeight max height for frames
*/
public void setFrameProcessingMaxHeight(int maxHeight) {
mCameraEngine.setFrameProcessingMaxHeight(maxHeight);
}
/**
* The max width for frame processing frames.
* @see #setFrameProcessingMaxWidth(int)
* @return max width
*/
public int getFrameProcessingMaxWidth() {
return mCameraEngine.getFrameProcessingMaxWidth();
}
/**
* The max height for frame processing frames.
* @see #setFrameProcessingMaxHeight(int)
* @return max height
*/
public int getFrameProcessingMaxHeight() {
return mCameraEngine.getFrameProcessingMaxHeight();
}
/**
* Sets the {@link android.graphics.ImageFormat} for frame processing.
* Before applying you should check {@link CameraOptions#getSupportedFrameProcessingFormats()}.
*
* @param format image format
*/
public void setFrameProcessingFormat(int format) {
mCameraEngine.setFrameProcessingFormat(format);
}
/**
* Returns the current frame processing format.
* @see #setFrameProcessingFormat(int)
* @return image format
*/
public int getFrameProcessingFormat() {
return mCameraEngine.getFrameProcessingFormat();
}
//endregion
//region Overlays
@Override
@ -2333,11 +2428,6 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
} else {
boolean isNoFilter = filter instanceof NoFilter;
boolean isFilterPreview = mCameraPreview instanceof FilterCameraPreview;
// If not experimental, we only allow NoFilter (called on creation).
if (!isNoFilter && !mExperimental) {
throw new RuntimeException("Filters are an experimental features and" +
" need the experimental flag set.");
}
// If not a filter preview, we only allow NoFilter (called on creation).
if (!isNoFilter && !isFilterPreview) {
throw new RuntimeException("Filters are only supported by the GL_SURFACE preview." +
@ -2362,10 +2452,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
*/
@NonNull
public Filter getFilter() {
if (!mExperimental) {
throw new RuntimeException("Filters are an experimental features and need " +
"the experimental flag set.");
} else if (mCameraPreview == null) {
if (mCameraPreview == null) {
return mPendingFilter;
} else if (mCameraPreview instanceof FilterCameraPreview) {
return ((FilterCameraPreview) mCameraPreview).getCurrentFilter();

@ -18,7 +18,6 @@ import android.view.SurfaceHolder;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.engine.mappers.Camera1Mapper;
@ -26,6 +25,7 @@ import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.engine.options.Camera1Options;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.ByteBufferFrameManager;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
@ -48,13 +48,14 @@ import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class Camera1Engine extends CameraEngine implements
public class Camera1Engine extends CameraBaseEngine implements
Camera.PreviewCallback,
Camera.ErrorCallback,
FrameManager.BufferCallback {
ByteBufferFrameManager.BufferCallback {
private static final String JOB_FOCUS_RESET = "focus reset";
private static final String JOB_FOCUS_END = "focus end";
@ -105,6 +106,15 @@ public class Camera1Engine extends CameraEngine implements
return result;
}
@EngineThread
@NonNull
@Override
protected List<Size> getFrameProcessingAvailableSizes() {
// We don't choose the frame processing size.
// It comes from the preview stream.
return Collections.singletonList(mPreviewStreamSize);
}
@EngineThread
@Override
protected void onPreviewStreamSizeChanged() {
@ -189,7 +199,7 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected Task<Void> onStartPreview() {
LOG.i("onStartPreview", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged();
getCallback().onCameraPreviewStreamSizeChanged();
Size previewSize = getPreviewStreamSize(Reference.VIEW);
if (previewSize == null) {
@ -286,8 +296,8 @@ public class Camera1Engine extends CameraEngine implements
@Override
protected Task<Void> onStopEngine() {
LOG.i("onStopEngine:", "About to clean up.");
mOrchestrator.remove(JOB_FOCUS_RESET);
mOrchestrator.remove(JOB_FOCUS_END);
getOrchestrator().remove(JOB_FOCUS_RESET);
getOrchestrator().remove(JOB_FOCUS_END);
if (mCamera != null) {
try {
LOG.i("onStopEngine:", "Clean up.", "Releasing camera.");
@ -473,7 +483,7 @@ public class Camera1Engine extends CameraEngine implements
public void setFlash(@NonNull Flash flash) {
final Flash old = mFlash;
mFlash = flash;
mFlashTask = mOrchestrator.scheduleStateful("flash (" + flash + ")",
mFlashTask = getOrchestrator().scheduleStateful("flash (" + flash + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -497,7 +507,7 @@ public class Camera1Engine extends CameraEngine implements
public void setLocation(@Nullable Location location) {
final Location oldLocation = mLocation;
mLocation = location;
mLocationTask = mOrchestrator.scheduleStateful("location",
mLocationTask = getOrchestrator().scheduleStateful("location",
CameraState.ENGINE,
new Runnable() {
@Override
@ -524,7 +534,7 @@ public class Camera1Engine extends CameraEngine implements
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
mWhiteBalanceTask = mOrchestrator.scheduleStateful(
mWhiteBalanceTask = getOrchestrator().scheduleStateful(
"white balance (" + whiteBalance + ")",
CameraState.ENGINE,
new Runnable() {
@ -554,7 +564,7 @@ public class Camera1Engine extends CameraEngine implements
public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr;
mHdr = hdr;
mHdrTask = mOrchestrator.scheduleStateful("hdr (" + hdr + ")",
mHdrTask = getOrchestrator().scheduleStateful("hdr (" + hdr + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -578,7 +588,7 @@ public class Camera1Engine extends CameraEngine implements
public void setZoom(final float zoom, @Nullable final PointF[] points, final boolean notify) {
final float old = mZoomValue;
mZoomValue = zoom;
mZoomTask = mOrchestrator.scheduleStateful("zoom (" + zoom + ")",
mZoomTask = getOrchestrator().scheduleStateful("zoom (" + zoom + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -587,7 +597,7 @@ public class Camera1Engine extends CameraEngine implements
if (applyZoom(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnZoomChanged(mZoomValue, points);
getCallback().dispatchOnZoomChanged(mZoomValue, points);
}
}
}
@ -610,7 +620,7 @@ public class Camera1Engine extends CameraEngine implements
@Nullable final PointF[] points, final boolean notify) {
final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mExposureCorrectionTask = mOrchestrator.scheduleStateful(
mExposureCorrectionTask = getOrchestrator().scheduleStateful(
"exposure correction (" + EVvalue + ")",
CameraState.ENGINE,
new Runnable() {
@ -620,7 +630,7 @@ public class Camera1Engine extends CameraEngine implements
if (applyExposureCorrection(params, old)) {
mCamera.setParameters(params);
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(mExposureCorrectionValue,
getCallback().dispatchOnExposureCorrectionChanged(mExposureCorrectionValue,
bounds, points);
}
}
@ -651,7 +661,7 @@ public class Camera1Engine extends CameraEngine implements
public void setPlaySounds(boolean playSounds) {
final boolean old = mPlaySounds;
mPlaySounds = playSounds;
mPlaySoundsTask = mOrchestrator.scheduleStateful(
mPlaySoundsTask = getOrchestrator().scheduleStateful(
"play sounds (" + playSounds + ")",
CameraState.ENGINE,
new Runnable() {
@ -688,7 +698,7 @@ public class Camera1Engine extends CameraEngine implements
public void setPreviewFrameRate(float previewFrameRate) {
final float old = previewFrameRate;
mPreviewFrameRate = previewFrameRate;
mPreviewFrameRateTask = mOrchestrator.scheduleStateful(
mPreviewFrameRateTask = getOrchestrator().scheduleStateful(
"preview fps (" + previewFrameRate + ")",
CameraState.ENGINE,
new Runnable() {
@ -748,7 +758,25 @@ public class Camera1Engine extends CameraEngine implements
@NonNull
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, this);
return new ByteBufferFrameManager(2, this);
}
@NonNull
@Override
public ByteBufferFrameManager getFrameManager() {
return (ByteBufferFrameManager) super.getFrameManager();
}
@Override
public void setHasFrameProcessors(boolean hasFrameProcessors) {
// we don't care, FP is always on
mHasFrameProcessors = hasFrameProcessors;
}
@Override
public void setFrameProcessingFormat(int format) {
// Ignore input: we only support NV21.
mFrameProcessingFormat = ImageFormat.NV21;
}
@Override
@ -768,7 +796,7 @@ public class Camera1Engine extends CameraEngine implements
Frame frame = getFrameManager().getFrame(data,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR, Reference.OUTPUT, Axis.RELATIVE_TO_SENSOR));
mCallback.dispatchFrame(frame);
getCallback().dispatchFrame(frame);
}
//endregion
@ -786,7 +814,7 @@ public class Camera1Engine extends CameraEngine implements
}
final int viewWidthF = viewWidth;
final int viewHeightF = viewHeight;
mOrchestrator.scheduleStateful("auto focus", CameraState.ENGINE, new Runnable() {
getOrchestrator().scheduleStateful("auto focus", CameraState.ENGINE, new Runnable() {
@Override
public void run() {
if (!mCameraOptions.isAutoFocusSupported()) return;
@ -805,16 +833,16 @@ public class Camera1Engine extends CameraEngine implements
if (maxAE > 0) params.setMeteringAreas(maxAE > 1 ? meteringAreas2 : meteringAreas1);
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
mCamera.setParameters(params);
mCallback.dispatchOnFocusStart(gesture, p);
getCallback().dispatchOnFocusStart(gesture, p);
// The auto focus callback is not guaranteed to be called, but we really want it
// to be. So we remove the old runnable if still present and post a new one.
mOrchestrator.remove(JOB_FOCUS_END);
mOrchestrator.scheduleDelayed(JOB_FOCUS_END, AUTOFOCUS_END_DELAY_MILLIS,
getOrchestrator().remove(JOB_FOCUS_END);
getOrchestrator().scheduleDelayed(JOB_FOCUS_END, AUTOFOCUS_END_DELAY_MILLIS,
new Runnable() {
@Override
public void run() {
mCallback.dispatchOnFocusEnd(gesture, false, p);
getCallback().dispatchOnFocusEnd(gesture, false, p);
}
});
@ -824,11 +852,11 @@ public class Camera1Engine extends CameraEngine implements
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
mOrchestrator.remove(JOB_FOCUS_END);
mOrchestrator.remove(JOB_FOCUS_RESET);
mCallback.dispatchOnFocusEnd(gesture, success, p);
getOrchestrator().remove(JOB_FOCUS_END);
getOrchestrator().remove(JOB_FOCUS_RESET);
getCallback().dispatchOnFocusEnd(gesture, success, p);
if (shouldResetAutoFocus()) {
mOrchestrator.scheduleStatefulDelayed(
getOrchestrator().scheduleStatefulDelayed(
JOB_FOCUS_RESET,
CameraState.ENGINE,
getAutoFocusResetDelay(),

@ -34,7 +34,6 @@ import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.TaskCompletionSource;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
@ -59,16 +58,15 @@ import com.otaliastudios.cameraview.engine.options.Camera2Options;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.Frame;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.frame.ImageFrameManager;
import com.otaliastudios.cameraview.gesture.Gesture;
import com.otaliastudios.cameraview.internal.utils.CropHelper;
import com.otaliastudios.cameraview.internal.utils.ImageHelper;
import com.otaliastudios.cameraview.internal.utils.WorkerHandler;
import com.otaliastudios.cameraview.picture.Full2PictureRecorder;
import com.otaliastudios.cameraview.picture.Snapshot2PictureRecorder;
import com.otaliastudios.cameraview.preview.GlCameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.video.Full2VideoRecorder;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
@ -79,10 +77,12 @@ import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAvailableListener,
public class Camera2Engine extends CameraBaseEngine implements
ImageReader.OnImageAvailableListener,
ActionHolder {
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.NV21;
private static final int FRAME_PROCESSING_INPUT_FORMAT = ImageFormat.YUV_420_888;
private static final int FRAME_PROCESSING_POOL_SIZE = 2;
private static final int FRAME_PROCESSING_FORMAT = ImageFormat.YUV_420_888;
@VisibleForTesting static final long METER_TIMEOUT = 2500;
private final CameraManager mManager;
@ -95,10 +95,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
private final Camera2Mapper mMapper = Camera2Mapper.get();
// Frame processing
private Size mFrameProcessingSize;
private ImageReader mFrameProcessingReader; // need this or the reader surface is collected
private final WorkerHandler mFrameConversionHandler;
private final Object mFrameProcessingImageLock = new Object();
private Surface mFrameProcessingSurface;
// Preview
@ -119,8 +116,8 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public Camera2Engine(Callback callback) {
super(callback);
mManager = (CameraManager) mCallback.getContext().getSystemService(Context.CAMERA_SERVICE);
mFrameConversionHandler = WorkerHandler.get("CameraFrameConversion");
mManager = (CameraManager) getCallback().getContext()
.getSystemService(Context.CAMERA_SERVICE);
new LogAction().start(this);
}
@ -326,6 +323,29 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
}
}
@EngineThread
@NonNull
@Override
protected List<Size> getFrameProcessingAvailableSizes() {
try {
CameraCharacteristics characteristics = mManager.getCameraCharacteristics(mCameraId);
StreamConfigurationMap streamMap =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
android.util.Size[] sizes = streamMap.getOutputSizes(mFrameProcessingFormat);
List<Size> candidates = new ArrayList<>(sizes.length);
for (android.util.Size size : sizes) {
Size add = new Size(size.getWidth(), size.getHeight());
if (!candidates.contains(add)) candidates.add(add);
}
return candidates;
} catch (CameraAccessException e) {
throw createCameraException(e);
}
}
@EngineThread
@Override
protected void onPreviewStreamSizeChanged() {
@ -520,28 +540,13 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// 4. FRAME PROCESSING
if (hasFrameProcessors()) {
// Choose the size.
StreamConfigurationMap streamMap = mCameraCharacteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (streamMap == null) {
throw new RuntimeException("StreamConfigurationMap is null. Should not happen.");
}
android.util.Size[] aSizes = streamMap.getOutputSizes(FRAME_PROCESSING_INPUT_FORMAT);
List<Size> sizes = new ArrayList<>();
for (android.util.Size aSize : aSizes) {
sizes.add(new Size(aSize.getWidth(), aSize.getHeight()));
}
mFrameProcessingSize = SizeSelectors.and(
SizeSelectors.maxWidth(Math.min(640, mPreviewStreamSize.getWidth())),
SizeSelectors.maxHeight(Math.min(640, mPreviewStreamSize.getHeight())),
SizeSelectors.biggest()).select(sizes).get(0);
mFrameProcessingSize = computeFrameProcessingSize();
mFrameProcessingReader = ImageReader.newInstance(
mFrameProcessingSize.getWidth(),
mFrameProcessingSize.getHeight(),
FRAME_PROCESSING_INPUT_FORMAT,
2);
mFrameProcessingReader.setOnImageAvailableListener(this,
mFrameConversionHandler.getHandler());
mFrameProcessingFormat,
getFrameProcessingPoolSize());
mFrameProcessingReader.setOnImageAvailableListener(this, null);
mFrameProcessingSurface = mFrameProcessingReader.getSurface();
outputSurfaces.add(mFrameProcessingSurface);
} else {
@ -584,7 +589,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
@Override
protected Task<Void> onStartPreview() {
LOG.i("onStartPreview:", "Dispatching onCameraPreviewStreamSizeChanged.");
mCallback.onCameraPreviewStreamSizeChanged();
getCallback().onCameraPreviewStreamSizeChanged();
Size previewSizeForView = getPreviewStreamSize(Reference.VIEW);
if (previewSizeForView == null) {
@ -593,7 +598,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight());
mPreview.setDrawRotation(getAngles().offset(Reference.BASE, Reference.VIEW, Axis.ABSOLUTE));
if (hasFrameProcessors()) {
getFrameManager().setUp(FRAME_PROCESSING_FORMAT, mFrameProcessingSize);
getFrameManager().setUp(mFrameProcessingFormat, mFrameProcessingSize);
}
LOG.i("onStartPreview:", "Starting preview.");
@ -608,7 +613,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// the recorder sets. Also we are posting so that doTakeVideo sees a started preview.
final VideoResult.Stub stub = mFullVideoPendingStub;
mFullVideoPendingStub = null;
mOrchestrator.scheduleStateful("do take video", CameraState.PREVIEW,
getOrchestrator().scheduleStateful("do take video", CameraState.PREVIEW,
new Runnable() {
@Override
public void run() {
@ -675,12 +680,9 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
mCaptureSize = null;
mFrameProcessingSize = null;
if (mFrameProcessingReader != null) {
synchronized (mFrameProcessingImageLock) {
// This call synchronously releases all Images and their underlying properties.
// This can cause a segmentation fault while converting the Image to NV21.
// So we use this lock for the two operations.
mFrameProcessingReader.close();
}
// WARNING: This call synchronously releases all Images and their underlying
// properties. This can cause issues if the Image is being used.
mFrameProcessingReader.close();
mFrameProcessingReader = null;
}
if (mPictureReader != null) {
@ -829,7 +831,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
boolean unlock = (fullPicture && getPictureMetering())
|| (!fullPicture && getPictureSnapshotMetering());
if (unlock) {
mOrchestrator.scheduleStateful("reset metering after picture",
getOrchestrator().scheduleStateful("reset metering after picture",
CameraState.PREVIEW,
new Runnable() {
@Override
@ -945,7 +947,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// SnapshotRecorder will invoke this on its own thread, so let's post in our own thread
// and check camera state before trying to restore the preview. Engine might have been
// torn down in the engine thread while this was still being called.
mOrchestrator.scheduleStateful("restore preview template", CameraState.BIND,
getOrchestrator().scheduleStateful("restore preview template", CameraState.BIND,
new Runnable() {
@Override
public void run() {
@ -1078,7 +1080,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setFlash(@NonNull final Flash flash) {
final Flash old = mFlash;
mFlash = flash;
mFlashTask = mOrchestrator.scheduleStateful("flash (" + flash + ")",
mFlashTask = getOrchestrator().scheduleStateful("flash (" + flash + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -1153,7 +1155,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setLocation(@Nullable Location location) {
final Location old = mLocation;
mLocation = location;
mLocationTask = mOrchestrator.scheduleStateful("location",
mLocationTask = getOrchestrator().scheduleStateful("location",
CameraState.ENGINE,
new Runnable() {
@Override
@ -1178,7 +1180,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setWhiteBalance(@NonNull WhiteBalance whiteBalance) {
final WhiteBalance old = mWhiteBalance;
mWhiteBalance = whiteBalance;
mWhiteBalanceTask = mOrchestrator.scheduleStateful(
mWhiteBalanceTask = getOrchestrator().scheduleStateful(
"white balance (" + whiteBalance + ")",
CameraState.ENGINE,
new Runnable() {
@ -1207,7 +1209,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setHdr(@NonNull Hdr hdr) {
final Hdr old = mHdr;
mHdr = hdr;
mHdrTask = mOrchestrator.scheduleStateful("hdr (" + hdr + ")",
mHdrTask = getOrchestrator().scheduleStateful("hdr (" + hdr + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -1234,7 +1236,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setZoom(final float zoom, final @Nullable PointF[] points, final boolean notify) {
final float old = mZoomValue;
mZoomValue = zoom;
mZoomTask = mOrchestrator.scheduleStateful(
mZoomTask = getOrchestrator().scheduleStateful(
"zoom (" + zoom + ")",
CameraState.ENGINE,
new Runnable() {
@ -1243,7 +1245,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (applyZoom(mRepeatingRequestBuilder, old)) {
applyRepeatingRequestBuilder();
if (notify) {
mCallback.dispatchOnZoomChanged(zoom, points);
getCallback().dispatchOnZoomChanged(zoom, points);
}
}
}
@ -1290,7 +1292,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
final boolean notify) {
final float old = mExposureCorrectionValue;
mExposureCorrectionValue = EVvalue;
mExposureCorrectionTask = mOrchestrator.scheduleStateful(
mExposureCorrectionTask = getOrchestrator().scheduleStateful(
"exposure correction (" + EVvalue + ")",
CameraState.ENGINE,
new Runnable() {
@ -1299,7 +1301,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (applyExposureCorrection(mRepeatingRequestBuilder, old)) {
applyRepeatingRequestBuilder();
if (notify) {
mCallback.dispatchOnExposureCorrectionChanged(EVvalue, bounds, points);
getCallback().dispatchOnExposureCorrectionChanged(EVvalue, bounds, points);
}
}
}
@ -1332,7 +1334,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setPreviewFrameRate(float previewFrameRate) {
final float oldPreviewFrameRate = mPreviewFrameRate;
mPreviewFrameRate = previewFrameRate;
mPreviewFrameRateTask = mOrchestrator.scheduleStateful(
mPreviewFrameRateTask = getOrchestrator().scheduleStateful(
"preview fps (" + previewFrameRate + ")",
CameraState.ENGINE,
new Runnable() {
@ -1382,7 +1384,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setPictureFormat(final @NonNull PictureFormat pictureFormat) {
if (pictureFormat != mPictureFormat) {
mPictureFormat = pictureFormat;
mOrchestrator.scheduleStateful("picture format (" + pictureFormat + ")",
getOrchestrator().scheduleStateful("picture format (" + pictureFormat + ")",
CameraState.ENGINE,
new Runnable() {
@Override
@ -1397,50 +1399,36 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
//region Frame Processing
protected int getFrameProcessingPoolSize() {
return FRAME_PROCESSING_POOL_SIZE;
}
@NonNull
@Override
protected FrameManager instantiateFrameManager() {
return new FrameManager(2, null);
return new ImageFrameManager(getFrameProcessingPoolSize());
}
@Override
public void onImageAvailable(ImageReader reader) {
byte[] data = getFrameManager().getBuffer();
if (data == null) {
LOG.w("onImageAvailable", "no byte buffer!");
return;
}
LOG.v("onImageAvailable", "trying to acquire Image.");
Image image = null;
try {
image = reader.acquireLatestImage();
} catch (Exception ignore) { }
if (image == null) {
LOG.w("onImageAvailable", "we have a byte buffer but no Image!");
getFrameManager().onBufferUnused(data);
return;
}
LOG.v("onImageAvailable", "we have both a byte buffer and an Image.");
try {
synchronized (mFrameProcessingImageLock) {
ImageHelper.convertToNV21(image, data);
}
} catch (Exception e) {
LOG.w("onImageAvailable", "error while converting.");
getFrameManager().onBufferUnused(data);
image.close();
return;
}
image.close();
if (getState() == CameraState.PREVIEW && !isChangingState()) {
LOG.w("onImageAvailable", "failed to acquire Image!");
} else if (getState() == CameraState.PREVIEW && !isChangingState()) {
// After preview, the frame manager is correctly set up
Frame frame = getFrameManager().getFrame(data,
//noinspection unchecked
Frame frame = getFrameManager().getFrame(image,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
getAngles().offset(Reference.SENSOR,
Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR));
mCallback.dispatchFrame(frame);
getCallback().dispatchFrame(frame);
} else {
getFrameManager().onBufferUnused(data);
image.close();
}
}
@ -1448,7 +1436,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
public void setHasFrameProcessors(final boolean hasFrameProcessors) {
// Frame processing is set up partially when binding and partially when starting
// the preview. If the value is changed between the two, the preview step can crash.
mOrchestrator.schedule("has frame processors (" + hasFrameProcessors + ")",
getOrchestrator().schedule("has frame processors (" + hasFrameProcessors + ")",
true, new Runnable() {
@Override
public void run() {
@ -1456,13 +1444,36 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// Extremely rare case in which this was called in between startBind and
// startPreview. This can cause issues. Try later.
setHasFrameProcessors(hasFrameProcessors);
} else if (getState().isAtLeast(CameraState.BIND)) {
// Apply and restart.
Camera2Engine.super.setHasFrameProcessors(hasFrameProcessors);
return;
}
// Apply and restart.
mHasFrameProcessors = hasFrameProcessors;
if (getState().isAtLeast(CameraState.BIND)) {
restartBind();
}
}
});
}
@Override
public void setFrameProcessingFormat(final int format) {
// This is called during initialization. Set our default first.
if (mFrameProcessingFormat == 0) mFrameProcessingFormat = FRAME_PROCESSING_FORMAT;
// Frame processing format is used both when binding and when starting the preview.
// If the value is changed between the two, the preview step can crash.
getOrchestrator().schedule("frame processing format (" + format + ")",
true, new Runnable() {
@Override
public void run() {
if (getState().isAtLeast(CameraState.BIND) && isChangingState()) {
// Extremely rare case in which this was called in between startBind and
// startPreview. This can cause issues. Try later.
setFrameProcessingFormat(format);
return;
}
mFrameProcessingFormat = format > 0 ? format : FRAME_PROCESSING_FORMAT;
if (getState().isAtLeast(CameraState.BIND)) {
restartBind();
} else {
// Just apply.
Camera2Engine.super.setHasFrameProcessors(hasFrameProcessors);
}
}
});
@ -1477,7 +1488,7 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
// This will only work when we have a preview, since it launches the preview
// in the end. Even without this it would need the bind state at least,
// since we need the preview size.
mOrchestrator.scheduleStateful("autofocus (" + gesture + ")",
getOrchestrator().scheduleStateful("autofocus (" + gesture + ")",
CameraState.PREVIEW,
new Runnable() {
@Override
@ -1487,17 +1498,17 @@ public class Camera2Engine extends CameraEngine implements ImageReader.OnImageAv
if (!mCameraOptions.isAutoFocusSupported()) return;
// Create the meter and start.
mCallback.dispatchOnFocusStart(gesture, point);
getCallback().dispatchOnFocusStart(gesture, point);
final MeterAction action = createMeterAction(point);
Action wrapper = Actions.timeout(METER_TIMEOUT, action);
wrapper.start(Camera2Engine.this);
wrapper.addCallback(new CompletionCallback() {
@Override
protected void onActionCompleted(@NonNull Action a) {
mCallback.dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
mOrchestrator.remove("reset metering");
getCallback().dispatchOnFocusEnd(gesture, action.isSuccessful(), point);
getOrchestrator().remove("reset metering");
if (shouldResetAutoFocus()) {
mOrchestrator.scheduleStatefulDelayed("reset metering",
getOrchestrator().scheduleStatefulDelayed("reset metering",
CameraState.PREVIEW,
getAutoFocusResetDelay(),
new Runnable() {

@ -0,0 +1,927 @@
package com.otaliastudios.cameraview.engine;
import android.location.Location;
import androidx.annotation.CallSuper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.gms.tasks.Task;
import com.google.android.gms.tasks.Tasks;
import com.otaliastudios.cameraview.CameraException;
import com.otaliastudios.cameraview.CameraOptions;
import com.otaliastudios.cameraview.PictureResult;
import com.otaliastudios.cameraview.VideoResult;
import com.otaliastudios.cameraview.controls.Audio;
import com.otaliastudios.cameraview.controls.Facing;
import com.otaliastudios.cameraview.controls.Flash;
import com.otaliastudios.cameraview.controls.Hdr;
import com.otaliastudios.cameraview.controls.Mode;
import com.otaliastudios.cameraview.controls.PictureFormat;
import com.otaliastudios.cameraview.controls.VideoCodec;
import com.otaliastudios.cameraview.controls.WhiteBalance;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.engine.orchestrator.CameraState;
import com.otaliastudios.cameraview.frame.FrameManager;
import com.otaliastudios.cameraview.overlay.Overlay;
import com.otaliastudios.cameraview.picture.PictureRecorder;
import com.otaliastudios.cameraview.preview.CameraPreview;
import com.otaliastudios.cameraview.size.AspectRatio;
import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.size.SizeSelector;
import com.otaliastudios.cameraview.size.SizeSelectors;
import com.otaliastudios.cameraview.video.VideoRecorder;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Abstract implementation of {@link CameraEngine} that helps in common tasks.
*/
public abstract class CameraBaseEngine extends CameraEngine {
@SuppressWarnings("WeakerAccess") protected CameraPreview mPreview;
@SuppressWarnings("WeakerAccess") protected CameraOptions mCameraOptions;
@SuppressWarnings("WeakerAccess") protected PictureRecorder mPictureRecorder;
@SuppressWarnings("WeakerAccess") protected VideoRecorder mVideoRecorder;
@SuppressWarnings("WeakerAccess") protected Size mCaptureSize;
@SuppressWarnings("WeakerAccess") protected Size mPreviewStreamSize;
@SuppressWarnings("WeakerAccess") protected Size mFrameProcessingSize;
@SuppressWarnings("WeakerAccess") protected int mFrameProcessingFormat;
@SuppressWarnings("WeakerAccess") protected boolean mHasFrameProcessors;
@SuppressWarnings("WeakerAccess") protected Flash mFlash;
@SuppressWarnings("WeakerAccess") protected WhiteBalance mWhiteBalance;
@SuppressWarnings("WeakerAccess") protected VideoCodec mVideoCodec;
@SuppressWarnings("WeakerAccess") protected Hdr mHdr;
@SuppressWarnings("WeakerAccess") protected PictureFormat mPictureFormat;
@SuppressWarnings("WeakerAccess") protected Location mLocation;
@SuppressWarnings("WeakerAccess") protected float mZoomValue;
@SuppressWarnings("WeakerAccess") protected float mExposureCorrectionValue;
@SuppressWarnings("WeakerAccess") protected boolean mPlaySounds;
@SuppressWarnings("WeakerAccess") protected boolean mPictureMetering;
@SuppressWarnings("WeakerAccess") protected boolean mPictureSnapshotMetering;
@SuppressWarnings("WeakerAccess") protected float mPreviewFrameRate;
private final FrameManager mFrameManager;
private final Angles mAngles;
@Nullable private SizeSelector mPreviewStreamSizeSelector;
private SizeSelector mPictureSizeSelector;
private SizeSelector mVideoSizeSelector;
private Facing mFacing;
private Mode mMode;
private Audio mAudio;
private long mVideoMaxSize;
private int mVideoMaxDuration;
private int mVideoBitRate;
private int mAudioBitRate;
private long mAutoFocusResetDelayMillis;
private int mSnapshotMaxWidth; // in REF_VIEW like SizeSelectors
private int mSnapshotMaxHeight; // in REF_VIEW like SizeSelectors
private int mFrameProcessingMaxWidth; // in REF_VIEW like SizeSelectors
private int mFrameProcessingMaxHeight; // in REF_VIEW like SizeSelectors
private Overlay mOverlay;
// Ops used for testing.
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mZoomTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mExposureCorrectionTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mFlashTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mWhiteBalanceTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mHdrTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mLocationTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPlaySoundsTask
= Tasks.forResult(null);
@VisibleForTesting(otherwise = VisibleForTesting.PROTECTED) Task<Void> mPreviewFrameRateTask
= Tasks.forResult(null);
@SuppressWarnings("WeakerAccess")
protected CameraBaseEngine(@NonNull Callback callback) {
super(callback);
mFrameManager = instantiateFrameManager();
mAngles = new Angles();
}
/**
* Called at construction time to get a frame manager that can later be
* accessed through {@link #getFrameManager()}.
* @return a frame manager
*/
@NonNull
protected abstract FrameManager instantiateFrameManager();
@NonNull
@Override
public final Angles getAngles() {
return mAngles;
}
@NonNull
@Override
public FrameManager getFrameManager() {
return mFrameManager;
}
@Nullable
@Override
public final CameraOptions getCameraOptions() {
return mCameraOptions;
}
@Override
public final void setPreview(@NonNull CameraPreview cameraPreview) {
if (mPreview != null) mPreview.setSurfaceCallback(null);
mPreview = cameraPreview;
mPreview.setSurfaceCallback(this);
}
@NonNull
@Override
public final CameraPreview getPreview() {
return mPreview;
}
@Override
public final void setOverlay(@Nullable Overlay overlay) {
mOverlay = overlay;
}
@Nullable
@Override
public final Overlay getOverlay() {
return mOverlay;
}
@Override
public final void setPreviewStreamSizeSelector(@Nullable SizeSelector selector) {
mPreviewStreamSizeSelector = selector;
}
@Nullable
@Override
public final SizeSelector getPreviewStreamSizeSelector() {
return mPreviewStreamSizeSelector;
}
@Override
public final void setPictureSizeSelector(@NonNull SizeSelector selector) {
mPictureSizeSelector = selector;
}
@NonNull
@Override
public final SizeSelector getPictureSizeSelector() {
return mPictureSizeSelector;
}
@Override
public final void setVideoSizeSelector(@NonNull SizeSelector selector) {
mVideoSizeSelector = selector;
}
@NonNull
@Override
public final SizeSelector getVideoSizeSelector() {
return mVideoSizeSelector;
}
@Override
public final void setVideoMaxSize(long videoMaxSizeBytes) {
mVideoMaxSize = videoMaxSizeBytes;
}
@Override
public final long getVideoMaxSize() {
return mVideoMaxSize;
}
@Override
public final void setVideoMaxDuration(int videoMaxDurationMillis) {
mVideoMaxDuration = videoMaxDurationMillis;
}
@Override
public final int getVideoMaxDuration() {
return mVideoMaxDuration;
}
@Override
public final void setVideoCodec(@NonNull VideoCodec codec) {
mVideoCodec = codec;
}
@NonNull
@Override
public final VideoCodec getVideoCodec() {
return mVideoCodec;
}
@Override
public final void setVideoBitRate(int videoBitRate) {
mVideoBitRate = videoBitRate;
}
@Override
public final int getVideoBitRate() {
return mVideoBitRate;
}
@Override
public final void setAudioBitRate(int audioBitRate) {
mAudioBitRate = audioBitRate;
}
@Override
public final int getAudioBitRate() {
return mAudioBitRate;
}
@Override
public final void setSnapshotMaxWidth(int maxWidth) {
mSnapshotMaxWidth = maxWidth;
}
@Override
public final int getSnapshotMaxWidth() {
return mSnapshotMaxWidth;
}
@Override
public final void setSnapshotMaxHeight(int maxHeight) {
mSnapshotMaxHeight = maxHeight;
}
@Override
public final int getSnapshotMaxHeight() {
return mSnapshotMaxHeight;
}
@Override
public final void setFrameProcessingMaxWidth(int maxWidth) {
mFrameProcessingMaxWidth = maxWidth;
}
@Override
public final int getFrameProcessingMaxWidth() {
return mFrameProcessingMaxWidth;
}
@Override
public final void setFrameProcessingMaxHeight(int maxHeight) {
mFrameProcessingMaxHeight = maxHeight;
}
@Override
public final int getFrameProcessingMaxHeight() {
return mFrameProcessingMaxHeight;
}
@Override
public final int getFrameProcessingFormat() {
return mFrameProcessingFormat;
}
@Override
public final void setAutoFocusResetDelay(long delayMillis) {
mAutoFocusResetDelayMillis = delayMillis;
}
@Override
public final long getAutoFocusResetDelay() {
return mAutoFocusResetDelayMillis;
}
/**
* Helper function for subclasses.
* @return true if AF should be reset
*/
@SuppressWarnings("WeakerAccess")
protected final boolean shouldResetAutoFocus() {
return mAutoFocusResetDelayMillis > 0 && mAutoFocusResetDelayMillis != Long.MAX_VALUE;
}
/**
* Sets a new facing value. This will restart the engine session (if there's any)
* so that we can open the new facing camera.
* @param facing facing
*/
@Override
public final void setFacing(final @NonNull Facing facing) {
final Facing old = mFacing;
if (facing != old) {
mFacing = facing;
getOrchestrator().scheduleStateful("facing", CameraState.ENGINE,
new Runnable() {
@Override
public void run() {
if (collectCameraInfo(facing)) {
restart();
} else {
mFacing = old;
}
}
});
}
}
@NonNull
@Override
public final Facing getFacing() {
return mFacing;
}
/**
* Sets a new audio value that will be used for video recordings.
* @param audio desired audio
*/
@Override
public final void setAudio(@NonNull Audio audio) {
if (mAudio != audio) {
if (isTakingVideo()) {
LOG.w("Audio setting was changed while recording. " +
"Changes will take place starting from next video");
}
mAudio = audio;
}
}
@NonNull
@Override
public final Audio getAudio() {
return mAudio;
}
/**
* Sets the desired mode (either picture or video).
* @param mode desired mode.
*/
@Override
public final void setMode(@NonNull Mode mode) {
if (mode != mMode) {
mMode = mode;
getOrchestrator().scheduleStateful("mode", CameraState.ENGINE,
new Runnable() {
@Override
public void run() {
restart();
}
});
}
}
@NonNull
@Override
public final Mode getMode() {
return mMode;
}
@Override
public final float getZoomValue() {
return mZoomValue;
}
@Override
public final float getExposureCorrectionValue() {
return mExposureCorrectionValue;
}
@NonNull
@Override
public final Flash getFlash() {
return mFlash;
}
@NonNull
@Override
public final WhiteBalance getWhiteBalance() {
return mWhiteBalance;
}
@NonNull
@Override
public final Hdr getHdr() {
return mHdr;
}
@Nullable
@Override
public final Location getLocation() {
return mLocation;
}
@NonNull
@Override
public final PictureFormat getPictureFormat() {
return mPictureFormat;
}
@Override
public final float getPreviewFrameRate() {
return mPreviewFrameRate;
}
@Override
public final boolean hasFrameProcessors() {
return mHasFrameProcessors;
}
@Override
public final void setPictureMetering(boolean enable) {
mPictureMetering = enable;
}
@Override
public final boolean getPictureMetering() {
return mPictureMetering;
}
@Override
public final void setPictureSnapshotMetering(boolean enable) {
mPictureSnapshotMetering = enable;
}
@Override
public final boolean getPictureSnapshotMetering() {
return mPictureSnapshotMetering;
}
//region Picture and video control
@Override
public final boolean isTakingPicture() {
return mPictureRecorder != null;
}
@Override
public /* final */ void takePicture(final @NonNull PictureResult.Stub stub) {
// Save boolean before scheduling! See how Camera2Engine calls this with a temp value.
final boolean metering = mPictureMetering;
getOrchestrator().scheduleStateful("take picture", CameraState.BIND,
new Runnable() {
@Override
public void run() {
LOG.i("takePicture:", "running. isTakingPicture:", isTakingPicture());
if (isTakingPicture()) return;
if (mMode == Mode.VIDEO) {
throw new IllegalStateException("Can't take hq pictures while in VIDEO mode");
}
stub.isSnapshot = false;
stub.location = mLocation;
stub.facing = mFacing;
stub.format = mPictureFormat;
onTakePicture(stub, metering);
}
});
}
/**
* The snapshot size is the {@link #getPreviewStreamSize(Reference)}, but cropped based on the
* view/surface aspect ratio.
* @param stub a picture stub
*/
@Override
public /* final */ void takePictureSnapshot(final @NonNull PictureResult.Stub stub) {
// Save boolean before scheduling! See how Camera2Engine calls this with a temp value.
final boolean metering = mPictureSnapshotMetering;
getOrchestrator().scheduleStateful("take picture snapshot", CameraState.BIND,
new Runnable() {
@Override
public void run() {
LOG.i("takePictureSnapshot:", "running. isTakingPicture:", isTakingPicture());
if (isTakingPicture()) return;
stub.location = mLocation;
stub.isSnapshot = true;
stub.facing = mFacing;
stub.format = PictureFormat.JPEG;
// Leave the other parameters to subclasses.
//noinspection ConstantConditions
AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
onTakePictureSnapshot(stub, ratio, metering);
}
});
}
@Override
public void onPictureShutter(boolean didPlaySound) {
getCallback().onShutter(!didPlaySound);
}
@Override
public void onPictureResult(@Nullable PictureResult.Stub result, @Nullable Exception error) {
mPictureRecorder = null;
if (result != null) {
getCallback().dispatchOnPictureTaken(result);
} else {
LOG.e("onPictureResult", "result is null: something went wrong.", error);
getCallback().dispatchError(new CameraException(error,
CameraException.REASON_PICTURE_FAILED));
}
}
@Override
public final boolean isTakingVideo() {
return mVideoRecorder != null && mVideoRecorder.isRecording();
}
@Override
public final void takeVideo(final @NonNull VideoResult.Stub stub, final @NonNull File file) {
getOrchestrator().scheduleStateful("take video", CameraState.BIND, new Runnable() {
@Override
public void run() {
LOG.i("takeVideo:", "running. isTakingVideo:", isTakingVideo());
if (isTakingVideo()) return;
if (mMode == Mode.PICTURE) {
throw new IllegalStateException("Can't record video while in PICTURE mode");
}
stub.file = file;
stub.isSnapshot = false;
stub.videoCodec = mVideoCodec;
stub.location = mLocation;
stub.facing = mFacing;
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
stub.videoBitRate = mVideoBitRate;
stub.audioBitRate = mAudioBitRate;
onTakeVideo(stub);
}
});
}
/**
* @param stub a video stub
* @param file the output file
*/
@Override
public final void takeVideoSnapshot(@NonNull final VideoResult.Stub stub,
@NonNull final File file) {
getOrchestrator().scheduleStateful("take video snapshot", CameraState.BIND,
new Runnable() {
@Override
public void run() {
LOG.i("takeVideoSnapshot:", "running. isTakingVideo:", isTakingVideo());
stub.file = file;
stub.isSnapshot = true;
stub.videoCodec = mVideoCodec;
stub.location = mLocation;
stub.facing = mFacing;
stub.videoBitRate = mVideoBitRate;
stub.audioBitRate = mAudioBitRate;
stub.audio = mAudio;
stub.maxSize = mVideoMaxSize;
stub.maxDuration = mVideoMaxDuration;
//noinspection ConstantConditions
AspectRatio ratio = AspectRatio.of(getPreviewSurfaceSize(Reference.OUTPUT));
onTakeVideoSnapshot(stub, ratio);
}
});
}
@Override
public final void stopVideo() {
getOrchestrator().schedule("stop video", true, new Runnable() {
@Override
public void run() {
LOG.i("stopVideo", "running. isTakingVideo?", isTakingVideo());
onStopVideo();
}
});
}
@EngineThread
@SuppressWarnings("WeakerAccess")
protected void onStopVideo() {
if (mVideoRecorder != null) {
mVideoRecorder.stop(false);
// Do not null this, so we respond correctly to isTakingVideo(),
// which checks for recorder presence and recorder.isRecording().
// It will be nulled in onVideoResult.
}
}
@CallSuper
@Override
public void onVideoResult(@Nullable VideoResult.Stub result, @Nullable Exception exception) {
mVideoRecorder = null;
if (result != null) {
getCallback().dispatchOnVideoTaken(result);
} else {
LOG.e("onVideoResult", "result is null: something went wrong.", exception);
getCallback().dispatchError(new CameraException(exception,
CameraException.REASON_VIDEO_FAILED));
}
}
@Override
public void onVideoRecordingStart() {
getCallback().dispatchOnVideoRecordingStart();
}
@Override
public void onVideoRecordingEnd() {
getCallback().dispatchOnVideoRecordingEnd();
}
@EngineThread
protected abstract void onTakePicture(@NonNull PictureResult.Stub stub, boolean doMetering);
@EngineThread
protected abstract void onTakePictureSnapshot(@NonNull PictureResult.Stub stub,
@NonNull AspectRatio outputRatio,
boolean doMetering);
@EngineThread
protected abstract void onTakeVideoSnapshot(@NonNull VideoResult.Stub stub,
@NonNull AspectRatio outputRatio);
@EngineThread
protected abstract void onTakeVideo(@NonNull VideoResult.Stub stub);
//endregion
//region Size / Surface
@Override
public final void onSurfaceChanged() {
LOG.i("onSurfaceChanged:", "Size is", getPreviewSurfaceSize(Reference.VIEW));
getOrchestrator().scheduleStateful("surface changed", CameraState.BIND,
new Runnable() {
@Override
public void run() {
// Compute a new camera preview size and apply.
Size newSize = computePreviewStreamSize();
if (newSize.equals(mPreviewStreamSize)) {
LOG.i("onSurfaceChanged:",
"The computed preview size is identical. No op.");
} else {
LOG.i("onSurfaceChanged:",
"Computed a new preview size. Calling onPreviewStreamSizeChanged().");
mPreviewStreamSize = newSize;
onPreviewStreamSizeChanged();
}
}
});
}
/**
* The preview stream size has changed. At this point, some engine might want to
* simply call {@link #restartPreview()}, others to {@link #restartBind()}.
*
* It basically depends on the step at which the preview stream size is actually used.
*/
@EngineThread
protected abstract void onPreviewStreamSizeChanged();
@Nullable
@Override
public final Size getPictureSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
Size size = mCaptureSize;
if (size == null || mMode == Mode.VIDEO) return null;
return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
}
@Nullable
@Override
public final Size getVideoSize(@SuppressWarnings("SameParameterValue") @NonNull Reference reference) {
Size size = mCaptureSize;
if (size == null || mMode == Mode.PICTURE) return null;
return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
}
@Nullable
@Override
public final Size getPreviewStreamSize(@NonNull Reference reference) {
Size size = mPreviewStreamSize;
if (size == null) return null;
return getAngles().flip(Reference.SENSOR, reference) ? size.flip() : size;
}
@SuppressWarnings("SameParameterValue")
@Nullable
private Size getPreviewSurfaceSize(@NonNull Reference reference) {
CameraPreview preview = mPreview;
if (preview == null) return null;
return getAngles().flip(Reference.VIEW, reference) ? preview.getSurfaceSize().flip()
: preview.getSurfaceSize();
}
/**
* Returns the snapshot size, but not cropped with the view dimensions, which
* is what we will do before creating the snapshot. However, cropping is done at various
* levels so we don't want to perform the op here.
*
* The base snapshot size is based on PreviewStreamSize (later cropped with view ratio). Why?
* One might be tempted to say that it's the SurfaceSize (which already matches the view ratio).
*
* The camera sensor will capture preview frames with PreviewStreamSize and that's it. Then they
* are hardware-scaled by the preview surface, but this does not affect the snapshot, as the
* snapshot recorder simply creates another surface.
*
* Done tests to ensure that this is true, by using
* 1. small SurfaceSize and biggest() PreviewStreamSize: output is not low quality
* 2. big SurfaceSize and smallest() PreviewStreamSize: output is low quality
* In both cases the result.size here was set to the biggest of the two.
*
* I could not find the same evidence for videos, but I would say that the same things should
* apply, despite the capturing mechanism being different.
*
* @param reference the reference system
* @return the uncropped snapshot size
*/
@Nullable
@Override
public final Size getUncroppedSnapshotSize(@NonNull Reference reference) {
Size baseSize = getPreviewStreamSize(reference);
if (baseSize == null) return null;
boolean flip = getAngles().flip(reference, Reference.VIEW);
int maxWidth = flip ? mSnapshotMaxHeight : mSnapshotMaxWidth;
int maxHeight = flip ? mSnapshotMaxWidth : mSnapshotMaxHeight;
if (maxWidth <= 0) maxWidth = Integer.MAX_VALUE;
if (maxHeight <= 0) maxHeight = Integer.MAX_VALUE;
float baseRatio = AspectRatio.of(baseSize).toFloat();
float maxValuesRatio = AspectRatio.of(maxWidth, maxHeight).toFloat();
if (maxValuesRatio >= baseRatio) {
// Height is the real constraint.
int outHeight = Math.min(baseSize.getHeight(), maxHeight);
int outWidth = (int) Math.floor((float) outHeight * baseRatio);
return new Size(outWidth, outHeight);
} else {
// Width is the real constraint.
int outWidth = Math.min(baseSize.getWidth(), maxWidth);
int outHeight = (int) Math.floor((float) outWidth / baseRatio);
return new Size(outWidth, outHeight);
}
}
/**
* This is called either on cameraView.start(), or when the underlying surface changes.
* It is possible that in the first call the preview surface has not already computed its
* dimensions.
* But when it does, the {@link CameraPreview.SurfaceCallback} should be called,
* and this should be refreshed.
*
* @return the capture size
*/
@NonNull
@SuppressWarnings("WeakerAccess")
protected final Size computeCaptureSize() {
return computeCaptureSize(mMode);
}
@NonNull
@SuppressWarnings("WeakerAccess")
protected final Size computeCaptureSize(@NonNull Mode mode) {
// We want to pass stuff into the REF_VIEW reference, not the sensor one.
// This is already managed by CameraOptions, so we just flip again at the end.
boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
SizeSelector selector;
Collection<Size> sizes;
if (mode == Mode.PICTURE) {
selector = mPictureSizeSelector;
sizes = mCameraOptions.getSupportedPictureSizes();
} else {
selector = mVideoSizeSelector;
sizes = mCameraOptions.getSupportedVideoSizes();
}
selector = SizeSelectors.or(selector, SizeSelectors.biggest());
List<Size> list = new ArrayList<>(sizes);
Size result = selector.select(list).get(0);
if (!list.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
LOG.i("computeCaptureSize:", "result:", result, "flip:", flip, "mode:", mode);
if (flip) result = result.flip(); // Go back to REF_SENSOR
return result;
}
/**
* This is called anytime {@link #computePreviewStreamSize()} is called.
* This means that it should be called during the binding process, when
* we can be sure that the camera is available (engineState == STARTED).
* @return a list of available sizes for preview
*/
@EngineThread
@NonNull
protected abstract List<Size> getPreviewStreamAvailableSizes();
@EngineThread
@NonNull
@SuppressWarnings("WeakerAccess")
protected final Size computePreviewStreamSize() {
@NonNull List<Size> previewSizes = getPreviewStreamAvailableSizes();
// These sizes come in REF_SENSOR. Since there is an external selector involved,
// we must convert all of them to REF_VIEW, then flip back when returning.
boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
List<Size> sizes = new ArrayList<>(previewSizes.size());
for (Size size : previewSizes) {
sizes.add(flip ? size.flip() : size);
}
// Create our own default selector, which will be used if the external
// mPreviewStreamSizeSelector is null, or if it fails in finding a size.
Size targetMinSize = getPreviewSurfaceSize(Reference.VIEW);
if (targetMinSize == null) {
throw new IllegalStateException("targetMinSize should not be null here.");
}
AspectRatio targetRatio = AspectRatio.of(mCaptureSize.getWidth(), mCaptureSize.getHeight());
if (flip) targetRatio = targetRatio.flip();
LOG.i("computePreviewStreamSize:",
"targetRatio:", targetRatio,
"targetMinSize:", targetMinSize);
SizeSelector matchRatio = SizeSelectors.and( // Match this aspect ratio and sort by biggest
SizeSelectors.aspectRatio(targetRatio, 0),
SizeSelectors.biggest());
SizeSelector matchSize = SizeSelectors.and( // Bigger than this size, and sort by smallest
SizeSelectors.minHeight(targetMinSize.getHeight()),
SizeSelectors.minWidth(targetMinSize.getWidth()),
SizeSelectors.smallest());
SizeSelector matchAll = SizeSelectors.or(
SizeSelectors.and(matchRatio, matchSize), // Try to respect both constraints.
matchSize, // If couldn't match aspect ratio, at least respect the size
matchRatio, // If couldn't respect size, at least match aspect ratio
SizeSelectors.biggest() // If couldn't match any, take the biggest.
);
// Apply the external selector with this as a fallback,
// and return a size in REF_SENSOR reference.
SizeSelector selector;
if (mPreviewStreamSizeSelector != null) {
selector = SizeSelectors.or(mPreviewStreamSizeSelector, matchAll);
} else {
selector = matchAll;
}
Size result = selector.select(sizes).get(0);
if (!sizes.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
if (flip) result = result.flip();
LOG.i("computePreviewStreamSize:", "result:", result, "flip:", flip);
return result;
}
/**
* This is called anytime {@link #computeFrameProcessingSize()} is called.
* Implementors can return null if frame processor size is not selectable
* @return a list of available sizes for frame processing
*/
@EngineThread
@NonNull
protected abstract List<Size> getFrameProcessingAvailableSizes();
@EngineThread
@NonNull
@SuppressWarnings("WeakerAccess")
protected final Size computeFrameProcessingSize() {
@NonNull List<Size> frameSizes = getFrameProcessingAvailableSizes();
// These sizes come in REF_SENSOR. Since there is an external selector involved,
// we must convert all of them to REF_VIEW, then flip back when returning.
boolean flip = getAngles().flip(Reference.SENSOR, Reference.VIEW);
List<Size> sizes = new ArrayList<>(frameSizes.size());
for (Size size : frameSizes) {
sizes.add(flip ? size.flip() : size);
}
AspectRatio targetRatio = AspectRatio.of(
mPreviewStreamSize.getWidth(),
mPreviewStreamSize.getHeight());
if (flip) targetRatio = targetRatio.flip();
int maxWidth = mFrameProcessingMaxWidth;
int maxHeight = mFrameProcessingMaxHeight;
if (maxWidth <= 0 || maxWidth == Integer.MAX_VALUE) maxWidth = 640;
if (maxHeight <= 0 || maxHeight == Integer.MAX_VALUE) maxHeight = 640;
Size targetMaxSize = new Size(maxWidth, maxHeight);
LOG.i("computeFrameProcessingSize:",
"targetRatio:", targetRatio,
"targetMaxSize:", targetMaxSize);
SizeSelector matchRatio = SizeSelectors.aspectRatio(targetRatio, 0);
SizeSelector matchSize = SizeSelectors.and(
SizeSelectors.maxHeight(targetMaxSize.getHeight()),
SizeSelectors.maxWidth(targetMaxSize.getWidth()),
SizeSelectors.biggest());
SizeSelector matchAll = SizeSelectors.or(
SizeSelectors.and(matchRatio, matchSize), // Try to respect both constraints.
matchSize, // If couldn't match aspect ratio, at least respect the size
SizeSelectors.smallest() // If couldn't match any, take the smallest.
);
Size result = matchAll.select(sizes).get(0);
if (!sizes.contains(result)) {
throw new RuntimeException("SizeSelectors must not return Sizes other than " +
"those in the input list.");
}
if (flip) result = result.flip();
LOG.i("computeFrameProcessingSize:", "result:", result, "flip:", flip);
return result;
}
//endregion
}

@ -1,5 +1,6 @@
package com.otaliastudios.cameraview.engine.options;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.media.CamcorderProfile;
@ -128,5 +129,8 @@ public class Camera1Options extends CameraOptions {
// Picture formats
supportedPictureFormats.add(PictureFormat.JPEG);
// Frame processing formats
supportedFrameProcessingFormats.add(ImageFormat.NV21);
}
}

@ -174,5 +174,15 @@ public class Camera2Options extends CameraOptions {
}
}
}
// Frame processing formats
supportedFrameProcessingFormats.add(ImageFormat.YUV_420_888);
int[] outputFormats = streamMap.getOutputFormats();
for (int outputFormat : outputFormats) {
// Ensure it is a raw format
if (ImageFormat.getBitsPerPixel(outputFormat) > 0) {
supportedFrameProcessingFormats.add(outputFormat);
}
}
}
}

@ -0,0 +1,162 @@
package com.otaliastudios.cameraview.frame;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.size.Size;
import java.util.concurrent.LinkedBlockingQueue;
/**
* This class manages the allocation of byte buffers and {@link Frame} objects.
* We are interested in recycling both of them, especially byte[] buffers which can create a lot
* of overhead.
*
* The pool size applies to both the {@link Frame} pool and the byte[] pool - it makes sense to use
* the same number since they are consumed at the same time.
*
* We can work in two modes, depending on whether a
* {@link BufferCallback} is passed to the constructor. The modes changes the buffer behavior.
*
* 1. {@link #BUFFER_MODE_DISPATCH}: in this mode, as soon as we have a buffer, it is dispatched to
* the {@link BufferCallback}. The callback should then fill the buffer, and finally call
* {@link FrameManager#getFrame(Object, long, int)} to receive a frame.
* This is used for Camera1.
*
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte
* buffers, instead of handing them to the callback. The users can ask for buffers through
* {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame
* {@link FrameManager#getFrame(Object, long, int)}, or, in case it was not filled, returned to
* the queue using {@link #onBufferUnused(byte[])}.
* This is used for Camera2.
*/
public class ByteBufferFrameManager extends FrameManager<byte[]> {
/**
* Receives callbacks on buffer availability
* (when a Frame is released, we reuse its buffer).
*/
public interface BufferCallback {
void onBufferAvailable(@NonNull byte[] buffer);
}
/**
* In this mode, we have a {@link #mBufferCallback} and dispatch
* new buffers to the callback.
*/
private final static int BUFFER_MODE_DISPATCH = 0;
/**
* In this mode, we have a {@link #mBufferQueue} where we store
* buffers and only dispatch when requested.
*/
private final static int BUFFER_MODE_ENQUEUE = 1;
private LinkedBlockingQueue<byte[]> mBufferQueue;
private BufferCallback mBufferCallback;
private final int mBufferMode;
/**
* Construct a new frame manager.
* The construction must be followed by an {@link #setUp(int, Size)} call
* as soon as the parameters are known.
*
* @param poolSize the size of the backing pool.
* @param callback a callback
*/
public ByteBufferFrameManager(int poolSize, @Nullable BufferCallback callback) {
super(poolSize, byte[].class);
if (callback != null) {
mBufferCallback = callback;
mBufferMode = BUFFER_MODE_DISPATCH;
} else {
mBufferQueue = new LinkedBlockingQueue<>(poolSize);
mBufferMode = BUFFER_MODE_ENQUEUE;
}
}
@Override
public void setUp(int format, @NonNull Size size) {
super.setUp(format, size);
int bytes = getFrameBytes();
for (int i = 0; i < getPoolSize(); i++) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(new byte[bytes]);
} else {
mBufferQueue.offer(new byte[bytes]);
}
}
}
/**
* Returns a new byte buffer than can be filled.
* This can only be called in {@link #BUFFER_MODE_ENQUEUE} mode! Where the frame
* manager also holds a queue of the byte buffers.
*
* If not null, the buffer returned by this method can be filled and used to get
* a new frame through {@link FrameManager#getFrame(Object, long, int)}.
*
* @return a buffer, or null
*/
@Nullable
public byte[] getBuffer() {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call getBuffer() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
return mBufferQueue.poll();
}
/**
* Can be called if the buffer obtained by {@link #getBuffer()}
* was not used to construct a frame, so it can be put back into the queue.
* @param buffer a buffer
*/
public void onBufferUnused(@NonNull byte[] buffer) {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call onBufferUnused() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
if (isSetUp()) {
mBufferQueue.offer(buffer);
} else {
LOG.w("onBufferUnused: buffer was returned but we're not set up anymore.");
}
}
@Override
protected void onFrameDataReleased(@NonNull byte[] data, boolean recycled) {
if (recycled && data.length == getFrameBytes()) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(data);
} else {
mBufferQueue.offer(data);
}
}
}
@NonNull
@Override
protected byte[] onCloneFrameData(@NonNull byte[] data) {
byte[] clone = new byte[data.length];
System.arraycopy(data, 0, clone, 0, data.length);
return clone;
}
/**
* Releases all frames controlled by this manager and
* clears the pool.
* In BUFFER_MODE_ENQUEUE, releases also all the buffers.
*/
@Override
public void release() {
super.release();
if (mBufferMode == BUFFER_MODE_ENQUEUE) {
mBufferQueue.clear();
}
}
}

@ -1,6 +1,9 @@
package com.otaliastudios.cameraview.frame;
import android.annotation.SuppressLint;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.controls.Engine;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
@ -14,8 +17,9 @@ public class Frame {
private final static CameraLogger LOG = CameraLogger.create(TAG);
private final FrameManager mManager;
private final Class<?> mDataClass;
private byte[] mData = null;
private Object mData = null;
private long mTime = -1;
private long mLastTime = -1;
private int mRotation = 0;
@ -24,9 +28,10 @@ public class Frame {
Frame(@NonNull FrameManager manager) {
mManager = manager;
mDataClass = manager.getFrameDataClass();
}
void setContent(@NonNull byte[] data, long time, int rotation, @NonNull Size size, int format) {
void setContent(@NonNull Object data, long time, int rotation, @NonNull Size size, int format) {
this.mData = data;
this.mTime = time;
this.mLastTime = time;
@ -63,12 +68,13 @@ public class Frame {
*
* @return a frozen Frame
*/
@SuppressLint("NewApi")
@NonNull
public Frame freeze() {
ensureHasContent();
byte[] data = new byte[mData.length];
System.arraycopy(mData, 0, data, 0, mData.length);
Frame other = new Frame(mManager);
//noinspection unchecked
Object data = mManager.cloneFrameData(getData());
other.setContent(data, mTime, mRotation, mSize, mFormat);
return other;
}
@ -80,7 +86,7 @@ public class Frame {
public void release() {
if (!hasContent()) return;
LOG.v("Frame with time", mTime, "is being released.");
byte[] data = mData;
Object data = mData;
mData = null;
mRotation = 0;
mTime = -1;
@ -89,6 +95,7 @@ public class Frame {
// After the manager is notified, this frame instance can be taken by
// someone else, possibly from another thread. So this should be the
// last call in this method. If we null data after, we can have issues.
//noinspection unchecked
mManager.onFrameReleased(this, data);
}
@ -96,10 +103,23 @@ public class Frame {
* Returns the frame data.
* @return the frame data
*/
@SuppressWarnings("unchecked")
@NonNull
public byte[] getData() {
public <T> T getData() {
ensureHasContent();
return mData;
return (T) mData;
}
/**
* Returns the class returned by {@link #getData()}.
* This class depends on the engine that produced this frame.
* - {@link Engine#CAMERA1} will produce byte[] arrays
* - {@link Engine#CAMERA2} will produce {@link android.media.Image}s
* @return the data class
*/
@NonNull
public Class<?> getDataClass() {
return mDataClass;
}
/**

@ -7,76 +7,33 @@ import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.concurrent.LinkedBlockingQueue;
/**
* This class manages the allocation of byte buffers and {@link Frame} objects.
* We are interested in recycling both of them, especially byte[] buffers which can create a lot
* of overhead.
*
* This class manages the allocation of {@link Frame} objects.
* The FrameManager keeps a {@link #mPoolSize} integer that defines the number of instances to keep.
* The pool size applies to both the {@link Frame} pool and the byte[] pool - it makes sense to use
* the same number since they are consumed at the same time.
*
* Main methods are:
* - {@link #setUp(int, Size)}: to set up with size and allocate buffers
* - {@link #release()}: to release. After release, a manager can be setUp again.
* - {@link #getFrame(byte[], long, int)}: gets a new {@link Frame}.
* - {@link #getFrame(Object, long, int)}: gets a new {@link Frame}.
*
* For both byte buffers and frames to get back to the FrameManager pool, all you have to do
* For frames to get back to the FrameManager pool, all you have to do
* is call {@link Frame#release()} when done.
*
* Other than this, the FrameManager can work in two modes, depending on whether a
* {@link BufferCallback} is passed to the constructor. The modes changes the buffer behavior.
*
* 1. {@link #BUFFER_MODE_DISPATCH}: in this mode, as soon as we have a buffer, it is dispatched to
* the {@link BufferCallback}. The callback should then fill the buffer, and finally call
* {@link #getFrame(byte[], long, int)} to receive a frame.
* This is used for Camera1.
*
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte
* buffers, instead of handing them to the callback. The users can ask for buffers through
* {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame
* {@link #getFrame(byte[], long, int)}, or, in case it was not filled, returned to the queue
* using {@link #onBufferUnused(byte[])}.
* This is used for Camera2.
*/
public class FrameManager {
public abstract class FrameManager<T> {
private static final String TAG = FrameManager.class.getSimpleName();
private static final CameraLogger LOG = CameraLogger.create(TAG);
/**
* Receives callbacks on buffer availability
* (when a Frame is released, we reuse its buffer).
*/
public interface BufferCallback {
void onBufferAvailable(@NonNull byte[] buffer);
}
protected static final CameraLogger LOG = CameraLogger.create(TAG);
private final int mPoolSize;
private int mBufferSize = -1;
private int mFrameBytes = -1;
private Size mFrameSize = null;
private int mFrameFormat = -1;
private final Class<T> mFrameDataClass;
private LinkedBlockingQueue<Frame> mFrameQueue;
private LinkedBlockingQueue<byte[]> mBufferQueue;
private BufferCallback mBufferCallback;
private final int mBufferMode;
/**
* In this mode, we have a {@link #mBufferCallback} and dispatch
* new buffers to the callback.
*/
private final static int BUFFER_MODE_DISPATCH = 0;
/**
* In this mode, we have a {@link #mBufferQueue} where we store
* buffers and only dispatch when requested.
*/
private final static int BUFFER_MODE_ENQUEUE = 1;
/**
* Construct a new frame manager.
@ -84,18 +41,37 @@ public class FrameManager {
* as soon as the parameters are known.
*
* @param poolSize the size of the backing pool.
* @param callback a callback
*/
public FrameManager(int poolSize, @Nullable BufferCallback callback) {
protected FrameManager(int poolSize, @NonNull Class<T> dataClass) {
mPoolSize = poolSize;
mFrameDataClass = dataClass;
mFrameQueue = new LinkedBlockingQueue<>(mPoolSize);
if (callback != null) {
mBufferCallback = callback;
mBufferMode = BUFFER_MODE_DISPATCH;
} else {
mBufferQueue = new LinkedBlockingQueue<>(mPoolSize);
mBufferMode = BUFFER_MODE_ENQUEUE;
}
}
/**
* Returns the pool size.
* @return pool size
*/
@SuppressWarnings("WeakerAccess")
public final int getPoolSize() {
return mPoolSize;
}
/**
* Returns the frame size in bytes.
* @return frame size in bytes
*/
@SuppressWarnings("WeakerAccess")
public final int getFrameBytes() {
return mFrameBytes;
}
/**
* Returns the frame data class.
* @return frame data class
*/
public final Class<T> getFrameDataClass() {
return mFrameDataClass;
}
/**
@ -106,9 +82,8 @@ public class FrameManager {
*
* @param format the image format
* @param size the frame size
* @return the buffer size
*/
public int setUp(int format, @NonNull Size size) {
public void setUp(int format, @NonNull Size size) {
if (isSetUp()) {
// TODO throw or just reconfigure?
}
@ -116,15 +91,7 @@ public class FrameManager {
mFrameFormat = format;
int bitsPerPixel = ImageFormat.getBitsPerPixel(format);
long sizeInBits = size.getHeight() * size.getWidth() * bitsPerPixel;
mBufferSize = (int) Math.ceil(sizeInBits / 8.0d);
for (int i = 0; i < mPoolSize; i++) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(new byte[mBufferSize]);
} else {
mBufferQueue.offer(new byte[mBufferSize]);
}
}
return mBufferSize;
mFrameBytes = (int) Math.ceil(sizeInBits / 8.0d);
}
/**
@ -134,47 +101,10 @@ public class FrameManager {
*
* @return true if set up
*/
private boolean isSetUp() {
protected boolean isSetUp() {
return mFrameSize != null;
}
/**
* Returns a new byte buffer than can be filled.
* This can only be called in {@link #BUFFER_MODE_ENQUEUE} mode! Where the frame
* manager also holds a queue of the byte buffers.
*
* If not null, the buffer returned by this method can be filled and used to get
* a new frame through {@link #getFrame(byte[], long, int)}.
*
* @return a buffer, or null
*/
@Nullable
public byte[] getBuffer() {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call getBuffer() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
return mBufferQueue.poll();
}
/**
* Can be called if the buffer obtained by {@link #getBuffer()}
* was not used to construct a frame, so it can be put back into the queue.
* @param buffer a buffer
*/
public void onBufferUnused(@NonNull byte[] buffer) {
if (mBufferMode != BUFFER_MODE_ENQUEUE) {
throw new IllegalStateException("Can't call onBufferUnused() " +
"when not in BUFFER_MODE_ENQUEUE.");
}
if (isSetUp()) {
mBufferQueue.offer(buffer);
} else {
LOG.w("onBufferUnused: buffer was returned but we're not set up anymore.");
}
}
/**
* Returns a new Frame for the given data. This must be called
* - after {@link #setUp(int, Size)}, which sets the buffer size
@ -190,7 +120,7 @@ public class FrameManager {
* @return a new frame
*/
@NonNull
public Frame getFrame(@NonNull byte[] data, long time, int rotation) {
public Frame getFrame(@NonNull T data, long time, int rotation) {
if (!isSetUp()) {
throw new IllegalStateException("Can't call getFrame() after releasing " +
"or before setUp.");
@ -209,33 +139,37 @@ public class FrameManager {
/**
* Called by child frames when they are released.
* This might be called from old Frames that belong to an old 'setUp'
* of this FrameManager instance. So the buffer size might be different,
* for instance.
*
* @param frame the released frame
*/
void onFrameReleased(@NonNull Frame frame, @NonNull byte[] buffer) {
void onFrameReleased(@NonNull Frame frame, @NonNull T data) {
if (!isSetUp()) return;
// If frame queue is full, let's drop everything.
// If frame queue accepts this frame, let's recycle the buffer as well.
if (mFrameQueue.offer(frame)) {
int currSize = buffer.length;
int reqSize = mBufferSize;
if (currSize == reqSize) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
mBufferCallback.onBufferAvailable(buffer);
} else {
mBufferQueue.offer(buffer);
}
}
}
boolean recycled = mFrameQueue.offer(frame);
onFrameDataReleased(data, recycled);
}
/**
* Called when a Frame was released and its data is now available.
* This might be called from old Frames that belong to an old 'setUp'
* of this FrameManager instance. So the buffer size might be different,
* for instance.
* @param data data
* @param recycled recycled
*/
protected abstract void onFrameDataReleased(@NonNull T data, boolean recycled);
@NonNull
final T cloneFrameData(@NonNull T data) {
return onCloneFrameData(data);
}
@NonNull
protected abstract T onCloneFrameData(@NonNull T data);
/**
* Releases all frames controlled by this manager and
* clears the pool.
* In BUFFER_MODE_ENQUEUE, releases also all the buffers.
*/
public void release() {
if (!isSetUp()) {
@ -245,10 +179,7 @@ public class FrameManager {
LOG.i("release: Clearing the frame and buffer queue.");
mFrameQueue.clear();
if (mBufferMode == BUFFER_MODE_ENQUEUE) {
mBufferQueue.clear();
}
mBufferSize = -1;
mFrameBytes = -1;
mFrameSize = null;
mFrameFormat = -1;
}

@ -0,0 +1,30 @@
package com.otaliastudios.cameraview.frame;
import android.media.Image;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
@RequiresApi(Build.VERSION_CODES.KITKAT)
public class ImageFrameManager extends FrameManager<Image> {
public ImageFrameManager(int poolSize) {
super(poolSize, Image.class);
}
@Override
protected void onFrameDataReleased(@NonNull Image data, boolean recycled) {
try {
data.close();
} catch (Exception ignore) {}
}
@NonNull
@Override
protected Image onCloneFrameData(@NonNull Image data) {
throw new RuntimeException("Cannot freeze() an Image Frame. " +
"Please consider using the frame synchronously in your process() method, " +
"which also gives better performance.");
}
}

@ -1,100 +0,0 @@
package com.otaliastudios.cameraview.internal.utils;
import android.graphics.ImageFormat;
import android.media.Image;
import java.nio.ByteBuffer;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
/**
* Conversions for {@link android.media.Image}s into byte arrays.
*/
@RequiresApi(19)
public class ImageHelper {
/**
* From https://stackoverflow.com/a/52740776/4288782 .
* The result array should have a size that is at least 3/2 * w * h.
* This is correctly computed by {@link com.otaliastudios.cameraview.frame.FrameManager}.
*
* @param image input image
* @param result output array
*/
public static void convertToNV21(@NonNull Image image, @NonNull byte[] result) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalStateException("CAn only convert from YUV_420_888.");
}
int width = image.getWidth();
int height = image.getHeight();
int ySize = width * height;
int uvSize = width * height / 4;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); // Y
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); // U
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); // V
int rowStride = image.getPlanes()[0].getRowStride();
if (image.getPlanes()[0].getPixelStride() != 1) {
throw new AssertionError("Something wrong in convertToNV21");
}
int pos = 0;
if (rowStride == width) { // likely
yBuffer.get(result, 0, ySize);
pos += ySize;
}
else {
int yBufferPos = width - rowStride; // not an actual position
for (; pos<ySize; pos+=width) {
yBufferPos += rowStride - width;
yBuffer.position(yBufferPos);
yBuffer.get(result, pos, width);
}
}
rowStride = image.getPlanes()[2].getRowStride();
int pixelStride = image.getPlanes()[2].getPixelStride();
if (rowStride != image.getPlanes()[1].getRowStride()) {
throw new AssertionError("Something wrong in convertToNV21");
}
if (pixelStride != image.getPlanes()[1].getPixelStride()) {
throw new AssertionError("Something wrong in convertToNV21");
}
if (pixelStride == 2 && rowStride == width && uBuffer.get(0) == vBuffer.get(1)) {
// maybe V an U planes overlap as per NV21, which means vBuffer[1]
// is alias of uBuffer[0]
byte savePixel = vBuffer.get(1);
vBuffer.put(1, (byte)0);
if (uBuffer.get(0) == 0) {
vBuffer.put(1, (byte)255);
//noinspection ConstantConditions
if (uBuffer.get(0) == 255) {
vBuffer.put(1, savePixel);
vBuffer.get(result, ySize, uvSize);
return; // shortcut
}
}
// unfortunately, the check failed. We must save U and V pixel by pixel
vBuffer.put(1, savePixel);
}
// other optimizations could check if (pixelStride == 1) or (pixelStride == 2),
// but performance gain would be less significant
for (int row=0; row<height/2; row++) {
for (int col=0; col<width/2; col++) {
int vuPos = col*pixelStride + row*rowStride;
result[pos++] = vBuffer.get(vuPos);
result[pos++] = uBuffer.get(vuPos);
}
}
}
}

@ -25,6 +25,10 @@
<attr name="cameraSnapshotMaxWidth" format="integer|reference" />
<attr name="cameraSnapshotMaxHeight" format="integer|reference" />
<attr name="cameraFrameProcessingMaxWidth" format="integer|reference" />
<attr name="cameraFrameProcessingMaxHeight" format="integer|reference" />
<attr name="cameraFrameProcessingFormat" format="integer|reference" />
<attr name="cameraVideoBitRate" format="integer|reference" />
<attr name="cameraAudioBitRate" format="integer|reference" />
<attr name="cameraPreviewFrameRate" format="integer|reference" />

@ -3,31 +3,37 @@ package com.otaliastudios.cameraview.frame;
import android.graphics.ImageFormat;
import androidx.annotation.NonNull;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static junit.framework.Assert.assertNotNull;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
public class FrameTest {
private FrameManager manager;
private FrameManager<String> manager;
@Before
public void setUp() {
manager = mock(FrameManager.class);
manager = spy(new FrameManager<String>(1, String.class) {
@Override
protected void onFrameDataReleased(@NonNull String data, boolean recycled) { }
@NonNull
@Override
protected String onCloneFrameData(@NonNull String data) {
return data;
}
});
}
@After
@ -40,21 +46,21 @@ public class FrameTest {
// Only time should count.
Frame f1 = new Frame(manager);
long time = 1000;
f1.setContent(new byte[3], time, 90, new Size(5, 5), ImageFormat.NV21);
f1.setContent("foo", time, 90, new Size(5, 5), ImageFormat.NV21);
Frame f2 = new Frame(manager);
f2.setContent(new byte[2], time, 0, new Size(10, 10), ImageFormat.NV21);
f2.setContent("bar", time, 0, new Size(10, 10), ImageFormat.NV21);
assertEquals(f1, f2);
f2.setContent(new byte[2], time + 1, 0, new Size(10, 10), ImageFormat.NV21);
f2.setContent("foo", time + 1, 0, new Size(10, 10), ImageFormat.NV21);
assertNotEquals(f1, f2);
}
@Test
public void testReleaseThrows() {
final Frame frame = new Frame(manager);
frame.setContent(new byte[2], 1000, 90, new Size(10, 10), ImageFormat.NV21);
frame.setContent("foo", 1000, 90, new Size(10, 10), ImageFormat.NV21);
frame.release();
verify(manager, times(1)).onFrameReleased(eq(frame), any(byte[].class));
verify(manager, times(1)).onFrameReleased(frame, "foo");
assertThrows(new Runnable() { public void run() { frame.getTime(); }});
assertThrows(new Runnable() { public void run() { frame.getFormat(); }});
@ -75,7 +81,7 @@ public class FrameTest {
@Test
public void testFreeze() {
Frame frame = new Frame(manager);
byte[] data = new byte[]{0, 1, 5, 0, 7, 3, 4, 5};
String data = "test data";
long time = 1000;
int rotation = 90;
Size size = new Size(10, 10);
@ -83,14 +89,14 @@ public class FrameTest {
frame.setContent(data, time, rotation, size, format);
Frame frozen = frame.freeze();
assertArrayEquals(data, frozen.getData());
assertEquals(data, frozen.getData());
assertEquals(time, frozen.getTime());
assertEquals(rotation, frozen.getRotation());
assertEquals(size, frozen.getSize());
// Mutate the first, ensure that frozen is not affected
frame.setContent(new byte[]{3, 2, 1}, 50, 180, new Size(1, 1), ImageFormat.JPEG);
assertArrayEquals(data, frozen.getData());
frame.setContent("new data", 50, 180, new Size(1, 1), ImageFormat.JPEG);
assertEquals(data, frozen.getData());
assertEquals(time, frozen.getTime());
assertEquals(rotation, frozen.getRotation());
assertEquals(size, frozen.getSize());

@ -47,7 +47,7 @@ import java.util.List;
public class CameraActivity extends AppCompatActivity implements View.OnClickListener, OptionView.Callback {
private final static CameraLogger LOG = CameraLogger.create("DemoApp");
private final static boolean USE_FRAME_PROCESSOR = false;
private final static boolean USE_FRAME_PROCESSOR = true;
private final static boolean DECODE_BITMAP = true;
private CameraView camera;
@ -78,18 +78,24 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
lastTime = newTime;
LOG.e("Frame delayMillis:", delay, "FPS:", 1000 / delay);
if (DECODE_BITMAP) {
YuvImage yuvImage = new YuvImage(frame.getData(), ImageFormat.NV21,
frame.getSize().getWidth(),
frame.getSize().getHeight(),
null);
ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0,
frame.getSize().getWidth(),
frame.getSize().getHeight()), 100, jpegStream);
byte[] jpegByteArray = jpegStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
//noinspection ResultOfMethodCallIgnored
bitmap.toString();
if (frame.getFormat() == ImageFormat.NV21
&& frame.getDataClass() == byte[].class) {
byte[] data = frame.getData();
YuvImage yuvImage = new YuvImage(data,
frame.getFormat(),
frame.getSize().getWidth(),
frame.getSize().getHeight(),
null);
ByteArrayOutputStream jpegStream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0,
frame.getSize().getWidth(),
frame.getSize().getHeight()), 100, jpegStream);
byte[] jpegByteArray = jpegStream.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jpegByteArray,
0, jpegByteArray.length);
//noinspection ResultOfMethodCallIgnored
bitmap.toString();
}
}
}
});
@ -125,6 +131,8 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
new Option.OverlayInPreview(watermark),
new Option.OverlayInPictureSnapshot(watermark),
new Option.OverlayInVideoSnapshot(watermark),
// Frame Processing
new Option.FrameProcessingFormat(),
// Other
new Option.Grid(), new Option.GridColor(), new Option.UseDeviceOrientation()
);
@ -141,6 +149,8 @@ public class CameraActivity extends AppCompatActivity implements View.OnClickLis
false, false, false, false, true,
// Watermarks
false, false, true,
// Frame Processing
true,
// Other
false, false, true
);

@ -4,6 +4,7 @@ import android.graphics.Color;
import androidx.annotation.NonNull;
import androidx.core.util.Pair;
import android.graphics.ImageFormat;
import android.view.View;
import android.view.ViewGroup;
@ -554,4 +555,45 @@ public abstract class Option<T> {
}
}
public static class FrameProcessingFormat extends Option<Integer> {
FrameProcessingFormat() {
super("Frame Processing Format");
}
@Override
public void set(@NonNull CameraView view, @NonNull Integer value) {
view.setFrameProcessingFormat(value);
}
@NonNull
@Override
public Integer get(@NonNull CameraView view) {
return view.getFrameProcessingFormat();
}
@NonNull
@Override
public Collection<Integer> getAll(@NonNull CameraView view, @NonNull CameraOptions options) {
return options.getSupportedFrameProcessingFormats();
}
@NonNull
@Override
public String toString(@NonNull Integer value) {
switch (value) {
case ImageFormat.NV21: return "NV21";
case ImageFormat.NV16: return "NV16";
case ImageFormat.JPEG: return "JPEG";
case ImageFormat.YUY2: return "YUY2";
case ImageFormat.YUV_420_888: return "YUV_420_888";
case ImageFormat.YUV_422_888: return "YUV_422_888";
case ImageFormat.YUV_444_888: return "YUV_444_888";
case ImageFormat.RAW10: return "RAW10";
case ImageFormat.RAW12: return "RAW12";
case ImageFormat.RAW_SENSOR: return "RAW_SENSOR";
}
return super.toString(value);
}
}
}

@ -19,12 +19,17 @@ cameraView.addFrameProcessor(new FrameProcessor() {
@Override
@WorkerThread
public void process(@NonNull Frame frame) {
byte[] data = frame.getData();
int rotation = frame.getRotation();
long time = frame.getTime();
Size size = frame.getSize();
int format = frame.getFormat();
// Process...
if (frame.getDataClass() == byte[].class) {
byte[] data = frame.getData();
// Process byte array...
} else if (frame.getDataClass() == Image.class) {
Image data = frame.getData();
// Process android.media.Image...
}
}
}
```
@ -78,13 +83,64 @@ public void process(@NonNull Frame frame) {
latch.await();
}
```
### Frame Data
Starting from `v2.5.0`, the type of data offered by `frame.getData()` depends on the camera engine
that created this frame:
- The Camera1 engine will offer `byte[]` arrays
- The Camera2 engine will offer `android.media.Image` objects
You can check this at runtime by inspecting the data class using `frame.getDataClass()`.
### Frame Size
The Camera2 engine offers the option to set size constraints for the incoming frames.
```java
cameraView.setFrameProcessingMaxWidth(maxWidth);
cameraView.setFrameProcessingMaxHeight(maxWidth);
```
With other engines, these API have no effect.
### Frame Format
The Camera2 engine offers the option to set the frame format as one of the ImageFormat
constants. The default is `ImageFormat.YUV_420_888`.
```java
cameraView.setFrameProcessingFormat(ImageFormat.YUV_420_888);
cameraView.setFrameProcessingFormat(ImageFormat.YUV_422_888);
```
With the Camera1 engine, the incoming format will always be `ImageFormat.NV21`.
You can check which formats are available for use through `CameraOptions.getSupportedFrameProcessingFormats()`.
### XML Attributes
```xml
<com.otaliastudios.cameraview.CameraView
app:cameraFrameProcessingMaxWidth="640"
app:cameraFrameProcessingMaxHeight="640"
app:cameraFrameProcessingFormat="0x23"/>
```
### Related APIs
|Frame API|Type|Description|
|---------|----|-----------|
|`camera.addFrameProcessor(FrameProcessor)`|`-`|Register a `FrameProcessor`.|
|`frame.getData()`|`byte[]`|The current preview frame, in its original orientation.|
|`camera.removeFrameProcessor(FrameProcessor)`|`-`|Removes a `FrameProcessor`.|
|`camera.clearFrameProcessors()`|`-`|Removes all `FrameProcessor`s.|
|`camera.setFrameProcessingMaxWidth(int)`|`-`|Sets the max width for incoming frames.|
|`camera.setFrameProcessingMaxHeight(int)`|`-`|Sets the max height for incoming frames.|
|`camera.getFrameProcessingMaxWidth()`|`int`|Gets the max width for incoming frames.|
|`camera.getFrameProcessingMaxHeight()`|`int`|Gets the max height for incoming frames.|
|`camera.setFrameProcessingFormat(int)`|`-`|Sets the desired format for incoming frames. Should be one of the ImageFormat constants.|
|`camera.getFrameProcessingFormat()`|`-`|Gets the format for incoming frames. One of the ImageFormat constants.|
|`frame.getDataClass()`|`Class<T>`|The class of the data returned by `getData()`. Either `byte[]` or `android.media.Image`.|
|`frame.getData()`|`T`|The current preview frame, in its original orientation.|
|`frame.getTime()`|`long`|The preview timestamp, in `System.currentTimeMillis()` reference.|
|`frame.getRotation()`|`int`|The rotation that should be applied to the byte array in order to see what the user sees.|
|`frame.getSize()`|`Size`|The frame size, before any rotation is applied, to access data.|

@ -13,10 +13,8 @@ Starting from version `2.1.0`, CameraView experimentally supports real-time filt
the camera frames before they are shown and recorded. Just like [overlays](watermarks-and-overlays.html),
these filters are applied to the preview and to any [picture or video snapshots](capturing-media.html).
Conditions:
- you must set the experimental flag: `app:cameraExperimental="true"`
- you must use `Preview.GL_SURFACE` as a preview
Starting from `2.5.0`, this feature is considered to be stable and you do not need the experimental
flag to use it. The only condition is to use the `Preview.GL_SURFACE` preview.
### Simple usage

Loading…
Cancel
Save