Frame rotation improvements (#745)

* Add Frame.rotationToUser and Frame.rotationToView

* Update docs

* Add missing annotation
pull/754/head
Mattia Iavarone 5 years ago committed by GitHub
parent 1bb15dc595
commit 83bb45108a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 20
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManagerTest.java
  2. 17
      cameraview/src/androidTest/java/com/otaliastudios/cameraview/frame/FrameManagerTest.java
  3. 6
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera1Engine.java
  4. 8
      cameraview/src/main/java/com/otaliastudios/cameraview/engine/Camera2Engine.java
  5. 4
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/BaseFilter.java
  6. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/Filter.java
  7. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filter/MultiFilter.java
  8. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/AutoFixFilter.java
  9. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/BrightnessFilter.java
  10. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/ContrastFilter.java
  11. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/DocumentaryFilter.java
  12. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/DuotoneFilter.java
  13. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/FillLightFilter.java
  14. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GammaFilter.java
  15. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/GrainFilter.java
  16. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/HueFilter.java
  17. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/LomoishFilter.java
  18. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SaturationFilter.java
  19. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/SharpnessFilter.java
  20. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/TemperatureFilter.java
  21. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/TintFilter.java
  22. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/filters/VignetteFilter.java
  23. 13
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/ByteBufferFrameManager.java
  24. 50
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/Frame.java
  25. 31
      cameraview/src/main/java/com/otaliastudios/cameraview/frame/FrameManager.java
  26. 2
      cameraview/src/main/java/com/otaliastudios/cameraview/picture/Snapshot1PictureRecorder.java
  27. 21
      cameraview/src/test/java/com/otaliastudios/cameraview/frame/FrameTest.java
  28. 6
      docs/_docs/frame-processing.md

@ -7,6 +7,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.size.Size;
import org.junit.After;
@ -26,6 +27,7 @@ import static org.mockito.Mockito.verify;
@SmallTest
public class ByteBufferFrameManagerTest extends BaseTest {
private final Angles angles = new Angles();
private ByteBufferFrameManager.BufferCallback callback;
@Before
@ -41,22 +43,22 @@ public class ByteBufferFrameManagerTest extends BaseTest {
@Test
public void testAllocate() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
verify(callback, times(1)).onBufferAvailable(any(byte[].class));
reset(callback);
manager = new ByteBufferFrameManager(5, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
verify(callback, times(5)).onBufferAvailable(any(byte[].class));
}
@Test
public void testOnFrameReleased_alreadyFull() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
int length = manager.getFrameBytes();
Frame frame1 = manager.getFrame(new byte[length], 0, 0);
Frame frame1 = manager.getFrame(new byte[length], 0);
assertNotNull(frame1);
// Since frame1 is already taken and poolSize = 1, getFrame() would return null.
// To create a new frame, freeze the first one.
@ -73,12 +75,12 @@ public class ByteBufferFrameManagerTest extends BaseTest {
@Test
public void testOnFrameReleased_sameLength() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
int length = manager.getFrameBytes();
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
Frame frame = manager.getFrame(picture, 0);
assertNotNull(frame);
// Release the frame and ensure that onBufferAvailable is called.
@ -90,16 +92,16 @@ public class ByteBufferFrameManagerTest extends BaseTest {
@Test
public void testOnFrameReleased_differentLength() {
ByteBufferFrameManager manager = new ByteBufferFrameManager(1, callback);
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
int length = manager.getFrameBytes();
// A camera preview frame comes. Request a frame.
byte[] picture = new byte[length];
Frame frame = manager.getFrame(picture, 0, 0);
Frame frame = manager.getFrame(picture, 0);
assertNotNull(frame);
// Don't release the frame. Change the allocation size.
manager.setUp(ImageFormat.NV16, new Size(15, 15));
manager.setUp(ImageFormat.NV16, new Size(15, 15), angles);
// Now release the old frame and ensure that onBufferAvailable is NOT called,
// because the released data has wrong length.

@ -8,6 +8,7 @@ import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import com.otaliastudios.cameraview.BaseTest;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.size.Size;
import org.junit.Test;
@ -18,14 +19,14 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class FrameManagerTest extends BaseTest {
private final Angles angles = new Angles();
@Test
public void testFrameRecycling() {
// A 1-pool manager will always recycle the same frame.
@ -39,12 +40,12 @@ public class FrameManagerTest extends BaseTest {
return data;
}
};
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
Frame first = manager.getFrame("foo", 0, 0);
Frame first = manager.getFrame("foo", 0);
assertNotNull(first);
first.release();
Frame second = manager.getFrame("bar", 0, 0);
Frame second = manager.getFrame("bar", 0);
assertNotNull(second);
second.release();
assertEquals(first, second);
@ -62,11 +63,11 @@ public class FrameManagerTest extends BaseTest {
return data;
}
};
manager.setUp(ImageFormat.NV21, new Size(50, 50));
manager.setUp(ImageFormat.NV21, new Size(50, 50), angles);
Frame first = manager.getFrame("foo", 0, 0);
Frame first = manager.getFrame("foo", 0);
assertNotNull(first);
Frame second = manager.getFrame("bar", 0, 0);
Frame second = manager.getFrame("bar", 0);
assertNull(second);
}
}

@ -231,7 +231,7 @@ public class Camera1Engine extends CameraBaseEngine implements
mCamera.setPreviewCallbackWithBuffer(null); // Release anything left
mCamera.setPreviewCallbackWithBuffer(this); // Add ourselves
getFrameManager().setUp(PREVIEW_FORMAT, mPreviewStreamSize);
getFrameManager().setUp(PREVIEW_FORMAT, mPreviewStreamSize, getAngles());
LOG.i("onStartPreview", "Starting preview with startPreview().");
try {
@ -795,9 +795,7 @@ public class Camera1Engine extends CameraBaseEngine implements
// Seen this happen in logs.
return;
}
int rotation = getAngles().offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
Frame frame = getFrameManager().getFrame(data, System.currentTimeMillis(), rotation);
Frame frame = getFrameManager().getFrame(data, System.currentTimeMillis());
if (frame != null) {
getCallback().dispatchFrame(frame);
}

@ -70,7 +70,6 @@ import com.otaliastudios.cameraview.size.Size;
import com.otaliastudios.cameraview.video.Full2VideoRecorder;
import com.otaliastudios.cameraview.video.SnapshotVideoRecorder;
import java.io.FileDescriptor;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
@ -609,7 +608,7 @@ public class Camera2Engine extends CameraBaseEngine implements
mPreview.setStreamSize(previewSizeForView.getWidth(), previewSizeForView.getHeight());
mPreview.setDrawRotation(getAngles().offset(Reference.BASE, Reference.VIEW, Axis.ABSOLUTE));
if (hasFrameProcessors()) {
getFrameManager().setUp(mFrameProcessingFormat, mFrameProcessingSize);
getFrameManager().setUp(mFrameProcessingFormat, mFrameProcessingSize, getAngles());
}
LOG.i("onStartPreview:", "Starting preview.");
@ -1443,10 +1442,7 @@ public class Camera2Engine extends CameraBaseEngine implements
// After preview, the frame manager is correctly set up
//noinspection unchecked
Frame frame = getFrameManager().getFrame(image,
System.currentTimeMillis(),
getAngles().offset(Reference.SENSOR,
Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR));
System.currentTimeMillis());
if (frame != null) {
LOG.v("onImageAvailable:", "Image acquired, dispatching.");
getCallback().dispatchFrame(frame);

@ -177,7 +177,7 @@ public abstract class BaseFilter implements Filter {
}
@Override
public void draw(long timestampUs, float[] transformMatrix) {
public void draw(long timestampUs, @NonNull float[] transformMatrix) {
if (programHandle == -1) {
LOG.w("Filter.draw() called after destroying the filter. " +
"This can happen rarely because of threading.");
@ -188,7 +188,7 @@ public abstract class BaseFilter implements Filter {
}
}
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
// Copy the model / view / projection matrix over.
GLES20.glUniformMatrix4fv(vertexModelViewProjectionMatrixLocation, 1,
false, GlUtils.IDENTITY_MATRIX, 0);

@ -70,7 +70,7 @@ public interface Filter {
* @param timestampUs timestamp in microseconds
* @param transformMatrix matrix
*/
void draw(long timestampUs, float[] transformMatrix);
void draw(long timestampUs, @NonNull float[] transformMatrix);
/**
* Called anytime the output size changes.

@ -237,7 +237,7 @@ public class MultiFilter implements Filter, OneParameterFilter, TwoParameterFilt
}
@Override
public void draw(long timestampUs, float[] transformMatrix) {
public void draw(long timestampUs, @NonNull float[] transformMatrix) {
synchronized (lock) {
for (int i = 0; i < filters.size(); i++) {
boolean isFirst = i == 0;

@ -117,7 +117,7 @@ public class AutoFixFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");

@ -86,7 +86,7 @@ public class BrightnessFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(brightnessLocation, brightness);
GlUtils.checkError("glUniform1f");

@ -88,7 +88,7 @@ public class ContrastFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(contrastLocation, contrast);
GlUtils.checkError("glUniform1f");

@ -93,7 +93,7 @@ public class DocumentaryFilter extends BaseFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (mWidth > mHeight) {

@ -137,7 +137,7 @@ public class DuotoneFilter extends BaseFilter implements TwoParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] first = new float[]{
Color.red(mFirstColor) / 255f,

@ -95,7 +95,7 @@ public class FillLightFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float amount = 1.0f - strength;
float multiplier = 1.0f / (amount * 0.7f + 0.3f);

@ -83,7 +83,7 @@ public class GammaFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(gammaLocation, gamma);
GlUtils.checkError("glUniform1f");

@ -138,7 +138,7 @@ public class GrainFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(strengthLocation, strength);
GlUtils.checkError("glUniform1f");

@ -96,7 +96,7 @@ public class HueFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
// map it on 360 degree circle
float shaderHue = ((hue - 45) / 45f + 0.5f) * -1;

@ -141,7 +141,7 @@ public class LomoishFilter extends BaseFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (width > height) {

@ -106,7 +106,7 @@ public class SaturationFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
if (scale > 0.0f) {
GLES20.glUniform1f(scaleLocation, 0F);

@ -116,7 +116,7 @@ public class SharpnessFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");

@ -94,7 +94,7 @@ public class TemperatureFilter extends BaseFilter implements OneParameterFilter
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
GLES20.glUniform1f(scaleLocation, scale);
GlUtils.checkError("glUniform1f");

@ -91,7 +91,7 @@ public class TintFilter extends BaseFilter implements OneParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] channels = new float[]{
Color.red(tint) / 255f,

@ -145,7 +145,7 @@ public class VignetteFilter extends BaseFilter implements TwoParameterFilter {
}
@Override
protected void onPreDraw(long timestampUs, float[] transformMatrix) {
protected void onPreDraw(long timestampUs, @NonNull float[] transformMatrix) {
super.onPreDraw(timestampUs, transformMatrix);
float[] scale = new float[2];
if (mWidth > mHeight) {

@ -4,6 +4,7 @@ package com.otaliastudios.cameraview.frame;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.size.Size;
import java.util.concurrent.LinkedBlockingQueue;
@ -21,14 +22,14 @@ import java.util.concurrent.LinkedBlockingQueue;
*
* 1. {@link #BUFFER_MODE_DISPATCH}: in this mode, as soon as we have a buffer, it is dispatched to
* the {@link BufferCallback}. The callback should then fill the buffer, and finally call
* {@link FrameManager#getFrame(Object, long, int)} to receive a frame.
* {@link FrameManager#getFrame(Object, long)} to receive a frame.
* This is used for Camera1.
*
* 2. {@link #BUFFER_MODE_ENQUEUE}: in this mode, the manager internally keeps a queue of byte
* buffers, instead of handing them to the callback. The users can ask for buffers through
* {@link #getBuffer()}.
* This buffer can be filled with data and used to get a frame
* {@link FrameManager#getFrame(Object, long, int)}, or, in case it was not filled, returned to
* {@link FrameManager#getFrame(Object, long)}, or, in case it was not filled, returned to
* the queue using {@link #onBufferUnused(byte[])}.
* This is used for Camera2.
*/
@ -60,7 +61,7 @@ public class ByteBufferFrameManager extends FrameManager<byte[]> {
/**
* Construct a new frame manager.
* The construction must be followed by an {@link #setUp(int, Size)} call
* The construction must be followed by an {@link FrameManager#setUp(int, Size, Angles)} call
* as soon as the parameters are known.
*
* @param poolSize the size of the backing pool.
@ -79,8 +80,8 @@ public class ByteBufferFrameManager extends FrameManager<byte[]> {
@Override
public void setUp(int format, @NonNull Size size) {
super.setUp(format, size);
public void setUp(int format, @NonNull Size size, @NonNull Angles angles) {
super.setUp(format, size, angles);
int bytes = getFrameBytes();
for (int i = 0; i < getPoolSize(); i++) {
if (mBufferMode == BUFFER_MODE_DISPATCH) {
@ -97,7 +98,7 @@ public class ByteBufferFrameManager extends FrameManager<byte[]> {
* manager also holds a queue of the byte buffers.
*
* If not null, the buffer returned by this method can be filled and used to get
* a new frame through {@link FrameManager#getFrame(Object, long, int)}.
* a new frame through {@link FrameManager#getFrame(Object, long)}.
*
* @return a buffer, or null
*/

@ -22,7 +22,8 @@ public class Frame {
private Object mData = null;
private long mTime = -1;
private long mLastTime = -1;
private int mRotation = 0;
private int mUserRotation = 0;
private int mViewRotation = 0;
private Size mSize = null;
private int mFormat = -1;
@ -31,13 +32,15 @@ public class Frame {
mDataClass = manager.getFrameDataClass();
}
void setContent(@NonNull Object data, long time, int rotation, @NonNull Size size, int format) {
this.mData = data;
this.mTime = time;
this.mLastTime = time;
this.mRotation = rotation;
this.mSize = size;
this.mFormat = format;
void setContent(@NonNull Object data, long time, int userRotation, int viewRotation,
@NonNull Size size, int format) {
mData = data;
mTime = time;
mLastTime = time;
mUserRotation = userRotation;
mViewRotation = viewRotation;
mSize = size;
mFormat = format;
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
@ -75,7 +78,7 @@ public class Frame {
Frame other = new Frame(mManager);
//noinspection unchecked
Object data = mManager.cloneFrameData(getData());
other.setContent(data, mTime, mRotation, mSize, mFormat);
other.setContent(data, mTime, mUserRotation, mViewRotation, mSize, mFormat);
return other;
}
@ -88,7 +91,8 @@ public class Frame {
LOG.v("Frame with time", mTime, "is being released.");
Object data = mData;
mData = null;
mRotation = 0;
mUserRotation = 0;
mViewRotation = 0;
mTime = -1;
mSize = null;
mFormat = -1;
@ -133,16 +137,36 @@ public class Frame {
return mTime;
}
/**
* @deprecated use {@link #getRotationToUser()} instead
*/
@Deprecated
public int getRotation() {
return getRotationToUser();
}
/**
* Returns the clock-wise rotation that should be applied on the data
* array, such that the resulting frame matches what the user is seeing
* on screen.
* on screen. Knowing this can help in the processing phase.
*
* @return clock-wise rotation
*/
public int getRotation() {
public int getRotationToUser() {
ensureHasContent();
return mUserRotation;
}
/**
* Returns the clock-wise rotation that should be applied on the data
* array, such that the resulting frame matches the View / Activity orientation.
* Knowing this can help in the drawing / rendering phase.
*
* @return clock-wise rotation
*/
public int getRotationToView() {
ensureHasContent();
return mRotation;
return mViewRotation;
}
/**

@ -4,6 +4,9 @@ package com.otaliastudios.cameraview.frame;
import android.graphics.ImageFormat;
import com.otaliastudios.cameraview.CameraLogger;
import com.otaliastudios.cameraview.engine.offset.Angles;
import com.otaliastudios.cameraview.engine.offset.Axis;
import com.otaliastudios.cameraview.engine.offset.Reference;
import com.otaliastudios.cameraview.size.Size;
import androidx.annotation.NonNull;
@ -16,9 +19,9 @@ import java.util.concurrent.LinkedBlockingQueue;
* The FrameManager keeps a {@link #mPoolSize} integer that defines the number of instances to keep.
*
* Main methods are:
* - {@link #setUp(int, Size)}: to set up with size and allocate buffers
* - {@link #setUp(int, Size, Angles)}: to set up with size and allocate buffers
* - {@link #release()}: to release. After release, a manager can be setUp again.
* - {@link #getFrame(Object, long, int)}: gets a new {@link Frame}.
* - {@link #getFrame(Object, long)}: gets a new {@link Frame}.
*
* For frames to get back to the FrameManager pool, all you have to do
* is call {@link Frame#release()} when done.
@ -34,11 +37,12 @@ public abstract class FrameManager<T> {
private int mFrameFormat = -1;
private final Class<T> mFrameDataClass;
private LinkedBlockingQueue<Frame> mFrameQueue;
private Angles mAngles;
/**
* Construct a new frame manager.
* The construction must be followed by an {@link #setUp(int, Size)} call
* The construction must be followed by an {@link #setUp(int, Size, Angles)} call
* as soon as the parameters are known.
*
* @param poolSize the size of the backing pool.
@ -80,11 +84,11 @@ public abstract class FrameManager<T> {
* the preview size and the image format value are known.
*
* This method can be called again after {@link #release()} has been called.
*
* @param format the image format
* @param format the image format
* @param size the frame size
* @param angles angle object
*/
public void setUp(int format, @NonNull Size size) {
public void setUp(int format, @NonNull Size size, @NonNull Angles angles) {
if (isSetUp()) {
// TODO throw or just reconfigure?
}
@ -96,10 +100,11 @@ public abstract class FrameManager<T> {
for (int i = 0; i < getPoolSize(); i++) {
mFrameQueue.offer(new Frame(this));
}
mAngles = angles;
}
/**
* Returns true after {@link #setUp(int, Size)}
* Returns true after {@link #setUp(int, Size, Angles)}
* but before {@link #release()}.
* Returns false otherwise.
*
@ -111,16 +116,15 @@ public abstract class FrameManager<T> {
/**
* Returns a new Frame for the given data. This must be called
* - after {@link #setUp(int, Size)}, which sets the buffer size
* - after {@link #setUp(int, Size, Angles)}, which sets the buffer size
* - after the T data has been filled
*
* @param data data
* @param time timestamp
* @param rotation rotation
* @return a new frame
*/
@Nullable
public Frame getFrame(@NonNull T data, long time, int rotation) {
public Frame getFrame(@NonNull T data, long time) {
if (!isSetUp()) {
throw new IllegalStateException("Can't call getFrame() after releasing " +
"or before setUp.");
@ -129,7 +133,11 @@ public abstract class FrameManager<T> {
Frame frame = mFrameQueue.poll();
if (frame != null) {
LOG.v("getFrame for time:", time, "RECYCLING.");
frame.setContent(data, time, rotation, mFrameSize, mFrameFormat);
int userRotation = mAngles.offset(Reference.SENSOR, Reference.OUTPUT,
Axis.RELATIVE_TO_SENSOR);
int viewRotation = mAngles.offset(Reference.SENSOR, Reference.VIEW,
Axis.RELATIVE_TO_SENSOR);
frame.setContent(data, time, userRotation, viewRotation, mFrameSize, mFrameFormat);
return frame;
} else {
LOG.i("getFrame for time:", time, "NOT AVAILABLE.");
@ -183,5 +191,6 @@ public abstract class FrameManager<T> {
mFrameBytes = -1;
mFrameSize = null;
mFrameFormat = -1;
mAngles = null;
}
}

@ -81,7 +81,7 @@ public class Snapshot1PictureRecorder extends SnapshotPictureRecorder {
// It seems that the buffers are already cleared here, so we need to allocate again.
camera.setPreviewCallbackWithBuffer(null); // Release anything left
camera.setPreviewCallbackWithBuffer(mEngine1); // Add ourselves
mEngine1.getFrameManager().setUp(mFormat, previewStreamSize);
mEngine1.getFrameManager().setUp(mFormat, previewStreamSize, mEngine1.getAngles());
}
});
}

@ -46,19 +46,19 @@ public class FrameTest {
// Only time should count.
Frame f1 = new Frame(manager);
long time = 1000;
f1.setContent("foo", time, 90, new Size(5, 5), ImageFormat.NV21);
f1.setContent("foo", time, 90, 180, new Size(5, 5), ImageFormat.NV21);
Frame f2 = new Frame(manager);
f2.setContent("bar", time, 0, new Size(10, 10), ImageFormat.NV21);
f2.setContent("bar", time, 0, 90, new Size(10, 10), ImageFormat.NV21);
assertEquals(f1, f2);
f2.setContent("foo", time + 1, 0, new Size(10, 10), ImageFormat.NV21);
f2.setContent("foo", time + 1, 0, 90, new Size(10, 10), ImageFormat.NV21);
assertNotEquals(f1, f2);
}
@Test
public void testReleaseThrows() {
final Frame frame = new Frame(manager);
frame.setContent("foo", 1000, 90, new Size(10, 10), ImageFormat.NV21);
frame.setContent("foo", 1000, 90, 90, new Size(10, 10), ImageFormat.NV21);
frame.release();
verify(manager, times(1)).onFrameReleased(frame, "foo");
@ -83,22 +83,25 @@ public class FrameTest {
Frame frame = new Frame(manager);
String data = "test data";
long time = 1000;
int rotation = 90;
int userRotation = 90;
int viewRotation = 90;
Size size = new Size(10, 10);
int format = ImageFormat.NV21;
frame.setContent(data, time, rotation, size, format);
frame.setContent(data, time, userRotation, viewRotation, size, format);
Frame frozen = frame.freeze();
assertEquals(data, frozen.getData());
assertEquals(time, frozen.getTime());
assertEquals(rotation, frozen.getRotation());
assertEquals(userRotation, frozen.getRotationToUser());
assertEquals(viewRotation, frozen.getRotationToView());
assertEquals(size, frozen.getSize());
// Mutate the first, ensure that frozen is not affected
frame.setContent("new data", 50, 180, new Size(1, 1), ImageFormat.JPEG);
frame.setContent("new data", 50, 180, 180, new Size(1, 1), ImageFormat.JPEG);
assertEquals(data, frozen.getData());
assertEquals(time, frozen.getTime());
assertEquals(rotation, frozen.getRotation());
assertEquals(userRotation, frozen.getRotationToUser());
assertEquals(viewRotation, frozen.getRotationToView());
assertEquals(size, frozen.getSize());
assertEquals(format, frozen.getFormat());
}

@ -16,10 +16,11 @@ cameraView.addFrameProcessor(new FrameProcessor() {
@Override
@WorkerThread
public void process(@NonNull Frame frame) {
int rotation = frame.getRotation();
long time = frame.getTime();
Size size = frame.getSize();
int format = frame.getFormat();
int userRotation = frame.getRotationToUser();
int viewRotation = frame.getRotationToView();
if (frame.getDataClass() == byte[].class) {
byte[] data = frame.getData();
// Process byte array...
@ -173,7 +174,8 @@ cameraView.setFrameProcessingPoolSize(3);
|`frame.getDataClass()`|`Class<T>`|The class of the data returned by `getData()`. Either `byte[]` or `android.media.Image`.|
|`frame.getData()`|`T`|The current preview frame, in its original orientation.|
|`frame.getTime()`|`long`|The preview timestamp, in `System.currentTimeMillis()` reference.|
|`frame.getRotation()`|`int`|The rotation that should be applied to the byte array in order to see what the user sees.|
|`frame.getRotationToUser()`|`int`|The rotation that should be applied to the byte array in order to see what the user sees. Can be useful in the processing phase.|
|`frame.getRotationToView()`|`int`|The rotation that should be applied to the byte array in order to match the View / Activity orientation. Can be useful in the drawing / rendering phase.|
|`frame.getSize()`|`Size`|The frame size, before any rotation is applied, to access data.|
|`frame.getFormat()`|`int`|The frame `ImageFormat`. Defaults to `ImageFormat.NV21` for Camera1 and `ImageFormat.YUV_420_888` for Camera2.|
|`frame.freeze()`|`Frame`|Clones this frame and makes it immutable. Can be expensive because requires copying the byte array.|

Loading…
Cancel
Save