Add startVideo(File), refactor FocusMarkerLayout, remove static thread

pull/1/head
Mattia Iavarone 7 years ago
parent 132a3474fd
commit 5bbccb494f
  1. 2
      README.md
  2. 126
      camerakit/src/main/api16/com/flurgle/camerakit/Camera1.java
  3. 6
      camerakit/src/main/api21/com/flurgle/camerakit/Camera2.java
  4. 5
      camerakit/src/main/base/com/flurgle/camerakit/CameraImpl.java
  5. 104
      camerakit/src/main/java/com/flurgle/camerakit/CameraView.java
  6. 12
      camerakit/src/main/java/com/flurgle/camerakit/FocusMarkerLayout.java

@ -2,8 +2,8 @@
*A fork of [Dylan McIntyre's CameraKit-Android library](https://github.com/gogopop/CameraKit-Android), originally a fork of [Google's CameraView library](https://github.com/google/cameraview). Right now this is like CameraKit-Android, but with *a lot* of serious bugs fixed, new sizing behavior, better orientation and EXIF support, new `setLocation` and `setWhiteBalance` APIs. Feel free to open issues with suggestions or contribute. Roadmap:*
- [x] *delete `captureMethod` and `permissionPolicy`, replace with `sessionType` (either picture or video) such that when `sessionType=video`, pictures are captured with the fast 'frame' method*
- [x] *pass a nullable File to startVideo, so user can choose where to save the file*
- [ ] *test video and 'frame' capture behavior, I expect some bugs there*
- [ ] *pass a nullable File to startVideo, so user can choose where to save the file*
- [ ] *simple APIs to draw grid lines*
- [ ] *rethink `adjustViewBounds`, maybe replace with a `scaleType` flag (center crop or center inside)*
- [ ] *add a `sizingMethod` API to choose the capture size? Could be `max`, `4:3`, `16:9`... Right now it's `max`*

@ -447,9 +447,13 @@ class Camera1 extends CameraImpl {
}
// -----------------
// Video recording stuff.
@Override
void startVideo() {
void startVideo(@NonNull File videoFile) {
mVideoFile = videoFile;
if (mSessionType == SESSION_TYPE_VIDEO) {
initMediaRecorder();
try {
@ -488,15 +492,12 @@ class Camera1 extends CameraImpl {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
mMediaRecorder.setProfile(getCamcorderProfile(mVideoQuality));
mVideoFile = new File(mPreview.getView().getContext().getExternalFilesDir(null), "video.mp4");
mMediaRecorder.setOutputFile(mVideoFile.getAbsolutePath());
mMediaRecorder.setOrientationHint(computeCameraToDisplayOffset()); // TODO is this correct? Should we use exif orientation? Maybe not.
// Not needed. mMediaRecorder.setPreviewDisplay(mPreview.getSurface());
}
@NonNull
private CamcorderProfile getCamcorderProfile(@VideoQuality int videoQuality) {
switch (videoQuality) {
@ -542,90 +543,96 @@ class Camera1 extends CameraImpl {
}
// -----------------
// Tap to focus stuff.
void setTapToAutofocusListener(Camera.AutoFocusCallback callback) {
if (this.mFocus != FOCUS_TAP) {
throw new IllegalArgumentException("Please set the camera to FOCUS_TAP.");
}
this.mAutofocusCallback = callback;
}
private int getFocusAreaSize() {
return FOCUS_AREA_SIZE_DEFAULT;
}
private int getFocusMeteringAreaWeight() {
return FOCUS_METERING_AREA_WEIGHT_DEFAULT;
}
private void detachFocusTapListener() {
mPreview.getView().setOnTouchListener(null);
}
private void attachFocusTapListener() {
mPreview.getView().setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
if (mCamera != null) {
Camera.Parameters parameters = mCamera.getParameters();
String focusMode = parameters.getFocusMode();
Rect rect = calculateFocusArea(event.getX(), event.getY());
List<Camera.Area> meteringAreas = new ArrayList<>();
meteringAreas.add(new Camera.Area(rect, getFocusMeteringAreaWeight()));
if (parameters.getMaxNumFocusAreas() != 0 && focusMode != null &&
(focusMode.equals(Camera.Parameters.FOCUS_MODE_AUTO) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_MACRO) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(meteringAreas);
if (parameters.getMaxNumMeteringAreas() > 0) {
parameters.setMeteringAreas(meteringAreas);
}
if(!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
return false; //cannot autoFocus
}
mCamera.setParameters(parameters);
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
resetFocus(success, camera);
}
});
} else if (parameters.getMaxNumMeteringAreas() > 0) {
if(!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
return false; //cannot autoFocus
}
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(meteringAreas);
parameters.setMeteringAreas(meteringAreas);
mCamera.setParameters(parameters);
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
resetFocus(success, camera);
}
});
} else {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (mAutofocusCallback != null) {
mAutofocusCallback.onAutoFocus(success, camera);
}
}
});
if (event.getAction() != MotionEvent.ACTION_UP) return false;
if (mCamera == null) return false;
Camera.Parameters parameters = mCamera.getParameters();
String focusMode = parameters.getFocusMode();
Rect rect = calculateFocusArea(event.getX(), event.getY());
List<Camera.Area> meteringAreas = new ArrayList<>();
meteringAreas.add(new Camera.Area(rect, getFocusMeteringAreaWeight()));
if (parameters.getMaxNumFocusAreas() != 0 && focusMode != null &&
(focusMode.equals(Camera.Parameters.FOCUS_MODE_AUTO) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_MACRO) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) ||
focusMode.equals(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(meteringAreas);
if (parameters.getMaxNumMeteringAreas() > 0) {
parameters.setMeteringAreas(meteringAreas);
}
if(!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
return false; //cannot autoFocus
}
mCamera.setParameters(parameters);
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
resetFocus(success, camera);
}
});
} else if (parameters.getMaxNumMeteringAreas() > 0) {
if(!parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
return false; //cannot autoFocus
}
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
parameters.setFocusAreas(meteringAreas);
parameters.setMeteringAreas(meteringAreas);
mCamera.setParameters(parameters);
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
resetFocus(success, camera);
}
});
} else {
mCamera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (mAutofocusCallback != null) {
mAutofocusCallback.onAutoFocus(success, camera);
}
}
});
}
return true;
}
});
}
private void resetFocus(final boolean success, final Camera camera) {
mHandler.removeCallbacksAndMessages(null);
mHandler.postDelayed(new Runnable() {
@ -649,6 +656,7 @@ class Camera1 extends CameraImpl {
}, DELAY_MILLIS_BEFORE_RESETTING_FOCUS);
}
private Rect calculateFocusArea(float x, float y) {
int buffer = getFocusAreaSize() / 2;
int centerX = calculateCenter(x, mPreview.getView().getWidth(), buffer);
@ -661,6 +669,7 @@ class Camera1 extends CameraImpl {
);
}
private static int calculateCenter(float coord, int dimen, int buffer) {
int normalized = (int) ((coord / dimen) * 2000 - 1000);
if (Math.abs(normalized) + buffer > 1000) {
@ -675,9 +684,10 @@ class Camera1 extends CameraImpl {
}
// -----------------
// Size static stuff.
/**
* Returns a list of {@link Size} out of Camera.Sizes.
*/

@ -8,10 +8,12 @@ import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
import android.util.SizeF;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@ -34,7 +36,7 @@ class Camera2 extends CameraImpl {
private ConstantMapper.MapperImpl mMapper = new ConstantMapper.Mapper2();
private final HashMap<String, ExtraProperties> mExtraPropertiesMap = new HashMap<>();
Camera2(CameraListener callback, PreviewImpl preview, Context context) {
Camera2(CameraView.CameraListenerWrapper callback, PreviewImpl preview, Context context) {
super(callback, preview);
preview.setCallback(new PreviewImpl.OnPreviewSurfaceChangedCallback() {
@Override
@ -177,7 +179,7 @@ class Camera2 extends CameraImpl {
}
@Override
void startVideo() {
void startVideo(@NonNull File videoFile) {
}

@ -1,7 +1,10 @@
package com.flurgle.camerakit;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.io.File;
abstract class CameraImpl {
protected final CameraView.CameraListenerWrapper mCameraListener;
@ -28,7 +31,7 @@ abstract class CameraImpl {
abstract void setLocation(double latitude, double longitude);
abstract void captureImage();
abstract void startVideo();
abstract void startVideo(@NonNull File file);
abstract void endVideo();
abstract Size getCaptureSize();

@ -60,14 +60,16 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private final static String TAG = CameraView.class.getSimpleName();
private static Handler sWorkerHandler;
private static Handler getWorkerHandler() {
if (sWorkerHandler == null) {
HandlerThread workerThread = new HandlerThread("CameraViewWorker");
workerThread.setDaemon(true);
workerThread.start();
sWorkerHandler = new Handler(workerThread.getLooper());
private Handler sWorkerHandler;
private Handler getWorkerHandler() {
synchronized (this) {
if (sWorkerHandler == null) {
HandlerThread workerThread = new HandlerThread("CameraViewWorker");
workerThread.setDaemon(true);
workerThread.start();
sWorkerHandler = new Handler(workerThread.getLooper());
}
}
return sWorkerHandler;
}
@ -94,6 +96,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
private PreviewImpl mPreviewImpl;
private Lifecycle mLifecycle;
private FocusMarkerLayout mFocusMarkerLayout;
private boolean mIsStarted;
public CameraView(@NonNull Context context) {
@ -108,29 +111,24 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
@SuppressWarnings("WrongConstant")
private void init(@NonNull Context context, @Nullable AttributeSet attrs) {
if (attrs != null) {
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
try {
mFacing = a.getInteger(R.styleable.CameraView_cameraFacing, CameraKit.Defaults.DEFAULT_FACING);
mFlash = a.getInteger(R.styleable.CameraView_cameraFlash, CameraKit.Defaults.DEFAULT_FLASH);
mFocus = a.getInteger(R.styleable.CameraView_cameraFocus, CameraKit.Defaults.DEFAULT_FOCUS);
// mMethod = a.getInteger(R.styleable.CameraView_cameraCaptureMethod, CameraKit.Defaults.DEFAULT_METHOD);
// mPermissions = a.getInteger(R.styleable.CameraView_cameraPermissionPolicy, CameraKit.Defaults.DEFAULT_PERMISSIONS);
mSessionType = a.getInteger(R.styleable.CameraView_cameraSessionType, CameraKit.Defaults.DEFAULT_SESSION_TYPE);
mZoom = a.getInteger(R.styleable.CameraView_cameraZoomMode, CameraKit.Defaults.DEFAULT_ZOOM);
mWhiteBalance = a.getInteger(R.styleable.CameraView_cameraWhiteBalance, CameraKit.Defaults.DEFAULT_WHITE_BALANCE);
mVideoQuality = a.getInteger(R.styleable.CameraView_cameraVideoQuality, CameraKit.Defaults.DEFAULT_VIDEO_QUALITY);
mJpegQuality = a.getInteger(R.styleable.CameraView_cameraJpegQuality, CameraKit.Defaults.DEFAULT_JPEG_QUALITY);
mCropOutput = a.getBoolean(R.styleable.CameraView_cameraCropOutput, CameraKit.Defaults.DEFAULT_CROP_OUTPUT);
mAdjustViewBounds = a.getBoolean(R.styleable.CameraView_android_adjustViewBounds, CameraKit.Defaults.DEFAULT_ADJUST_VIEW_BOUNDS);
} finally {
a.recycle();
}
}
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.CameraView, 0, 0);
mFacing = a.getInteger(R.styleable.CameraView_cameraFacing, CameraKit.Defaults.DEFAULT_FACING);
mFlash = a.getInteger(R.styleable.CameraView_cameraFlash, CameraKit.Defaults.DEFAULT_FLASH);
mFocus = a.getInteger(R.styleable.CameraView_cameraFocus, CameraKit.Defaults.DEFAULT_FOCUS);
mSessionType = a.getInteger(R.styleable.CameraView_cameraSessionType, CameraKit.Defaults.DEFAULT_SESSION_TYPE);
mZoom = a.getInteger(R.styleable.CameraView_cameraZoomMode, CameraKit.Defaults.DEFAULT_ZOOM);
mWhiteBalance = a.getInteger(R.styleable.CameraView_cameraWhiteBalance, CameraKit.Defaults.DEFAULT_WHITE_BALANCE);
mVideoQuality = a.getInteger(R.styleable.CameraView_cameraVideoQuality, CameraKit.Defaults.DEFAULT_VIDEO_QUALITY);
mJpegQuality = a.getInteger(R.styleable.CameraView_cameraJpegQuality, CameraKit.Defaults.DEFAULT_JPEG_QUALITY);
mCropOutput = a.getBoolean(R.styleable.CameraView_cameraCropOutput, CameraKit.Defaults.DEFAULT_CROP_OUTPUT);
mAdjustViewBounds = a.getBoolean(R.styleable.CameraView_android_adjustViewBounds, CameraKit.Defaults.DEFAULT_ADJUST_VIEW_BOUNDS);
a.recycle();
mCameraListener = new CameraListenerWrapper();
mPreviewImpl = new TextureViewPreview(context, this);
mCameraImpl = new Camera1(mCameraListener, mPreviewImpl);
mFocusMarkerLayout = new FocusMarkerLayout(context);
addView(mFocusMarkerLayout);
mIsStarted = false;
setFacing(mFacing);
@ -156,9 +154,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
}
};
final FocusMarkerLayout focusMarkerLayout = new FocusMarkerLayout(getContext());
addView(focusMarkerLayout);
focusMarkerLayout.setOnTouchListener(new OnTouchListener() {
/* focusMarkerLayout.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent motionEvent) {
int action = motionEvent.getAction();
@ -169,7 +165,7 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mPreviewImpl.getView().dispatchTouchEvent(motionEvent);
return true;
}
});
}); */
}
mLifecycle = null;
}
@ -552,13 +548,12 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
* @param focus a Focus value.
*/
public void setFocus(@Focus int focus) {
this.mFocus = focus;
if (this.mFocus == CameraKit.Constants.FOCUS_TAP_WITH_MARKER) {
mCameraImpl.setFocus(CameraKit.Constants.FOCUS_TAP);
return;
mFocus = focus;
mFocusMarkerLayout.setEnabled(focus == CameraKit.Constants.FOCUS_TAP_WITH_MARKER);
if (focus == CameraKit.Constants.FOCUS_TAP_WITH_MARKER) {
focus = CameraKit.Constants.FOCUS_TAP;
}
mCameraImpl.setFocus(mFocus);
mCameraImpl.setFocus(focus);
}
@ -669,14 +664,43 @@ public class CameraView extends FrameLayout implements LifecycleObserver {
mCameraImpl.captureImage();
}
public void startRecordingVideo() {
mCameraImpl.startVideo();
/**
* Starts recording a video with selected options, in a file called
* "video.mp4" in the default folder.
* This is discouraged, please use {@link #startCapturingVideo(File)} instead.
*
* @deprecated see {@link #startCapturingVideo(File)}
*/
@Deprecated
public void startCapturingVideo() {
startCapturingVideo(null);
}
public void stopRecordingVideo() {
/**
* Starts recording a video with selected options. Video will be written to the given file,
* so callers should ensure they have appropriate permissions to write to the file.
*
* @param file a file where the video will be saved
*/
public void startCapturingVideo(File file) {
if (file == null) {
file = new File(getContext().getExternalFilesDir(null), "video.mp4");
}
mCameraImpl.startVideo(file);
}
/**
* Stops capturing video, if there was a video record going on.
* This will fire {@link CameraListener#onVideoTaken(File)}.
*/
public void stopCapturingVideo() {
mCameraImpl.endVideo();
}
/**
* Returns the size used for the preview,
* or null if it hasn't been computed (for example if the surface is not ready).

@ -7,6 +7,8 @@ import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
@ -25,10 +27,18 @@ public class FocusMarkerLayout extends FrameLayout {
mFocusMarkerContainer = (FrameLayout) findViewById(R.id.focusMarkerContainer);
mFill = (ImageView) findViewById(R.id.fill);
mFocusMarkerContainer.setAlpha(0);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
int action = event.getAction();
if (action == MotionEvent.ACTION_UP && isEnabled()) {
focus(event.getX(), event.getY());
}
return false; // We didn't consume, pass to parent
}
public void focus(float mx, float my) {
int x = (int) (mx - mFocusMarkerContainer.getWidth() / 2);
int y = (int) (my - mFocusMarkerContainer.getWidth() / 2);

Loading…
Cancel
Save