Basic working video

pull/1/head
Dylan McIntyre 8 years ago
parent 52ca32f130
commit 5d934abacb
  1. 11
      README.md
  2. 6
      camerakit/src/main/AndroidManifest.xml
  3. 28
      camerakit/src/main/java/com/flurgle/camerakit/Camera1.java
  4. 613
      camerakit/src/main/java/com/flurgle/camerakit/Camera2.java
  5. 21
      camerakit/src/main/java/com/flurgle/camerakit/CameraView.java
  6. 10
      camerakit/src/main/java/com/flurgle/camerakit/CameraViewImpl.java
  7. 81
      camerakit/src/main/java/com/flurgle/camerakit/PictureCaptureCallback.java
  8. 82
      camerakit/src/main/java/com/flurgle/camerakit/encoding/Encoder.java
  9. 12
      camerakit/src/main/java/com/flurgle/camerakit/encoding/Frame.java
  10. 157
      camerakit/src/main/java/com/flurgle/camerakit/encoding/VideoEncoder.java
  11. 2
      camerakit/src/main/res/values/attrs.xml
  12. 16
      demo/src/main/java/com/flurgle/camerakit/demo/MainActivity.java
  13. 6
      demo/src/main/res/layout/activity_main.xml

@ -22,6 +22,7 @@ Try out all the unique features using the CameraKit Demo from the Google Play st
- [`ckPictureMode`](#ckpicturemode)
- [`ckTapToFocus`](#cktaptofocus)
- [`ckAutoFocus`](#ckautofocus)
- [`ckCaptureSize`](#ckcapturesize)
- [Capturing Images](#capturing-images)
- [Capturing Video](#capturing-video)
- [Automatic Permissions Behavior](#automatic-permissions-behavior)
@ -126,7 +127,8 @@ camera.setCameraListener(new CameraListener() {
camerakit:ckFlash="off"
camerakit:ckPictureMode="quality"
camerakit:ckTapToFocus="on"
camerakit:ckAutoFocus="true" />
camerakit:ckAutoFocus="true"
camerakit:ckCaptureSize="8" />
```
#### `ckCropOutput`
@ -175,6 +177,13 @@ camera.setCameraListener(new CameraListener() {
| `true` | Continuously allow the `CameraView` preview to adjust focus automatically. |
| `false` | Never adjust focus during preview. |
#### `ckCaptureSize`
| Value | Description |
| --------------| -------------|
| `n <= 0` | Capture at the highest quality possible. |
| `n > 0` | Capture at a size of approximately `n` megapixels. |
### Capturing Images
To capture an image just call `CameraView.capturePicture()`. Make sure you setup a `CameraListener` to handle the image callback.

@ -8,6 +8,12 @@
<uses-feature
android:name="android.hardware.camera.autofocus"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.front"
android:required="false" />
<uses-feature
android:name="android.hardware.microphone"
android:required="false" />
<application />

@ -27,6 +27,8 @@ public class Camera1 extends CameraViewImpl {
FLASH_MODES.put(CameraKit.Constants.FLASH_AUTO, Camera.Parameters.FLASH_MODE_AUTO);
}
private static File VIDEO_FILE;
private int mCameraId;
Camera mCamera;
@ -36,13 +38,9 @@ public class Camera1 extends CameraViewImpl {
private final Camera.CameraInfo mCameraInfo = new Camera.CameraInfo();
private boolean mShowingPreview;
private boolean mAutoFocus;
private int mFacing;
private int mFlash;
private int mDisplayOrientation;
private SortedSet<Size> mPreviewSizes;
@ -62,9 +60,10 @@ public class Camera1 extends CameraViewImpl {
}
});
mPreviewSizes = new TreeSet<>();
mCaptureSizes = new TreeSet<>();
VIDEO_FILE = new File(getView().getContext().getExternalFilesDir(null), "video.mp4");;
}
@Override
@ -168,6 +167,8 @@ public class Camera1 extends CameraViewImpl {
@Override
void startVideo() {
if (!canRecordAudio()) return;
try {
prepareMediaRecorder();
} catch (IOException e) {
@ -187,10 +188,10 @@ public class Camera1 extends CameraViewImpl {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setProfile(CamcorderProfile.get(mFacing == CameraKit.Constants.FACING_BACK ? CamcorderProfile.QUALITY_HIGH : CamcorderProfile.QUALITY_LOW));
mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
mMediaRecorder.setOutputFile(new File(getView().getContext().getExternalFilesDir(null), "video.mp4").getAbsolutePath());
mMediaRecorder.setOutputFile(VIDEO_FILE.getAbsolutePath());
mMediaRecorder.setOrientationHint(mCameraInfo.orientation);
mMediaRecorder.prepare();
mMediaRecorder.start();
@ -200,9 +201,11 @@ public class Camera1 extends CameraViewImpl {
void endVideo() {
if (mMediaRecorder != null) {
mMediaRecorder.stop();
mMediaRecorder = null;
}
getCameraListener().onVideoTaken(new File(getView().getContext().getExternalFilesDir(null), "video.mp4"));
getCameraListener().onVideoTaken(VIDEO_FILE);
}
@Override
@ -229,7 +232,7 @@ public class Camera1 extends CameraViewImpl {
private int calcCameraRotation(int rotation) {
if (mCameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
return (360 - (mCameraInfo.orientation + rotation) % 360) % 360;
} else { // back-facing
} else {
return (mCameraInfo.orientation - rotation + 360) % 360;
}
}
@ -267,7 +270,10 @@ public class Camera1 extends CameraViewImpl {
mPreview.setTruePreviewSize(previewSize.getWidth(), previewSize.getHeight());
mCameraParameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
mCameraParameters.setRotation(calcCameraRotation(mDisplayOrientation));
// TODO: fix this
mCameraParameters.setRotation(calcCameraRotation(mDisplayOrientation) + (mFacing == CameraKit.Constants.FACING_FRONT ? 180 : 0));
setAutoFocusInternal(mAutoFocus);
setFlashInternal(mFlash);
mCamera.setParameters(mCameraParameters);

@ -1,613 +0,0 @@
package com.flurgle.camerakit;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.util.Log;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.View;
import com.flurgle.camerakit.encoding.VideoEncoder;
import com.flurgle.camerakit.utils.AspectRatio;
import com.flurgle.camerakit.utils.Size;
import com.flurgle.camerakit.utils.YuvUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import static android.content.ContentValues.TAG;
@TargetApi(21)
public class Camera2 extends CameraViewImpl {
private static final SparseIntArray INTERNAL_FACINGS = new SparseIntArray();
static {
INTERNAL_FACINGS.put(CameraKit.Constants.FACING_BACK, CameraCharacteristics.LENS_FACING_BACK);
INTERNAL_FACINGS.put(CameraKit.Constants.FACING_FRONT, CameraCharacteristics.LENS_FACING_FRONT);
}
private CameraManager mCameraManager;
private CameraDevice mCamera;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private ImageReader mImageReader;
private VideoEncoder mVideoEncoder;
private Semaphore mCameraOpenCloseLock;
private int mFacing;
private int mFlash;
private int mDisplayOrientation;
private String mCameraId;
private CameraCharacteristics mCameraCharacteristics;
private SortedSet<Size> mPreviewSizes;
private SortedSet<Size> mCaptureSizes;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private boolean mCropOutput;
private boolean mIsRecording;
Camera2(Context context, CameraListener cameraListener, PreviewImpl preview) {
super(cameraListener, preview);
mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
mPreview.setCallback(new PreviewImpl.Callback() {
@Override
public void onSurfaceChanged() {
startCaptureSession();
}
});
mCameraOpenCloseLock = new Semaphore(1);
mPreviewSizes = new TreeSet<>();
mCaptureSizes = new TreeSet<>();
}
@Override
View getView() {
return mPreview.getView();
}
@Override
void start() {
if (chooseCameraIdByFacing()) {
startBackgroundThread();
collectCameraInfo();
prepareImageReader();
startOpeningCamera();
}
}
@Override
void stop() {
try {
mCameraOpenCloseLock.acquire();
if (mCaptureSession != null) {
mCaptureSession.close();
mCaptureSession = null;
}
if (mCamera != null) {
mCamera.close();
mCamera = null;
}
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
if (mBackgroundThread != null) {
stopBackgroundThread();
}
}
}
@Override
boolean isCameraOpened() {
return mCamera != null;
}
@Override
void setFacing(int facing) {
int internalFacing = INTERNAL_FACINGS.get(facing);
if (mFacing == internalFacing) return;
this.mFacing = internalFacing;
if (isCameraOpened()) {
stop();
start();
}
}
@Override
int getFacing() {
return mFacing;
}
@Override
void setFlash(int flash) {
if (mFlash == flash) return;
int fallback = flash;
mFlash = flash;
if (mPreviewRequestBuilder != null) {
updateFlash(mPreviewRequestBuilder);
if (mCaptureSession != null) {
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
mFlash = fallback;
}
}
}
}
@Override
int getFlash() {
return mFlash;
}
@Override
boolean getAutoFocus() {
return true;
}
@Override
void capturePicture() {
if (mFacing == INTERNAL_FACINGS.get(CameraKit.Constants.FACING_BACK)) {
lockFocus();
} else {
captureStillPicture();
}
}
@Override
void captureStill() {
if (mOnImageAvailableListener != null) {
mOnImageAvailableListener.allowCallback();
}
}
@Override
void startVideo() {
if (mCamera == null) {
return;
}
mIsRecording = true;
}
@Override
void endVideo() {
mIsRecording = false;
if (mVideoEncoder != null) {
mVideoEncoder.stopEncoder();
mVideoEncoder = null;
}
}
@Override
void setDisplayOrientation(int displayOrientation) {
mDisplayOrientation = displayOrientation;
mPreview.setDisplayOrientation(mDisplayOrientation);
}
void updateFlash(CaptureRequest.Builder builder) {
switch (mFlash) {
case CameraKit.Constants.FLASH_OFF:
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
break;
case CameraKit.Constants.FLASH_ON:
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
break;
case CameraKit.Constants.FLASH_AUTO:
builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
break;
}
}
void setCropOutput(boolean cropOutput) {
this.mCropOutput = cropOutput;
}
private boolean chooseCameraIdByFacing() {
try {
int internalFacing = mFacing;
final String[] ids = mCameraManager.getCameraIdList();
if (ids.length == 0) { // No camera
throw new RuntimeException("No camera available.");
}
for (String id : ids) {
CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id);
Integer level = characteristics.get(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (level == null ||
level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
continue;
}
Integer internal = characteristics.get(CameraCharacteristics.LENS_FACING);
if (internal == null) {
throw new NullPointerException("Unexpected state: LENS_FACING null");
}
if (internal == internalFacing) {
mCameraId = id;
mCameraCharacteristics = characteristics;
return true;
}
}
// Not found
mCameraId = ids[0];
mCameraCharacteristics = mCameraManager.getCameraCharacteristics(mCameraId);
Integer level = mCameraCharacteristics.get(
CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (level == null ||
level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return false;
}
Integer internal = mCameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
if (internal == null) {
throw new NullPointerException("Unexpected state: LENS_FACING null");
}
for (int i = 0, count = INTERNAL_FACINGS.size(); i < count; i++) {
if (INTERNAL_FACINGS.valueAt(i) == internal) {
mFacing = INTERNAL_FACINGS.keyAt(i);
return true;
}
}
// The operation can reach here when the only camera device is an external one.
// We treat it as facing back.
mFacing = CameraKit.Constants.FACING_BACK;
return true;
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to get a list of camera devices", e);
}
}
private void collectCameraInfo() {
StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
throw new IllegalStateException("Failed to get configuration map: " + mCameraId);
}
mPreviewSizes.clear();
for (android.util.Size size : map.getOutputSizes(mPreview.getOutputClass())) {
mPreviewSizes.add(new Size(size.getWidth(), size.getHeight()));
}
mCaptureSizes.clear();
for (android.util.Size size : map.getOutputSizes(ImageFormat.JPEG)) {
mCaptureSizes.add(new Size(size.getWidth(), size.getHeight()));
}
}
private Size getOptimalPreviewSize() {
int surfaceLonger, surfaceShorter;
final int surfaceWidth = mPreview.getWidth();
final int surfaceHeight = mPreview.getHeight();
if (surfaceWidth < surfaceHeight) {
surfaceLonger = surfaceHeight;
surfaceShorter = surfaceWidth;
} else {
surfaceLonger = surfaceWidth;
surfaceShorter = surfaceHeight;
}
// Pick the smallest of those big enough.
for (Size size : mPreviewSizes) {
if (size.getWidth() >= surfaceLonger && size.getHeight() >= surfaceShorter) {
return size;
}
}
// If no size is big enough, pick the largest one.
return mPreviewSizes.last();
}
private void prepareImageReader() {
Size previewSize = getOptimalPreviewSize();
AspectRatio aspectRatio = AspectRatio.of(previewSize.getWidth(), previewSize.getHeight());
Size bestSize = findSizeClosestTo(1500000, aspectRatio, mCaptureSizes);
mImageReader = ImageReader.newInstance(bestSize.getWidth(), bestSize.getHeight(), ImageFormat.YUV_420_888, 3);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);
}
private Size findSizeClosestTo(int targetLength, AspectRatio targetAspectRatio, SortedSet<Size> sizes) {
int closestDistance = Integer.MAX_VALUE;
Size closestSize = null;
for (Size size : sizes) {
if (targetAspectRatio.matches(size)) {
int length = size.getWidth() * size.getHeight();
int distance = Math.abs(targetLength - length);
if (closestSize == null) {
closestSize = size;
} else {
if (distance < closestDistance) {
closestSize = size;
closestDistance = length;
}
}
}
}
return closestSize;
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
@SuppressWarnings("MissingPermission")
private void startOpeningCamera() {
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
mCameraManager.openCamera(mCameraId, mCameraDeviceCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to open camera: " + mCameraId, e);
} catch (InterruptedException e) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
}
void startCaptureSession() {
if (!isCameraOpened() || !mPreview.isReady() || mImageReader == null) {
return;
}
Size previewSize = getOptimalPreviewSize();
mPreview.setTruePreviewSize(previewSize.getWidth(), previewSize.getHeight());
Surface surface = mPreview.getSurface();
try {
mPreviewRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
mCamera.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), mSessionCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
throw new RuntimeException("Failed to start camera session");
} catch (IllegalStateException e) {
startOpeningCamera();
}
}
private void lockFocus() {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
try {
mCaptureCallback.setState(PictureCaptureCallback.STATE_LOCKING);
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to lock focus.", e);
}
}
private void captureStillPicture() {
try {
CaptureRequest.Builder captureRequestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureRequestBuilder.addTarget(mImageReader.getSurface());
captureRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AF_MODE));
updateFlash(captureRequestBuilder);
// Calculate JPEG orientation.
@SuppressWarnings("ConstantConditions")
int sensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
captureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION,
(sensorOrientation +
mDisplayOrientation * (mFacing == CameraKit.Constants.FACING_FRONT ? 1 : -1) +
360) % 360);
// Stop preview and capture a still picture.
mCaptureSession.stopRepeating();
mOnImageAvailableListener.allowCallback();
mCaptureSession.capture(captureRequestBuilder.build(), new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
unlockFocus();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "Cannot capture a still picture.", e);
}
}
void unlockFocus() {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
try {
mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, null);
//updateAutoFocus();
updateFlash(mPreviewRequestBuilder);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
mCaptureCallback.setState(PictureCaptureCallback.STATE_PREVIEW);
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to restart camera preview.", e);
}
}
private final CameraDevice.StateCallback mCameraDeviceCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
mCameraOpenCloseLock.release();
mCamera = camera;
getCameraListener().onCameraOpened();
startCaptureSession();
}
@Override
public void onClosed(@NonNull CameraDevice camera) {
getCameraListener().onCameraClosed();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
mCameraOpenCloseLock.release();
mCamera.close();
mCamera = null;
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
mCameraOpenCloseLock.release();
Log.e(TAG, "onError: " + camera.getId() + " (" + error + ")");
mCamera = null;
}
};
private final CameraCaptureSession.StateCallback mSessionCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if (mCamera == null) {
return;
}
mCaptureSession = session;
updateFlash(mPreviewRequestBuilder);
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to start camera preview because it couldn't access camera", e);
} catch (IllegalStateException e) {
Log.e(TAG, "Failed to start camera preview.", e);
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
Log.e(TAG, "Failed to configure capture session.");
}
@Override
public void onClosed(@NonNull CameraCaptureSession session) {
if (mCaptureSession != null && mCaptureSession.equals(session)) {
mCaptureSession = null;
}
}
};
private PictureCaptureCallback mCaptureCallback = new PictureCaptureCallback() {
@Override
public void onPrecaptureRequired() {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
setState(STATE_PRECAPTURE);
try {
mCaptureSession.capture(mPreviewRequestBuilder.build(), this, mBackgroundHandler);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
} catch (CameraAccessException e) {
Log.e(TAG, "Failed to run precapture sequence.", e);
}
}
@Override
public void onReady() {
captureStillPicture();
}
};
private abstract static class VariableCallbackOnImageAvailableListener implements ImageReader.OnImageAvailableListener {
protected boolean mAllowOneCallback = false;
@Override
public abstract void onImageAvailable(ImageReader reader);
public void allowCallback() {
mAllowOneCallback = true;
}
}
private VariableCallbackOnImageAvailableListener mOnImageAvailableListener = new VariableCallbackOnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (!mAllowOneCallback && !mIsRecording) {
image.close();
return;
}
Rect crop = null;
if (mCropOutput) {
}
if (mAllowOneCallback) {
mAllowOneCallback = false;
byte[] out = YuvUtils.createRGB(image, crop);
getCameraListener().onPictureTaken(out);
}
if (mIsRecording) {
if (mVideoEncoder == null) {
try {
mVideoEncoder = new VideoEncoder(getView().getContext(), mFacing, image.getWidth(), image.getHeight());
} catch (IOException e) {
}
}
if (mVideoEncoder != null) {
byte[] out = YuvUtils.getYUVData(image);
try {
mVideoEncoder.encode(out);
} catch (Exception e) {
}
}
}
image.close();
}
};
}

@ -42,6 +42,8 @@ public class CameraView extends FrameLayout {
private static final int PERMISSION_REQUEST_CAMERA = 16;
private static final int DEFAULT_CAPTURE_SIZE = -1;
@IntDef({FACING_BACK, FACING_FRONT})
@Retention(RetentionPolicy.SOURCE)
@interface Facing {
@ -75,6 +77,8 @@ public class CameraView extends FrameLayout {
private int mTapToFocus;
private boolean mAutoFocus;
private float mCaptureSize;
private boolean mAdjustViewBounds;
private boolean mWaitingForPermission;
@ -122,6 +126,10 @@ public class CameraView extends FrameLayout {
mAutoFocus = a.getBoolean(R.styleable.CameraView_ckAutoFocus, true);
}
if (attr == R.styleable.CameraView_ckCaptureSize) {
mCaptureSize = a.getFloat(R.styleable.CameraView_ckCaptureSize, DEFAULT_CAPTURE_SIZE);
}
if (attr == R.styleable.CameraView_android_adjustViewBounds) {
mAdjustViewBounds = a.getBoolean(R.styleable.CameraView_android_adjustViewBounds, false);
}
@ -184,6 +192,7 @@ public class CameraView extends FrameLayout {
int permissionCheck = ContextCompat.checkSelfPermission(getContext(), Manifest.permission.CAMERA);
if (permissionCheck == PackageManager.PERMISSION_GRANTED) {
mWaitingForPermission = false;
mCameraImpl.setCanRecordAudio(true);
mCameraImpl.start();
} else {
requestCameraPermission();
@ -254,6 +263,10 @@ public class CameraView extends FrameLayout {
this.mAutoFocus = autoFocus;
}
public void setCaptureSize(float captureSize) {
this.mCaptureSize = captureSize;
}
public void setCameraListener(CameraListener cameraListener) {
this.mCameraListener = new CameraListenerMiddleWare(cameraListener);
mCameraImpl.setCameraListener(mCameraListener);
@ -276,12 +289,6 @@ public class CameraView extends FrameLayout {
public void stopRecordingVideo() {
mCameraImpl.endVideo();
postDelayed(new Runnable() {
@Override
public void run() {
mCameraListener.onVideoTaken(new File(getContext().getExternalFilesDir(null), "video.mp4"));
}
}, 1000);
}
private void requestCameraPermission() {
@ -295,7 +302,7 @@ public class CameraView extends FrameLayout {
}
if (activity != null) {
ActivityCompat.requestPermissions(activity, new String[]{ Manifest.permission.CAMERA }, PERMISSION_REQUEST_CAMERA);
ActivityCompat.requestPermissions(activity, new String[]{ Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO }, PERMISSION_REQUEST_CAMERA);
mWaitingForPermission = true;
}
}

@ -8,6 +8,8 @@ public abstract class CameraViewImpl {
protected final PreviewImpl mPreview;
private boolean mCanRecordAudio;
CameraViewImpl(CameraListener callback, PreviewImpl preview) {
mCameraListener = callback;
mPreview = preview;
@ -51,4 +53,12 @@ public abstract class CameraViewImpl {
return mCameraListener != null ? mCameraListener : new CameraListener() {};
}
public void setCanRecordAudio(boolean canRecordAudio) {
this.mCanRecordAudio = canRecordAudio;
}
protected boolean canRecordAudio() {
return mCanRecordAudio;
}
}

@ -1,81 +0,0 @@
package com.flurgle.camerakit;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.support.annotation.NonNull;
public abstract class PictureCaptureCallback extends CameraCaptureSession.CaptureCallback {
static final int STATE_PREVIEW = 0;
static final int STATE_LOCKING = 1;
static final int STATE_LOCKED = 2;
static final int STATE_PRECAPTURE = 3;
static final int STATE_WAITING = 4;
static final int STATE_CAPTURING = 5;
private int mState;
PictureCaptureCallback() {
}
void setState(int state) {
mState = state;
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
process(result);
}
private void process(@NonNull CaptureResult result) {
switch (mState) {
case STATE_LOCKING: {
Integer af = result.get(CaptureResult.CONTROL_AF_STATE);
if (af == null) {
break;
}
if (af == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
af == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Integer ae = result.get(CaptureResult.CONTROL_AE_STATE);
if (ae == null || ae == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
setState(STATE_CAPTURING);
onReady();
} else {
setState(STATE_LOCKED);
onPrecaptureRequired();
}
}
break;
}
case STATE_PRECAPTURE: {
Integer ae = result.get(CaptureResult.CONTROL_AE_STATE);
if (ae == null || ae == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
ae == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED ||
ae == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
setState(STATE_WAITING);
}
break;
}
case STATE_WAITING: {
Integer ae = result.get(CaptureResult.CONTROL_AE_STATE);
if (ae == null || ae != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
setState(STATE_CAPTURING);
onReady();
}
break;
}
}
}
public abstract void onReady();
public abstract void onPrecaptureRequired();
}

@ -1,82 +0,0 @@
package com.flurgle.camerakit.encoding;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
public abstract class Encoder {
protected BlockingQueue<Frame> queue = new ArrayBlockingQueue<Frame>(100);
int width;
int height;
int cameraFacing;
long startMS = 0;
byte[] rotatedFrameData = null;
byte[] planeManagedData = null;
boolean encoderStarted = false;
public Encoder(int cameraFacing, int width, int height) {
this.cameraFacing = cameraFacing;
this.height = height;
this.width = width;
}
abstract public void encode(byte[] rawData);
public abstract void stopEncoder();
public boolean hasEncoderStarted() {
return encoderStarted;
}
public void setStartMS(long ms) {
this.startMS = ms;
}
public static void YV12toYUV420PackedSemiPlanar(final byte[] input, byte[] out, final int width, final int height) {
final int frameSize = width * height;
final int qFrameSize = frameSize / 4;
for (int i = 0; i < input.length; i++) {
if (i < frameSize)
out[i] = input[i];
if (i < (qFrameSize)) {
out[frameSize + i * 2] = input[frameSize + i + qFrameSize]; // Cb (U)
out[frameSize + i * 2 + 1] = input[frameSize + i]; // Cr (V)
}
}
}
static byte[] NV21toYUV420p(byte[] data, int width, int height) {
int len_target = (width * height * 3) / 2;
byte[] buf_target = new byte[len_target];
System.arraycopy(data, 0, buf_target, 0, width * height);
for (int i = 0; i < (width * height / 4); i++) {
buf_target[(width * height) + i] = data[(width * height) + 2 * i + 1];
buf_target[(width * height) + (width * height / 4) + i] = data[(width * height) + 2 * i];
}
return buf_target;
}
public void rotateYUV420Degree90(byte[] data, byte[] output, int imageWidth, int imageHeight) {
int i = 0;
for (int x = 0; x < imageWidth; x++) {
for (int y = imageHeight - 1; y >= 0; y--) {
output[i] = data[y * imageWidth + x];
i++;
}
}
i = imageWidth * imageHeight * 3 / 2 - 1;
for (int x = imageWidth - 1; x > 0; x = x - 2) {
for (int y = 0; y < imageHeight / 2; y++) {
output[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + x];
i--;
output[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + (x - 1)];
i--;
}
}
}
}

@ -1,12 +0,0 @@
package com.flurgle.camerakit.encoding;
public class Frame {
public int id;
public byte[] frameData;
public Frame(int id) {
this.id = id;
}
}

@ -1,157 +0,0 @@
package com.flurgle.camerakit.encoding;
import android.content.Context;
import android.graphics.ImageFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;
public class VideoEncoder extends Encoder {
File mVideoFile;
FileOutputStream mFOS;
MediaCodec mMediaCodec;
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
MediaFormat mediaFormat = null;
public static int frameID = 0;
public VideoEncoder(Context context, int cameraFacing, int width, int height) throws IOException {
super(cameraFacing, width, height);
try {
mVideoFile = new File(context.getExternalFilesDir(null), "video.mp4");
mFOS = new FileOutputStream(mVideoFile, false);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
queue = new ArrayBlockingQueue<>(100);
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
try {
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
frameID = 0;
rotatedFrameData = new byte[width * height * (ImageFormat.getBitsPerPixel(ImageFormat.YV12)) / 8];
planeManagedData = new byte[width * height * (ImageFormat.getBitsPerPixel(ImageFormat.YV12)) / 8];
encoderStarted = true;
mMediaCodec.start();
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopEncoder() {
encoderStarted = false;
if (mMediaCodec != null) {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}
if (mFOS != null) {
try {
mFOS.close();
} catch (IOException e) {
}
}
}
@Override
public void encode(byte[] rawData) {
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();
int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
int size = inputBuffer.limit();
inputBuffer.put(rawData);
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, size, (System.currentTimeMillis() - startMS) * 1000, 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
do {
if (outputBufferIndex >= 0) {
Frame frame = new Frame(frameID);
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
int dataLength = 0;
outBuffer.get(outData);
dataLength = outData.length - 2;
frame.frameData = new byte[dataLength];
// skipping 0x00 0x80 while copying
System.arraycopy(outData, 2, frame.frameData, 0, dataLength);
try {
if (bufferInfo.offset != 0) {
mFOS.write(outData, bufferInfo.offset, outData.length - bufferInfo.offset);
} else {
mFOS.write(outData, 0, outData.length);
}
mFOS.flush();
} catch (IOException e) {
Log.e("Encoding", e.toString());
}
try {
queue.put(frame);
} catch (InterruptedException e) {
Log.e("EncodeDecode", "interrupted while waiting");
e.printStackTrace();
} catch (NullPointerException e) {
Log.e("EncodeDecode", "frame is null");
e.printStackTrace();
} catch (IllegalArgumentException e) {
Log.e("EncodeDecode", "problem inserting in the queue");
e.printStackTrace();
}
Log.d("EncodeDecode", "H263 frame enqueued. queue size now: " + queue.size());
frameID++;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
Log.e("EncodeDecode", "output buffer of encoder : info changed");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.e("EncodeDecode", "output buffer of encoder : format changed");
} else {
Log.e("EncodeDecode", "unknown value of outputBufferIndex : " + outputBufferIndex);
}
} while (outputBufferIndex >= 0);
}
}

@ -30,6 +30,8 @@
<attr name="ckAutoFocus" format="boolean" />
<attr name="ckCaptureSize" format="float" />
<attr name="android:adjustViewBounds" />
</declare-styleable>

@ -112,14 +112,6 @@ public class MainActivity extends AppCompatActivity {
@OnClick(R.id.captureVideo)
void captureVideo() {
camera.startRecordingVideo();
camera.postDelayed(new Runnable() {
@Override
public void run() {
camera.stopRecordingVideo();
}
}, 3000);
camera.setCameraListener(new CameraListener() {
@Override
public void onVideoTaken(File video) {
@ -128,6 +120,14 @@ public class MainActivity extends AppCompatActivity {
previewDialog.show();
}
});
camera.startRecordingVideo();
camera.postDelayed(new Runnable() {
@Override
public void run() {
camera.stopRecordingVideo();
}
}, 3000);
}
@OnClick(R.id.toggleCamera)

@ -16,7 +16,7 @@
<com.flurgle.camerakit.CameraView xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/camera"
android:layout_width="match_parent"
android:layout_height="160dp"
android:layout_height="500dp"
android:layout_gravity="center_horizontal"
app:ckCropOutput="true"
app:ckFacing="back"
@ -235,14 +235,14 @@
android:id="@+id/widthWrapContent"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="WRAP_CONTENT" />
android:text="wrap_content" />
<android.support.v7.widget.AppCompatCheckBox
android:id="@+id/widthMatchParent"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="16dp"
android:text="MATCH_PARENT" />
android:text="match_parent" />
</LinearLayout>

Loading…
Cancel
Save