Feature: change y/u/v plane to yuv

pull/209/head
xufuji456 3 years ago
parent 2ca0bd97a5
commit f9f146245b
  1. 18
      Live/src/main/cpp/RtmpPusher.cpp
  2. 23
      Live/src/main/cpp/VideoStream.cpp
  3. 2
      Live/src/main/cpp/VideoStream.h
  4. 17
      Live/src/main/java/com/frank/live/LivePusherNew.java
  5. 35
      Live/src/main/java/com/frank/live/camera/Camera2Helper.java
  6. 2
      Live/src/main/java/com/frank/live/camera/Camera2Listener.java
  7. 2
      Live/src/main/java/com/frank/live/listener/OnFrameDataCallback.java
  8. 2
      Live/src/main/java/com/frank/live/stream/VideoStream.java
  9. 8
      Live/src/main/java/com/frank/live/stream/VideoStreamNew.java

@ -173,23 +173,13 @@ RTMP_PUSHER_FUNC(void, native_1start, jstring path_) {
env->ReleaseStringUTFChars(path_, path);
}
RTMP_PUSHER_FUNC(void, native_1pushVideo, jbyteArray yuv, jbyteArray y, jbyteArray u, jbyteArray v) {
RTMP_PUSHER_FUNC(void, native_1pushVideo, jbyteArray yuv, jint camera_type) {
if (!videoStream || !readyPushing) {
return;
}
if (yuv) {
jbyte *yuv_plane = env->GetByteArrayElements(yuv, JNI_FALSE);
videoStream->encodeVideo(yuv_plane, nullptr, nullptr, nullptr);
env->ReleaseByteArrayElements(yuv, yuv_plane, 0);
} else if (y && u && v) {
jbyte *y_plane = env->GetByteArrayElements(y, JNI_FALSE);
jbyte *u_plane = env->GetByteArrayElements(u, JNI_FALSE);
jbyte *v_plane = env->GetByteArrayElements(v, JNI_FALSE);
videoStream->encodeVideo(nullptr, y_plane, u_plane, v_plane);
env->ReleaseByteArrayElements(y, y_plane, 0);
env->ReleaseByteArrayElements(u, u_plane, 0);
env->ReleaseByteArrayElements(v, v_plane, 0);
}
jbyte *yuv_plane = env->GetByteArrayElements(yuv, JNI_FALSE);
videoStream->encodeVideo(yuv_plane, camera_type);
env->ReleaseByteArrayElements(yuv, yuv_plane, 0);
}
RTMP_PUSHER_FUNC(void, native_1setAudioCodecInfo, jint sampleRateInHz, jint channels) {

@ -82,21 +82,22 @@ void VideoStream::setVideoCallback(VideoCallback callback) {
this->videoCallback = callback;
}
void VideoStream::encodeVideo(int8_t *data, int8_t *y_plane, int8_t *u_plane, int8_t *v_plane) {
void VideoStream::encodeVideo(int8_t *data, int8_t camera_type) {
pthread_mutex_lock(&mutex);
if (data) {
//y
memcpy(pic_in->img.plane[0], data, yLen);
//uv
if (camera_type == 1) {
memcpy(pic_in->img.plane[0], data, yLen); // y
for (int i = 0; i < yLen/4; ++i) {
*(pic_in->img.plane[1] + i) = *(data + yLen + i * 2 + 1);
*(pic_in->img.plane[2] + i) = *(data + yLen + i * 2);
*(pic_in->img.plane[1] + i) = *(data + yLen + i * 2 + 1); // u
*(pic_in->img.plane[2] + i) = *(data + yLen + i * 2); // v
}
} else if (y_plane && u_plane && v_plane) {
memcpy(pic_in->img.plane[0], y_plane, (size_t) yLen);
memcpy(pic_in->img.plane[1], u_plane, (size_t) yLen / 4);
memcpy(pic_in->img.plane[2], v_plane, (size_t) yLen / 4);
} else if (camera_type == 2) {
int offset = 0;
memcpy(pic_in->img.plane[0], data, (size_t) yLen); // y
offset += yLen;
memcpy(pic_in->img.plane[1], data + offset, (size_t) yLen / 4); // u
offset += yLen / 4;
memcpy(pic_in->img.plane[2], data + offset, (size_t) yLen / 4); // v
} else {
return;
}

@ -17,7 +17,7 @@ public:
void setVideoEncInfo(int width, int height, int fps, int bitrate);
void encodeVideo(int8_t *data, int8_t *y_plane, int8_t *u_plane, int8_t *v_plane);
void encodeVideo(int8_t *data, int8_t camera_type);
void setVideoCallback(VideoCallback videoCallback);

@ -148,12 +148,8 @@ public class LivePusherNew implements OnFrameDataCallback {
native_pushAudio(data);
}
private void pushVideo(byte[] data) {
native_pushVideo(data, null, null, null);
}
private void pushVideo(byte[] y, byte[] u, byte[] v) {
native_pushVideo(null, y, u, v);
private void pushVideo(byte[] data, int cameraType) {
native_pushVideo(data, cameraType);
}
@Override
@ -179,11 +175,9 @@ public class LivePusherNew implements OnFrameDataCallback {
}
@Override
public void onVideoFrame(byte[] yuv, byte[] y, byte[] u, byte[] v) {
public void onVideoFrame(byte[] yuv, int cameraType) {
if (yuv != null) {
pushVideo(yuv);
} else if (y != null && u != null && v != null) {
pushVideo(y, u, v);
pushVideo(yuv, cameraType);
}
}
@ -199,8 +193,7 @@ public class LivePusherNew implements OnFrameDataCallback {
private native void native_pushAudio(byte[] data);
// compat for Camera1 and Camera2
private native void native_pushVideo(byte[] yuv, byte[] y, byte[] u, byte[] v);
private native void native_pushVideo(byte[] yuv, int cameraType);
private native void native_stop();

@ -525,10 +525,8 @@ public class Camera2Helper {
}
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] data = null;
private byte[] yPlane;
private byte[] uPlane;
private byte[] vPlane;
private byte[] temp = null;
private byte[] yuvData = null;
private final ReentrantLock lock = new ReentrantLock();
@Override
@ -538,32 +536,28 @@ public class Camera2Helper {
Image.Plane[] planes = image.getPlanes();
lock.lock();
int width = image.getWidth();
int offset = 0;
int width = image.getWidth();
int height = image.getHeight();
if (yPlane == null) {
yPlane = new byte[width * height];
uPlane = new byte[width * height / 4];
vPlane = new byte[width * height / 4];
int len = width * height;
if (yuvData == null) {
yuvData = new byte[len * 3 / 2];
}
planes[0].getBuffer().get(yPlane);
planes[0].getBuffer().get(yuvData, offset, len);
offset += len;
for (int i = 1; i < planes.length; i++) {
int srcIndex = 0, dstIndex = 0;
int rowStride = planes[i].getRowStride();
int pixelsStride = planes[i].getPixelStride();
ByteBuffer buffer = planes[i].getBuffer();
if (data == null || data.length != buffer.capacity()) {
data = new byte[buffer.capacity()];
if (temp == null || temp.length != buffer.capacity()) {
temp = new byte[buffer.capacity()];
}
buffer.get(data);
buffer.get(temp);
for (int j = 0; j < height / 2; j++) {
for (int k = 0; k < width / 2; k++) {
if (i == 1) {
uPlane[dstIndex++] = data[srcIndex];
} else if (i == 2) {
vPlane[dstIndex++] = data[srcIndex];
}
yuvData[offset + dstIndex++] = temp[srcIndex];
srcIndex += pixelsStride;
}
if (pixelsStride == 2) {
@ -572,10 +566,11 @@ public class Camera2Helper {
srcIndex += rowStride - width / 2;
}
}
offset += len / 4;
}
if (camera2Listener != null) {
camera2Listener.onPreviewFrame(yPlane, uPlane, vPlane);
camera2Listener.onPreviewFrame(yuvData);
}
lock.unlock();
}

@ -7,7 +7,7 @@ public interface Camera2Listener {
void onCameraOpened(Size previewSize, int displayOrientation);
void onPreviewFrame(byte[] y, byte[] u, byte[] v);
void onPreviewFrame(byte[] yuvData);
void onCameraClosed();

@ -13,7 +13,7 @@ public interface OnFrameDataCallback {
void onAudioCodecInfo(int sampleRate, int channelCount);
void onVideoFrame(byte[] yuv, byte[] y, byte[] u, byte[] v);
void onVideoFrame(byte[] yuv, int cameraType);
void onVideoCodecInfo(int width, int height, int frameRate, int bitrate);
}

@ -62,7 +62,7 @@ public class VideoStream extends VideoStreamBase implements Camera.PreviewCallba
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving && mCallback != null) {
mCallback.onVideoFrame(data, null, null, null);
mCallback.onVideoFrame(data, 1);
}
}

@ -128,14 +128,12 @@ public class VideoStreamNew extends VideoStreamBase
/**
* Camera2 preview frame data
*
* @param y plane of y
* @param u plane of u
* @param v plane of v
* @param yuvData data of yuv
*/
@Override
public void onPreviewFrame(byte[] y, byte[] u, byte[] v) {
public void onPreviewFrame(byte[] yuvData) {
if (isLiving && mCallback != null) {
mCallback.onVideoFrame(null, y, u, v);
mCallback.onVideoFrame(yuvData, 2);
}
}

Loading…
Cancel
Save