compat for Camera1 and Camera2

pull/209/head
xufuji456 3 years ago
parent 30110d16ad
commit c459d0ab5e
  1. 27
      Live/src/main/cpp/RtmpPusher.cpp
  2. 45
      Live/src/main/cpp/VideoStream.cpp
  3. 13
      Live/src/main/cpp/VideoStream.h
  4. 9
      Live/src/main/java/com/frank/live/LivePusherNew.java

@ -70,7 +70,7 @@ void releasePackets(RTMPPacket *&packet) {
if (packet) {
RTMPPacket_Free(packet);
delete packet;
packet = 0;
packet = nullptr;
}
}
@ -173,27 +173,24 @@ RTMP_PUSHER_FUNC(void, native_1start, jstring path_) {
env->ReleaseStringUTFChars(path_, path);
}
RTMP_PUSHER_FUNC(void, native_1pushVideo, jbyteArray data_) {
RTMP_PUSHER_FUNC(void, native_1pushVideo, jbyteArray yuv, jbyteArray y, jbyteArray u, jbyteArray v) {
if (!videoStream || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, nullptr);
videoStream->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
RTMP_PUSHER_FUNC(void, native_1pushVideoNew, jbyteArray y, jbyteArray u, jbyteArray v) {
if (!videoStream || !readyPushing) {
return;
}
jbyte *y_plane = env->GetByteArrayElements(y, nullptr);
jbyte *u_plane = env->GetByteArrayElements(u, nullptr);
jbyte *v_plane = env->GetByteArrayElements(v, nullptr);
videoStream->encodeDataNew(y_plane, u_plane, v_plane);
if (yuv) {
jbyte *yuv_plane = env->GetByteArrayElements(yuv, JNI_FALSE);
videoStream->encodeVideo(yuv_plane, nullptr, nullptr, nullptr);
env->ReleaseByteArrayElements(yuv, yuv_plane, 0);
} else if (y && u && v) {
jbyte *y_plane = env->GetByteArrayElements(y, JNI_FALSE);
jbyte *u_plane = env->GetByteArrayElements(u, JNI_FALSE);
jbyte *v_plane = env->GetByteArrayElements(v, JNI_FALSE);
videoStream->encodeVideo(nullptr, y_plane, u_plane, v_plane);
env->ReleaseByteArrayElements(y, y_plane, 0);
env->ReleaseByteArrayElements(u, u_plane, 0);
env->ReleaseByteArrayElements(v, v_plane, 0);
}
}
RTMP_PUSHER_FUNC(void, native_1setAudioCodecInfo, jint sampleRateInHz, jint channels) {
if (audioStream) {

@ -4,7 +4,11 @@
#include "VideoStream.h"
#include "PushInterface.h"
VideoStream::VideoStream() {
VideoStream::VideoStream():yLen(0),
mBitrate(0),
videoCodec(nullptr),
pic_in(nullptr),
videoCallback(nullptr) {
pthread_mutex_init(&mutex, nullptr);
}
@ -22,9 +26,6 @@ VideoStream::~VideoStream() {
void VideoStream::setVideoEncInfo(int width, int height, int fps, int bitrate) {
pthread_mutex_lock(&mutex);
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
yLen = width * height;
if (videoCodec) {
@ -81,44 +82,24 @@ void VideoStream::setVideoCallback(VideoCallback callback) {
this->videoCallback = callback;
}
void VideoStream::encodeData(int8_t *data) {
void VideoStream::encodeVideo(int8_t *data, int8_t *y_plane, int8_t *u_plane, int8_t *v_plane) {
pthread_mutex_lock(&mutex);
if (data) {
//y
memcpy(pic_in->img.plane[0], data, yLen);
for (int i = 0; i < yLen/4; ++i) {
//uv
for (int i = 0; i < yLen/4; ++i) {
*(pic_in->img.plane[1] + i) = *(data + yLen + i * 2 + 1);
*(pic_in->img.plane[2] + i) = *(data + yLen + i * 2);
}
x264_nal_t *pp_nal;
int pi_nal;
x264_picture_t pic_out;
x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
int pps_len, sps_len = 0;
uint8_t sps[100];
uint8_t pps[100];
for (int i = 0; i < pi_nal; ++i) {
if (pp_nal[i].i_type == NAL_SPS) {
sps_len = pp_nal[i].i_payload - 4;
memcpy(sps, pp_nal[i].p_payload + 4, static_cast<size_t>(sps_len));
} else if (pp_nal[i].i_type == NAL_PPS) {
pps_len = pp_nal[i].i_payload - 4;
memcpy(pps, pp_nal[i].p_payload + 4, static_cast<size_t>(pps_len));
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload);
}
}
pthread_mutex_unlock(&mutex);
}
void VideoStream::encodeDataNew(int8_t *y_plane, int8_t *u_plane, int8_t *v_plane) {
pthread_mutex_lock(&mutex);
} else if (y_plane && u_plane && v_plane) {
memcpy(pic_in->img.plane[0], y_plane, (size_t) yLen);
memcpy(pic_in->img.plane[1], u_plane, (size_t) yLen / 4);
memcpy(pic_in->img.plane[2], v_plane, (size_t) yLen / 4);
} else {
return;
}
x264_nal_t *pp_nal;
int pi_nal;

@ -17,22 +17,17 @@ public:
void setVideoEncInfo(int width, int height, int fps, int bitrate);
void encodeData(int8_t *data);
void encodeDataNew(int8_t *y_plane, int8_t *u_plane, int8_t *v_plane);
void encodeVideo(int8_t *data, int8_t *y_plane, int8_t *u_plane, int8_t *v_plane);
void setVideoCallback(VideoCallback videoCallback);
private:
pthread_mutex_t mutex;
int mWidth;
int mHeight;
int mFps;
pthread_mutex_t mutex{};
int yLen;
int mBitrate;
x264_t *videoCodec = 0;
x264_picture_t *pic_in = 0;
int yLen;
VideoCallback videoCallback;
void sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len);

@ -150,11 +150,11 @@ public class LivePusherNew {
}
public void pushVideo(byte[] data) {
native_pushVideo(data);
native_pushVideo(data, null, null, null);
}
public void pushVideo(byte[] y, byte[] u, byte[] v) {
native_pushVideoNew(y, u, v);
native_pushVideo(null, y, u, v);
}
private native void native_init();
@ -169,9 +169,8 @@ public class LivePusherNew {
private native void native_pushAudio(byte[] data);
private native void native_pushVideo(byte[] data);
private native void native_pushVideoNew(byte[] y, byte[] u, byte[] v);
// compat for Camera1 and Camera2
private native void native_pushVideo(byte[] yuv, byte[] y, byte[] u, byte[] v);
private native void native_stop();

Loading…
Cancel
Save