add frame data callback

pull/209/head
xufuji456 3 years ago
parent c459d0ab5e
commit 35ad5b2763
  1. 2
      Live/src/main/cpp/RtmpPusher.cpp
  2. 60
      Live/src/main/java/com/frank/live/LivePusherNew.java
  3. 19
      Live/src/main/java/com/frank/live/listener/OnFrameDataCallback.java
  4. 24
      Live/src/main/java/com/frank/live/stream/AudioStream.java
  5. 32
      Live/src/main/java/com/frank/live/stream/VideoStream.java
  6. 40
      Live/src/main/java/com/frank/live/stream/VideoStreamNew.java

@ -198,7 +198,7 @@ RTMP_PUSHER_FUNC(void, native_1setAudioCodecInfo, jint sampleRateInHz, jint chan
}
}
RTMP_PUSHER_FUNC(jint, getInputSamples) {
RTMP_PUSHER_FUNC(jint, native_1getInputSamples) {
if (audioStream) {
return audioStream->getInputSamples();
}

@ -5,13 +5,14 @@ import android.view.SurfaceHolder;
import android.view.TextureView;
import com.frank.live.listener.LiveStateChangeListener;
import com.frank.live.listener.OnFrameDataCallback;
import com.frank.live.param.AudioParam;
import com.frank.live.param.VideoParam;
import com.frank.live.stream.AudioStream;
import com.frank.live.stream.VideoStream;
import com.frank.live.stream.VideoStreamNew;
public class LivePusherNew {
public class LivePusherNew implements OnFrameDataCallback {
//error of opening video encoder
private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01;
@ -32,7 +33,7 @@ public class LivePusherNew {
System.loadLibrary("live");
}
private AudioStream audioStream;
private final AudioStream audioStream;
private VideoStream videoStream;
// private VideoStreamNew videoStream;
@ -129,34 +130,65 @@ public class LivePusherNew {
}
}
public void setVideoCodecInfo(int width, int height, int fps, int bitrate) {
native_setVideoCodecInfo(width, height, fps, bitrate);
public void start(String path) {
native_start(path);
}
public void setAudioCodecInfo(int sampleRateInHz, int channels) {
native_setAudioCodecInfo(sampleRateInHz, channels);
private int getInputSamplesFromNative() {
return native_getInputSamples();
}
public void start(String path) {
native_start(path);
public void setVideoCodecInfo(int width, int height, int frameRate, int bitrate) {
native_setVideoCodecInfo(width, height, frameRate, bitrate);
}
public int getInputSample() {
return getInputSamples();
private void setAudioCodecInfo(int sampleRateInHz, int channels) {
native_setAudioCodecInfo(sampleRateInHz, channels);
}
public void pushAudio(byte[] data) {
private void pushAudio(byte[] data) {
native_pushAudio(data);
}
public void pushVideo(byte[] data) {
private void pushVideo(byte[] data) {
native_pushVideo(data, null, null, null);
}
public void pushVideo(byte[] y, byte[] u, byte[] v) {
private void pushVideo(byte[] y, byte[] u, byte[] v) {
native_pushVideo(null, y, u, v);
}
@Override
public int getInputSamples() {
return getInputSamplesFromNative();
}
@Override
public void onAudioCodecInfo(int sampleRate, int channelCount) {
setAudioCodecInfo(sampleRate, channelCount);
}
@Override
public void onAudioFrame(byte[] pcm) {
if (pcm != null) {
pushAudio(pcm);
}
}
@Override
public void onVideoCodecInfo(int width, int height, int frameRate, int bitrate) {
setVideoCodecInfo(width, height, frameRate, bitrate);
}
@Override
public void onVideoFrame(byte[] yuv, byte[] y, byte[] u, byte[] v) {
if (yuv != null) {
pushVideo(yuv);
} else if (y != null && u != null && v != null) {
pushVideo(y, u, v);
}
}
private native void native_init();
private native void native_start(String path);
@ -165,7 +197,7 @@ public class LivePusherNew {
private native void native_setAudioCodecInfo(int sampleRateInHz, int channels);
private native int getInputSamples();
private native int native_getInputSamples();
private native void native_pushAudio(byte[] data);

@ -0,0 +1,19 @@
package com.frank.live.listener;
/**
* Video/Audio frame callback
* Created by frank on 2022/01/25.
*/
public interface OnFrameDataCallback {
int getInputSamples();
void onAudioFrame(byte[] pcm);
void onAudioCodecInfo(int sampleRate, int channelCount);
void onVideoFrame(byte[] yuv, byte[] y, byte[] u, byte[] v);
void onVideoCodecInfo(int width, int height, int frameRate, int bitrate);
}

@ -4,7 +4,7 @@ import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import com.frank.live.LivePusherNew;
import com.frank.live.listener.OnFrameDataCallback;
import com.frank.live.param.AudioParam;
import java.util.concurrent.ExecutorService;
@ -12,15 +12,15 @@ import java.util.concurrent.Executors;
public class AudioStream {
private int inputSamples;
private ExecutorService executor;
private AudioRecord audioRecord;
private LivePusherNew mLivePusher;
private boolean isLiving;
private boolean isMute;
private boolean isLiving;
private final int inputSamples;
private final ExecutorService executor;
private final AudioRecord audioRecord;
private final OnFrameDataCallback mCallback;
public AudioStream(LivePusherNew livePusher, AudioParam audioParam) {
mLivePusher = livePusher;
public AudioStream(OnFrameDataCallback callback, AudioParam audioParam) {
mCallback = callback;
executor = Executors.newSingleThreadExecutor();
int channelConfig;
if (audioParam.getNumChannels() == 2) {
@ -29,12 +29,12 @@ public class AudioStream {
channelConfig = AudioFormat.CHANNEL_IN_MONO;
}
mLivePusher.setAudioCodecInfo(audioParam.getSampleRate(), audioParam.getNumChannels());
inputSamples = mLivePusher.getInputSample() * 2;
mCallback.onAudioCodecInfo(audioParam.getSampleRate(), audioParam.getNumChannels());
inputSamples = mCallback.getInputSamples() * 2;
int minBufferSize = AudioRecord.getMinBufferSize(audioParam.getSampleRate(),
channelConfig, audioParam.getAudioFormat()) * 2;
int bufferSizeInBytes = minBufferSize > inputSamples ? minBufferSize : inputSamples;
int bufferSizeInBytes = Math.max(minBufferSize, inputSamples);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, audioParam.getSampleRate(),
channelConfig, audioParam.getAudioFormat(), bufferSizeInBytes);
}
@ -65,7 +65,7 @@ public class AudioStream {
if (!isMute) {
int len = audioRecord.read(bytes, 0, bytes.length);
if (len > 0) {
mLivePusher.pushAudio(bytes);
mCallback.onAudioFrame(bytes);
}
}
}

@ -4,22 +4,28 @@ import android.app.Activity;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import com.frank.live.LivePusherNew;
import com.frank.live.listener.OnFrameDataCallback;
public class VideoStream implements Camera.PreviewCallback, CameraHelper.OnChangedSizeListener {
private LivePusherNew mLivePusher;
private CameraHelper cameraHelper;
private int mBitrate;
private int mFps;
private final OnFrameDataCallback mCallback;
private final CameraHelper cameraHelper;
private final int mBitrate;
private final int mFrameRate;
private boolean isLiving;
public VideoStream(LivePusherNew livePusher, Activity activity, int width, int height, int bitrate, int fps, int cameraId) {
mLivePusher = livePusher;
mBitrate = bitrate;
mFps = fps;
public VideoStream(OnFrameDataCallback callback,
Activity activity,
int width,
int height,
int bitrate,
int frameRate,
int cameraId) {
mCallback = callback;
mBitrate = bitrate;
mFrameRate = frameRate;
cameraHelper = new CameraHelper(activity, cameraId, width, height);
cameraHelper.setPreviewCallback(this);
cameraHelper.setOnChangedSizeListener(this);
@ -38,8 +44,8 @@ public class VideoStream implements Camera.PreviewCallback, CameraHelper.OnChang
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving) {
mLivePusher.pushVideo(data);
if (isLiving && mCallback != null) {
mCallback.onVideoFrame(data, null, null, null);
}
}
@ -49,7 +55,9 @@ public class VideoStream implements Camera.PreviewCallback, CameraHelper.OnChang
@Override
public void onChanged(int w, int h) {
mLivePusher.setVideoCodecInfo(w, h, mFps, mBitrate);
if (mCallback != null) {
mCallback.onVideoCodecInfo(w, h, mFrameRate, mBitrate);
}
}
public void startLive() {

@ -9,9 +9,9 @@ import android.util.Size;
import android.view.SurfaceHolder;
import android.view.TextureView;
import com.frank.live.LivePusherNew;
import com.frank.live.camera2.Camera2Helper;
import com.frank.live.camera2.Camera2Listener;
import com.frank.live.listener.OnFrameDataCallback;
import com.frank.live.param.VideoParam;
/**
@ -22,15 +22,18 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
private static final String TAG = VideoStreamNew.class.getSimpleName();
private LivePusherNew mLivePusher;
private Camera2Helper camera2Helper;
private boolean isLiving;
private TextureView mTextureView;
private Context mContext;
private VideoParam mVideoParam;
public VideoStreamNew(LivePusherNew livePusher, TextureView textureView, VideoParam videoParam, Context context) {
this.mLivePusher = livePusher;
private final Context mContext;
private Camera2Helper camera2Helper;
private final VideoParam mVideoParam;
private final TextureView mTextureView;
private final OnFrameDataCallback mCallback;
public VideoStreamNew(OnFrameDataCallback callback,
TextureView textureView,
VideoParam videoParam,
Context context) {
this.mCallback = callback;
this.mTextureView = textureView;
this.mVideoParam = videoParam;
this.mContext = context;
@ -55,7 +58,6 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
.specificCameraId(Camera2Helper.CAMERA_ID_BACK)
.context(mContext.getApplicationContext())
.previewOn(mTextureView)
// .previewViewSize(new Point(mTextureView.getWidth(), mTextureView.getHeight()))
.previewViewSize(new Point(mVideoParam.getWidth(), mVideoParam.getHeight()))
.rotation(rotateDegree)
.build();
@ -95,7 +97,7 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Log.e(TAG, "onSurfaceTextureAvailable...");
Log.i(TAG, "onSurfaceTextureAvailable...");
startPreview();
}
@ -106,7 +108,7 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.e(TAG, "onSurfaceTextureDestroyed...");
Log.i(TAG, "onSurfaceTextureDestroyed...");
stopPreview();
return false;
}
@ -117,7 +119,7 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
}
/**
* camere preview frame data
* Camera2 preview frame data
*
* @param y plane of y
* @param u plane of u
@ -125,23 +127,23 @@ public class VideoStreamNew implements TextureView.SurfaceTextureListener, Camer
*/
@Override
public void onPreviewFrame(byte[] y, byte[] u, byte[] v) {
if (isLiving && mLivePusher != null) {
mLivePusher.pushVideo(y, u, v);
if (isLiving && mCallback != null) {
mCallback.onVideoFrame(null, y, u, v);
}
}
@Override
public void onCameraOpened(Size previewSize, int displayOrientation) {
Log.e(TAG, "onCameraOpened previewSize=" + previewSize.toString());
if (mLivePusher != null && mVideoParam != null) {
mLivePusher.setVideoCodecInfo(previewSize.getWidth(), previewSize.getHeight(),
Log.i(TAG, "onCameraOpened previewSize=" + previewSize.toString());
if (mCallback != null && mVideoParam != null) {
mCallback.onVideoCodecInfo(previewSize.getWidth(), previewSize.getHeight(),
mVideoParam.getFrameRate(), mVideoParam.getBitRate());
}
}
@Override
public void onCameraClosed() {
Log.e(TAG, "onCameraClosed");
Log.i(TAG, "onCameraClosed");
}
@Override

Loading…
Cancel
Save