@ -0,0 +1 @@ |
||||
/build |
@ -0,0 +1,45 @@ |
||||
# For more information about using CMake with Android Studio, read the |
||||
# documentation: https://d.android.com/studio/projects/add-native-code.html |
||||
|
||||
# Sets the minimum version of CMake required to build the native library. |
||||
|
||||
cmake_minimum_required(VERSION 3.4.1) |
||||
set(distribution_DIR ${CMAKE_SOURCE_DIR}/../../../../libs) |
||||
|
||||
add_library(faac |
||||
STATIC |
||||
IMPORTED) |
||||
set_target_properties(faac |
||||
PROPERTIES IMPORTED_LOCATION |
||||
../../../../libs/armeabi-v7a/libfaac.a) |
||||
|
||||
add_library(rtmp |
||||
STATIC |
||||
IMPORTED) |
||||
set_target_properties(rtmp |
||||
PROPERTIES IMPORTED_LOCATION |
||||
../../../../libs/armeabi-v7a/librtmp.a) |
||||
|
||||
add_library(x264 |
||||
STATIC |
||||
IMPORTED) |
||||
set_target_properties(x264 |
||||
PROPERTIES IMPORTED_LOCATION |
||||
../../../../libs/armeabi-v7a/libx264.a) |
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11") |
||||
|
||||
include_directories(main/cpp/include) |
||||
|
||||
add_library(live |
||||
SHARED |
||||
src/main/cpp/live.c |
||||
src/main/cpp/queue.c) |
||||
|
||||
find_library( log-lib |
||||
log ) |
||||
|
||||
target_link_libraries(live x264 faac rtmp |
||||
-landroid |
||||
-ljnigraphics |
||||
${log-lib} ) |
@ -0,0 +1,43 @@ |
||||
apply plugin: 'com.android.library' |
||||
|
||||
android { |
||||
compileSdkVersion 25 |
||||
buildToolsVersion "25.0.2" |
||||
defaultConfig { |
||||
// applicationId "com.frank.live" |
||||
minSdkVersion 15 |
||||
targetSdkVersion 25 |
||||
versionCode 1 |
||||
versionName "1.0" |
||||
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" |
||||
externalNativeBuild { |
||||
cmake { |
||||
cppFlags "" |
||||
} |
||||
} |
||||
ndk{ |
||||
abiFilters "armeabi-v7a" |
||||
} |
||||
} |
||||
buildTypes { |
||||
release { |
||||
minifyEnabled false |
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' |
||||
} |
||||
} |
||||
externalNativeBuild { |
||||
cmake { |
||||
path "CMakeLists.txt" |
||||
} |
||||
} |
||||
} |
||||
|
||||
dependencies { |
||||
compile fileTree(dir: 'libs', include: ['*.jar']) |
||||
androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', { |
||||
exclude group: 'com.android.support', module: 'support-annotations' |
||||
}) |
||||
compile 'com.android.support:appcompat-v7:25.3.1' |
||||
compile 'com.android.support.constraint:constraint-layout:1.0.2' |
||||
testCompile 'junit:junit:4.12' |
||||
} |
@ -0,0 +1,25 @@ |
||||
# Add project specific ProGuard rules here. |
||||
# By default, the flags in this file are appended to flags specified |
||||
# in C:\Users\frank\AppData\Local\Android\Sdk/tools/proguard/proguard-android.txt |
||||
# You can edit the include path and order by changing the proguardFiles |
||||
# directive in build.gradle. |
||||
# |
||||
# For more details, see |
||||
# http://developer.android.com/guide/developing/tools/proguard.html |
||||
|
||||
# Add any project specific keep options here: |
||||
|
||||
# If your project uses WebView with JS, uncomment the following |
||||
# and specify the fully qualified class name to the JavaScript interface |
||||
# class: |
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview { |
||||
# public *; |
||||
#} |
||||
|
||||
# Uncomment this to preserve the line number information for |
||||
# debugging stack traces. |
||||
#-keepattributes SourceFile,LineNumberTable |
||||
|
||||
# If you keep the line number information, uncomment this to |
||||
# hide the original source file name. |
||||
#-renamesourcefileattribute SourceFile |
@ -0,0 +1,26 @@ |
||||
package com.frank.live; |
||||
|
||||
import android.content.Context; |
||||
import android.support.test.InstrumentationRegistry; |
||||
import android.support.test.runner.AndroidJUnit4; |
||||
|
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
|
||||
import static org.junit.Assert.*; |
||||
|
||||
/** |
||||
* Instrumentation test, which will execute on an Android device. |
||||
* |
||||
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a> |
||||
*/ |
||||
@RunWith(AndroidJUnit4.class) |
||||
public class ExampleInstrumentedTest { |
||||
@Test |
||||
public void useAppContext() throws Exception { |
||||
// Context of the app under test.
|
||||
Context appContext = InstrumentationRegistry.getTargetContext(); |
||||
|
||||
assertEquals("com.frank.pusher", appContext.getPackageName()); |
||||
} |
||||
} |
@ -0,0 +1,27 @@ |
||||
<?xml version="1.0" encoding="utf-8"?> |
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android" |
||||
package="com.frank.live" > |
||||
|
||||
<uses-permission android:name="android.permission.INTERNET"/> |
||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> |
||||
<uses-permission android:name="android.permission.CAMERA"/> |
||||
<uses-permission android:name="android.permission.RECORD_AUDIO"/> |
||||
|
||||
<application |
||||
android:allowBackup="true" |
||||
android:icon="@mipmap/ic_launcher" |
||||
android:label="@string/app_name" |
||||
android:roundIcon="@mipmap/ic_launcher_round" |
||||
android:supportsRtl="true" |
||||
android:theme="@style/AppTheme" > |
||||
<!--<activity android:name="com.frank.live.LiveActivity"--> |
||||
<!--android:screenOrientation="landscape">--> |
||||
<!--<intent-filter>--> |
||||
<!--<action android:name="android.intent.action.MAIN" />--> |
||||
|
||||
<!--<category android:name="android.intent.category.LAUNCHER" />--> |
||||
<!--</intent-filter>--> |
||||
<!--</activity>--> |
||||
</application> |
||||
|
||||
</manifest> |
@ -0,0 +1,99 @@ |
||||
/*
|
||||
* FAAC - Freeware Advanced Audio Coder |
||||
* Copyright (C) 2001 Menno Bakker |
||||
* |
||||
* This library is free software; you can redistribute it and/or |
||||
* modify it under the terms of the GNU Lesser General Public |
||||
* License as published by the Free Software Foundation; either |
||||
* version 2.1 of the License, or (at your option) any later version. |
||||
* |
||||
* This library is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
||||
* Lesser General Public License for more details. |
||||
|
||||
* You should have received a copy of the GNU Lesser General Public |
||||
* License along with this library; if not, write to the Free Software |
||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
||||
* |
||||
* $Id: faac.h,v 1.36 2009/01/25 18:50:32 menno Exp $ |
||||
*/ |
||||
|
||||
#ifndef _FAAC_H_ |
||||
#define _FAAC_H_ |
||||
|
||||
#ifdef __cplusplus |
||||
extern "C" { |
||||
#endif /* __cplusplus */ |
||||
|
||||
#if defined(_WIN32) && !defined(__MINGW32__) |
||||
# ifndef FAACAPI |
||||
# define FAACAPI __stdcall |
||||
# endif |
||||
#else |
||||
# ifndef FAACAPI |
||||
# define FAACAPI |
||||
# endif |
||||
#endif |
||||
|
||||
#pragma pack(push, 1) |
||||
|
||||
typedef struct { |
||||
void *ptr; |
||||
char *name; |
||||
} |
||||
psymodellist_t; |
||||
|
||||
#include "faaccfg.h" |
||||
|
||||
|
||||
typedef void *faacEncHandle; |
||||
|
||||
#ifndef HAVE_INT32_T |
||||
typedef signed int int32_t; |
||||
#endif |
||||
|
||||
/*
|
||||
Allows an application to get FAAC version info. This is intended |
||||
purely for informative purposes. |
||||
|
||||
Returns FAAC_CFG_VERSION. |
||||
*/ |
||||
int FAACAPI faacEncGetVersion(char **faac_id_string, |
||||
char **faac_copyright_string); |
||||
|
||||
|
||||
faacEncConfigurationPtr FAACAPI |
||||
faacEncGetCurrentConfiguration(faacEncHandle hEncoder); |
||||
|
||||
|
||||
int FAACAPI faacEncSetConfiguration(faacEncHandle hEncoder, |
||||
faacEncConfigurationPtr config); |
||||
|
||||
|
||||
faacEncHandle FAACAPI faacEncOpen(unsigned long sampleRate, |
||||
unsigned int numChannels, |
||||
unsigned long *inputSamples, |
||||
unsigned long *maxOutputBytes); |
||||
|
||||
|
||||
int FAACAPI faacEncGetDecoderSpecificInfo(faacEncHandle hEncoder, unsigned char **ppBuffer, |
||||
unsigned long *pSizeOfDecoderSpecificInfo); |
||||
|
||||
|
||||
int FAACAPI faacEncEncode(faacEncHandle hEncoder, int32_t * inputBuffer, unsigned int samplesInput, |
||||
unsigned char *outputBuffer, |
||||
unsigned int bufferSize); |
||||
|
||||
|
||||
int FAACAPI faacEncClose(faacEncHandle hEncoder); |
||||
|
||||
|
||||
|
||||
#pragma pack(pop) |
||||
|
||||
#ifdef __cplusplus |
||||
} |
||||
#endif /* __cplusplus */ |
||||
|
||||
#endif /* _FAAC_H_ */ |
@ -0,0 +1,122 @@ |
||||
/*
|
||||
* FAAC - Freeware Advanced Audio Coder |
||||
* Copyright (C) 2001 Menno Bakker |
||||
* |
||||
* This library is free software; you can redistribute it and/or |
||||
* modify it under the terms of the GNU Lesser General Public |
||||
* License as published by the Free Software Foundation; either |
||||
* version 2.1 of the License, or (at your option) any later version. |
||||
* |
||||
* This library is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
||||
* Lesser General Public License for more details. |
||||
|
||||
* You should have received a copy of the GNU Lesser General Public |
||||
* License along with this library; if not, write to the Free Software |
||||
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
||||
* |
||||
* $Id: faaccfg.h,v 1.3 2004/07/04 12:12:05 corrados Exp $ |
||||
*/ |
||||
|
||||
#ifndef _FAACCFG_H_ |
||||
#define _FAACCFG_H_ |
||||
|
||||
#define FAAC_CFG_VERSION 104 |
||||
|
||||
/* MPEG ID's */ |
||||
#define MPEG2 1 |
||||
#define MPEG4 0 |
||||
|
||||
/* AAC object types */ |
||||
#define MAIN 1 |
||||
#define LOW 2 |
||||
#define SSR 3 |
||||
#define LTP 4 |
||||
|
||||
/* Input Formats */ |
||||
#define FAAC_INPUT_NULL 0 |
||||
#define FAAC_INPUT_16BIT 1 |
||||
#define FAAC_INPUT_24BIT 2 |
||||
#define FAAC_INPUT_32BIT 3 |
||||
#define FAAC_INPUT_FLOAT 4 |
||||
|
||||
#define SHORTCTL_NORMAL 0 |
||||
#define SHORTCTL_NOSHORT 1 |
||||
#define SHORTCTL_NOLONG 2 |
||||
|
||||
#pragma pack(push, 1) |
||||
typedef struct faacEncConfiguration |
||||
{ |
||||
/* config version */ |
||||
int version; |
||||
|
||||
/* library version */ |
||||
char *name; |
||||
|
||||
/* copyright string */ |
||||
char *copyright; |
||||
|
||||
/* MPEG version, 2 or 4 */ |
||||
unsigned int mpegVersion; |
||||
|
||||
/* AAC object type */ |
||||
unsigned int aacObjectType; |
||||
|
||||
/* Allow mid/side coding */ |
||||
unsigned int allowMidside; |
||||
|
||||
/* Use one of the channels as LFE channel */ |
||||
unsigned int useLfe; |
||||
|
||||
/* Use Temporal Noise Shaping */ |
||||
unsigned int useTns; |
||||
|
||||
/* bitrate / channel of AAC file */ |
||||
unsigned long bitRate; |
||||
|
||||
/* AAC file frequency bandwidth */ |
||||
unsigned int bandWidth; |
||||
|
||||
/* Quantizer quality */ |
||||
unsigned long quantqual; |
||||
|
||||
/* Bitstream output format (0 = Raw; 1 = ADTS) */ |
||||
unsigned int outputFormat; |
||||
|
||||
/* psychoacoustic model list */ |
||||
psymodellist_t *psymodellist; |
||||
|
||||
/* selected index in psymodellist */ |
||||
unsigned int psymodelidx; |
||||
|
||||
/*
|
||||
PCM Sample Input Format |
||||
0 FAAC_INPUT_NULL invalid, signifies a misconfigured config |
||||
1 FAAC_INPUT_16BIT native endian 16bit |
||||
2 FAAC_INPUT_24BIT native endian 24bit in 24 bits (not implemented) |
||||
3 FAAC_INPUT_32BIT native endian 24bit in 32 bits (DEFAULT) |
||||
4 FAAC_INPUT_FLOAT 32bit floating point |
||||
*/ |
||||
unsigned int inputFormat; |
||||
|
||||
/* block type enforcing (SHORTCTL_NORMAL/SHORTCTL_NOSHORT/SHORTCTL_NOLONG) */ |
||||
int shortctl; |
||||
|
||||
/*
|
||||
Channel Remapping |
||||
|
||||
Default 0, 1, 2, 3 ... 63 (64 is MAX_CHANNELS in coder.h) |
||||
|
||||
WAVE 4.0 2, 0, 1, 3 |
||||
WAVE 5.0 2, 0, 1, 3, 4 |
||||
WAVE 5.1 2, 0, 1, 4, 5, 3 |
||||
AIFF 5.1 2, 0, 3, 1, 4, 5
|
||||
*/ |
||||
int channel_map[64];
|
||||
|
||||
} faacEncConfiguration, *faacEncConfigurationPtr; |
||||
|
||||
#pragma pack(pop) |
||||
|
||||
#endif /* _FAACCFG_H_ */ |
@ -0,0 +1,161 @@ |
||||
#ifndef __AMF_H__ |
||||
#define __AMF_H__ |
||||
/*
|
||||
* Copyright (C) 2005-2008 Team XBMC |
||||
* http://www.xbmc.org
|
||||
* Copyright (C) 2008-2009 Andrej Stepanchuk |
||||
* Copyright (C) 2009-2010 Howard Chu |
||||
* |
||||
* This file is part of librtmp. |
||||
* |
||||
* librtmp is free software; you can redistribute it and/or modify |
||||
* it under the terms of the GNU Lesser General Public License as |
||||
* published by the Free Software Foundation; either version 2.1, |
||||
* or (at your option) any later version. |
||||
* |
||||
* librtmp is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Lesser General Public License |
||||
* along with librtmp see the file COPYING. If not, write to |
||||
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
||||
* Boston, MA 02110-1301, USA. |
||||
* http://www.gnu.org/copyleft/lgpl.html
|
||||
*/ |
||||
|
||||
#include <stdint.h> |
||||
|
||||
#ifndef TRUE |
||||
#define TRUE 1 |
||||
#define FALSE 0 |
||||
#endif |
||||
|
||||
#ifdef __cplusplus |
||||
extern "C" |
||||
{ |
||||
#endif |
||||
|
||||
typedef enum |
||||
{ AMF_NUMBER = 0, AMF_BOOLEAN, AMF_STRING, AMF_OBJECT, |
||||
AMF_MOVIECLIP, /* reserved, not used */ |
||||
AMF_NULL, AMF_UNDEFINED, AMF_REFERENCE, AMF_ECMA_ARRAY, AMF_OBJECT_END, |
||||
AMF_STRICT_ARRAY, AMF_DATE, AMF_LONG_STRING, AMF_UNSUPPORTED, |
||||
AMF_RECORDSET, /* reserved, not used */ |
||||
AMF_XML_DOC, AMF_TYPED_OBJECT, |
||||
AMF_AVMPLUS, /* switch to AMF3 */ |
||||
AMF_INVALID = 0xff |
||||
} AMFDataType; |
||||
|
||||
typedef enum |
||||
{ AMF3_UNDEFINED = 0, AMF3_NULL, AMF3_FALSE, AMF3_TRUE, |
||||
AMF3_INTEGER, AMF3_DOUBLE, AMF3_STRING, AMF3_XML_DOC, AMF3_DATE, |
||||
AMF3_ARRAY, AMF3_OBJECT, AMF3_XML, AMF3_BYTE_ARRAY |
||||
} AMF3DataType; |
||||
|
||||
typedef struct AVal |
||||
{ |
||||
char *av_val; |
||||
int av_len; |
||||
} AVal; |
||||
#define AVC(str) {str,sizeof(str)-1} |
||||
#define AVMATCH(a1,a2) ((a1)->av_len == (a2)->av_len && !memcmp((a1)->av_val,(a2)->av_val,(a1)->av_len)) |
||||
|
||||
struct AMFObjectProperty; |
||||
|
||||
typedef struct AMFObject |
||||
{ |
||||
int o_num; |
||||
struct AMFObjectProperty *o_props; |
||||
} AMFObject; |
||||
|
||||
typedef struct AMFObjectProperty |
||||
{ |
||||
AVal p_name; |
||||
AMFDataType p_type; |
||||
union |
||||
{ |
||||
double p_number; |
||||
AVal p_aval; |
||||
AMFObject p_object; |
||||
} p_vu; |
||||
int16_t p_UTCoffset; |
||||
} AMFObjectProperty; |
||||
|
||||
char *AMF_EncodeString(char *output, char *outend, const AVal * str); |
||||
char *AMF_EncodeNumber(char *output, char *outend, double dVal); |
||||
char *AMF_EncodeInt16(char *output, char *outend, short nVal); |
||||
char *AMF_EncodeInt24(char *output, char *outend, int nVal); |
||||
char *AMF_EncodeInt32(char *output, char *outend, int nVal); |
||||
char *AMF_EncodeBoolean(char *output, char *outend, int bVal); |
||||
|
||||
/* Shortcuts for AMFProp_Encode */ |
||||
char *AMF_EncodeNamedString(char *output, char *outend, const AVal * name, const AVal * value); |
||||
char *AMF_EncodeNamedNumber(char *output, char *outend, const AVal * name, double dVal); |
||||
char *AMF_EncodeNamedBoolean(char *output, char *outend, const AVal * name, int bVal); |
||||
|
||||
unsigned short AMF_DecodeInt16(const char *data); |
||||
unsigned int AMF_DecodeInt24(const char *data); |
||||
unsigned int AMF_DecodeInt32(const char *data); |
||||
void AMF_DecodeString(const char *data, AVal * str); |
||||
void AMF_DecodeLongString(const char *data, AVal * str); |
||||
int AMF_DecodeBoolean(const char *data); |
||||
double AMF_DecodeNumber(const char *data); |
||||
|
||||
char *AMF_Encode(AMFObject * obj, char *pBuffer, char *pBufEnd); |
||||
int AMF_Decode(AMFObject * obj, const char *pBuffer, int nSize, |
||||
int bDecodeName); |
||||
int AMF_DecodeArray(AMFObject * obj, const char *pBuffer, int nSize, |
||||
int nArrayLen, int bDecodeName); |
||||
int AMF3_Decode(AMFObject * obj, const char *pBuffer, int nSize, |
||||
int bDecodeName); |
||||
void AMF_Dump(AMFObject * obj); |
||||
void AMF_Reset(AMFObject * obj); |
||||
|
||||
void AMF_AddProp(AMFObject * obj, const AMFObjectProperty * prop); |
||||
int AMF_CountProp(AMFObject * obj); |
||||
AMFObjectProperty *AMF_GetProp(AMFObject * obj, const AVal * name, |
||||
int nIndex); |
||||
|
||||
AMFDataType AMFProp_GetType(AMFObjectProperty * prop); |
||||
void AMFProp_SetNumber(AMFObjectProperty * prop, double dval); |
||||
void AMFProp_SetBoolean(AMFObjectProperty * prop, int bflag); |
||||
void AMFProp_SetString(AMFObjectProperty * prop, AVal * str); |
||||
void AMFProp_SetObject(AMFObjectProperty * prop, AMFObject * obj); |
||||
|
||||
void AMFProp_GetName(AMFObjectProperty * prop, AVal * name); |
||||
void AMFProp_SetName(AMFObjectProperty * prop, AVal * name); |
||||
double AMFProp_GetNumber(AMFObjectProperty * prop); |
||||
int AMFProp_GetBoolean(AMFObjectProperty * prop); |
||||
void AMFProp_GetString(AMFObjectProperty * prop, AVal * str); |
||||
void AMFProp_GetObject(AMFObjectProperty * prop, AMFObject * obj); |
||||
|
||||
int AMFProp_IsValid(AMFObjectProperty * prop); |
||||
|
||||
char *AMFProp_Encode(AMFObjectProperty * prop, char *pBuffer, char *pBufEnd); |
||||
int AMF3Prop_Decode(AMFObjectProperty * prop, const char *pBuffer, |
||||
int nSize, int bDecodeName); |
||||
int AMFProp_Decode(AMFObjectProperty * prop, const char *pBuffer, |
||||
int nSize, int bDecodeName); |
||||
|
||||
void AMFProp_Dump(AMFObjectProperty * prop); |
||||
void AMFProp_Reset(AMFObjectProperty * prop); |
||||
|
||||
typedef struct AMF3ClassDef |
||||
{ |
||||
AVal cd_name; |
||||
char cd_externalizable; |
||||
char cd_dynamic; |
||||
int cd_num; |
||||
AVal *cd_props; |
||||
} AMF3ClassDef; |
||||
|
||||
void AMF3CD_AddProp(AMF3ClassDef * cd, AVal * prop); |
||||
AVal *AMF3CD_GetProp(AMF3ClassDef * cd, int idx); |
||||
|
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
|
||||
#endif /* __AMF_H__ */ |
@ -0,0 +1,47 @@ |
||||
#ifndef __RTMP_HTTP_H__ |
||||
#define __RTMP_HTTP_H__ |
||||
/*
|
||||
* Copyright (C) 2010 Howard Chu |
||||
* Copyright (C) 2010 Antti Ajanki |
||||
* |
||||
* This file is part of librtmp. |
||||
* |
||||
* librtmp is free software; you can redistribute it and/or modify |
||||
* it under the terms of the GNU Lesser General Public License as |
||||
* published by the Free Software Foundation; either version 2.1, |
||||
* or (at your option) any later version. |
||||
* |
||||
* librtmp is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Lesser General Public License |
||||
* along with librtmp see the file COPYING. If not, write to |
||||
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
||||
* Boston, MA 02110-1301, USA. |
||||
* http://www.gnu.org/copyleft/lgpl.html
|
||||
*/ |
||||
|
||||
typedef enum { |
||||
HTTPRES_OK, /* result OK */ |
||||
HTTPRES_OK_NOT_MODIFIED, /* not modified since last request */ |
||||
HTTPRES_NOT_FOUND, /* not found */ |
||||
HTTPRES_BAD_REQUEST, /* client error */ |
||||
HTTPRES_SERVER_ERROR, /* server reported an error */ |
||||
HTTPRES_REDIRECTED, /* resource has been moved */ |
||||
HTTPRES_LOST_CONNECTION /* connection lost while waiting for data */ |
||||
} HTTPResult; |
||||
|
||||
struct HTTP_ctx { |
||||
char *date; |
||||
int size; |
||||
int status; |
||||
void *data; |
||||
}; |
||||
|
||||
typedef size_t (HTTP_read_callback)(void *ptr, size_t size, size_t nmemb, void *stream); |
||||
|
||||
HTTPResult HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb); |
||||
|
||||
#endif |
@ -0,0 +1,63 @@ |
||||
/*
|
||||
* Copyright (C) 2008-2009 Andrej Stepanchuk |
||||
* Copyright (C) 2009-2010 Howard Chu |
||||
* |
||||
* This file is part of librtmp. |
||||
* |
||||
* librtmp is free software; you can redistribute it and/or modify |
||||
* it under the terms of the GNU Lesser General Public License as |
||||
* published by the Free Software Foundation; either version 2.1, |
||||
* or (at your option) any later version. |
||||
* |
||||
* librtmp is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Lesser General Public License |
||||
* along with librtmp see the file COPYING. If not, write to |
||||
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
||||
* Boston, MA 02110-1301, USA. |
||||
* http://www.gnu.org/copyleft/lgpl.html
|
||||
*/ |
||||
|
||||
#ifndef __RTMP_LOG_H__ |
||||
#define __RTMP_LOG_H__ |
||||
|
||||
#include <stdio.h> |
||||
#include <stdarg.h> |
||||
#include <stdint.h> |
||||
|
||||
#ifdef __cplusplus |
||||
extern "C" { |
||||
#endif |
||||
/* Enable this to get full debugging output */ |
||||
/* #define _DEBUG */ |
||||
|
||||
#ifdef _DEBUG |
||||
#undef NODEBUG |
||||
#endif |
||||
|
||||
typedef enum |
||||
{ RTMP_LOGCRIT=0, RTMP_LOGERROR, RTMP_LOGWARNING, RTMP_LOGINFO, |
||||
RTMP_LOGDEBUG, RTMP_LOGDEBUG2, RTMP_LOGALL |
||||
} RTMP_LogLevel; |
||||
|
||||
extern RTMP_LogLevel RTMP_debuglevel; |
||||
|
||||
typedef void (RTMP_LogCallback)(int level, const char *fmt, va_list); |
||||
void RTMP_LogSetCallback(RTMP_LogCallback *cb); |
||||
void RTMP_LogSetOutput(FILE *file); |
||||
void RTMP_LogPrintf(const char *format, ...); |
||||
void RTMP_LogStatus(const char *format, ...); |
||||
void RTMP_Log(int level, const char *format, ...); |
||||
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len); |
||||
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len); |
||||
void RTMP_LogSetLevel(RTMP_LogLevel lvl); |
||||
RTMP_LogLevel RTMP_LogGetLevel(void); |
||||
|
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
|
||||
#endif |
@ -0,0 +1,345 @@ |
||||
#ifndef __RTMP_H__ |
||||
#define __RTMP_H__ |
||||
/*
|
||||
* Copyright (C) 2005-2008 Team XBMC |
||||
* http://www.xbmc.org
|
||||
* Copyright (C) 2008-2009 Andrej Stepanchuk |
||||
* Copyright (C) 2009-2010 Howard Chu |
||||
* |
||||
* This file is part of librtmp. |
||||
* |
||||
* librtmp is free software; you can redistribute it and/or modify |
||||
* it under the terms of the GNU Lesser General Public License as |
||||
* published by the Free Software Foundation; either version 2.1, |
||||
* or (at your option) any later version. |
||||
* |
||||
* librtmp is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU Lesser General Public License |
||||
* along with librtmp see the file COPYING. If not, write to |
||||
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, |
||||
* Boston, MA 02110-1301, USA. |
||||
* http://www.gnu.org/copyleft/lgpl.html
|
||||
*/ |
||||
|
||||
#if !defined(NO_CRYPTO) && !defined(CRYPTO) |
||||
#define CRYPTO |
||||
#endif |
||||
|
||||
#include <errno.h> |
||||
#include <stdint.h> |
||||
#include <stddef.h> |
||||
|
||||
#include "amf.h" |
||||
|
||||
#ifdef __cplusplus |
||||
extern "C" |
||||
{ |
||||
#endif |
||||
|
||||
#define RTMP_LIB_VERSION 0x020300 /* 2.3 */ |
||||
|
||||
#define RTMP_FEATURE_HTTP 0x01 |
||||
#define RTMP_FEATURE_ENC 0x02 |
||||
#define RTMP_FEATURE_SSL 0x04 |
||||
#define RTMP_FEATURE_MFP 0x08 /* not yet supported */ |
||||
#define RTMP_FEATURE_WRITE 0x10 /* publish, not play */ |
||||
#define RTMP_FEATURE_HTTP2 0x20 /* server-side rtmpt */ |
||||
|
||||
#define RTMP_PROTOCOL_UNDEFINED -1 |
||||
#define RTMP_PROTOCOL_RTMP 0 |
||||
#define RTMP_PROTOCOL_RTMPE RTMP_FEATURE_ENC |
||||
#define RTMP_PROTOCOL_RTMPT RTMP_FEATURE_HTTP |
||||
#define RTMP_PROTOCOL_RTMPS RTMP_FEATURE_SSL |
||||
#define RTMP_PROTOCOL_RTMPTE (RTMP_FEATURE_HTTP|RTMP_FEATURE_ENC) |
||||
#define RTMP_PROTOCOL_RTMPTS (RTMP_FEATURE_HTTP|RTMP_FEATURE_SSL) |
||||
#define RTMP_PROTOCOL_RTMFP RTMP_FEATURE_MFP |
||||
|
||||
#define RTMP_DEFAULT_CHUNKSIZE 128 |
||||
|
||||
/* needs to fit largest number of bytes recv() may return */ |
||||
#define RTMP_BUFFER_CACHE_SIZE (16*1024) |
||||
|
||||
#define RTMP_CHANNELS 65600 |
||||
|
||||
extern const char RTMPProtocolStringsLower[][7]; |
||||
extern const AVal RTMP_DefaultFlashVer; |
||||
extern int RTMP_ctrlC; |
||||
|
||||
uint32_t RTMP_GetTime(void); |
||||
|
||||
#define RTMP_PACKET_TYPE_AUDIO 0x08 |
||||
#define RTMP_PACKET_TYPE_VIDEO 0x09 |
||||
#define RTMP_PACKET_TYPE_INFO 0x12 |
||||
|
||||
#define RTMP_MAX_HEADER_SIZE 18 |
||||
|
||||
#define RTMP_PACKET_SIZE_LARGE 0 |
||||
#define RTMP_PACKET_SIZE_MEDIUM 1 |
||||
#define RTMP_PACKET_SIZE_SMALL 2 |
||||
#define RTMP_PACKET_SIZE_MINIMUM 3 |
||||
|
||||
typedef struct RTMPChunk |
||||
{ |
||||
int c_headerSize; |
||||
int c_chunkSize; |
||||
char *c_chunk; |
||||
char c_header[RTMP_MAX_HEADER_SIZE]; |
||||
} RTMPChunk; |
||||
|
||||
typedef struct RTMPPacket |
||||
{ |
||||
uint8_t m_headerType; |
||||
uint8_t m_packetType; |
||||
uint8_t m_hasAbsTimestamp; /* timestamp absolute or relative? */ |
||||
int m_nChannel; |
||||
uint32_t m_nTimeStamp; /* timestamp */ |
||||
int32_t m_nInfoField2; /* last 4 bytes in a long header */ |
||||
uint32_t m_nBodySize; |
||||
uint32_t m_nBytesRead; |
||||
RTMPChunk *m_chunk; |
||||
char *m_body; |
||||
} RTMPPacket; |
||||
|
||||
typedef struct RTMPSockBuf |
||||
{ |
||||
int sb_socket; |
||||
int sb_size; /* number of unprocessed bytes in buffer */ |
||||
char *sb_start; /* pointer into sb_pBuffer of next byte to process */ |
||||
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */ |
||||
int sb_timedout; |
||||
void *sb_ssl; |
||||
} RTMPSockBuf; |
||||
|
||||
void RTMPPacket_Reset(RTMPPacket *p); |
||||
void RTMPPacket_Dump(RTMPPacket *p); |
||||
int RTMPPacket_Alloc(RTMPPacket *p, int nSize); |
||||
void RTMPPacket_Free(RTMPPacket *p); |
||||
|
||||
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize) |
||||
|
||||
typedef struct RTMP_LNK |
||||
{ |
||||
AVal hostname; |
||||
AVal sockshost; |
||||
|
||||
AVal playpath0; /* parsed from URL */ |
||||
AVal playpath; /* passed in explicitly */ |
||||
AVal tcUrl; |
||||
AVal swfUrl; |
||||
AVal pageUrl; |
||||
AVal app; |
||||
AVal auth; |
||||
AVal flashVer; |
||||
AVal subscribepath; |
||||
AVal token; |
||||
AMFObject extras; |
||||
int edepth; |
||||
|
||||
int seekTime; |
||||
int stopTime; |
||||
|
||||
#define RTMP_LF_AUTH 0x0001 /* using auth param */ |
||||
#define RTMP_LF_LIVE 0x0002 /* stream is live */ |
||||
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */ |
||||
#define RTMP_LF_PLST 0x0008 /* send playlist before play */ |
||||
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */ |
||||
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */ |
||||
int lFlags; |
||||
|
||||
int swfAge; |
||||
|
||||
int protocol; |
||||
int timeout; /* connection timeout in seconds */ |
||||
|
||||
unsigned short socksport; |
||||
unsigned short port; |
||||
|
||||
#ifdef CRYPTO |
||||
#define RTMP_SWF_HASHLEN 32 |
||||
void *dh; /* for encryption */ |
||||
void *rc4keyIn; |
||||
void *rc4keyOut; |
||||
|
||||
uint32_t SWFSize; |
||||
uint8_t SWFHash[RTMP_SWF_HASHLEN]; |
||||
char SWFVerificationResponse[RTMP_SWF_HASHLEN+10]; |
||||
#endif |
||||
} RTMP_LNK; |
||||
|
||||
/* state for read() wrapper */ |
||||
typedef struct RTMP_READ |
||||
{ |
||||
char *buf; |
||||
char *bufpos; |
||||
unsigned int buflen; |
||||
uint32_t timestamp; |
||||
uint8_t dataType; |
||||
uint8_t flags; |
||||
#define RTMP_READ_HEADER 0x01 |
||||
#define RTMP_READ_RESUME 0x02 |
||||
#define RTMP_READ_NO_IGNORE 0x04 |
||||
#define RTMP_READ_GOTKF 0x08 |
||||
#define RTMP_READ_GOTFLVK 0x10 |
||||
#define RTMP_READ_SEEKING 0x20 |
||||
int8_t status; |
||||
#define RTMP_READ_COMPLETE -3 |
||||
#define RTMP_READ_ERROR -2 |
||||
#define RTMP_READ_EOF -1 |
||||
#define RTMP_READ_IGNORE 0 |
||||
|
||||
/* if bResume == TRUE */ |
||||
uint8_t initialFrameType; |
||||
uint32_t nResumeTS; |
||||
char *metaHeader; |
||||
char *initialFrame; |
||||
uint32_t nMetaHeaderSize; |
||||
uint32_t nInitialFrameSize; |
||||
uint32_t nIgnoredFrameCounter; |
||||
uint32_t nIgnoredFlvFrameCounter; |
||||
} RTMP_READ; |
||||
|
||||
typedef struct RTMP_METHOD |
||||
{ |
||||
AVal name; |
||||
int num; |
||||
} RTMP_METHOD; |
||||
|
||||
typedef struct RTMP |
||||
{ |
||||
int m_inChunkSize; |
||||
int m_outChunkSize; |
||||
int m_nBWCheckCounter; |
||||
int m_nBytesIn; |
||||
int m_nBytesInSent; |
||||
int m_nBufferMS; |
||||
int m_stream_id; /* returned in _result from createStream */ |
||||
int m_mediaChannel; |
||||
uint32_t m_mediaStamp; |
||||
uint32_t m_pauseStamp; |
||||
int m_pausing; |
||||
int m_nServerBW; |
||||
int m_nClientBW; |
||||
uint8_t m_nClientBW2; |
||||
uint8_t m_bPlaying; |
||||
uint8_t m_bSendEncoding; |
||||
uint8_t m_bSendCounter; |
||||
|
||||
int m_numInvokes; |
||||
int m_numCalls; |
||||
RTMP_METHOD *m_methodCalls; /* remote method calls queue */ |
||||
|
||||
RTMPPacket *m_vecChannelsIn[RTMP_CHANNELS]; |
||||
RTMPPacket *m_vecChannelsOut[RTMP_CHANNELS]; |
||||
int m_channelTimestamp[RTMP_CHANNELS]; /* abs timestamp of last packet */ |
||||
|
||||
double m_fAudioCodecs; /* audioCodecs for the connect packet */ |
||||
double m_fVideoCodecs; /* videoCodecs for the connect packet */ |
||||
double m_fEncoding; /* AMF0 or AMF3 */ |
||||
|
||||
double m_fDuration; /* duration of stream in seconds */ |
||||
|
||||
int m_msgCounter; /* RTMPT stuff */ |
||||
int m_polling; |
||||
int m_resplen; |
||||
int m_unackd; |
||||
AVal m_clientID; |
||||
|
||||
RTMP_READ m_read; |
||||
RTMPPacket m_write; |
||||
RTMPSockBuf m_sb; |
||||
RTMP_LNK Link; |
||||
} RTMP; |
||||
|
||||
int RTMP_ParseURL(const char *url, int *protocol, AVal *host, |
||||
unsigned int *port, AVal *playpath, AVal *app); |
||||
|
||||
void RTMP_ParsePlaypath(AVal *in, AVal *out); |
||||
void RTMP_SetBufferMS(RTMP *r, int size); |
||||
void RTMP_UpdateBufferMS(RTMP *r); |
||||
|
||||
int RTMP_SetOpt(RTMP *r, const AVal *opt, AVal *arg); |
||||
int RTMP_SetupURL(RTMP *r, char *url); |
||||
void RTMP_SetupStream(RTMP *r, int protocol, |
||||
AVal *hostname, |
||||
unsigned int port, |
||||
AVal *sockshost, |
||||
AVal *playpath, |
||||
AVal *tcUrl, |
||||
AVal *swfUrl, |
||||
AVal *pageUrl, |
||||
AVal *app, |
||||
AVal *auth, |
||||
AVal *swfSHA256Hash, |
||||
uint32_t swfSize, |
||||
AVal *flashVer, |
||||
AVal *subscribepath, |
||||
int dStart, |
||||
int dStop, int bLiveStream, long int timeout); |
||||
|
||||
int RTMP_Connect(RTMP *r, RTMPPacket *cp); |
||||
struct sockaddr; |
||||
int RTMP_Connect0(RTMP *r, struct sockaddr *svc); |
||||
int RTMP_Connect1(RTMP *r, RTMPPacket *cp); |
||||
int RTMP_Serve(RTMP *r); |
||||
|
||||
int RTMP_ReadPacket(RTMP *r, RTMPPacket *packet); |
||||
int RTMP_SendPacket(RTMP *r, RTMPPacket *packet, int queue); |
||||
int RTMP_SendChunk(RTMP *r, RTMPChunk *chunk); |
||||
int RTMP_IsConnected(RTMP *r); |
||||
int RTMP_Socket(RTMP *r); |
||||
int RTMP_IsTimedout(RTMP *r); |
||||
double RTMP_GetDuration(RTMP *r); |
||||
int RTMP_ToggleStream(RTMP *r); |
||||
|
||||
int RTMP_ConnectStream(RTMP *r, int seekTime); |
||||
int RTMP_ReconnectStream(RTMP *r, int seekTime); |
||||
void RTMP_DeleteStream(RTMP *r); |
||||
int RTMP_GetNextMediaPacket(RTMP *r, RTMPPacket *packet); |
||||
int RTMP_ClientPacket(RTMP *r, RTMPPacket *packet); |
||||
|
||||
void RTMP_Init(RTMP *r); |
||||
void RTMP_Close(RTMP *r); |
||||
RTMP *RTMP_Alloc(void); |
||||
void RTMP_Free(RTMP *r); |
||||
void RTMP_EnableWrite(RTMP *r); |
||||
|
||||
int RTMP_LibVersion(void); |
||||
void RTMP_UserInterrupt(void); /* user typed Ctrl-C */ |
||||
|
||||
int RTMP_SendCtrl(RTMP *r, short nType, unsigned int nObject, |
||||
unsigned int nTime); |
||||
|
||||
/* caller probably doesn't know current timestamp, should
|
||||
* just use RTMP_Pause instead |
||||
*/ |
||||
int RTMP_SendPause(RTMP *r, int DoPause, int dTime); |
||||
int RTMP_Pause(RTMP *r, int DoPause); |
||||
|
||||
int RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name, |
||||
AMFObjectProperty * p); |
||||
|
||||
int RTMPSockBuf_Fill(RTMPSockBuf *sb); |
||||
int RTMPSockBuf_Send(RTMPSockBuf *sb, const char *buf, int len); |
||||
int RTMPSockBuf_Close(RTMPSockBuf *sb); |
||||
|
||||
int RTMP_SendCreateStream(RTMP *r); |
||||
int RTMP_SendSeek(RTMP *r, int dTime); |
||||
int RTMP_SendServerBW(RTMP *r); |
||||
int RTMP_SendClientBW(RTMP *r); |
||||
void RTMP_DropRequest(RTMP *r, int i, int freeit); |
||||
int RTMP_Read(RTMP *r, char *buf, int size); |
||||
int RTMP_Write(RTMP *r, const char *buf, int size); |
||||
|
||||
/* hashswf.c */ |
||||
int RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash, |
||||
int age); |
||||
|
||||
#ifdef __cplusplus |
||||
}; |
||||
#endif |
||||
|
||||
#endif |
@ -0,0 +1,962 @@ |
||||
/*****************************************************************************
|
||||
* include.x264.h: include.x264 public header |
||||
***************************************************************************** |
||||
* Copyright (C) 2003-2016 include.x264 project |
||||
* |
||||
* Authors: Laurent Aimar <fenrir@via.ecp.fr> |
||||
* Loren Merritt <lorenm@u.washington.edu> |
||||
* Fiona Glaser <fiona@include.x264.com> |
||||
* |
||||
* This program is free software; you can redistribute it and/or modify |
||||
* it under the terms of the GNU General Public License as published by |
||||
* the Free Software Foundation; either version 2 of the License, or |
||||
* (at your option) any later version. |
||||
* |
||||
* This program is distributed in the hope that it will be useful, |
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
* GNU General Public License for more details. |
||||
* |
||||
* You should have received a copy of the GNU General Public License |
||||
* along with this program; if not, write to the Free Software |
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02111, USA. |
||||
* |
||||
* This program is also available under a commercial proprietary license. |
||||
* For more information, contact us at licensing@include.x264.com. |
||||
*****************************************************************************/ |
||||
|
||||
#ifndef X264_X264_H |
||||
#define X264_X264_H |
||||
|
||||
#ifdef __cplusplus |
||||
extern "C" { |
||||
#endif |
||||
|
||||
#if !defined(_STDINT_H) && !defined(_STDINT_H_) && !defined(_STDINT_H_INCLUDED) && !defined(_STDINT) &&\ |
||||
!defined(_SYS_STDINT_H_) && !defined(_INTTYPES_H) && !defined(_INTTYPES_H_) && !defined(_INTTYPES) |
||||
# ifdef _MSC_VER |
||||
# pragma message("You must include stdint.h or inttypes.h before include.x264.h") |
||||
# else |
||||
# warning You must include stdint.h or inttypes.h before x264.h |
||||
# endif |
||||
#endif |
||||
|
||||
#include <stdarg.h> |
||||
#include <stdint.h> |
||||
|
||||
#include "x264_config.h" |
||||
|
||||
#define X264_BUILD 148 |
||||
|
||||
/* Application developers planning to link against a shared library version of
|
||||
* libx264 from a Microsoft Visual Studio or similar development environment |
||||
* will need to define X264_API_IMPORTS before including this header. |
||||
* This clause does not apply to MinGW, similar development environments, or non |
||||
* Windows platforms. */ |
||||
#ifdef X264_API_IMPORTS |
||||
#define X264_API __declspec(dllimport) |
||||
#else |
||||
#define X264_API |
||||
#endif |
||||
|
||||
/* x264_t:
|
||||
* opaque handler for encoder */ |
||||
typedef struct x264_t x264_t; |
||||
|
||||
/****************************************************************************
|
||||
* NAL structure and functions |
||||
****************************************************************************/ |
||||
|
||||
enum nal_unit_type_e |
||||
{ |
||||
NAL_UNKNOWN = 0, |
||||
NAL_SLICE = 1, |
||||
NAL_SLICE_DPA = 2, |
||||
NAL_SLICE_DPB = 3, |
||||
NAL_SLICE_DPC = 4, |
||||
NAL_SLICE_IDR = 5, /* ref_idc != 0 */ |
||||
NAL_SEI = 6, /* ref_idc == 0 */ |
||||
NAL_SPS = 7, |
||||
NAL_PPS = 8, |
||||
NAL_AUD = 9, |
||||
NAL_FILLER = 12, |
||||
/* ref_idc == 0 for 6,9,10,11,12 */ |
||||
}; |
||||
enum nal_priority_e |
||||
{ |
||||
NAL_PRIORITY_DISPOSABLE = 0, |
||||
NAL_PRIORITY_LOW = 1, |
||||
NAL_PRIORITY_HIGH = 2, |
||||
NAL_PRIORITY_HIGHEST = 3, |
||||
}; |
||||
|
||||
/* The data within the payload is already NAL-encapsulated; the ref_idc and type
|
||||
* are merely in the struct for easy access by the calling application. |
||||
* All data returned in an x264_nal_t, including the data in p_payload, is no longer |
||||
* valid after the next call to x264_encoder_encode. Thus it must be used or copied |
||||
* before calling x264_encoder_encode or x264_encoder_headers again. */ |
||||
typedef struct x264_nal_t |
||||
{ |
||||
int i_ref_idc; /* nal_priority_e */ |
||||
int i_type; /* nal_unit_type_e */ |
||||
int b_long_startcode; |
||||
int i_first_mb; /* If this NAL is a slice, the index of the first MB in the slice. */ |
||||
int i_last_mb; /* If this NAL is a slice, the index of the last MB in the slice. */ |
||||
|
||||
/* Size of payload (including any padding) in bytes. */ |
||||
int i_payload; |
||||
/* If param->b_annexb is set, Annex-B bytestream with startcode.
|
||||
* Otherwise, startcode is replaced with a 4-byte size. |
||||
* This size is the size used in mp4/similar muxing; it is equal to i_payload-4 */ |
||||
uint8_t *p_payload; |
||||
|
||||
/* Size of padding in bytes. */ |
||||
int i_padding; |
||||
} x264_nal_t; |
||||
|
||||
/****************************************************************************
|
||||
* Encoder parameters |
||||
****************************************************************************/ |
||||
/* CPU flags */ |
||||
|
||||
/* x86 */ |
||||
#define X264_CPU_CMOV 0x0000001 |
||||
#define X264_CPU_MMX 0x0000002 |
||||
#define X264_CPU_MMX2 0x0000004 /* MMX2 aka MMXEXT aka ISSE */ |
||||
#define X264_CPU_MMXEXT X264_CPU_MMX2 |
||||
#define X264_CPU_SSE 0x0000008 |
||||
#define X264_CPU_SSE2 0x0000010 |
||||
#define X264_CPU_SSE3 0x0000020 |
||||
#define X264_CPU_SSSE3 0x0000040 |
||||
#define X264_CPU_SSE4 0x0000080 /* SSE4.1 */ |
||||
#define X264_CPU_SSE42 0x0000100 /* SSE4.2 */ |
||||
#define X264_CPU_LZCNT 0x0000200 /* Phenom support for "leading zero count" instruction. */ |
||||
#define X264_CPU_AVX 0x0000400 /* AVX support: requires OS support even if YMM registers aren't used. */ |
||||
#define X264_CPU_XOP 0x0000800 /* AMD XOP */ |
||||
#define X264_CPU_FMA4 0x0001000 /* AMD FMA4 */ |
||||
#define X264_CPU_FMA3 0x0002000 /* FMA3 */ |
||||
#define X264_CPU_AVX2 0x0004000 /* AVX2 */ |
||||
#define X264_CPU_BMI1 0x0008000 /* BMI1 */ |
||||
#define X264_CPU_BMI2 0x0010000 /* BMI2 */ |
||||
/* x86 modifiers */ |
||||
#define X264_CPU_CACHELINE_32 0x0020000 /* avoid memory loads that span the border between two cachelines */ |
||||
#define X264_CPU_CACHELINE_64 0x0040000 /* 32/64 is the size of a cacheline in bytes */ |
||||
#define X264_CPU_SSE2_IS_SLOW 0x0080000 /* avoid most SSE2 functions on Athlon64 */ |
||||
#define X264_CPU_SSE2_IS_FAST 0x0100000 /* a few functions are only faster on Core2 and Phenom */ |
||||
#define X264_CPU_SLOW_SHUFFLE 0x0200000 /* The Conroe has a slow shuffle unit (relative to overall SSE performance) */ |
||||
#define X264_CPU_STACK_MOD4 0x0400000 /* if stack is only mod4 and not mod16 */ |
||||
#define X264_CPU_SLOW_CTZ 0x0800000 /* BSR/BSF x86 instructions are really slow on some CPUs */ |
||||
#define X264_CPU_SLOW_ATOM 0x1000000 /* The Atom is terrible: slow SSE unaligned loads, slow |
||||
* SIMD multiplies, slow SIMD variable shifts, slow pshufb, |
||||
* cacheline split penalties -- gather everything here that |
||||
* isn't shared by other CPUs to avoid making half a dozen |
||||
* new SLOW flags. */ |
||||
#define X264_CPU_SLOW_PSHUFB 0x2000000 /* such as on the Intel Atom */ |
||||
#define X264_CPU_SLOW_PALIGNR 0x4000000 /* such as on the AMD Bobcat */ |
||||
|
||||
/* PowerPC */ |
||||
#define X264_CPU_ALTIVEC 0x0000001 |
||||
|
||||
/* ARM and AArch64 */ |
||||
#define X264_CPU_ARMV6 0x0000001 |
||||
#define X264_CPU_NEON 0x0000002 /* ARM NEON */ |
||||
#define X264_CPU_FAST_NEON_MRC 0x0000004 /* Transfer from NEON to ARM register is fast (Cortex-A9) */ |
||||
#define X264_CPU_ARMV8 0x0000008 |
||||
|
||||
/* MIPS */ |
||||
#define X264_CPU_MSA 0x0000001 /* MIPS MSA */ |
||||
|
||||
/* Analyse flags */ |
||||
#define X264_ANALYSE_I4x4 0x0001 /* Analyse i4x4 */ |
||||
#define X264_ANALYSE_I8x8 0x0002 /* Analyse i8x8 (requires 8x8 transform) */ |
||||
#define X264_ANALYSE_PSUB16x16 0x0010 /* Analyse p16x8, p8x16 and p8x8 */ |
||||
#define X264_ANALYSE_PSUB8x8 0x0020 /* Analyse p8x4, p4x8, p4x4 */ |
||||
#define X264_ANALYSE_BSUB16x16 0x0100 /* Analyse b16x8, b8x16 and b8x8 */ |
||||
#define X264_DIRECT_PRED_NONE 0 |
||||
#define X264_DIRECT_PRED_SPATIAL 1 |
||||
#define X264_DIRECT_PRED_TEMPORAL 2 |
||||
#define X264_DIRECT_PRED_AUTO 3 |
||||
#define X264_ME_DIA 0 |
||||
#define X264_ME_HEX 1 |
||||
#define X264_ME_UMH 2 |
||||
#define X264_ME_ESA 3 |
||||
#define X264_ME_TESA 4 |
||||
#define X264_CQM_FLAT 0 |
||||
#define X264_CQM_JVT 1 |
||||
#define X264_CQM_CUSTOM 2 |
||||
#define X264_RC_CQP 0 |
||||
#define X264_RC_CRF 1 |
||||
#define X264_RC_ABR 2 |
||||
#define X264_QP_AUTO 0 |
||||
#define X264_AQ_NONE 0 |
||||
#define X264_AQ_VARIANCE 1 |
||||
#define X264_AQ_AUTOVARIANCE 2 |
||||
#define X264_AQ_AUTOVARIANCE_BIASED 3 |
||||
#define X264_B_ADAPT_NONE 0 |
||||
#define X264_B_ADAPT_FAST 1 |
||||
#define X264_B_ADAPT_TRELLIS 2 |
||||
#define X264_WEIGHTP_NONE 0 |
||||
#define X264_WEIGHTP_SIMPLE 1 |
||||
#define X264_WEIGHTP_SMART 2 |
||||
#define X264_B_PYRAMID_NONE 0 |
||||
#define X264_B_PYRAMID_STRICT 1 |
||||
#define X264_B_PYRAMID_NORMAL 2 |
||||
#define X264_KEYINT_MIN_AUTO 0 |
||||
#define X264_KEYINT_MAX_INFINITE (1<<30) |
||||
|
||||
static const char * const x264_direct_pred_names[] = { "none", "spatial", "temporal", "auto", 0 }; |
||||
static const char * const x264_motion_est_names[] = { "dia", "hex", "umh", "esa", "tesa", 0 }; |
||||
static const char * const x264_b_pyramid_names[] = { "none", "strict", "normal", 0 }; |
||||
static const char * const x264_overscan_names[] = { "undef", "show", "crop", 0 }; |
||||
static const char * const x264_vidformat_names[] = { "component", "pal", "ntsc", "secam", "mac", "undef", 0 }; |
||||
static const char * const x264_fullrange_names[] = { "off", "on", 0 }; |
||||
static const char * const x264_colorprim_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "film", "bt2020", "smpte428", |
||||
"smpte431", "smpte432", 0 }; |
||||
static const char * const x264_transfer_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "linear", "log100", "log316", |
||||
"iec61966-2-4", "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12", "smpte2084", "smpte428", 0 }; |
||||
static const char * const x264_colmatrix_names[] = { "GBR", "bt709", "undef", "", "fcc", "bt470bg", "smpte170m", "smpte240m", "YCgCo", "bt2020nc", "bt2020c", |
||||
"smpte2085", 0 }; |
||||
static const char * const x264_nal_hrd_names[] = { "none", "vbr", "cbr", 0 }; |
||||
|
||||
/* Colorspace type */ |
||||
#define X264_CSP_MASK 0x00ff /* */ |
||||
#define X264_CSP_NONE 0x0000 /* Invalid mode */ |
||||
#define X264_CSP_I420 0x0001 /* yuv 4:2:0 planar */ |
||||
#define X264_CSP_YV12 0x0002 /* yvu 4:2:0 planar */ |
||||
#define X264_CSP_NV12 0x0003 /* yuv 4:2:0, with one y plane and one packed u+v */ |
||||
#define X264_CSP_NV21 0x0004 /* yuv 4:2:0, with one y plane and one packed v+u */ |
||||
#define X264_CSP_I422 0x0005 /* yuv 4:2:2 planar */ |
||||
#define X264_CSP_YV16 0x0006 /* yvu 4:2:2 planar */ |
||||
#define X264_CSP_NV16 0x0007 /* yuv 4:2:2, with one y plane and one packed u+v */ |
||||
#define X264_CSP_V210 0x0008 /* 10-bit yuv 4:2:2 packed in 32 */ |
||||
#define X264_CSP_I444 0x0009 /* yuv 4:4:4 planar */ |
||||
#define X264_CSP_YV24 0x000a /* yvu 4:4:4 planar */ |
||||
#define X264_CSP_BGR 0x000b /* packed bgr 24bits */ |
||||
#define X264_CSP_BGRA 0x000c /* packed bgr 32bits */ |
||||
#define X264_CSP_RGB 0x000d /* packed rgb 24bits */ |
||||
#define X264_CSP_MAX 0x000e /* end of list */ |
||||
#define X264_CSP_VFLIP 0x1000 /* the csp is vertically flipped */ |
||||
#define X264_CSP_HIGH_DEPTH 0x2000 /* the csp has a depth of 16 bits per pixel component */ |
||||
|
||||
/* Slice type */ |
||||
#define X264_TYPE_AUTO 0x0000 /* Let include.x264 choose the right type */ |
||||
#define X264_TYPE_IDR 0x0001 |
||||
#define X264_TYPE_I 0x0002 |
||||
#define X264_TYPE_P 0x0003 |
||||
#define X264_TYPE_BREF 0x0004 /* Non-disposable B-frame */ |
||||
#define X264_TYPE_B 0x0005 |
||||
#define X264_TYPE_KEYFRAME 0x0006 /* IDR or I depending on b_open_gop option */ |
||||
#define IS_X264_TYPE_I(x) ((x)==X264_TYPE_I || (x)==X264_TYPE_IDR || (x)==X264_TYPE_KEYFRAME) |
||||
#define IS_X264_TYPE_B(x) ((x)==X264_TYPE_B || (x)==X264_TYPE_BREF) |
||||
|
||||
/* Log level */ |
||||
#define X264_LOG_NONE (-1) |
||||
#define X264_LOG_ERROR 0 |
||||
#define X264_LOG_WARNING 1 |
||||
#define X264_LOG_INFO 2 |
||||
#define X264_LOG_DEBUG 3 |
||||
|
||||
/* Threading */ |
||||
#define X264_THREADS_AUTO 0 /* Automatically select optimal number of threads */ |
||||
#define X264_SYNC_LOOKAHEAD_AUTO (-1) /* Automatically select optimal lookahead thread buffer size */ |
||||
|
||||
/* HRD */ |
||||
#define X264_NAL_HRD_NONE 0 |
||||
#define X264_NAL_HRD_VBR 1 |
||||
#define X264_NAL_HRD_CBR 2 |
||||
|
||||
/* Zones: override ratecontrol or other options for specific sections of the video.
|
||||
* See x264_encoder_reconfig() for which options can be changed. |
||||
* If zones overlap, whichever comes later in the list takes precedence. */ |
||||
typedef struct x264_zone_t |
||||
{ |
||||
int i_start, i_end; /* range of frame numbers */ |
||||
int b_force_qp; /* whether to use qp vs bitrate factor */ |
||||
int i_qp; |
||||
float f_bitrate_factor; |
||||
struct x264_param_t *param; |
||||
} x264_zone_t; |
||||
|
||||
typedef struct x264_param_t |
||||
{ |
||||
/* CPU flags */ |
||||
unsigned int cpu; |
||||
int i_threads; /* encode multiple frames in parallel */ |
||||
int i_lookahead_threads; /* multiple threads for lookahead analysis */ |
||||
int b_sliced_threads; /* Whether to use slice-based threading. */ |
||||
int b_deterministic; /* whether to allow non-deterministic optimizations when threaded */ |
||||
int b_cpu_independent; /* force canonical behavior rather than cpu-dependent optimal algorithms */ |
||||
int i_sync_lookahead; /* threaded lookahead buffer */ |
||||
|
||||
/* Video Properties */ |
||||
int i_width; |
||||
int i_height; |
||||
int i_csp; /* CSP of encoded bitstream */ |
||||
int i_level_idc; |
||||
int i_frame_total; /* number of frames to encode if known, else 0 */ |
||||
|
||||
/* NAL HRD
|
||||
* Uses Buffering and Picture Timing SEIs to signal HRD |
||||
* The HRD in H.264 was not designed with VFR in mind. |
||||
* It is therefore not recommendeded to use NAL HRD with VFR. |
||||
* Furthermore, reconfiguring the VBV (via x264_encoder_reconfig) |
||||
* will currently generate invalid HRD. */ |
||||
int i_nal_hrd; |
||||
|
||||
struct |
||||
{ |
||||
/* they will be reduced to be 0 < x <= 65535 and prime */ |
||||
int i_sar_height; |
||||
int i_sar_width; |
||||
|
||||
int i_overscan; /* 0=undef, 1=no overscan, 2=overscan */ |
||||
|
||||
/* see h264 annex E for the values of the following */ |
||||
int i_vidformat; |
||||
int b_fullrange; |
||||
int i_colorprim; |
||||
int i_transfer; |
||||
int i_colmatrix; |
||||
int i_chroma_loc; /* both top & bottom */ |
||||
} vui; |
||||
|
||||
/* Bitstream parameters */ |
||||
int i_frame_reference; /* Maximum number of reference frames */ |
||||
int i_dpb_size; /* Force a DPB size larger than that implied by B-frames and reference frames.
|
||||
* Useful in combination with interactive error resilience. */ |
||||
int i_keyint_max; /* Force an IDR keyframe at this interval */ |
||||
int i_keyint_min; /* Scenecuts closer together than this are coded as I, not IDR. */ |
||||
int i_scenecut_threshold; /* how aggressively to insert extra I frames */ |
||||
int b_intra_refresh; /* Whether or not to use periodic intra refresh instead of IDR frames. */ |
||||
|
||||
int i_bframe; /* how many b-frame between 2 references pictures */ |
||||
int i_bframe_adaptive; |
||||
int i_bframe_bias; |
||||
int i_bframe_pyramid; /* Keep some B-frames as references: 0=off, 1=strict hierarchical, 2=normal */ |
||||
int b_open_gop; |
||||
int b_bluray_compat; |
||||
int i_avcintra_class; |
||||
|
||||
int b_deblocking_filter; |
||||
int i_deblocking_filter_alphac0; /* [-6, 6] -6 light filter, 6 strong */ |
||||
int i_deblocking_filter_beta; /* [-6, 6] idem */ |
||||
|
||||
int b_cabac; |
||||
int i_cabac_init_idc; |
||||
|
||||
int b_interlaced; |
||||
int b_constrained_intra; |
||||
|
||||
int i_cqm_preset; |
||||
char *psz_cqm_file; /* filename (in UTF-8) of CQM file, JM format */ |
||||
uint8_t cqm_4iy[16]; /* used only if i_cqm_preset == X264_CQM_CUSTOM */ |
||||
uint8_t cqm_4py[16]; |
||||
uint8_t cqm_4ic[16]; |
||||
uint8_t cqm_4pc[16]; |
||||
uint8_t cqm_8iy[64]; |
||||
uint8_t cqm_8py[64]; |
||||
uint8_t cqm_8ic[64]; |
||||
uint8_t cqm_8pc[64]; |
||||
|
||||
/* Log */ |
||||
void (*pf_log)( void *, int i_level, const char *psz, va_list ); |
||||
void *p_log_private; |
||||
int i_log_level; |
||||
int b_full_recon; /* fully reconstruct frames, even when not necessary for encoding. Implied by psz_dump_yuv */ |
||||
char *psz_dump_yuv; /* filename (in UTF-8) for reconstructed frames */ |
||||
|
||||
/* Encoder analyser parameters */ |
||||
struct |
||||
{ |
||||
unsigned int intra; /* intra partitions */ |
||||
unsigned int inter; /* inter partitions */ |
||||
|
||||
int b_transform_8x8; |
||||
int i_weighted_pred; /* weighting for P-frames */ |
||||
int b_weighted_bipred; /* implicit weighting for B-frames */ |
||||
int i_direct_mv_pred; /* spatial vs temporal mv prediction */ |
||||
int i_chroma_qp_offset; |
||||
|
||||
int i_me_method; /* motion estimation algorithm to use (X264_ME_*) */ |
||||
int i_me_range; /* integer pixel motion estimation search range (from predicted mv) */ |
||||
int i_mv_range; /* maximum length of a mv (in pixels). -1 = auto, based on level */ |
||||
int i_mv_range_thread; /* minimum space between threads. -1 = auto, based on number of threads. */ |
||||
int i_subpel_refine; /* subpixel motion estimation quality */ |
||||
int b_chroma_me; /* chroma ME for subpel and mode decision in P-frames */ |
||||
int b_mixed_references; /* allow each mb partition to have its own reference number */ |
||||
int i_trellis; /* trellis RD quantization */ |
||||
int b_fast_pskip; /* early SKIP detection on P-frames */ |
||||
int b_dct_decimate; /* transform coefficient thresholding on P-frames */ |
||||
int i_noise_reduction; /* adaptive pseudo-deadzone */ |
||||
float f_psy_rd; /* Psy RD strength */ |
||||
float f_psy_trellis; /* Psy trellis strength */ |
||||
int b_psy; /* Toggle all psy optimizations */ |
||||
|
||||
int b_mb_info; /* Use input mb_info data in x264_picture_t */ |
||||
int b_mb_info_update; /* Update the values in mb_info according to the results of encoding. */ |
||||
|
||||
/* the deadzone size that will be used in luma quantization */ |
||||
int i_luma_deadzone[2]; /* {inter, intra} */ |
||||
|
||||
int b_psnr; /* compute and print PSNR stats */ |
||||
int b_ssim; /* compute and print SSIM stats */ |
||||
} analyse; |
||||
|
||||
/* Rate control parameters */ |
||||
struct |
||||
{ |
||||
int i_rc_method; /* X264_RC_* */ |
||||
|
||||
int i_qp_constant; /* 0 to (51 + 6*(x264_bit_depth-8)). 0=lossless */ |
||||
int i_qp_min; /* min allowed QP value */ |
||||
int i_qp_max; /* max allowed QP value */ |
||||
int i_qp_step; /* max QP step between frames */ |
||||
|
||||
int i_bitrate; |
||||
float f_rf_constant; /* 1pass VBR, nominal QP */ |
||||
float f_rf_constant_max; /* In CRF mode, maximum CRF as caused by VBV */ |
||||
float f_rate_tolerance; |
||||
int i_vbv_max_bitrate; |
||||
int i_vbv_buffer_size; |
||||
float f_vbv_buffer_init; /* <=1: fraction of buffer_size. >1: kbit */ |
||||
float f_ip_factor; |
||||
float f_pb_factor; |
||||
|
||||
/* VBV filler: force CBR VBV and use filler bytes to ensure hard-CBR.
|
||||
* Implied by NAL-HRD CBR. */ |
||||
int b_filler; |
||||
|
||||
int i_aq_mode; /* psy adaptive QP. (X264_AQ_*) */ |
||||
float f_aq_strength; |
||||
int b_mb_tree; /* Macroblock-tree ratecontrol. */ |
||||
int i_lookahead; |
||||
|
||||
/* 2pass */ |
||||
int b_stat_write; /* Enable stat writing in psz_stat_out */ |
||||
char *psz_stat_out; /* output filename (in UTF-8) of the 2pass stats file */ |
||||
int b_stat_read; /* Read stat from psz_stat_in and use it */ |
||||
char *psz_stat_in; /* input filename (in UTF-8) of the 2pass stats file */ |
||||
|
||||
/* 2pass params (same as ffmpeg ones) */ |
||||
float f_qcompress; /* 0.0 => cbr, 1.0 => constant qp */ |
||||
float f_qblur; /* temporally blur quants */ |
||||
float f_complexity_blur; /* temporally blur complexity */ |
||||
x264_zone_t *zones; /* ratecontrol overrides */ |
||||
int i_zones; /* number of zone_t's */ |
||||
char *psz_zones; /* alternate method of specifying zones */ |
||||
} rc; |
||||
|
||||
/* Cropping Rectangle parameters: added to those implicitly defined by
|
||||
non-mod16 video resolutions. */ |
||||
struct |
||||
{ |
||||
unsigned int i_left; |
||||
unsigned int i_top; |
||||
unsigned int i_right; |
||||
unsigned int i_bottom; |
||||
} crop_rect; |
||||
|
||||
/* frame packing arrangement flag */ |
||||
int i_frame_packing; |
||||
|
||||
/* Muxing parameters */ |
||||
int b_aud; /* generate access unit delimiters */ |
||||
int b_repeat_headers; /* put SPS/PPS before each keyframe */ |
||||
int b_annexb; /* if set, place start codes (4 bytes) before NAL units,
|
||||
* otherwise place size (4 bytes) before NAL units. */ |
||||
int i_sps_id; /* SPS and PPS id number */ |
||||
int b_vfr_input; /* VFR input. If 1, use timebase and timestamps for ratecontrol purposes.
|
||||
* If 0, use fps only. */ |
||||
int b_pulldown; /* use explicity set timebase for CFR */ |
||||
uint32_t i_fps_num; |
||||
uint32_t i_fps_den; |
||||
uint32_t i_timebase_num; /* Timebase numerator */ |
||||
uint32_t i_timebase_den; /* Timebase denominator */ |
||||
|
||||
int b_tff; |
||||
|
||||
/* Pulldown:
|
||||
* The correct pic_struct must be passed with each input frame. |
||||
* The input timebase should be the timebase corresponding to the output framerate. This should be constant. |
||||
* e.g. for 3:2 pulldown timebase should be 1001/30000 |
||||
* The PTS passed with each frame must be the PTS of the frame after pulldown is applied. |
||||
* Frame doubling and tripling require b_vfr_input set to zero (see H.264 Table D-1) |
||||
* |
||||
* Pulldown changes are not clearly defined in H.264. Therefore, it is the calling app's responsibility to manage this. |
||||
*/ |
||||
|
||||
int b_pic_struct; |
||||
|
||||
/* Fake Interlaced.
|
||||
* |
||||
* Used only when b_interlaced=0. Setting this flag makes it possible to flag the stream as PAFF interlaced yet |
||||
* encode all frames progessively. It is useful for encoding 25p and 30p Blu-Ray streams. |
||||
*/ |
||||
|
||||
int b_fake_interlaced; |
||||
|
||||
/* Don't optimize header parameters based on video content, e.g. ensure that splitting an input video, compressing
|
||||
* each part, and stitching them back together will result in identical SPS/PPS. This is necessary for stitching |
||||
* with container formats that don't allow multiple SPS/PPS. */ |
||||
int b_stitchable; |
||||
|
||||
int b_opencl; /* use OpenCL when available */ |
||||
int i_opencl_device; /* specify count of GPU devices to skip, for CLI users */ |
||||
void *opencl_device_id; /* pass explicit cl_device_id as void*, for API users */ |
||||
char *psz_clbin_file; /* filename (in UTF-8) of the compiled OpenCL kernel cache file */ |
||||
|
||||
/* Slicing parameters */ |
||||
int i_slice_max_size; /* Max size per slice in bytes; includes estimated NAL overhead. */ |
||||
int i_slice_max_mbs; /* Max number of MBs per slice; overrides i_slice_count. */ |
||||
int i_slice_min_mbs; /* Min number of MBs per slice */ |
||||
int i_slice_count; /* Number of slices per frame: forces rectangular slices. */ |
||||
int i_slice_count_max; /* Absolute cap on slices per frame; stops applying slice-max-size
|
||||
* and slice-max-mbs if this is reached. */ |
||||
|
||||
/* Optional callback for freeing this x264_param_t when it is done being used.
|
||||
* Only used when the x264_param_t sits in memory for an indefinite period of time, |
||||
* i.e. when an x264_param_t is passed to x264_t in an x264_picture_t or in zones. |
||||
* Not used when x264_encoder_reconfig is called directly. */ |
||||
void (*param_free)( void* ); |
||||
|
||||
/* Optional low-level callback for low-latency encoding. Called for each output NAL unit
|
||||
* immediately after the NAL unit is finished encoding. This allows the calling application |
||||
* to begin processing video data (e.g. by sending packets over a network) before the frame |
||||
* is done encoding. |
||||
* |
||||
* This callback MUST do the following in order to work correctly: |
||||
* 1) Have available an output buffer of at least size nal->i_payload*3/2 + 5 + 64. |
||||
* 2) Call x264_nal_encode( h, dst, nal ), where dst is the output buffer. |
||||
* After these steps, the content of nal is valid and can be used in the same way as if |
||||
* the NAL unit were output by x264_encoder_encode. |
||||
* |
||||
* This does not need to be synchronous with the encoding process: the data pointed to |
||||
* by nal (both before and after x264_nal_encode) will remain valid until the next |
||||
* x264_encoder_encode call. The callback must be re-entrant. |
||||
* |
||||
* This callback does not work with frame-based threads; threads must be disabled |
||||
* or sliced-threads enabled. This callback also does not work as one would expect |
||||
* with HRD -- since the buffering period SEI cannot be calculated until the frame |
||||
* is finished encoding, it will not be sent via this callback. |
||||
* |
||||
* Note also that the NALs are not necessarily returned in order when sliced threads is |
||||
* enabled. Accordingly, the variable i_first_mb and i_last_mb are available in |
||||
* x264_nal_t to help the calling application reorder the slices if necessary. |
||||
* |
||||
* When this callback is enabled, x264_encoder_encode does not return valid NALs; |
||||
* the calling application is expected to acquire all output NALs through the callback. |
||||
* |
||||
* It is generally sensible to combine this callback with a use of slice-max-mbs or |
||||
* slice-max-size. |
||||
* |
||||
* The opaque pointer is the opaque pointer from the input frame associated with this |
||||
* NAL unit. This helps distinguish between nalu_process calls from different sources, |
||||
* e.g. if doing multiple encodes in one process. |
||||
*/ |
||||
void (*nalu_process) ( x264_t *h, x264_nal_t *nal, void *opaque ); |
||||
} x264_param_t; |
||||
|
||||
void x264_nal_encode( x264_t *h, uint8_t *dst, x264_nal_t *nal ); |
||||
|
||||
/****************************************************************************
|
||||
* H.264 level restriction information |
||||
****************************************************************************/ |
||||
|
||||
typedef struct x264_level_t |
||||
{ |
||||
int level_idc; |
||||
int mbps; /* max macroblock processing rate (macroblocks/sec) */ |
||||
int frame_size; /* max frame size (macroblocks) */ |
||||
int dpb; /* max decoded picture buffer (mbs) */ |
||||
int bitrate; /* max bitrate (kbit/sec) */ |
||||
int cpb; /* max vbv buffer (kbit) */ |
||||
int mv_range; /* max vertical mv component range (pixels) */ |
||||
int mvs_per_2mb; /* max mvs per 2 consecutive mbs. */ |
||||
int slice_rate; /* ?? */ |
||||
int mincr; /* min compression ratio */ |
||||
int bipred8x8; /* limit bipred to >=8x8 */ |
||||
int direct8x8; /* limit b_direct to >=8x8 */ |
||||
int frame_only; /* forbid interlacing */ |
||||
} x264_level_t; |
||||
|
||||
/* all of the levels defined in the standard, terminated by .level_idc=0 */ |
||||
X264_API extern const x264_level_t x264_levels[]; |
||||
|
||||
/****************************************************************************
|
||||
* Basic parameter handling functions |
||||
****************************************************************************/ |
||||
|
||||
/* x264_param_default:
|
||||
* fill x264_param_t with default values and do CPU detection */ |
||||
void x264_param_default( x264_param_t * ); |
||||
|
||||
/* x264_param_parse:
|
||||
* set one parameter by name. |
||||
* returns 0 on success, or returns one of the following errors. |
||||
* note: BAD_VALUE occurs only if it can't even parse the value, |
||||
* numerical range is not checked until x264_encoder_open() or |
||||
* x264_encoder_reconfig(). |
||||
* value=NULL means "true" for boolean options, but is a BAD_VALUE for non-booleans. */ |
||||
#define X264_PARAM_BAD_NAME (-1) |
||||
#define X264_PARAM_BAD_VALUE (-2) |
||||
int x264_param_parse( x264_param_t *, const char *name, const char *value ); |
||||
|
||||
/****************************************************************************
|
||||
* Advanced parameter handling functions |
||||
****************************************************************************/ |
||||
|
||||
/* These functions expose the full power of include.x264's preset-tune-profile system for
|
||||
* easy adjustment of large numbers of internal parameters. |
||||
* |
||||
* In order to replicate x264CLI's option handling, these functions MUST be called |
||||
* in the following order: |
||||
* 1) x264_param_default_preset |
||||
* 2) Custom user options (via param_parse or directly assigned variables) |
||||
* 3) x264_param_apply_fastfirstpass |
||||
* 4) x264_param_apply_profile |
||||
* |
||||
* Additionally, x264CLI does not apply step 3 if the preset chosen is "placebo" |
||||
* or --slow-firstpass is set. */ |
||||
|
||||
/* x264_param_default_preset:
|
||||
* The same as x264_param_default, but also use the passed preset and tune |
||||
* to modify the default settings. |
||||
* (either can be NULL, which implies no preset or no tune, respectively) |
||||
* |
||||
* Currently available presets are, ordered from fastest to slowest: */ |
||||
static const char * const x264_preset_names[] = { "ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo", 0 }; |
||||
|
||||
/* The presets can also be indexed numerically, as in:
|
||||
* x264_param_default_preset( ¶m, "3", ... ) |
||||
* with ultrafast mapping to "0" and placebo mapping to "9". This mapping may |
||||
* of course change if new presets are added in between, but will always be |
||||
* ordered from fastest to slowest. |
||||
* |
||||
* Warning: the speed of these presets scales dramatically. Ultrafast is a full |
||||
* 100 times faster than placebo! |
||||
* |
||||
* Currently available tunings are: */ |
||||
static const char * const x264_tune_names[] = { "film", "animation", "grain", "stillimage", "psnr", "ssim", "fastdecode", "zerolatency", 0 }; |
||||
|
||||
/* Multiple tunings can be used if separated by a delimiter in ",./-+",
|
||||
* however multiple psy tunings cannot be used. |
||||
* film, animation, grain, stillimage, psnr, and ssim are psy tunings. |
||||
* |
||||
* returns 0 on success, negative on failure (e.g. invalid preset/tune name). */ |
||||
int x264_param_default_preset( x264_param_t *, const char *preset, const char *tune ); |
||||
|
||||
/* x264_param_apply_fastfirstpass:
|
||||
* If first-pass mode is set (rc.b_stat_read == 0, rc.b_stat_write == 1), |
||||
* modify the encoder settings to disable options generally not useful on |
||||
* the first pass. */ |
||||
void x264_param_apply_fastfirstpass( x264_param_t * ); |
||||
|
||||
/* x264_param_apply_profile:
|
||||
* Applies the restrictions of the given profile. |
||||
* Currently available profiles are, from most to least restrictive: */ |
||||
static const char * const x264_profile_names[] = { "baseline", "main", "high", "high10", "high422", "high444", 0 }; |
||||
|
||||
/* (can be NULL, in which case the function will do nothing)
|
||||
* |
||||
* Does NOT guarantee that the given profile will be used: if the restrictions |
||||
* of "High" are applied to settings that are already Baseline-compatible, the |
||||
* stream will remain baseline. In short, it does not increase settings, only |
||||
* decrease them. |
||||
* |
||||
* returns 0 on success, negative on failure (e.g. invalid profile name). */ |
||||
int x264_param_apply_profile( x264_param_t *, const char *profile ); |
||||
|
||||
/****************************************************************************
|
||||
* Picture structures and functions |
||||
****************************************************************************/ |
||||
|
||||
/* x264_bit_depth:
|
||||
* Specifies the number of bits per pixel that include.x264 uses. This is also the |
||||
* bit depth that include.x264 encodes in. If this value is > 8, include.x264 will read |
||||
* two bytes of input data for each pixel sample, and expect the upper |
||||
* (16-x264_bit_depth) bits to be zero. |
||||
* Note: The flag X264_CSP_HIGH_DEPTH must be used to specify the |
||||
* colorspace depth as well. */ |
||||
X264_API extern const int x264_bit_depth; |
||||
|
||||
/* x264_chroma_format:
|
||||
* Specifies the chroma formats that include.x264 supports encoding. When this |
||||
* value is non-zero, then it represents a X264_CSP_* that is the only |
||||
* chroma format that include.x264 supports encoding. If the value is 0 then |
||||
* there are no restrictions. */ |
||||
X264_API extern const int x264_chroma_format; |
||||
|
||||
enum pic_struct_e |
||||
{ |
||||
PIC_STRUCT_AUTO = 0, // automatically decide (default)
|
||||
PIC_STRUCT_PROGRESSIVE = 1, // progressive frame
|
||||
// "TOP" and "BOTTOM" are not supported in include.x264 (PAFF only)
|
||||
PIC_STRUCT_TOP_BOTTOM = 4, // top field followed by bottom
|
||||
PIC_STRUCT_BOTTOM_TOP = 5, // bottom field followed by top
|
||||
PIC_STRUCT_TOP_BOTTOM_TOP = 6, // top field, bottom field, top field repeated
|
||||
PIC_STRUCT_BOTTOM_TOP_BOTTOM = 7, // bottom field, top field, bottom field repeated
|
||||
PIC_STRUCT_DOUBLE = 8, // double frame
|
||||
PIC_STRUCT_TRIPLE = 9, // triple frame
|
||||
}; |
||||
|
||||
typedef struct x264_hrd_t |
||||
{ |
||||
double cpb_initial_arrival_time; |
||||
double cpb_final_arrival_time; |
||||
double cpb_removal_time; |
||||
|
||||
double dpb_output_time; |
||||
} x264_hrd_t; |
||||
|
||||
/* Arbitrary user SEI:
|
||||
* Payload size is in bytes and the payload pointer must be valid. |
||||
* Payload types and syntax can be found in Annex D of the H.264 Specification. |
||||
* SEI payload alignment bits as described in Annex D must be included at the |
||||
* end of the payload if needed. |
||||
* The payload should not be NAL-encapsulated. |
||||
* Payloads are written first in order of input, apart from in the case when HRD |
||||
* is enabled where payloads are written after the Buffering Period SEI. */ |
||||
|
||||
typedef struct x264_sei_payload_t |
||||
{ |
||||
int payload_size; |
||||
int payload_type; |
||||
uint8_t *payload; |
||||
} x264_sei_payload_t; |
||||
|
||||
typedef struct x264_sei_t |
||||
{ |
||||
int num_payloads; |
||||
x264_sei_payload_t *payloads; |
||||
/* In: optional callback to free each payload AND x264_sei_payload_t when used. */ |
||||
void (*sei_free)( void* ); |
||||
} x264_sei_t; |
||||
|
||||
typedef struct x264_image_t |
||||
{ |
||||
int i_csp; /* Colorspace */ |
||||
int i_plane; /* Number of image planes */ |
||||
int i_stride[4]; /* Strides for each plane */ |
||||
uint8_t *plane[4]; /* Pointers to each plane */ |
||||
} x264_image_t; |
||||
|
||||
typedef struct x264_image_properties_t |
||||
{ |
||||
/* All arrays of data here are ordered as follows:
|
||||
* each array contains one offset per macroblock, in raster scan order. In interlaced |
||||
* mode, top-field MBs and bottom-field MBs are interleaved at the row level. |
||||
* Macroblocks are 16x16 blocks of pixels (with respect to the luma plane). For the |
||||
* purposes of calculating the number of macroblocks, width and height are rounded up to |
||||
* the nearest 16. If in interlaced mode, height is rounded up to the nearest 32 instead. */ |
||||
|
||||
/* In: an array of quantizer offsets to be applied to this image during encoding.
|
||||
* These are added on top of the decisions made by include.x264. |
||||
* Offsets can be fractional; they are added before QPs are rounded to integer. |
||||
* Adaptive quantization must be enabled to use this feature. Behavior if quant |
||||
* offsets differ between encoding passes is undefined. */ |
||||
float *quant_offsets; |
||||
/* In: optional callback to free quant_offsets when used.
|
||||
* Useful if one wants to use a different quant_offset array for each frame. */ |
||||
void (*quant_offsets_free)( void* ); |
||||
|
||||
/* In: optional array of flags for each macroblock.
|
||||
* Allows specifying additional information for the encoder such as which macroblocks |
||||
* remain unchanged. Usable flags are listed below. |
||||
* x264_param_t.analyse.b_mb_info must be set to use this, since include.x264 needs to track |
||||
* extra data internally to make full use of this information. |
||||
* |
||||
* Out: if b_mb_info_update is set, include.x264 will update this array as a result of encoding. |
||||
* |
||||
* For "MBINFO_CONSTANT", it will remove this flag on any macroblock whose decoded |
||||
* pixels have changed. This can be useful for e.g. noting which areas of the |
||||
* frame need to actually be blitted. Note: this intentionally ignores the effects |
||||
* of deblocking for the current frame, which should be fine unless one needs exact |
||||
* pixel-perfect accuracy. |
||||
* |
||||
* Results for MBINFO_CONSTANT are currently only set for P-frames, and are not |
||||
* guaranteed to enumerate all blocks which haven't changed. (There may be false |
||||
* negatives, but no false positives.) |
||||
*/ |
||||
uint8_t *mb_info; |
||||
/* In: optional callback to free mb_info when used. */ |
||||
void (*mb_info_free)( void* ); |
||||
|
||||
/* The macroblock is constant and remains unchanged from the previous frame. */ |
||||
#define X264_MBINFO_CONSTANT (1<<0) |
||||
/* More flags may be added in the future. */ |
||||
|
||||
/* Out: SSIM of the the frame luma (if x264_param_t.b_ssim is set) */ |
||||
double f_ssim; |
||||
/* Out: Average PSNR of the frame (if x264_param_t.b_psnr is set) */ |
||||
double f_psnr_avg; |
||||
/* Out: PSNR of Y, U, and V (if x264_param_t.b_psnr is set) */ |
||||
double f_psnr[3]; |
||||
|
||||
/* Out: Average effective CRF of the encoded frame */ |
||||
double f_crf_avg; |
||||
} x264_image_properties_t; |
||||
|
||||
typedef struct x264_picture_t |
||||
{ |
||||
/* In: force picture type (if not auto)
|
||||
* If include.x264 encoding parameters are violated in the forcing of picture types, |
||||
* include.x264 will correct the input picture type and log a warning. |
||||
* Out: type of the picture encoded */ |
||||
int i_type; |
||||
/* In: force quantizer for != X264_QP_AUTO */ |
||||
int i_qpplus1; |
||||
/* In: pic_struct, for pulldown/doubling/etc...used only if b_pic_struct=1.
|
||||
* use pic_struct_e for pic_struct inputs |
||||
* Out: pic_struct element associated with frame */ |
||||
int i_pic_struct; |
||||
/* Out: whether this frame is a keyframe. Important when using modes that result in
|
||||
* SEI recovery points being used instead of IDR frames. */ |
||||
int b_keyframe; |
||||
/* In: user pts, Out: pts of encoded picture (user)*/ |
||||
int64_t i_pts; |
||||
/* Out: frame dts. When the pts of the first frame is close to zero,
|
||||
* initial frames may have a negative dts which must be dealt with by any muxer */ |
||||
int64_t i_dts; |
||||
/* In: custom encoding parameters to be set from this frame forwards
|
||||
(in coded order, not display order). If NULL, continue using |
||||
parameters from the previous frame. Some parameters, such as |
||||
aspect ratio, can only be changed per-GOP due to the limitations |
||||
of H.264 itself; in this case, the caller must force an IDR frame |
||||
if it needs the changed parameter to apply immediately. */ |
||||
x264_param_t *param; |
||||
/* In: raw image data */ |
||||
/* Out: reconstructed image data. include.x264 may skip part of the reconstruction process,
|
||||
e.g. deblocking, in frames where it isn't necessary. To force complete |
||||
reconstruction, at a small speed cost, set b_full_recon. */ |
||||
x264_image_t img; |
||||
/* In: optional information to modify encoder decisions for this frame
|
||||
* Out: information about the encoded frame */ |
||||
x264_image_properties_t prop; |
||||
/* Out: HRD timing information. Output only when i_nal_hrd is set. */ |
||||
x264_hrd_t hrd_timing; |
||||
/* In: arbitrary user SEI (e.g subtitles, AFDs) */ |
||||
x264_sei_t extra_sei; |
||||
/* private user data. copied from input to output frames. */ |
||||
void *opaque; |
||||
} x264_picture_t; |
||||
|
||||
/* x264_picture_init:
|
||||
* initialize an x264_picture_t. Needs to be done if the calling application |
||||
* allocates its own x264_picture_t as opposed to using x264_picture_alloc. */ |
||||
void x264_picture_init( x264_picture_t *pic ); |
||||
|
||||
/* x264_picture_alloc:
|
||||
* alloc data for a picture. You must call x264_picture_clean on it. |
||||
* returns 0 on success, or -1 on malloc failure or invalid colorspace. */ |
||||
int x264_picture_alloc( x264_picture_t *pic, int i_csp, int i_width, int i_height ); |
||||
|
||||
/* x264_picture_clean:
|
||||
* free associated resource for a x264_picture_t allocated with |
||||
* x264_picture_alloc ONLY */ |
||||
void x264_picture_clean( x264_picture_t *pic ); |
||||
|
||||
/****************************************************************************
|
||||
* Encoder functions |
||||
****************************************************************************/ |
||||
|
||||
/* Force a link error in the case of linking against an incompatible API version.
|
||||
* Glue #defines exist to force correct macro expansion; the final output of the macro |
||||
* is x264_encoder_open_##X264_BUILD (for purposes of dlopen). */ |
||||
#define x264_encoder_glue1(x,y) x##y |
||||
#define x264_encoder_glue2(x,y) x264_encoder_glue1(x,y) |
||||
#define x264_encoder_open x264_encoder_glue2(x264_encoder_open_,X264_BUILD) |
||||
|
||||
/* x264_encoder_open:
|
||||
* create a new encoder handler, all parameters from x264_param_t are copied */ |
||||
x264_t *x264_encoder_open( x264_param_t * ); |
||||
|
||||
/* x264_encoder_reconfig:
|
||||
* various parameters from x264_param_t are copied. |
||||
* this takes effect immediately, on whichever frame is encoded next; |
||||
* due to delay, this may not be the next frame passed to encoder_encode. |
||||
* if the change should apply to some particular frame, use x264_picture_t->param instead. |
||||
* returns 0 on success, negative on parameter validation error. |
||||
* not all parameters can be changed; see the actual function for a detailed breakdown. |
||||
* |
||||
* since not all parameters can be changed, moving from preset to preset may not always |
||||
* fully copy all relevant parameters, but should still work usably in practice. however, |
||||
* more so than for other presets, many of the speed shortcuts used in ultrafast cannot be |
||||
* switched out of; using reconfig to switch between ultrafast and other presets is not |
||||
* recommended without a more fine-grained breakdown of parameters to take this into account. */ |
||||
int x264_encoder_reconfig( x264_t *, x264_param_t * ); |
||||
/* x264_encoder_parameters:
|
||||
* copies the current internal set of parameters to the pointer provided |
||||
* by the caller. useful when the calling application needs to know |
||||
* how x264_encoder_open has changed the parameters, or the current state |
||||
* of the encoder after multiple x264_encoder_reconfig calls. |
||||
* note that the data accessible through pointers in the returned param struct |
||||
* (e.g. filenames) should not be modified by the calling application. */ |
||||
void x264_encoder_parameters( x264_t *, x264_param_t * ); |
||||
/* x264_encoder_headers:
|
||||
* return the SPS and PPS that will be used for the whole stream. |
||||
* *pi_nal is the number of NAL units outputted in pp_nal. |
||||
* returns the number of bytes in the returned NALs. |
||||
* returns negative on error. |
||||
* the payloads of all output NALs are guaranteed to be sequential in memory. */ |
||||
int x264_encoder_headers( x264_t *, x264_nal_t **pp_nal, int *pi_nal ); |
||||
/* x264_encoder_encode:
|
||||
* encode one picture. |
||||
* *pi_nal is the number of NAL units outputted in pp_nal. |
||||
* returns the number of bytes in the returned NALs. |
||||
* returns negative on error and zero if no NAL units returned. |
||||
* the payloads of all output NALs are guaranteed to be sequential in memory. */ |
||||
int x264_encoder_encode( x264_t *, x264_nal_t **pp_nal, int *pi_nal, x264_picture_t *pic_in, x264_picture_t *pic_out ); |
||||
/* x264_encoder_close:
|
||||
* close an encoder handler */ |
||||
void x264_encoder_close ( x264_t * ); |
||||
/* x264_encoder_delayed_frames:
|
||||
* return the number of currently delayed (buffered) frames |
||||
* this should be used at the end of the stream, to know when you have all the encoded frames. */ |
||||
int x264_encoder_delayed_frames( x264_t * ); |
||||
/* x264_encoder_maximum_delayed_frames( x264_t *h ):
|
||||
* return the maximum number of delayed (buffered) frames that can occur with the current |
||||
* parameters. */ |
||||
int x264_encoder_maximum_delayed_frames( x264_t *h ); |
||||
/* x264_encoder_intra_refresh:
|
||||
* If an intra refresh is not in progress, begin one with the next P-frame. |
||||
* If an intra refresh is in progress, begin one as soon as the current one finishes. |
||||
* Requires that b_intra_refresh be set. |
||||
* |
||||
* Useful for interactive streaming where the client can tell the server that packet loss has |
||||
* occurred. In this case, keyint can be set to an extremely high value so that intra refreshes |
||||
* only occur when calling x264_encoder_intra_refresh. |
||||
* |
||||
* In multi-pass encoding, if x264_encoder_intra_refresh is called differently in each pass, |
||||
* behavior is undefined. |
||||
* |
||||
* Should not be called during an x264_encoder_encode. */ |
||||
void x264_encoder_intra_refresh( x264_t * ); |
||||
/* x264_encoder_invalidate_reference:
|
||||
* An interactive error resilience tool, designed for use in a low-latency one-encoder-few-clients |
||||
* system. When the client has packet loss or otherwise incorrectly decodes a frame, the encoder |
||||
* can be told with this command to "forget" the frame and all frames that depend on it, referencing |
||||
* only frames that occurred before the loss. This will force a keyframe if no frames are left to |
||||
* reference after the aforementioned "forgetting". |
||||
* |
||||
* It is strongly recommended to use a large i_dpb_size in this case, which allows the encoder to |
||||
* keep around extra, older frames to fall back on in case more recent frames are all invalidated. |
||||
* Unlike increasing i_frame_reference, this does not increase the number of frames used for motion |
||||
* estimation and thus has no speed impact. It is also recommended to set a very large keyframe |
||||
* interval, so that keyframes are not used except as necessary for error recovery. |
||||
* |
||||
* x264_encoder_invalidate_reference is not currently compatible with the use of B-frames or intra |
||||
* refresh. |
||||
* |
||||
* In multi-pass encoding, if x264_encoder_invalidate_reference is called differently in each pass, |
||||
* behavior is undefined. |
||||
* |
||||
* Should not be called during an x264_encoder_encode, but multiple calls can be made simultaneously. |
||||
* |
||||
* Returns 0 on success, negative on failure. */ |
||||
int x264_encoder_invalidate_reference( x264_t *, int64_t pts ); |
||||
|
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
|
||||
#endif |
@ -0,0 +1,6 @@ |
||||
#define X264_BIT_DEPTH 8 |
||||
#define X264_GPL 1 |
||||
#define X264_INTERLACED 1 |
||||
#define X264_CHROMA_FORMAT 0 |
||||
#define X264_VERSION "" |
||||
#define X264_POINTVER "0.148.x" |
@ -0,0 +1,481 @@ |
||||
#include <jni.h> |
||||
#include <string.h> |
||||
#include "include/x264/x264.h" |
||||
#include <android/log.h> |
||||
#include "include/rtmp/rtmp.h" |
||||
#include "include/faac/faac.h" |
||||
#include <pthread.h> |
||||
#include "queue.h" |
||||
#include <stdint.h> |
||||
|
||||
#define TAG "FrankLive" |
||||
#define LOGI(format, ...) __android_log_print(ANDROID_LOG_INFO, TAG, format, ##__VA_ARGS__) |
||||
#define LOGE(format, ...) __android_log_print(ANDROID_LOG_ERROR, TAG, format, ##__VA_ARGS__) |
||||
|
||||
x264_picture_t picture_in; |
||||
x264_picture_t picture_out; |
||||
int y_len, uv_len; |
||||
x264_t *video_encode_handle; |
||||
faacEncHandle *audio_encode_handle; |
||||
uint32_t start_time; |
||||
pthread_cond_t cond; |
||||
pthread_mutex_t mutex; |
||||
char *url_path; |
||||
int is_pushing = FALSE; |
||||
unsigned long inputSamples; |
||||
unsigned long maxOutputBytes; |
||||
//子线程回调给Java需要用到JavaVM
|
||||
JavaVM* javaVM; |
||||
//调用类
|
||||
jobject jobject_error; |
||||
|
||||
/***************与Java层对应**************/ |
||||
//视频编码器打开失败
|
||||
const int ERROR_VIDEO_ENCODER_OPEN = 0x01; |
||||
//视频帧编码失败
|
||||
const int ERROR_VIDEO_ENCODE = 0x02; |
||||
//音频编码器打开失败
|
||||
const int ERROR_AUDIO_ENCODER_OPEN = 0x03; |
||||
//音频帧编码失败
|
||||
const int ERROR_AUDIO_ENCODE = 0x04; |
||||
//RTMP连接失败
|
||||
const int ERROR_RTMP_CONNECT = 0x05; |
||||
//RTMP连接流失败
|
||||
const int ERROR_RTMP_CONNECT_STREAM = 0x06; |
||||
//RTMP发送数据包失败
|
||||
const int ERROR_RTMP_SEND_PACKAT = 0x07; |
||||
/***************与Java层对应**************/ |
||||
|
||||
void add_rtmp_packet(RTMPPacket *pPacket); |
||||
void add_x264_body(uint8_t *buf, int len); |
||||
void add_x264_key_header(unsigned char sps[100], unsigned char pps[100], int len, int pps_len); |
||||
void add_aac_body(unsigned char *buf, int len); |
||||
void add_aac_header(); |
||||
|
||||
//当调用System.loadLibrary时,会回调这个方法
|
||||
jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved){ |
||||
javaVM = vm; |
||||
return JNI_VERSION_1_6; |
||||
} |
||||
|
||||
//回调异常给java
|
||||
void throw_error_to_java(int error_code){ |
||||
JNIEnv* env; |
||||
(*javaVM)->AttachCurrentThread(javaVM, &env, NULL); |
||||
jclass jclazz = (*env)->GetObjectClass(env, jobject_error); |
||||
jmethodID jmethod = (*env)->GetMethodID(env, jclazz, "errorFromNative", "(I)V"); |
||||
(*env)->CallVoidMethod(env, jobject_error, jmethod, error_code); |
||||
(*javaVM)->DetachCurrentThread(javaVM); |
||||
} |
||||
|
||||
//推流线程
|
||||
void *push_thread(void * args){ |
||||
//建立RTMP连接
|
||||
RTMP* rtmp = RTMP_Alloc(); |
||||
if(!rtmp){ |
||||
LOGE("RTMP_Alloc fail..."); |
||||
goto end; |
||||
} |
||||
RTMP_Init(rtmp); |
||||
RTMP_SetupURL(rtmp, url_path); |
||||
LOGI("url_path=%s", url_path); |
||||
RTMP_EnableWrite(rtmp); |
||||
rtmp->Link.timeout = 10; |
||||
if(!RTMP_Connect(rtmp, NULL)){ |
||||
LOGE("RTMP_Connect fail..."); |
||||
throw_error_to_java(ERROR_RTMP_CONNECT); |
||||
goto end; |
||||
} |
||||
LOGI("RTMP_Connect success..."); |
||||
if(!RTMP_ConnectStream(rtmp, 0)){ |
||||
LOGE("RTMP_ConnectStream fail..."); |
||||
throw_error_to_java(ERROR_RTMP_CONNECT_STREAM); |
||||
goto end; |
||||
} |
||||
LOGI("RTMP_ConnectStream success..."); |
||||
|
||||
//开始计时
|
||||
start_time = RTMP_GetTime(); |
||||
is_pushing = TRUE; |
||||
//发送一个ACC HEADER
|
||||
add_aac_header(); |
||||
//循环推流
|
||||
while(is_pushing) { |
||||
pthread_mutex_lock(&mutex); |
||||
pthread_cond_wait(&cond, &mutex); |
||||
//从队头去一个RTMP包出来
|
||||
RTMPPacket *packet = queue_get_first(); |
||||
if(packet){ |
||||
queue_delete_first(); |
||||
//发送rtmp包,true代表rtmp内部有缓存
|
||||
int ret = RTMP_SendPacket(rtmp, packet, TRUE); |
||||
if(!ret){ |
||||
LOGE("RTMP_SendPacket fail..."); |
||||
RTMPPacket_Free(packet); |
||||
pthread_mutex_unlock(&mutex); |
||||
throw_error_to_java(ERROR_RTMP_SEND_PACKAT); |
||||
goto end; |
||||
} |
||||
RTMPPacket_Free(packet); |
||||
LOGI("RTMP_SendPacket success..."); |
||||
} |
||||
pthread_mutex_unlock(&mutex); |
||||
} |
||||
end: |
||||
LOGI("free all the thing about rtmp..."); |
||||
RTMP_Close(rtmp); |
||||
free(rtmp); |
||||
free(url_path); |
||||
return 0; |
||||
} |
||||
|
||||
JNIEXPORT jint JNICALL |
||||
Java_com_frank_live_LiveUtil_native_1start(JNIEnv *env, jobject instance, jstring url_) { |
||||
const char *url = (*env)->GetStringUTFChars(env, url_, 0); |
||||
url_path = malloc(strlen(url) + 1); |
||||
memset(url_path, 0, strlen(url) + 1); |
||||
memcpy(url_path, url, strlen(url)); |
||||
//创建队列
|
||||
create_queue(); |
||||
//初始化互斥锁和条件变量
|
||||
pthread_mutex_init(&mutex, NULL); |
||||
pthread_cond_init(&cond, NULL); |
||||
pthread_t push_thread_id; |
||||
//创建消费线程推流
|
||||
pthread_create(&push_thread_id, NULL, push_thread, NULL); |
||||
(*env)->ReleaseStringUTFChars(env, url_, url); |
||||
|
||||
jobject_error= (*env)->NewGlobalRef(env, instance); |
||||
return 0; |
||||
} |
||||
|
||||
//视频编码器x264参数配置
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_setVideoParam(JNIEnv *env, jobject instance, jint width, jint height, |
||||
jint bitRate, jint frameRate) { |
||||
y_len = width * height; |
||||
uv_len = y_len/4; |
||||
|
||||
x264_param_t param; |
||||
//默认设置
|
||||
x264_param_default_preset(¶m, "ultrafast", "zerolatency"); |
||||
param.i_csp = X264_CSP_I420;//YUV420SP
|
||||
param.i_width = width; |
||||
param.i_height = height; |
||||
param.b_vfr_input = 0;//码率控制不通过timebase和timestamp,通过fps控制
|
||||
param.b_repeat_headers = 1;//每帧传入sps和pps,提高视频纠错能力
|
||||
param.i_level_idc = 51;//最大level:resolution@frameRate
|
||||
param.rc.i_rc_method = X264_RC_CRF;//控制恒定码率 CRF:恒定码率;CQP:恒定质量;ABR:平均码率
|
||||
param.rc.i_bitrate = bitRate;//码率
|
||||
param.rc.i_vbv_max_bitrate = (int) (bitRate * 1.2);//瞬间最大码率
|
||||
param.i_fps_num = (uint32_t) frameRate;//帧率分子
|
||||
param.i_fps_den = 1;//帧率分母
|
||||
param.i_timebase_num = param.i_fps_den;//时间基分子
|
||||
param.i_timebase_den = param.i_fps_num;//时间基分母
|
||||
param.i_threads = 1;//编码线程数
|
||||
//设置profile档次,"baseline"代表没有B帧
|
||||
x264_param_apply_profile(¶m, "baseline"); |
||||
//初始化图像
|
||||
x264_picture_alloc(&picture_in, param.i_csp, param.i_width, param.i_height); |
||||
//打开编码器
|
||||
video_encode_handle = x264_encoder_open(¶m); |
||||
if(video_encode_handle){ |
||||
LOGI("x264_encoder_open success..."); |
||||
} else{ |
||||
LOGE("x264_encoder_open fail..."); |
||||
throw_error_to_java(ERROR_VIDEO_ENCODER_OPEN); |
||||
} |
||||
} |
||||
|
||||
//音频编码器FAAC参数配置
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_setAudioParam(JNIEnv *env, jobject instance, jint sampleRate, jint numChannels) { |
||||
inputSamples; |
||||
maxOutputBytes; |
||||
audio_encode_handle = faacEncOpen((unsigned long) sampleRate, |
||||
(unsigned int) numChannels, &inputSamples, &maxOutputBytes); |
||||
if(!audio_encode_handle){ |
||||
LOGE("faacEncOpen fail..."); |
||||
throw_error_to_java(ERROR_AUDIO_ENCODER_OPEN); |
||||
return; |
||||
} |
||||
LOGI("faacEncOpen success..."); |
||||
faacEncConfigurationPtr configPtr = faacEncGetCurrentConfiguration(audio_encode_handle); |
||||
configPtr->bandWidth = 0; |
||||
configPtr->mpegVersion = MPEG4;//MPEG版本
|
||||
configPtr->outputFormat = 0;//包含ADTS头
|
||||
configPtr->useTns = 1;//时域噪音控制
|
||||
configPtr->useLfe = 0; |
||||
configPtr->allowMidside = 1; |
||||
configPtr->aacObjectType = LOW; |
||||
configPtr->quantqual = 100;//量化
|
||||
configPtr->shortctl = SHORTCTL_NORMAL; |
||||
int result = faacEncSetConfiguration(audio_encode_handle, configPtr); |
||||
if(result){ |
||||
LOGI("faacEncSetConfiguration success..."); |
||||
} else{ |
||||
LOGE("faacEncSetConfiguration fail..."); |
||||
throw_error_to_java(ERROR_AUDIO_ENCODER_OPEN); |
||||
} |
||||
} |
||||
|
||||
//添加RTMPPacket包到队列中
|
||||
void add_rtmp_packet(RTMPPacket *pPacket) { |
||||
pthread_mutex_lock(&mutex); |
||||
if(is_pushing){ |
||||
queue_append_last(pPacket); |
||||
} |
||||
pthread_cond_signal(&cond); |
||||
pthread_mutex_unlock(&mutex); |
||||
} |
||||
|
||||
//添加SPS和PPS header
|
||||
void add_x264_key_header(unsigned char sps[100], unsigned char pps[100], int sps_len, int pps_len) { |
||||
int body_size = 16 + sps_len + pps_len; |
||||
RTMPPacket *packet = malloc(sizeof(RTMPPacket)); |
||||
RTMPPacket_Alloc(packet, body_size); |
||||
RTMPPacket_Reset(packet); |
||||
unsigned char* body = (unsigned char *) packet->m_body; |
||||
int i = 0; |
||||
body[i++] = 0x17;//VideoHeadType 0-3:FrameType(KeyFrame=1);4-7:CodecId(AVC=7)
|
||||
body[i++] = 0x00;//AVC PacketType
|
||||
|
||||
body[i++] = 0x00;//composition type 24bit
|
||||
body[i++] = 0x00; |
||||
body[i++] = 0x00; |
||||
|
||||
body[i++] = 0x01;//AVC decoder configuration record
|
||||
body[i++] = sps[1]; |
||||
body[i++] = sps[2]; |
||||
body[i++] = sps[3]; |
||||
|
||||
body[i++] = 0xFF; |
||||
|
||||
body[i++] = 0xE1;//sps
|
||||
body[i++] = (unsigned char) ((sps_len >> 8) & 0xFF); |
||||
body[i++] = (unsigned char) (sps_len & 0xFF); |
||||
memcpy(&body[i], sps, (size_t) sps_len); |
||||
i += sps_len; |
||||
|
||||
body[i++] = 0x01;//pps
|
||||
body[i++] = (unsigned char) ((pps_len >> 8) & 0xFF); |
||||
body[i++] = (unsigned char) (pps_len & 0xFF); |
||||
memcpy(&body[i], pps, (size_t) pps_len); |
||||
i += pps_len; |
||||
|
||||
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO; |
||||
packet->m_nBodySize = (uint32_t) body_size; |
||||
packet->m_nTimeStamp = 0; |
||||
packet->m_hasAbsTimestamp = 0; |
||||
packet->m_nChannel = 0x04; |
||||
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; |
||||
|
||||
add_rtmp_packet(packet); |
||||
} |
||||
|
||||
//添加x264的body
|
||||
void add_x264_body(uint8_t *buf, int len) { |
||||
if(buf[2] == 0x01){//00 00 01
|
||||
buf += 3; |
||||
len -= 3; |
||||
} else if (buf[3] == 0x01){//00 00 00 01
|
||||
buf += 4; |
||||
len -= 4; |
||||
} |
||||
int body_size = len + 9;//
|
||||
RTMPPacket *packet = malloc(sizeof(RTMPPacket)); |
||||
RTMPPacket_Alloc(packet, body_size); |
||||
RTMPPacket_Reset(packet); |
||||
unsigned char *body = (unsigned char *) packet->m_body; |
||||
int type = buf[0] & 0x1F; |
||||
if(type == NAL_SLICE_IDR){//关键帧
|
||||
body[0] = 0x17; |
||||
} else{ |
||||
body[0] = 0x27; |
||||
} |
||||
body[1] = 0x01; |
||||
body[2] = 0x00; |
||||
body[3] = 0x00; |
||||
body[4] = 0x00; |
||||
|
||||
body[5] = (unsigned char) ((len >> 24) & 0xFF); |
||||
body[6] = (unsigned char) ((len >> 16) & 0xFF); |
||||
body[7] = (unsigned char) ((len >> 8) & 0xFF); |
||||
body[8] = (unsigned char) (len & 0xFF); |
||||
|
||||
memcpy(&body[9], buf, (size_t) len); |
||||
packet->m_headerType = RTMP_PACKET_SIZE_LARGE; |
||||
packet->m_hasAbsTimestamp = 0; |
||||
packet->m_nBodySize = (uint32_t) body_size; |
||||
packet->m_nChannel = 0x04; |
||||
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO; |
||||
packet->m_nTimeStamp = RTMP_GetTime() - start_time; |
||||
|
||||
add_rtmp_packet(packet); |
||||
} |
||||
|
||||
//推送视频流
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_pushVideo(JNIEnv *env, jobject instance, jbyteArray data_) { |
||||
//NV21转成YUV420P
|
||||
jbyte *nv21_buffer = (*env)->GetByteArrayElements(env, data_, NULL); |
||||
//Y相同,直接拷贝
|
||||
memcpy(picture_in.img.plane[0], nv21_buffer, (size_t) y_len); |
||||
jbyte *u_buffer = (jbyte *) picture_in.img.plane[1]; |
||||
jbyte *v_buffer = (jbyte *) picture_in.img.plane[2]; |
||||
int i; |
||||
//U和V交换
|
||||
for(i=0; i<uv_len; i++){ |
||||
*(u_buffer+i) = *(nv21_buffer + y_len + 2*i + 1); |
||||
*(v_buffer+i) = *(nv21_buffer + y_len + 2*i); |
||||
} |
||||
x264_nal_t *nal = NULL; |
||||
int nal_num = -1;//NAL unit个数
|
||||
//调用h264编码
|
||||
if(x264_encoder_encode(video_encode_handle, &nal, &nal_num, &picture_in, & picture_out) < 0){ |
||||
LOGE("x264_encoder_encode fail"); |
||||
throw_error_to_java(ERROR_VIDEO_ENCODE); |
||||
goto end; |
||||
} |
||||
if(nal_num <= 0){ |
||||
LOGE("nal_num <= 0"); |
||||
goto end; |
||||
} |
||||
//使用RTMP推流
|
||||
//关键帧(I帧)加上SPS和PPS
|
||||
int sps_len = 0, pps_len = 0; |
||||
unsigned char sps[100]; |
||||
unsigned char pps[100]; |
||||
memset(sps, 0, 100); |
||||
memset(pps, 0, 100); |
||||
for (i = 0; i < nal_num; ++i) { |
||||
if(nal[i].i_type == NAL_SPS){//sps
|
||||
sps_len = nal[i].i_payload - 4; |
||||
memcpy(sps, nal[i].p_payload + 4, (size_t) sps_len); |
||||
} else if(nal[i].i_type == NAL_PPS){//pps
|
||||
pps_len = nal[i].i_payload - 4; |
||||
memcpy(pps, nal[i].p_payload + 4, (size_t) pps_len); |
||||
add_x264_key_header(sps, pps, sps_len, pps_len); |
||||
} else{ |
||||
add_x264_body(nal[i].p_payload, nal[i].i_payload); |
||||
} |
||||
} |
||||
end: |
||||
(*env)->ReleaseByteArrayElements(env, data_, nv21_buffer, 0); |
||||
} |
||||
|
||||
//添加AAC header
|
||||
void add_aac_header() { |
||||
unsigned char *ppBuffer; |
||||
unsigned long pSize; |
||||
faacEncGetDecoderSpecificInfo(audio_encode_handle, &ppBuffer, &pSize); |
||||
int body_size = (int) (2 + pSize); |
||||
RTMPPacket* packet = malloc(sizeof(RTMPPacket)); |
||||
RTMPPacket_Alloc(packet, body_size); |
||||
RTMPPacket_Reset(packet); |
||||
unsigned char* body = (unsigned char *) packet->m_body; |
||||
//两字节头
|
||||
//soundFormat(4bits):10->aac;soundRate(2bits):3->44kHz;soundSize(1bit):1->pcm_16bit;soundType(1bit):1->stereo
|
||||
body[0] = 0xAF; |
||||
//AACPacketType:0表示AAC sequence header
|
||||
body[1] = 0x00; |
||||
|
||||
memcpy(&body[2], ppBuffer, (size_t) pSize); |
||||
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO; |
||||
packet->m_nBodySize = (uint32_t) body_size; |
||||
packet->m_nChannel = 4; |
||||
packet->m_nTimeStamp = 0; |
||||
packet->m_hasAbsTimestamp = 0; |
||||
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; |
||||
add_rtmp_packet(packet); |
||||
free(ppBuffer); |
||||
} |
||||
|
||||
//添加AAC body
|
||||
void add_aac_body(unsigned char *buf, int len) { |
||||
int body_size = 2 + len; |
||||
RTMPPacket* packet = malloc(sizeof(RTMPPacket)); |
||||
RTMPPacket_Alloc(packet, body_size); |
||||
RTMPPacket_Reset(packet); |
||||
unsigned char* body = (unsigned char *) packet->m_body; |
||||
//两字节头
|
||||
//soundFormat(4bits):10->aac;soundRate(2bits):3->44kHz;soundSize(1bit):1->pcm_16bit;soundType(1bit):1->stereo
|
||||
body[0] = 0xAF; |
||||
//PackageType(StreamType):0->AAC带ADTS头;1->AAC RAW
|
||||
body[1] = 0x01; |
||||
|
||||
memcpy(&body[2], buf, (size_t) len); |
||||
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO; |
||||
packet->m_nBodySize = (uint32_t) body_size; |
||||
packet->m_nChannel = 4; |
||||
packet->m_nTimeStamp = RTMP_GetTime() - start_time; |
||||
packet->m_hasAbsTimestamp = 0; |
||||
packet->m_headerType = RTMP_PACKET_SIZE_LARGE; |
||||
add_rtmp_packet(packet); |
||||
free(buf); |
||||
} |
||||
|
||||
//推送音频流
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_pushAudio(JNIEnv *env, jobject instance, jbyteArray data_, jint length) { |
||||
jbyte *data = (*env)->GetByteArrayElements(env, data_, NULL); |
||||
int* pcm_buf; |
||||
unsigned char* aac_buf; |
||||
pcm_buf = malloc(inputSamples * sizeof(int)); |
||||
aac_buf = malloc(maxOutputBytes * sizeof(unsigned char)); |
||||
int count = 0; |
||||
unsigned int buffer_size = (unsigned int) (length / 2); |
||||
unsigned short* buf = (unsigned short*) data; |
||||
while (count < buffer_size){ |
||||
int audio_length = (int) inputSamples; |
||||
if((count + audio_length) >= buffer_size){ |
||||
audio_length = buffer_size - count; |
||||
} |
||||
int i; |
||||
for(i=0; i<audio_length; i++){ |
||||
//每次从实时的pcm音频队列中读出量化位数为8的pcm数据
|
||||
int sample_byte = ((int16_t *)buf + count)[i]; |
||||
//用8个二进制位来表示一个采样量化点(模数转换)
|
||||
pcm_buf[i] = sample_byte << 8; |
||||
} |
||||
count += inputSamples; |
||||
//调用FAAC编码,返回编码字节数
|
||||
int bytes_len = faacEncEncode(audio_encode_handle, pcm_buf, (unsigned int) audio_length, aac_buf, maxOutputBytes); |
||||
if(bytes_len <= 0){ |
||||
// throw_error_to_java(ERROR_AUDIO_ENCODE);
|
||||
LOGE("音频编码失败..."); |
||||
continue; |
||||
} |
||||
add_aac_body(aac_buf, bytes_len); |
||||
} |
||||
(*env)->ReleaseByteArrayElements(env, data_, data, 0); |
||||
if(pcm_buf != NULL){ |
||||
free(pcm_buf); |
||||
} |
||||
} |
||||
|
||||
//停止推流
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_native_1stop(JNIEnv *env, jobject instance) { |
||||
is_pushing = FALSE; |
||||
LOGI("native_1stop"); |
||||
} |
||||
|
||||
//释放所有资源
|
||||
JNIEXPORT void JNICALL |
||||
Java_com_frank_live_LiveUtil_native_1release(JNIEnv *env, jobject instance) { |
||||
//清除x264的picture缓存
|
||||
x264_picture_clean(&picture_in); |
||||
x264_picture_clean(&picture_out); |
||||
//关闭音视频编码器
|
||||
x264_encoder_close(video_encode_handle); |
||||
faacEncClose(audio_encode_handle); |
||||
//删除全局引用
|
||||
(*env)->DeleteGlobalRef(env, jobject_error); |
||||
(*javaVM)->DestroyJavaVM(javaVM); |
||||
//销毁互斥锁和条件变量
|
||||
pthread_cond_destroy(&cond); |
||||
pthread_mutex_destroy(&mutex); |
||||
//退出线程
|
||||
pthread_exit(0); |
||||
} |
@ -0,0 +1,218 @@ |
||||
#include <stdio.h> |
||||
#include <malloc.h> |
||||
|
||||
typedef struct queue_node { |
||||
struct queue_node* prev; |
||||
struct queue_node* next; |
||||
void *p;//节点的值
|
||||
} node; |
||||
|
||||
// 表头
|
||||
static node *phead = NULL; |
||||
static int count = 0; |
||||
|
||||
static node* create_node(void *pval) { |
||||
node *pnode = NULL; |
||||
pnode = (node *) malloc(sizeof(node)); |
||||
if (pnode) { |
||||
// 默认的,pnode的前一节点和后一节点都指向它自身
|
||||
pnode->prev = pnode->next = pnode; |
||||
// 节点的值为pval
|
||||
pnode->p = pval; |
||||
} |
||||
return pnode; |
||||
} |
||||
|
||||
/**
|
||||
* 创建双向链表 |
||||
* @return 成功,返回0;否则,返回-1。 |
||||
*/ |
||||
int create_queue() { |
||||
phead = create_node(NULL); |
||||
if (!phead) { |
||||
return -1; |
||||
} |
||||
count = 0; |
||||
return 0; |
||||
} |
||||
|
||||
/**
|
||||
* 判断双向链表是否为空 |
||||
* @return 0为空 |
||||
*/ |
||||
int queue_is_empty() { |
||||
return count == 0; |
||||
} |
||||
|
||||
/**
|
||||
* 获取双向链表的大小 |
||||
* @return size |
||||
*/ |
||||
int queue_size() { |
||||
return count; |
||||
} |
||||
|
||||
/**
|
||||
* 获取链表中指定位置的节点 |
||||
*/ |
||||
|
||||
static node* get_node(int index) { |
||||
if (index < 0 || index >= count) { |
||||
return NULL; |
||||
} |
||||
if (index <= (count / 2)) { |
||||
int i = 0; |
||||
node *pnode = phead->next; |
||||
while ((i++) < index) |
||||
pnode = pnode->next; |
||||
return pnode; |
||||
} |
||||
int j = 0; |
||||
int rindex = count - index - 1; |
||||
node *rnode = phead->prev; |
||||
while ((j++) < rindex) |
||||
rnode = rnode->prev; |
||||
return rnode; |
||||
} |
||||
|
||||
/**
|
||||
* 获取链表中指定位置的元素 |
||||
* @param index index |
||||
* @return 指定位置的元素 |
||||
*/ |
||||
void* queue_get(int index) { |
||||
node *pindex = get_node(index); |
||||
if (!pindex) { |
||||
return NULL; |
||||
} |
||||
return pindex->p; |
||||
} |
||||
|
||||
/**
|
||||
* 获取链表中第1个元素的值 |
||||
* @return 第1个元素的值 |
||||
*/ |
||||
void* queue_get_first() { |
||||
return queue_get(0); |
||||
} |
||||
|
||||
/**
|
||||
* 获取链表中最后1个元素的值 |
||||
* @return 最后1个元素的值 |
||||
*/ |
||||
void* queue_get_last() { |
||||
return queue_get(count - 1); |
||||
} |
||||
|
||||
/**
|
||||
* 插入到index位置 |
||||
* @param index index |
||||
* @param pval pval |
||||
* @return 是否插入成功 |
||||
*/ |
||||
int queue_insert(int index, void* pval) { |
||||
// 插入表头
|
||||
if (index == 0) |
||||
return queue_insert_first(pval); |
||||
// 获取要插入的位置对应的节点
|
||||
node *pindex = get_node(index); |
||||
if (!pindex) |
||||
return -1; |
||||
// 创建“节点”
|
||||
node *pnode = create_node(pval); |
||||
if (!pnode) |
||||
return -1; |
||||
pnode->prev = pindex->prev; |
||||
pnode->next = pindex; |
||||
pindex->prev->next = pnode; |
||||
pindex->prev = pnode; |
||||
// 节点个数+1
|
||||
count++; |
||||
return 0; |
||||
} |
||||
|
||||
/**
|
||||
* 插入到链表头 |
||||
* @param pval |
||||
* @return 是否插入成功 |
||||
*/ |
||||
int queue_insert_first(void *pval) { |
||||
node *pnode = create_node(pval); |
||||
if (!pnode) |
||||
return -1; |
||||
pnode->prev = phead; |
||||
pnode->next = phead->next; |
||||
phead->next->prev = pnode; |
||||
phead->next = pnode; |
||||
count++; |
||||
return 0; |
||||
} |
||||
|
||||
/**
|
||||
* 插入链表尾 |
||||
* @param pval |
||||
* @return 是否插入成功 |
||||
*/ |
||||
int queue_append_last(void *pval) { |
||||
node *pnode = create_node(pval); |
||||
if (!pnode) |
||||
return -1; |
||||
pnode->next = phead; |
||||
pnode->prev = phead->prev; |
||||
phead->prev->next = pnode; |
||||
phead->prev = pnode; |
||||
count++; |
||||
return 0; |
||||
} |
||||
|
||||
/**
|
||||
* 删除双向链表中index位置的节点 |
||||
* @param index index |
||||
* @return 成功,返回0;否则,返回-1 |
||||
*/ |
||||
int queue_delete(int index) { |
||||
node *pindex = get_node(index); |
||||
if (!pindex) { |
||||
return -1; |
||||
} |
||||
pindex->next->prev = pindex->prev; |
||||
pindex->prev->next = pindex->next; |
||||
free(pindex); |
||||
count--; |
||||
return 0; |
||||
} |
||||
|
||||
/**
|
||||
* 删除第一个节点 |
||||
*/ |
||||
int queue_delete_first() { |
||||
return queue_delete(0); |
||||
} |
||||
|
||||
/*
|
||||
* 删除最后一个节点 |
||||
*/ |
||||
int queue_delete_last() { |
||||
return queue_delete(count - 1); |
||||
} |
||||
|
||||
/**
|
||||
* 释放双向链表 |
||||
* @return 成功,返回0;否则,返回-1。 |
||||
*/ |
||||
int destroy_queue() { |
||||
if (!phead) { |
||||
return -1; |
||||
} |
||||
node *pnode = phead->next; |
||||
node *ptmp = NULL; |
||||
while (pnode != phead) { |
||||
ptmp = pnode; |
||||
pnode = pnode->next; |
||||
free(ptmp); |
||||
} |
||||
free(phead); |
||||
phead = NULL; |
||||
count = 0; |
||||
return 0; |
||||
} |
@ -0,0 +1,29 @@ |
||||
#ifndef _QUQEUE_H |
||||
#define _QUQEUE_H |
||||
// 新建“双向链表”。成功,返回表头;否则,返回NULL
|
||||
extern int create_queue(); |
||||
// 撤销“双向链表”。成功,返回0;否则,返回-1
|
||||
extern int destroy_queue(); |
||||
// “双向链表是否为空”。为空的话返回1;否则,返回0。
|
||||
extern int queue_is_empty(); |
||||
// 返回“双向链表的大小”
|
||||
extern int queue_size(); |
||||
// 获取“双向链表中第index位置的元素”。成功,返回节点指针;否则,返回NULL。
|
||||
extern void* queue_get(int index); |
||||
// 获取“双向链表中第1个元素”。成功,返回节点指针;否则,返回NULL。
|
||||
extern void* queue_get_first(); |
||||
// 获取“双向链表中最后1个元素”。成功,返回节点指针;否则,返回NULL。
|
||||
extern void* queue_get_last(); |
||||
// 将“value”插入到index位置。成功,返回0;否则,返回-1。
|
||||
extern int queue_insert(int index, void *pval); |
||||
// 将“value”插入到表头位置。成功,返回0;否则,返回-1。
|
||||
extern int queue_insert_first(void *pval); |
||||
// 将“value”插入到末尾位置。成功,返回0;否则,返回-1。
|
||||
extern int queue_append_last(void *pval); |
||||
// 删除“双向链表中index位置的节点”。成功,返回0;否则,返回-1
|
||||
extern int queue_delete(int index); |
||||
// 删除第一个节点。成功,返回0;否则,返回-1
|
||||
extern int queue_delete_first(); |
||||
// 删除组后一个节点。成功,返回0;否则,返回-1
|
||||
extern int queue_delete_last(); |
||||
#endif//_QUQEUE_H
|
@ -0,0 +1,129 @@ |
||||
package com.frank.live; |
||||
|
||||
import android.Manifest; |
||||
import android.annotation.SuppressLint; |
||||
import android.annotation.TargetApi; |
||||
import android.hardware.Camera; |
||||
import android.media.AudioFormat; |
||||
import android.os.Build; |
||||
import android.os.Handler; |
||||
import android.os.Message; |
||||
import android.support.annotation.NonNull; |
||||
import android.support.v7.app.AppCompatActivity; |
||||
import android.os.Bundle; |
||||
import android.text.TextUtils; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
import android.view.View; |
||||
import android.widget.CompoundButton; |
||||
import android.widget.Toast; |
||||
import android.widget.ToggleButton; |
||||
import com.frank.live.Push.LivePusher; |
||||
import com.frank.live.listener.LiveStateChangeListener; |
||||
import com.frank.live.param.AudioParam; |
||||
import com.frank.live.param.VideoParam; |
||||
|
||||
/** |
||||
* h264与rtmp实时推流直播 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class LiveActivity extends AppCompatActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, LiveStateChangeListener { |
||||
|
||||
private final static String TAG = LiveActivity.class.getSimpleName(); |
||||
private final static int CODE_CAMERA_RECORD = 0x0001; |
||||
private final static String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}; |
||||
private final static String LIVE_URL = "rtmp://192.168.8.115/live/stream"; |
||||
private final static int MSG_ERROR = 100; |
||||
private SurfaceHolder surfaceHolder; |
||||
private LivePusher livePusher; |
||||
@SuppressLint("HandlerLeak") |
||||
private Handler mHandler = new Handler(){ |
||||
@Override |
||||
public void handleMessage(Message msg) { |
||||
super.handleMessage(msg); |
||||
if(msg.what == MSG_ERROR){ |
||||
String errMsg = (String)msg.obj; |
||||
if(!TextUtils.isEmpty(errMsg)){ |
||||
Toast.makeText(LiveActivity.this, errMsg, Toast.LENGTH_SHORT).show(); |
||||
} |
||||
} |
||||
} |
||||
}; |
||||
|
||||
@Override |
||||
protected void onCreate(Bundle savedInstanceState) { |
||||
super.onCreate(savedInstanceState); |
||||
setContentView(R.layout.activity_live); |
||||
|
||||
initView(); |
||||
requirePermission(); |
||||
initPusher(); |
||||
} |
||||
|
||||
private void initView(){ |
||||
findViewById(R.id.btn_swap).setOnClickListener(this); |
||||
((ToggleButton)findViewById(R.id.btn_live)).setOnCheckedChangeListener(this); |
||||
SurfaceView surface_camera = (SurfaceView) findViewById(R.id.surface_camera); |
||||
surfaceHolder = surface_camera.getHolder(); |
||||
} |
||||
|
||||
private void initPusher() { |
||||
int width = 640;//分辨率设置很重要
|
||||
int height = 480; |
||||
int videoBitRate = 400;//kb/s jason-->480kb
|
||||
int videoFrameRate = 25;//fps
|
||||
VideoParam videoParam = new VideoParam(width, height, |
||||
Camera.CameraInfo.CAMERA_FACING_BACK, videoBitRate, videoFrameRate); |
||||
int sampleRate = 44100;//采样率:Hz
|
||||
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;//立体声道
|
||||
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
|
||||
int numChannels = 2;//声道数
|
||||
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels); |
||||
livePusher = new LivePusher(surfaceHolder, videoParam, audioParam); |
||||
} |
||||
|
||||
@Override |
||||
public void onClick(View v) { |
||||
if(v.getId() == R.id.btn_swap){ |
||||
livePusher.switchCamera(); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { |
||||
if(isChecked){ |
||||
livePusher.startPush(LIVE_URL, this); |
||||
}else { |
||||
livePusher.stopPush(); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void onError(String msg) { |
||||
Log.e(TAG, "errMsg=" + msg); |
||||
mHandler.obtainMessage(MSG_ERROR, msg).sendToTarget(); |
||||
} |
||||
|
||||
@TargetApi(Build.VERSION_CODES.M) |
||||
private void requirePermission(){ |
||||
requestPermissions(permissions, CODE_CAMERA_RECORD); |
||||
} |
||||
|
||||
@Override |
||||
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { |
||||
super.onRequestPermissionsResult(requestCode, permissions, grantResults); |
||||
if(permissions.length > 0 && grantResults.length == permissions.length){ |
||||
for(int i=0; i<permissions.length; i++){ |
||||
Log.i(TAG, permissions[i] + ":grantResult=" + grantResults[i]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
protected void onDestroy() { |
||||
super.onDestroy(); |
||||
livePusher.release(); |
||||
} |
||||
} |
@ -0,0 +1,112 @@ |
||||
package com.frank.live; |
||||
|
||||
import com.frank.live.listener.LiveStateChangeListener; |
||||
|
||||
/** |
||||
* 直播核心类 |
||||
* Created by frank on 2017/8/15. |
||||
*/ |
||||
|
||||
public class LiveUtil { |
||||
|
||||
static { |
||||
System.loadLibrary("live"); |
||||
} |
||||
|
||||
private native int native_start(String url); |
||||
private native void setVideoParam(int width, int height, int bitRate, int frameRate); |
||||
private native void setAudioParam(int sampleRate, int numChannels); |
||||
private native void pushVideo(byte data[]); |
||||
private native void pushAudio(byte data[], int length); |
||||
private native void native_stop(); |
||||
private native void native_release(); |
||||
|
||||
//视频编码器打开失败
|
||||
private final static int ERROR_VIDEO_ENCODER_OPEN = 0x01; |
||||
//视频帧编码失败
|
||||
private final static int ERROR_VIDEO_ENCODE = 0x02; |
||||
//音频编码器打开失败
|
||||
private final static int ERROR_AUDIO_ENCODER_OPEN = 0x03; |
||||
//音频帧编码失败
|
||||
private final static int ERROR_AUDIO_ENCODE = 0x04; |
||||
//RTMP连接失败
|
||||
private final static int ERROR_RTMP_CONNECT = 0x05; |
||||
//RTMP连接流失败
|
||||
private final static int ERROR_RTMP_CONNECT_STREAM = 0x06; |
||||
//RTMP发送数据包失败
|
||||
private final static int ERROR_RTMP_SEND_PACKAT = 0x07; |
||||
|
||||
private LiveStateChangeListener liveStateChangeListener; |
||||
|
||||
public LiveUtil(){} |
||||
|
||||
public int startPush(String url){ |
||||
return native_start(url); |
||||
} |
||||
|
||||
public void setVideoParams(int width, int height, int bitRate, int frameRate){ |
||||
setVideoParam(width, height, bitRate, frameRate); |
||||
} |
||||
|
||||
public void setAudioParams(int sampleRate, int numChannels){ |
||||
setAudioParam(sampleRate, numChannels); |
||||
} |
||||
|
||||
public void pushVideoData(byte[] data){ |
||||
pushVideo(data); |
||||
} |
||||
|
||||
public void pushAudioData(byte[] data, int length){ |
||||
pushAudio(data, length); |
||||
} |
||||
|
||||
public void stopPush(){ |
||||
native_stop(); |
||||
} |
||||
|
||||
public void release(){ |
||||
native_release(); |
||||
} |
||||
|
||||
public void setOnLiveStateChangeListener(LiveStateChangeListener liveStateChangeListener){ |
||||
this.liveStateChangeListener = liveStateChangeListener; |
||||
} |
||||
|
||||
/** |
||||
* 当native报错时,回调这个方法 |
||||
* @param errCode errCode |
||||
*/ |
||||
public void errorFromNative(int errCode){ |
||||
//直播出错了,应该停止推流
|
||||
stopPush(); |
||||
if(liveStateChangeListener != null){ |
||||
String msg = ""; |
||||
switch (errCode){ |
||||
case ERROR_VIDEO_ENCODER_OPEN: |
||||
msg = "视频编码器打开失败..."; |
||||
break; |
||||
case ERROR_VIDEO_ENCODE: |
||||
msg = "视频帧编码失败..."; |
||||
break; |
||||
case ERROR_AUDIO_ENCODER_OPEN: |
||||
msg = "音频编码器打开失败..."; |
||||
break; |
||||
case ERROR_AUDIO_ENCODE: |
||||
msg = "音频帧编码失败..."; |
||||
break; |
||||
case ERROR_RTMP_CONNECT: |
||||
msg = "RTMP连接失败..."; |
||||
break; |
||||
case ERROR_RTMP_CONNECT_STREAM: |
||||
msg = "RTMP连接流失败..."; |
||||
break; |
||||
case ERROR_RTMP_SEND_PACKAT: |
||||
msg = "RTMP发送数据包失败..."; |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
liveStateChangeListener.onError(msg); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,80 @@ |
||||
package com.frank.live.Push; |
||||
|
||||
import android.media.AudioRecord; |
||||
import android.media.MediaRecorder; |
||||
import android.util.Log; |
||||
import com.frank.live.LiveUtil; |
||||
import com.frank.live.param.AudioParam; |
||||
|
||||
/** |
||||
* 音频推流 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class AudioPusher extends Pusher { |
||||
|
||||
private AudioRecord audioRecord; |
||||
private boolean isPushing; |
||||
private int minBufferSize; |
||||
private LiveUtil liveUtil; |
||||
|
||||
AudioPusher(AudioParam audioParam, LiveUtil liveUtil){ |
||||
this.liveUtil = liveUtil; |
||||
initAudioRecord(audioParam); |
||||
liveUtil.setAudioParams(audioParam.getSampleRate(), audioParam.getNumChannels()); |
||||
} |
||||
|
||||
@Override |
||||
public void startPush() { |
||||
isPushing = true; |
||||
new AudioRecordThread(audioRecord).start(); |
||||
} |
||||
|
||||
@Override |
||||
public void stopPush() { |
||||
isPushing = false; |
||||
} |
||||
|
||||
@Override |
||||
public void release() { |
||||
stopPush(); |
||||
if(audioRecord != null){ |
||||
audioRecord.release(); |
||||
audioRecord = null; |
||||
} |
||||
} |
||||
|
||||
private void initAudioRecord(AudioParam audioParam){ |
||||
minBufferSize = AudioRecord.getMinBufferSize(audioParam.getSampleRate(), |
||||
audioParam.getChannelConfig(), audioParam.getAudioFormat()); |
||||
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, audioParam.getSampleRate(), |
||||
audioParam.getChannelConfig(), audioParam.getAudioFormat(), minBufferSize); |
||||
} |
||||
|
||||
/** |
||||
* 录音线程:循环读取音频数据送去native层编码 |
||||
*/ |
||||
class AudioRecordThread extends Thread{ |
||||
private AudioRecord audioRecord; |
||||
|
||||
AudioRecordThread(AudioRecord audioRecord){ |
||||
this.audioRecord = audioRecord; |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
audioRecord.startRecording(); |
||||
while (isPushing){ |
||||
byte[] audioBuffer = new byte[minBufferSize]; |
||||
int length = audioRecord.read(audioBuffer, 0, audioBuffer.length); |
||||
if(length > 0){ |
||||
// Log.i("AudioPusher", "is recording...");
|
||||
liveUtil.pushAudioData(audioBuffer, length); |
||||
} |
||||
} |
||||
audioRecord.stop(); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,62 @@ |
||||
package com.frank.live.Push; |
||||
|
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import com.frank.live.LiveUtil; |
||||
import com.frank.live.listener.LiveStateChangeListener; |
||||
import com.frank.live.param.AudioParam; |
||||
import com.frank.live.param.VideoParam; |
||||
|
||||
/** |
||||
* 音视频推流 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class LivePusher { |
||||
|
||||
private VideoPusher videoPusher; |
||||
private AudioPusher audioPusher; |
||||
private LiveUtil liveUtil; |
||||
|
||||
public LivePusher(SurfaceHolder surfaceHolder, VideoParam videoParam, AudioParam audioParam){ |
||||
liveUtil = new LiveUtil(); |
||||
videoPusher = new VideoPusher(surfaceHolder, videoParam, liveUtil); |
||||
audioPusher = new AudioPusher(audioParam, liveUtil); |
||||
} |
||||
|
||||
/** |
||||
* 开始推流 |
||||
*/ |
||||
public void startPush(String liveUrl, LiveStateChangeListener liveStateChangeListener){ |
||||
videoPusher.startPush(); |
||||
audioPusher.startPush(); |
||||
liveUtil.setOnLiveStateChangeListener(liveStateChangeListener); |
||||
int result = liveUtil.startPush(liveUrl); |
||||
Log.i("LivePusher", "startPush=" + (result == 0 ? "success" : "fail")); |
||||
} |
||||
|
||||
/** |
||||
* 停止推流 |
||||
*/ |
||||
public void stopPush(){ |
||||
videoPusher.stopPush(); |
||||
audioPusher.stopPush(); |
||||
liveUtil.stopPush(); |
||||
} |
||||
|
||||
/** |
||||
* 切换摄像头 |
||||
*/ |
||||
public void switchCamera(){ |
||||
videoPusher.switchCamera(); |
||||
} |
||||
|
||||
/** |
||||
* 释放资源 |
||||
*/ |
||||
public void release(){ |
||||
videoPusher.release(); |
||||
audioPusher.release(); |
||||
liveUtil.release(); |
||||
} |
||||
} |
@ -0,0 +1,12 @@ |
||||
package com.frank.live.Push; |
||||
|
||||
/** |
||||
* |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public abstract class Pusher { |
||||
public abstract void startPush(); |
||||
public abstract void stopPush(); |
||||
public abstract void release(); |
||||
} |
@ -0,0 +1,124 @@ |
||||
package com.frank.live.Push; |
||||
|
||||
import android.graphics.ImageFormat; |
||||
import android.hardware.Camera; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
|
||||
import com.frank.live.LiveUtil; |
||||
import com.frank.live.param.VideoParam; |
||||
import java.io.IOException; |
||||
|
||||
/** |
||||
* 视频推流 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class VideoPusher extends Pusher implements SurfaceHolder.Callback, Camera.PreviewCallback { |
||||
|
||||
private SurfaceHolder surfaceHolder; |
||||
private VideoParam videoParam; |
||||
private Camera camera; |
||||
private boolean isPushing; |
||||
private byte[] previewBuffer; |
||||
private LiveUtil liveUtil; |
||||
|
||||
VideoPusher(SurfaceHolder surfaceHolder, VideoParam videoParam, LiveUtil liveUtil){ |
||||
this.surfaceHolder = surfaceHolder; |
||||
this.videoParam = videoParam; |
||||
this.liveUtil = liveUtil; |
||||
surfaceHolder.addCallback(this); |
||||
liveUtil.setVideoParams(videoParam.getWidth(), videoParam.getHeight(), |
||||
videoParam.getBitRate(), videoParam.getFrameRate()); |
||||
} |
||||
|
||||
|
||||
@Override |
||||
public void startPush() { |
||||
isPushing = true; |
||||
} |
||||
|
||||
@Override |
||||
public void stopPush() { |
||||
isPushing = false; |
||||
} |
||||
|
||||
@Override |
||||
public void release() { |
||||
stopPush(); |
||||
stopPreview(); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
startPreview(); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { |
||||
|
||||
} |
||||
|
||||
@Override |
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
stopPreview(); |
||||
} |
||||
|
||||
/** |
||||
* 开始预览 |
||||
*/ |
||||
private void startPreview() { |
||||
try { |
||||
camera = Camera.open(videoParam.getCameraId()); |
||||
Camera.Parameters parameters = camera.getParameters(); |
||||
parameters.setPreviewFormat(ImageFormat.NV21); |
||||
parameters.setPictureSize(videoParam.getWidth(), videoParam.getHeight()); |
||||
camera.setParameters(parameters); |
||||
camera.setDisplayOrientation(0);//竖屏是90°
|
||||
camera.setPreviewDisplay(surfaceHolder); |
||||
camera.startPreview(); |
||||
previewBuffer = new byte[videoParam.getWidth() * videoParam.getHeight() * 4]; |
||||
camera.addCallbackBuffer(previewBuffer); |
||||
camera.setPreviewCallbackWithBuffer(this); |
||||
} catch (IOException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* 停止预览 |
||||
*/ |
||||
private void stopPreview() { |
||||
if(camera != null){ |
||||
camera.stopPreview(); |
||||
camera.setPreviewCallback(null); |
||||
camera.release(); |
||||
camera = null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* 切换摄像头 |
||||
*/ |
||||
void switchCamera(){ |
||||
|
||||
if(videoParam.getCameraId() == Camera.CameraInfo.CAMERA_FACING_BACK){ |
||||
videoParam.setCameraId(Camera.CameraInfo.CAMERA_FACING_FRONT); |
||||
}else { |
||||
videoParam.setCameraId(Camera.CameraInfo.CAMERA_FACING_BACK); |
||||
} |
||||
//重新开始推流
|
||||
stopPreview(); |
||||
startPreview(); |
||||
} |
||||
|
||||
@Override |
||||
public void onPreviewFrame(byte[] data, Camera camera) { |
||||
camera.addCallbackBuffer(previewBuffer); |
||||
if(isPushing){ |
||||
// Log.i("VideoPusher", "isPushing...");
|
||||
liveUtil.pushVideoData(data); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,10 @@ |
||||
package com.frank.live.listener; |
||||
|
||||
/** |
||||
* 直播状态监听 |
||||
* Created by frank on 2018/1/29. |
||||
*/ |
||||
|
||||
public interface LiveStateChangeListener { |
||||
void onError(String msg); |
||||
} |
@ -0,0 +1,53 @@ |
||||
package com.frank.live.param; |
||||
|
||||
|
||||
/** |
||||
* 音频相关参数 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class AudioParam { |
||||
private int channelConfig; |
||||
private int sampleRate; |
||||
private int audioFormat; |
||||
private int numChannels; |
||||
|
||||
public AudioParam(int sampleRate, int channelConfig, int audioFormat, int numChannels) { |
||||
this.sampleRate = sampleRate; |
||||
this.channelConfig = channelConfig; |
||||
this.audioFormat = audioFormat; |
||||
this.numChannels = numChannels; |
||||
} |
||||
|
||||
public int getChannelConfig() { |
||||
return channelConfig; |
||||
} |
||||
|
||||
public void setChannelConfig(int channelConfig) { |
||||
this.channelConfig = channelConfig; |
||||
} |
||||
|
||||
public int getSampleRate() { |
||||
return sampleRate; |
||||
} |
||||
|
||||
public void setSampleRate(int sampleRate) { |
||||
this.sampleRate = sampleRate; |
||||
} |
||||
|
||||
public int getAudioFormat() { |
||||
return audioFormat; |
||||
} |
||||
|
||||
public void setAudioFormat(int audioFormat) { |
||||
this.audioFormat = audioFormat; |
||||
} |
||||
|
||||
public int getNumChannels() { |
||||
return numChannels; |
||||
} |
||||
|
||||
public void setNumChannels(int numChannels) { |
||||
this.numChannels = numChannels; |
||||
} |
||||
} |
@ -0,0 +1,62 @@ |
||||
package com.frank.live.param; |
||||
|
||||
/** |
||||
* 视频相关参数 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class VideoParam { |
||||
private int width; |
||||
private int height; |
||||
private int cameraId; |
||||
private int bitRate; |
||||
private int frameRate; |
||||
|
||||
public VideoParam(int width, int height, int cameraId, int bitRate, int frameRate) { |
||||
this.width = width; |
||||
this.height = height; |
||||
this.cameraId = cameraId; |
||||
this.bitRate = bitRate; |
||||
this.frameRate = frameRate; |
||||
} |
||||
|
||||
public int getWidth() { |
||||
return width; |
||||
} |
||||
|
||||
public void setWidth(int width) { |
||||
this.width = width; |
||||
} |
||||
|
||||
public int getHeight() { |
||||
return height; |
||||
} |
||||
|
||||
public void setHeight(int height) { |
||||
this.height = height; |
||||
} |
||||
|
||||
public int getCameraId() { |
||||
return cameraId; |
||||
} |
||||
|
||||
public void setCameraId(int cameraId) { |
||||
this.cameraId = cameraId; |
||||
} |
||||
|
||||
public int getBitRate() { |
||||
return bitRate; |
||||
} |
||||
|
||||
public void setBitRate(int bitRate) { |
||||
this.bitRate = bitRate; |
||||
} |
||||
|
||||
public int getFrameRate() { |
||||
return frameRate; |
||||
} |
||||
|
||||
public void setFrameRate(int frameRate) { |
||||
this.frameRate = frameRate; |
||||
} |
||||
} |
@ -0,0 +1,28 @@ |
||||
<?xml version="1.0" encoding="utf-8"?> |
||||
<RelativeLayout |
||||
xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" |
||||
tools:context="com.frank.live.LiveActivity"> |
||||
|
||||
<SurfaceView |
||||
android:id="@+id/surface_camera" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" /> |
||||
|
||||
<Button |
||||
android:id="@+id/btn_swap" |
||||
android:layout_width="wrap_content" |
||||
android:layout_height="wrap_content" |
||||
android:text="@string/swap"/> |
||||
|
||||
<ToggleButton |
||||
android:id="@+id/btn_live" |
||||
android:layout_width="wrap_content" |
||||
android:layout_height="wrap_content" |
||||
android:textOn="@string/stop" |
||||
android:textOff="@string/start" |
||||
android:layout_centerVertical="true"/> |
||||
|
||||
</RelativeLayout> |
After Width: | Height: | Size: 3.3 KiB |
After Width: | Height: | Size: 4.1 KiB |
After Width: | Height: | Size: 2.2 KiB |
After Width: | Height: | Size: 2.5 KiB |
After Width: | Height: | Size: 4.7 KiB |
After Width: | Height: | Size: 6.0 KiB |
After Width: | Height: | Size: 7.5 KiB |
After Width: | Height: | Size: 9.8 KiB |
After Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 14 KiB |
@ -0,0 +1,6 @@ |
||||
<?xml version="1.0" encoding="utf-8"?> |
||||
<resources> |
||||
<color name="colorPrimary">#3F51B5</color> |
||||
<color name="colorPrimaryDark">#303F9F</color> |
||||
<color name="colorAccent">#FF4081</color> |
||||
</resources> |
@ -0,0 +1,6 @@ |
||||
<resources> |
||||
<string name="app_name">live</string> |
||||
<string name="swap">切换</string> |
||||
<string name="start">开始</string> |
||||
<string name="stop">停止</string> |
||||
</resources> |
@ -0,0 +1,11 @@ |
||||
<resources> |
||||
|
||||
<!-- Base application theme. --> |
||||
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar"> |
||||
<!-- Customize your theme here. --> |
||||
<item name="colorPrimary">@color/colorPrimary</item> |
||||
<item name="colorPrimaryDark">@color/colorPrimaryDark</item> |
||||
<item name="colorAccent">@color/colorAccent</item> |
||||
</style> |
||||
|
||||
</resources> |
@ -0,0 +1,17 @@ |
||||
package com.frank.live; |
||||
|
||||
import org.junit.Test; |
||||
|
||||
import static org.junit.Assert.*; |
||||
|
||||
/** |
||||
* Example local unit test, which will execute on the development machine (host). |
||||
* |
||||
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a> |
||||
*/ |
||||
public class ExampleUnitTest { |
||||
@Test |
||||
public void addition_isCorrect() throws Exception { |
||||
assertEquals(4, 2 + 2); |
||||
} |
||||
} |
@ -0,0 +1,130 @@ |
||||
package com.frank.ffmpeg.activity; |
||||
|
||||
import android.Manifest; |
||||
import android.annotation.SuppressLint; |
||||
import android.annotation.TargetApi; |
||||
import android.hardware.Camera; |
||||
import android.media.AudioFormat; |
||||
import android.os.Build; |
||||
import android.os.Bundle; |
||||
import android.os.Handler; |
||||
import android.os.Message; |
||||
import android.support.annotation.NonNull; |
||||
import android.support.v7.app.AppCompatActivity; |
||||
import android.text.TextUtils; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
import android.view.View; |
||||
import android.widget.CompoundButton; |
||||
import android.widget.Toast; |
||||
import android.widget.ToggleButton; |
||||
import com.frank.ffmpeg.R; |
||||
import com.frank.live.Push.LivePusher; |
||||
import com.frank.live.listener.LiveStateChangeListener; |
||||
import com.frank.live.param.AudioParam; |
||||
import com.frank.live.param.VideoParam; |
||||
|
||||
/** |
||||
* h264与rtmp实时推流直播 |
||||
* Created by frank on 2018/1/28. |
||||
*/ |
||||
|
||||
public class LiveActivity extends AppCompatActivity implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, LiveStateChangeListener { |
||||
|
||||
private final static String TAG = LiveActivity.class.getSimpleName(); |
||||
private final static int CODE_CAMERA_RECORD = 0x0001; |
||||
private final static String[] permissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}; |
||||
private final static String LIVE_URL = "rtmp://192.168.8.115/live/stream"; |
||||
private final static int MSG_ERROR = 100; |
||||
private SurfaceHolder surfaceHolder; |
||||
private LivePusher livePusher; |
||||
@SuppressLint("HandlerLeak") |
||||
private Handler mHandler = new Handler(){ |
||||
@Override |
||||
public void handleMessage(Message msg) { |
||||
super.handleMessage(msg); |
||||
if(msg.what == MSG_ERROR){ |
||||
String errMsg = (String)msg.obj; |
||||
if(!TextUtils.isEmpty(errMsg)){ |
||||
Toast.makeText(LiveActivity.this, errMsg, Toast.LENGTH_SHORT).show(); |
||||
} |
||||
} |
||||
} |
||||
}; |
||||
|
||||
@Override |
||||
protected void onCreate(Bundle savedInstanceState) { |
||||
super.onCreate(savedInstanceState); |
||||
setContentView(R.layout.activity_live); |
||||
|
||||
initView(); |
||||
requirePermission(); |
||||
initPusher(); |
||||
} |
||||
|
||||
private void initView(){ |
||||
findViewById(R.id.btn_swap).setOnClickListener(this); |
||||
((ToggleButton)findViewById(R.id.btn_live)).setOnCheckedChangeListener(this); |
||||
SurfaceView surface_camera = (SurfaceView) findViewById(R.id.surface_camera); |
||||
surfaceHolder = surface_camera.getHolder(); |
||||
} |
||||
|
||||
private void initPusher() { |
||||
int width = 640;//分辨率设置很重要
|
||||
int height = 480; |
||||
int videoBitRate = 400;//kb/s jason-->480kb
|
||||
int videoFrameRate = 25;//fps
|
||||
VideoParam videoParam = new VideoParam(width, height, |
||||
Camera.CameraInfo.CAMERA_FACING_BACK, videoBitRate, videoFrameRate); |
||||
int sampleRate = 44100;//采样率:Hz
|
||||
int channelConfig = AudioFormat.CHANNEL_IN_STEREO;//立体声道
|
||||
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;//pcm16位
|
||||
int numChannels = 2;//声道数
|
||||
AudioParam audioParam = new AudioParam(sampleRate, channelConfig, audioFormat, numChannels); |
||||
livePusher = new LivePusher(surfaceHolder, videoParam, audioParam); |
||||
} |
||||
|
||||
@Override |
||||
public void onClick(View v) { |
||||
if(v.getId() == R.id.btn_swap){ |
||||
livePusher.switchCamera(); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { |
||||
if(isChecked){ |
||||
livePusher.startPush(LIVE_URL, this); |
||||
}else { |
||||
livePusher.stopPush(); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void onError(String msg) { |
||||
Log.e(TAG, "errMsg=" + msg); |
||||
mHandler.obtainMessage(MSG_ERROR, msg).sendToTarget(); |
||||
} |
||||
|
||||
@TargetApi(Build.VERSION_CODES.M) |
||||
private void requirePermission(){ |
||||
requestPermissions(permissions, CODE_CAMERA_RECORD); |
||||
} |
||||
|
||||
@Override |
||||
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { |
||||
super.onRequestPermissionsResult(requestCode, permissions, grantResults); |
||||
if(permissions.length > 0 && grantResults.length == permissions.length){ |
||||
for(int i=0; i<permissions.length; i++){ |
||||
Log.i(TAG, permissions[i] + ":grantResult=" + grantResults[i]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
protected void onDestroy() { |
||||
super.onDestroy(); |
||||
livePusher.release(); |
||||
} |
||||
} |
@ -0,0 +1,28 @@ |
||||
<?xml version="1.0" encoding="utf-8"?> |
||||
<RelativeLayout |
||||
xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" |
||||
tools:context="com.frank.ffmpeg.activity.LiveActivity"> |
||||
|
||||
<SurfaceView |
||||
android:id="@+id/surface_camera" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" /> |
||||
|
||||
<Button |
||||
android:id="@+id/btn_swap" |
||||
android:layout_width="wrap_content" |
||||
android:layout_height="wrap_content" |
||||
android:text="@string/swap"/> |
||||
|
||||
<ToggleButton |
||||
android:id="@+id/btn_live" |
||||
android:layout_width="wrap_content" |
||||
android:layout_height="wrap_content" |
||||
android:textOn="@string/stop" |
||||
android:textOff="@string/start" |
||||
android:layout_centerVertical="true"/> |
||||
|
||||
</RelativeLayout> |
@ -1 +1 @@ |
||||
include ':app' |
||||
include ':app', ':Live' |
||||
|