Add files via upload

main
wenchao1024 3 years ago committed by GitHub
parent 782eda4e45
commit b0a1fcccff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. BIN
      ffplay源码和书籍/ffdoc.pdf
  2. BIN
      ffplay源码和书籍/ffplay/CLOCKTXT_320.avi
  3. BIN
      ffplay源码和书籍/ffplay/Debug/SDL.dll
  4. BIN
      ffplay源码和书籍/ffplay/Debug/allcodecs.obj
  5. BIN
      ffplay源码和书籍/ffplay/Debug/allformats.obj
  6. BIN
      ffplay源码和书籍/ffplay/Debug/avidec.obj
  7. BIN
      ffplay源码和书籍/ffplay/Debug/avio.obj
  8. BIN
      ffplay源码和书籍/ffplay/Debug/aviobuf.obj
  9. BIN
      ffplay源码和书籍/ffplay/Debug/cutils.obj
  10. BIN
      ffplay源码和书籍/ffplay/Debug/dsputil.obj
  11. 26
      ffplay源码和书籍/ffplay/Debug/ffplay.Build.CppClean.log
  12. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.exe
  13. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.ilk
  14. 68
      ffplay源码和书籍/ffplay/Debug/ffplay.log
  15. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.obj
  16. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.pdb
  17. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/CL.read.1.tlog
  18. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/CL.write.1.tlog
  19. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/cl.command.1.tlog
  20. 2
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/ffplay.lastbuildstate
  21. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/link.command.1.tlog
  22. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/link.read.1.tlog
  23. BIN
      ffplay源码和书籍/ffplay/Debug/ffplay.tlog/link.write.1.tlog
  24. BIN
      ffplay源码和书籍/ffplay/Debug/file.obj
  25. BIN
      ffplay源码和书籍/ffplay/Debug/imgconvert.obj
  26. BIN
      ffplay源码和书籍/ffplay/Debug/msrle.obj
  27. BIN
      ffplay源码和书籍/ffplay/Debug/truespeech.obj
  28. BIN
      ffplay源码和书籍/ffplay/Debug/utils_codec.obj
  29. BIN
      ffplay源码和书籍/ffplay/Debug/utils_format.obj
  30. BIN
      ffplay源码和书籍/ffplay/Debug/vc120.idb
  31. BIN
      ffplay源码和书籍/ffplay/Debug/vc120.pdb
  32. 40
      ffplay源码和书籍/ffplay/berrno.h
  33. 801
      ffplay源码和书籍/ffplay/ffplay.c
  34. 201
      ffplay源码和书籍/ffplay/ffplay.dsp
  35. 157
      ffplay源码和书籍/ffplay/ffplay.vcxproj
  36. 92
      ffplay源码和书籍/ffplay/ffplay.vcxproj.filters
  37. 25
      ffplay源码和书籍/ffplay/libavcodec/allcodecs.c
  38. 201
      ffplay源码和书籍/ffplay/libavcodec/avcodec.h
  39. 23
      ffplay源码和书籍/ffplay/libavcodec/dsputil.c
  40. 13
      ffplay源码和书籍/ffplay/libavcodec/dsputil.h
  41. 1707
      ffplay源码和书籍/ffplay/libavcodec/imgconvert.c
  42. 887
      ffplay源码和书籍/ffplay/libavcodec/imgconvert_template.h
  43. 305
      ffplay源码和书籍/ffplay/libavcodec/msrle.c
  44. 380
      ffplay源码和书籍/ffplay/libavcodec/truespeech.c
  45. 139
      ffplay源码和书籍/ffplay/libavcodec/truespeech_data.h
  46. 412
      ffplay源码和书籍/ffplay/libavcodec/utils_codec.c
  47. 31
      ffplay源码和书籍/ffplay/libavformat/allformats.c
  48. 271
      ffplay源码和书籍/ffplay/libavformat/avformat.h
  49. 792
      ffplay源码和书籍/ffplay/libavformat/avidec.c
  50. 134
      ffplay源码和书籍/ffplay/libavformat/avio.c
  51. 119
      ffplay源码和书籍/ffplay/libavformat/avio.h
  52. 324
      ffplay源码和书籍/ffplay/libavformat/aviobuf.c
  53. 46
      ffplay源码和书籍/ffplay/libavformat/cutils.c
  54. 89
      ffplay源码和书籍/ffplay/libavformat/file.c
  55. 345
      ffplay源码和书籍/ffplay/libavformat/utils_format.c
  56. 57
      ffplay源码和书籍/ffplay/libavutil/avutil.h
  57. 29
      ffplay源码和书籍/ffplay/libavutil/bswap.h
  58. 69
      ffplay源码和书籍/ffplay/libavutil/common.h
  59. 10
      ffplay源码和书籍/ffplay/libavutil/mathematics.h
  60. 20
      ffplay源码和书籍/ffplay/libavutil/rational.h
  61. 2
      ffplay源码和书籍/ffplay/update.txt

Binary file not shown.

@ -0,0 +1,26 @@
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\vc120.pdb
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\vc120.idb
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\allcodecs.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\dsputil.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\imgconvert.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\msrle.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\truespeech.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\utils_codec.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\allformats.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\avidec.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\avio.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\aviobuf.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\cutils.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\file.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\utils_format.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.obj
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.ilk
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.exe
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.pdb
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\.\debug\ffplay.exe
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\cl.command.1.tlog
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\cl.read.1.tlog
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\cl.write.1.tlog
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\link.command.1.tlog
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\link.read.1.tlog
e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\debug\ffplay.tlog\link.write.1.tlog

@ -0,0 +1,68 @@
生成启动时间为 2016/11/18 15:28:25。
1>项目“E:\Work\研究生工作\COStream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\ffplay.vcxproj”在节点 2 上(Rebuild 个目标)。
1>ClCompile:
D:\Program Files\Microsoft Visual Studio 12.0\VC\bin\CL.exe /c /ZI /nologo /W3 /WX- /Od /Oy- /D WIN32 /D _DEBUG /D _CONSOLE /D _VC80_UPGRADE=0x0600 /D _MBCS /Gm /EHsc /RTC1 /MTd /GS /Gy- /fp:precise /Zc:wchar_t /Zc:forScope /Fo".\Debug\\" /Fd".\Debug\vc120.pdb" /Gd /TC /analyze- /errorReport:prompt ffplay.c libavcodec\allcodecs.c libavcodec\dsputil.c libavcodec\imgconvert.c libavcodec\msrle.c libavcodec\truespeech.c libavcodec\utils_codec.c libavformat\allformats.c libavformat\avidec.c libavformat\avio.c libavformat\aviobuf.c libavformat\cutils.c libavformat\file.c libavformat\utils_format.c
utils_format.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\utils_format.c(5): warning C4005: “UINT_MAX”: 宏重定义
d:\program files\microsoft visual studio 12.0\vc\include\limits.h(41) : 参见“UINT_MAX”的前一个定义
file.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(31): warning C4996: '_open': This function or variable may be unsafe. Consider using _sopen_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\io.h(237) : 参见“_open”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(41): warning C4996: 'read': The POSIX name for this item is deprecated. Instead, use the ISO C++ conformant name: _read. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\io.h(337) : 参见“read”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(47): warning C4996: 'write': The POSIX name for this item is deprecated. Instead, use the ISO C++ conformant name: _write. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\io.h(342) : 参见“write”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(53): warning C4244: “函数”: 从“offset_t”转换到“long”,可能丢失数据
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(53): warning C4996: 'lseek': The POSIX name for this item is deprecated. Instead, use the ISO C++ conformant name: _lseek. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\io.h(334) : 参见“lseek”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\file.c(59): warning C4996: 'close': The POSIX name for this item is deprecated. Instead, use the ISO C++ conformant name: _close. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\io.h(326) : 参见“close”的声明
cutils.c
aviobuf.c
avio.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avio.c(41): warning C4996: 'strcpy': This function or variable may be unsafe. Consider using strcpy_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\string.h(112) : 参见“strcpy”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avio.c(64): warning C4996: 'strcpy': This function or variable may be unsafe. Consider using strcpy_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\string.h(112) : 参见“strcpy”的声明
avidec.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avidec.c(121): warning C4244: “函数”: 从“int64_t”转换到“int”,可能丢失数据
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avidec.c(325): warning C4018: “>”: 有符号/无符号不匹配
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avidec.c(642): warning C4244: “+=”: 从“int64_t”转换到“unsigned int”,可能丢失数据
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavformat\avidec.c(646): warning C4018: “>=”: 有符号/无符号不匹配
allformats.c
utils_codec.c
truespeech.c
msrle.c
imgconvert.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavcodec\imgconvert.c(1091): warning C4146: 一元负运算符应用于无符号类型,结果仍为无符号类型
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavcodec\imgconvert_template.h(465): warning C4146: 一元负运算符应用于无符号类型,结果仍为无符号类型
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\libavcodec\imgconvert_template.h(859): warning C4146: 一元负运算符应用于无符号类型,结果仍为无符号类型
dsputil.c
allcodecs.c
ffplay.c
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\ffplay.c(93): warning C4996: '_ftime64': This function or variable may be unsafe. Consider using _ftime64_s instead. To disable deprecation, use _CRT_SECURE_NO_WARNINGS. See online help for details.
d:\program files\microsoft visual studio 12.0\vc\include\sys\timeb.h(131) : 参见“_ftime64”的声明
1>e:\work\研究生工作\costream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\ffplay.c(376): warning C4018: “>=”: 有符号/无符号不匹配
正在生成代码...
Link:
D:\Program Files\Microsoft Visual Studio 12.0\VC\bin\link.exe /ERRORREPORT:PROMPT /OUT:".\Debug\ffplay.exe" /INCREMENTAL /NOLOGO /LIBPATH:"E:\Work\SDL-1.2.15\lib\x86" odbc32.lib odbccp32.lib kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /MANIFEST /MANIFESTUAC:"level='asInvoker' uiAccess='false'" /manifest:embed /DEBUG /PDB:".\Debug\ffplay.pdb" /SUBSYSTEM:CONSOLE /TLBID:1 /DYNAMICBASE /NXCOMPAT /IMPLIB:".\Debug\ffplay.lib" /MACHINE:X86 /SAFESEH .\Debug\ffplay.obj
.\Debug\allcodecs.obj
.\Debug\dsputil.obj
.\Debug\imgconvert.obj
.\Debug\msrle.obj
.\Debug\truespeech.obj
.\Debug\utils_codec.obj
.\Debug\allformats.obj
.\Debug\avidec.obj
.\Debug\avio.obj
.\Debug\aviobuf.obj
.\Debug\cutils.obj
.\Debug\file.obj
.\Debug\utils_format.obj
1>ffplay.obj : warning LNK4075: 忽略“/EDITANDCONTINUE”(由于“/SAFESEH”规范)
ffplay.vcxproj -> E:\Work\研究生工作\COStream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\.\Debug\ffplay.exe
1>已完成生成项目“E:\Work\研究生工作\COStream以及视频编码工程\音视频编解码的书籍\ffplay源码和书籍\ffplay\ffplay.vcxproj”(Rebuild 个目标)的操作。
生成成功。
已用时间 00:00:01.76

@ -0,0 +1,2 @@
#TargetFrameworkVersion=v4.0:PlatformToolSet=v120:EnableManagedIncrementalBuild=false:VCToolArchitecture=Native32Bit
Debug|Win32|E:\Work\研究生工作\COStream以及视频编码工程\HomerHEVC\HomerHEVC\xHEVC\|

@ -0,0 +1,40 @@
#ifndef BERRNO_H
#define BERRNO_H
/* 常用的错误码 (也表示中断错误、信号等)*/
#ifdef ENOENT
#undef ENOENT
#endif
#define ENOENT 2
#ifdef EINTR
#undef EINTR
#endif
#define EINTR 4
#ifdef EIO
#undef EIO
#endif
#define EIO 5
#ifdef EAGAIN
#undef EAGAIN
#endif
#define EAGAIN 11
#ifdef ENOMEM
#undef ENOMEM
#endif
#define ENOMEM 12
#ifdef EINVAL
#undef EINVAL
#endif
#define EINVAL 22
#ifdef EPIPE
#undef EPIPE
#endif
#define EPIPE 32
#endif

@ -0,0 +1,801 @@
#include "./libavformat/avformat.h"
#if defined(CONFIG_WIN32)
#include <sys/types.h>
#include <sys/timeb.h>
#include <windows.h>
#else
#include <fcntl.h>
#include <sys/time.h>
#endif
#include <time.h>
#include <math.h>
#include <SDL.h>
#include <SDL_thread.h>
#ifdef CONFIG_WIN32
#undef main // We don't want SDL to override our main()
#endif
#pragma comment(lib, "SDL.lib")
/* 退出事件 */
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
/* 视频队列的最长长度 */
#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)
/* 音频队列的最大长度 */
#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
/* 图像队列的长度 */
#define VIDEO_PICTURE_QUEUE_SIZE 1
/* 数据包队列 */
typedef struct PacketQueue
{
// 头指针和尾指针
AVPacketList *first_pkt, *last_pkt;
// 队列长度
int size;
// 中断请求
int abort_request;
// 锁
SDL_mutex *mutex;
// 条件变量
SDL_cond *cond;
} PacketQueue;
/* 视频图像 */
typedef struct VideoPicture
{
// 显示层
SDL_Overlay *bmp;
// 宽高
int width, height; // source height & width
} VideoPicture;
/* 视频的状态 */
typedef struct VideoState
{
// 解析线程
SDL_Thread *parse_tid;
// 视频播放线程
SDL_Thread *video_tid;
// 中断请求
int abort_request;
// 表示一个输入上下文(或者表示一个文件上下文)
AVFormatContext *ic;
int audio_stream;
int video_stream;
// 音频流
AVStream *audio_st;
// 视频流
AVStream *video_st;
// 音频包队列
PacketQueue audioq;
// 视频包队列
PacketQueue videoq;
// 播放队列
VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
double frame_last_delay;
uint8_t audio_buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE *3) / 2];
unsigned int audio_buf_size;
int audio_buf_index;
// 音频数据包
AVPacket audio_pkt;
// 音频数据和长度
uint8_t *audio_pkt_data;
int audio_pkt_size;
// 视频解码器锁
SDL_mutex *video_decoder_mutex;
// 音频解码器锁
SDL_mutex *audio_decoder_mutex;
// 视频文件的名字
char filename[240];
} VideoState;
static AVInputFormat *file_iformat;
static const char *input_filename;
static VideoState *cur_stream;
static SDL_Surface *screen;
/* 获取当前的时间 */
int64_t av_gettime(void)
{
#if defined(CONFIG_WINCE)
return timeGetTime() *int64_t_C(1000);
#elif defined(CONFIG_WIN32)
struct _timeb tb;
_ftime(&tb);
return ((int64_t)tb.time *int64_t_C(1000) + (int64_t)tb.millitm) *int64_t_C(1000);
#else
struct timeval tv;
gettimeofday(&tv, NULL);
return (int64_t)tv.tv_sec *1000000+tv.tv_usec;
#endif
}
/* 数据包队列初始化 */
static void packet_queue_init(PacketQueue *q) // packet queue handling
{
memset(q, 0, sizeof(PacketQueue));
// 创建锁
q->mutex = SDL_CreateMutex();
// 创建条件变量
q->cond = SDL_CreateCond();
}
/* 刷新数据包队列,就是释放所有数据包 */
static void packet_queue_flush(PacketQueue *q)
{
AVPacketList *pkt, *pkt1;
SDL_LockMutex(q->mutex);
for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1)
{
pkt1 = pkt->next;
av_free_packet(&pkt->pkt);
av_freep(&pkt);
}
q->last_pkt = NULL;
q->first_pkt = NULL;
q->size = 0;
SDL_UnlockMutex(q->mutex);
}
/* 销毁数据包队列 */
static void packet_queue_end(PacketQueue *q)
{
packet_queue_flush(q);
SDL_DestroyMutex(q->mutex);
SDL_DestroyCond(q->cond);
}
/* 往数据包队列中压入一个数据包 */
static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
{
AVPacketList *pkt1;
pkt1 = av_malloc(sizeof(AVPacketList));
if (!pkt1)
return - 1;
pkt1->pkt = *pkt;
pkt1->next = NULL;
SDL_LockMutex(q->mutex);
if (!q->last_pkt)
q->first_pkt = pkt1;
else
q->last_pkt->next = pkt1;
q->last_pkt = pkt1;
q->size += pkt1->pkt.size;
SDL_CondSignal(q->cond);
SDL_UnlockMutex(q->mutex);
return 0;
}
/* 请求退出 */
static void packet_queue_abort(PacketQueue *q)
{
SDL_LockMutex(q->mutex);
q->abort_request = 1;
SDL_CondSignal(q->cond);
SDL_UnlockMutex(q->mutex);
}
/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
/* 从队列中取出一个数据包 */
static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
{
AVPacketList *pkt1;
int ret;
SDL_LockMutex(q->mutex);
for (;;)
{
if (q->abort_request)
{
ret = - 1; // 异常
break;
}
pkt1 = q->first_pkt;
if (pkt1)
{
q->first_pkt = pkt1->next;
if (!q->first_pkt)
q->last_pkt = NULL;
q->size -= pkt1->pkt.size;
*pkt = pkt1->pkt;
av_free(pkt1);
ret = 1;
break;
}
else if (!block)// 阻塞标记,1(阻塞模式),0(非阻塞模式)
{
ret = 0; // 非阻塞模式,没东西直接返回0
break;
}
else
{
SDL_CondWait(q->cond, q->mutex);
}
}
SDL_UnlockMutex(q->mutex);
return ret;
}
/* 创建一个VideoPicture对象 */
static void alloc_picture(void *opaque)
{
VideoState *is = opaque;
VideoPicture *vp;
vp = &is->pictq[0];
if (vp->bmp)
SDL_FreeYUVOverlay(vp->bmp);
vp->bmp = SDL_CreateYUVOverlay(is->video_st->actx->width,
is->video_st->actx->height,
SDL_YV12_OVERLAY,
screen);
vp->width = is->video_st->actx->width;
vp->height = is->video_st->actx->height;
}
/* 显示图像 */
static int video_display(VideoState *is, AVFrame *src_frame, double pts)
{
VideoPicture *vp;
int dst_pix_fmt;
AVPicture pict;
if (is->videoq.abort_request)
return - 1;
vp = &is->pictq[0];
/* if the frame is not skipped, then display it */
if (vp->bmp)
{
SDL_Rect rect;
if (pts)
Sleep((int)(is->frame_last_delay *1000));
#if 1
/* get a pointer on the bitmap */
SDL_LockYUVOverlay(vp->bmp);
dst_pix_fmt = PIX_FMT_YUV420P;
pict.data[0] = vp->bmp->pixels[0];
pict.data[1] = vp->bmp->pixels[2];
pict.data[2] = vp->bmp->pixels[1];
pict.linesize[0] = vp->bmp->pitches[0];
pict.linesize[1] = vp->bmp->pitches[2];
pict.linesize[2] = vp->bmp->pitches[1];
img_convert(&pict,
dst_pix_fmt,
(AVPicture*)src_frame,
is->video_st->actx->pix_fmt,
is->video_st->actx->width,
is->video_st->actx->height);
SDL_UnlockYUVOverlay(vp->bmp); /* update the bitmap content */
rect.x = 0;
rect.y = 0;
rect.w = is->video_st->actx->width;
rect.h = is->video_st->actx->height;
SDL_DisplayYUVOverlay(vp->bmp, &rect);
#endif
}
return 0;
}
/* 视频处理线程 :编码或者解码*/
static int video_thread(void *arg)
{
VideoState *is = arg;
AVPacket pkt1, *pkt = &pkt1;
int len1, got_picture;
double pts = 0;
// 分配一个帧
AVFrame *frame = av_malloc(sizeof(AVFrame));
memset(frame, 0, sizeof(AVFrame));
alloc_picture(is);
// 循环
for (;;)
{
// 取出一个数据包
if (packet_queue_get(&is->videoq, pkt, 1) < 0)
break;
// 加锁,然后解码,然后解锁
SDL_LockMutex(is->video_decoder_mutex);
len1 = avcodec_decode_video(is->video_st->actx, frame, &got_picture, pkt->data, pkt->size);
SDL_UnlockMutex(is->video_decoder_mutex);
// 计算分数
if (pkt->dts != AV_NOPTS_VALUE)
pts = av_q2d(is->video_st->time_base) *pkt->dts;
// 显示帧
if (got_picture)
{
if (video_display(is, frame, pts) < 0)
goto the_end;
}
av_free_packet(pkt);
}
the_end:
av_free(frame);
return 0;
}
/* decode one audio frame and returns its uncompressed size */
static int audio_decode_frame(VideoState *is, uint8_t *audio_buf, double *pts_ptr)
{
AVPacket *pkt = &is->audio_pkt;
int len1, data_size;
for (;;)
{
/* NOTE: the audio packet can contain several frames */
while (is->audio_pkt_size > 0)
{
SDL_LockMutex(is->audio_decoder_mutex);
len1 = avcodec_decode_audio(is->audio_st->actx, (int16_t*)audio_buf,
&data_size, is->audio_pkt_data, is->audio_pkt_size);
SDL_UnlockMutex(is->audio_decoder_mutex);
if (len1 < 0)
{
/* if error, we skip the frame */
is->audio_pkt_size = 0;
break;
}
is->audio_pkt_data += len1;
is->audio_pkt_size -= len1;
if (data_size <= 0)
continue;
return data_size;
}
/* free the current packet */
if (pkt->data)
av_free_packet(pkt);
/* read next packet */
if (packet_queue_get(&is->audioq, pkt, 1) < 0)
return - 1;
is->audio_pkt_data = pkt->data;
is->audio_pkt_size = pkt->size;
}
}
/* prepare a new audio buffer */
void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
{
VideoState *is = opaque;
int audio_size, len1;
double pts = 0;
while (len > 0)
{
if (is->audio_buf_index >= is->audio_buf_size)
{
audio_size = audio_decode_frame(is, is->audio_buf, &pts);
if (audio_size < 0)
{
/* if error, just output silence */
is->audio_buf_size = 1024;
memset(is->audio_buf, 0, is->audio_buf_size);
}
else
{
// audio_size = synchronize_audio(is, (int16_t*)is->audio_buf, audio_size, pts);
is->audio_buf_size = audio_size;
}
is->audio_buf_index = 0;
}
len1 = is->audio_buf_size - is->audio_buf_index;
if (len1 > len)
len1 = len;
memcpy(stream, (uint8_t*)is->audio_buf + is->audio_buf_index, len1);
len -= len1;
stream += len1;
is->audio_buf_index += len1;
}
}
/* open a given stream. Return 0 if OK */
/* 打开一个视频流或者音频流 ,并创建线程去编解码 */
static int stream_component_open(VideoState *is, int stream_index)
{
AVFormatContext *ic = is->ic;
AVCodecContext *enc;
AVCodec *codec;
SDL_AudioSpec wanted_spec, spec;
if (stream_index < 0 || stream_index >= ic->nb_streams)
return - 1;
enc = ic->streams[stream_index]->actx;
/* prepare audio output */
/* 音频 */
if (enc->codec_type == CODEC_TYPE_AUDIO)
{
wanted_spec.freq = enc->sample_rate;
wanted_spec.format = AUDIO_S16SYS;
/* hack for AC3. XXX: suppress that */
if (enc->channels > 2)
enc->channels = 2;
wanted_spec.channels = enc->channels;
wanted_spec.silence = 0;
wanted_spec.samples = 1024; //SDL_AUDIO_BUFFER_SIZE;
wanted_spec.callback = sdl_audio_callback;
wanted_spec.userdata = is;
if (SDL_OpenAudio(&wanted_spec, &spec) < 0)
{
fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
return - 1;
}
}
// 查找一个编解码器
codec = avcodec_find_decoder(enc->codec_id);
// 打开编解码器
if (!codec || avcodec_open(enc, codec) < 0)
return - 1;
switch (enc->codec_type)
{
case CODEC_TYPE_AUDIO: // 音频
is->audio_stream = stream_index;
is->audio_st = ic->streams[stream_index];
is->audio_buf_size = 0;
is->audio_buf_index = 0;
memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
packet_queue_init(&is->audioq);
SDL_PauseAudio(0);
break;
case CODEC_TYPE_VIDEO:
is->video_stream = stream_index;
is->video_st = ic->streams[stream_index];
is->frame_last_delay = is->video_st->frame_last_delay;
packet_queue_init(&is->videoq); // 队列初始化
is->video_tid = SDL_CreateThread(video_thread, is); // 创建视频线程去处理
break;
default:
break;
}
return 0;
}
static void stream_component_close(VideoState *is, int stream_index)
{
AVFormatContext *ic = is->ic;
AVCodecContext *enc;
if (stream_index < 0 || stream_index >= ic->nb_streams)
return ;
enc = ic->streams[stream_index]->actx;
switch (enc->codec_type)
{
case CODEC_TYPE_AUDIO:
packet_queue_abort(&is->audioq);
SDL_CloseAudio();
packet_queue_end(&is->audioq);
break;
case CODEC_TYPE_VIDEO:
packet_queue_abort(&is->videoq);
SDL_WaitThread(is->video_tid, NULL);
packet_queue_end(&is->videoq);
break;
default:
break;
}
avcodec_close(enc);
}
/* 解码线程 */
static int decode_thread(void *arg)
{
VideoState *is = arg;
AVFormatContext *ic;
int err, i, ret, video_index, audio_index;
AVPacket pkt1, *pkt = &pkt1;
AVFormatParameters params, *ap = &params;
int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL | SDL_RESIZABLE;
video_index = - 1;
audio_index = - 1;
is->video_stream = - 1;
is->audio_stream = - 1;
memset(ap, 0, sizeof(*ap));
// 打开输入文件,并把信息填充到IO上下文对象中
err = av_open_input_file(&ic, is->filename, NULL, 0, ap);
if (err < 0)
{
ret = - 1;
goto fail;
}
is->ic = ic;
// 比特流的数量
for (i = 0; i < ic->nb_streams; i++)
{
// 编解码上下文
AVCodecContext *enc = ic->streams[i]->actx;
// 编解码类型
switch (enc->codec_type)
{
case CODEC_TYPE_AUDIO: // 音频
if (audio_index < 0)
audio_index = i;
break;
case CODEC_TYPE_VIDEO: // 视频
if (video_index < 0)
video_index = i;
// 设置视频模式,并得到屏幕
screen = SDL_SetVideoMode(enc->width, enc->height, 0, flags);
SDL_WM_SetCaption("FFplay", "FFplay"); // 修改是为了适配视频大小
// schedule_refresh(is, 40);
break;
default:
break;
}
}
// 打开各个流的成分,并调用分量解码线程
if (audio_index >= 0)
stream_component_open(is, audio_index);
// 打开流的各个成分,并调用分量解码线程
if (video_index >= 0)
stream_component_open(is, video_index);
if (is->video_stream < 0 && is->audio_stream < 0)
{
fprintf(stderr, "%s: could not open codecs\n", is->filename);
ret = - 1;
goto fail;
}
// 无限循环
for (;;)
{
if (is->abort_request)
break;
// 如果队列已经满了,那么延迟去读
if (is->audioq.size > MAX_AUDIOQ_SIZE || is->videoq.size > MAX_VIDEOQ_SIZE || url_feof(&ic->pb))
{
SDL_Delay(10); // if the queue are full, no need to read more,wait 10 ms
continue;
}
// 读取一个数据包(表示一帧)
ret = av_read_packet(ic, pkt); //av_read_frame(ic, pkt);
if (ret < 0)
{
if (url_ferror(&ic->pb) == 0)
{
SDL_Delay(100); // wait for user event
continue;
}
else
break;
}
// 把数据包添加到队列中
if (pkt->stream_index == is->audio_stream)
{
packet_queue_put(&is->audioq, pkt);
}
else if (pkt->stream_index == is->video_stream)
{
packet_queue_put(&is->videoq, pkt);
}
else
{
av_free_packet(pkt);
}
}
while (!is->abort_request) // wait until the end
{
SDL_Delay(100);
}
ret = 0;
fail:
if (is->audio_stream >= 0)
stream_component_close(is, is->audio_stream);
if (is->video_stream >= 0)
stream_component_close(is, is->video_stream);
if (is->ic)
{
av_close_input_file(is->ic);
is->ic = NULL;
}
if (ret != 0)
{
SDL_Event event;
event.type = FF_QUIT_EVENT;
event.user.data1 = is;
SDL_PushEvent(&event);
}
return 0;
}
/* 打开输入流(输入文件 )*/
static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
{
// 视频状态
VideoState *is;
is = av_mallocz(sizeof(VideoState));
if (!is)
return NULL;
pstrcpy(is->filename, sizeof(is->filename), filename);
// 音频锁
is->audio_decoder_mutex = SDL_CreateMutex();
// 视频锁
is->video_decoder_mutex = SDL_CreateMutex();
// 创建解码线程
is->parse_tid = SDL_CreateThread(decode_thread, is);
if (!is->parse_tid)
{
av_free(is);
return NULL;
}
return is;
}
static void stream_close(VideoState *is)
{
VideoPicture *vp;
int i;
is->abort_request = 1;
SDL_WaitThread(is->parse_tid, NULL);
for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++)
{
vp = &is->pictq[i];
if (vp->bmp)
{
SDL_FreeYUVOverlay(vp->bmp);
vp->bmp = NULL;
}
}
SDL_DestroyMutex(is->audio_decoder_mutex);
SDL_DestroyMutex(is->video_decoder_mutex);
}
void do_exit(void)
{
if (cur_stream)
{
stream_close(cur_stream);
cur_stream = NULL;
}
SDL_Quit();
exit(0);
}
void event_loop(void) // handle an event sent by the GUI
{
SDL_Event event;
for (;;)
{
SDL_WaitEvent(&event);
switch (event.type)
{
case SDL_KEYDOWN:
switch (event.key.keysym.sym)
{
case SDLK_ESCAPE:
case SDLK_q:
do_exit();
break;
default:
break;
}
break;
case SDL_QUIT:
case FF_QUIT_EVENT:
do_exit();
break;
default:
break;
}
}
}
int main(int argc, char **argv)
{
int flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
// 注册所有支持的格式
av_register_all();
// 输入文件名
input_filename = "clocktxt_320.avi";
// input_filename = "d:/yuv/clocktxt.avi";
// sdl初始化(sdl是一个跨平台多媒体开发库)
if (SDL_Init(flags))
exit(1);
// 注册或者监听下列事件:激活事件、鼠标事件、windows的系统消息、用户事件
SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
SDL_EventState(SDL_MOUSEMOTION, SDL_IGNORE);
SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
// 打开输入流
cur_stream = stream_open(input_filename, file_iformat);
event_loop();
return 0;
}

@ -0,0 +1,201 @@
# Microsoft Developer Studio Project File - Name="ffplay" - Package Owner=<4>
# Microsoft Developer Studio Generated Build File, Format Version 6.00
# ** DO NOT EDIT **
# TARGTYPE "Win32 (x86) Console Application" 0x0103
CFG=ffplay - Win32 Debug
!MESSAGE This is not a valid makefile. To build this project using NMAKE,
!MESSAGE use the Export Makefile command and run
!MESSAGE
!MESSAGE NMAKE /f "ffplay.mak".
!MESSAGE
!MESSAGE You can specify a configuration when running NMAKE
!MESSAGE by defining the macro CFG on the command line. For example:
!MESSAGE
!MESSAGE NMAKE /f "ffplay.mak" CFG="ffplay - Win32 Debug"
!MESSAGE
!MESSAGE Possible choices for configuration are:
!MESSAGE
!MESSAGE "ffplay - Win32 Release" (based on "Win32 (x86) Console Application")
!MESSAGE "ffplay - Win32 Debug" (based on "Win32 (x86) Console Application")
!MESSAGE
# Begin Project
# PROP AllowPerConfigDependencies 0
# PROP Scc_ProjName ""
# PROP Scc_LocalPath ""
CPP=cl.exe
RSC=rc.exe
!IF "$(CFG)" == "ffplay - Win32 Release"
# PROP BASE Use_MFC 0
# PROP BASE Use_Debug_Libraries 0
# PROP BASE Output_Dir "Release"
# PROP BASE Intermediate_Dir "Release"
# PROP BASE Target_Dir ""
# PROP Use_MFC 0
# PROP Use_Debug_Libraries 0
# PROP Output_Dir "Release"
# PROP Intermediate_Dir "Release"
# PROP Target_Dir ""
# ADD BASE CPP /nologo /W3 /GX /O2 /D "WIN32" /D "NDEBUG" /D "_CONSOLE" /D "_MBCS" /YX /FD /c
# ADD CPP /nologo /W3 /GX /D "WIN32" /D "NDEBUG" /D "_CONSOLE" /D "_MBCS" /YX /FD /c
# ADD BASE RSC /l 0x804 /d "NDEBUG"
# ADD RSC /l 0x804 /d "NDEBUG"
BSC32=bscmake.exe
# ADD BASE BSC32 /nologo
# ADD BSC32 /nologo
LINK32=link.exe
# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /machine:I386
# ADD LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /machine:I386
!ELSEIF "$(CFG)" == "ffplay - Win32 Debug"
# PROP BASE Use_MFC 0
# PROP BASE Use_Debug_Libraries 1
# PROP BASE Output_Dir "Debug"
# PROP BASE Intermediate_Dir "Debug"
# PROP BASE Target_Dir ""
# PROP Use_MFC 0
# PROP Use_Debug_Libraries 1
# PROP Output_Dir "Debug"
# PROP Intermediate_Dir "Debug"
# PROP Ignore_Export_Lib 0
# PROP Target_Dir ""
# ADD BASE CPP /nologo /W3 /Gm /GX /ZI /Od /D "WIN32" /D "_DEBUG" /D "_CONSOLE" /D "_MBCS" /YX /FD /GZ /c
# ADD CPP /nologo /W3 /Gm /GX /ZI /Od /D "WIN32" /D "_DEBUG" /D "_CONSOLE" /D "_MBCS" /YX /FD /GZ /c
# ADD BASE RSC /l 0x804 /d "_DEBUG"
# ADD RSC /l 0x804 /d "_DEBUG"
BSC32=bscmake.exe
# ADD BASE BSC32 /nologo
# ADD BSC32 /nologo
LINK32=link.exe
# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /debug /machine:I386 /pdbtype:sept
# ADD LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /subsystem:console /debug /machine:I386 /pdbtype:sept
!ENDIF
# Begin Target
# Name "ffplay - Win32 Release"
# Name "ffplay - Win32 Debug"
# Begin Group "libavcodec"
# PROP Default_Filter ""
# Begin Source File
SOURCE=.\libavcodec\allcodecs.c
# End Source File
# Begin Source File
SOURCE=.\libavcodec\avcodec.h
# End Source File
# Begin Source File
SOURCE=.\libavcodec\dsputil.c
# End Source File
# Begin Source File
SOURCE=.\libavcodec\dsputil.h
# End Source File
# Begin Source File
SOURCE=.\libavcodec\imgconvert.c
# End Source File
# Begin Source File
SOURCE=.\libavcodec\imgconvert_template.h
# End Source File
# Begin Source File
SOURCE=.\libavcodec\msrle.c
# End Source File
# Begin Source File
SOURCE=.\libavcodec\truespeech.c
# End Source File
# Begin Source File
SOURCE=.\libavcodec\truespeech_data.h
# End Source File
# Begin Source File
SOURCE=.\libavcodec\utils_codec.c
# End Source File
# End Group
# Begin Group "libavformat"
# PROP Default_Filter ""
# Begin Source File
SOURCE=.\libavformat\allformats.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\avformat.h
# End Source File
# Begin Source File
SOURCE=.\libavformat\avidec.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\avio.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\avio.h
# End Source File
# Begin Source File
SOURCE=.\libavformat\aviobuf.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\cutils.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\file.c
# End Source File
# Begin Source File
SOURCE=.\libavformat\utils_format.c
# End Source File
# End Group
# Begin Group "libavutil"
# PROP Default_Filter ""
# Begin Source File
SOURCE=.\libavutil\avutil.h
# End Source File
# Begin Source File
SOURCE=.\libavutil\bswap.h
# End Source File
# Begin Source File
SOURCE=.\libavutil\common.h
# End Source File
# Begin Source File
SOURCE=.\libavutil\mathematics.h
# End Source File
# Begin Source File
SOURCE=.\libavutil\rational.h
# End Source File
# End Group
# Begin Source File
SOURCE=.\berrno.h
# End Source File
# Begin Source File
SOURCE=.\ffplay.c
# End Source File
# End Target
# End Project

@ -0,0 +1,157 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<SccProjectName />
<SccLocalPath />
<ProjectGuid>{F914CB69-0432-4937-87FD-D02757DD4B64}</ProjectGuid>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
<UseOfMfc>false</UseOfMfc>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<PlatformToolset>v120</PlatformToolset>
<UseOfMfc>false</UseOfMfc>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(VCTargetsPath)Microsoft.Cpp.UpgradeFromVC60.props" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="$(VCTargetsPath)Microsoft.Cpp.UpgradeFromVC60.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<OutDir>.\Release\</OutDir>
<IntDir>.\Release\</IntDir>
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<OutDir>.\Debug\</OutDir>
<IntDir>.\Debug\</IntDir>
<LinkIncremental>true</LinkIncremental>
<IncludePath>E:\Work\SDL-1.2.15\include;$(IncludePath)</IncludePath>
<LibraryPath>E:\Work\SDL-1.2.15\lib\x86;$(LibraryPath)</LibraryPath>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<InlineFunctionExpansion>Default</InlineFunctionExpansion>
<FunctionLevelLinking>false</FunctionLevelLinking>
<Optimization>Disabled</Optimization>
<SuppressStartupBanner>true</SuppressStartupBanner>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AssemblerListingLocation>.\Release\</AssemblerListingLocation>
<PrecompiledHeaderOutputFile>.\Release\ffplay.pch</PrecompiledHeaderOutputFile>
<ObjectFileName>.\Release\</ObjectFileName>
<ProgramDataBaseFileName>.\Release\</ProgramDataBaseFileName>
</ClCompile>
<Midl>
<TypeLibraryName>.\Release\ffplay.tlb</TypeLibraryName>
</Midl>
<ResourceCompile>
<Culture>0x0804</Culture>
<PreprocessorDefinitions>NDEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ResourceCompile>
<Bscmake>
<SuppressStartupBanner>true</SuppressStartupBanner>
<OutputFile>.\Release\ffplay.bsc</OutputFile>
</Bscmake>
<Link>
<SuppressStartupBanner>true</SuppressStartupBanner>
<SubSystem>Console</SubSystem>
<OutputFile>.\Release\ffplay.exe</OutputFile>
<AdditionalDependencies>odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<InlineFunctionExpansion>Default</InlineFunctionExpansion>
<FunctionLevelLinking>false</FunctionLevelLinking>
<Optimization>Disabled</Optimization>
<SuppressStartupBanner>true</SuppressStartupBanner>
<WarningLevel>Level3</WarningLevel>
<MinimalRebuild>true</MinimalRebuild>
<DebugInformationFormat>EditAndContinue</DebugInformationFormat>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AssemblerListingLocation>.\Debug\</AssemblerListingLocation>
<PrecompiledHeaderOutputFile>.\Debug\ffplay.pch</PrecompiledHeaderOutputFile>
<ObjectFileName>.\Debug\</ObjectFileName>
<ProgramDataBaseFileName>.\Debug\</ProgramDataBaseFileName>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
</ClCompile>
<Midl>
<TypeLibraryName>.\Debug\ffplay.tlb</TypeLibraryName>
</Midl>
<ResourceCompile>
<Culture>0x0804</Culture>
<PreprocessorDefinitions>_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ResourceCompile>
<Bscmake>
<SuppressStartupBanner>true</SuppressStartupBanner>
<OutputFile>.\Debug\ffplay.bsc</OutputFile>
</Bscmake>
<Link>
<SuppressStartupBanner>true</SuppressStartupBanner>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<OutputFile>.\Debug\ffplay.exe</OutputFile>
<AdditionalDependencies>odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalLibraryDirectories>E:\Work\SDL-1.2.15\lib\x86;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="ffplay.c" />
<ClCompile Include="libavcodec\allcodecs.c" />
<ClCompile Include="libavcodec\dsputil.c" />
<ClCompile Include="libavcodec\imgconvert.c" />
<ClCompile Include="libavcodec\msrle.c" />
<ClCompile Include="libavcodec\truespeech.c" />
<ClCompile Include="libavcodec\utils_codec.c" />
<ClCompile Include="libavformat\allformats.c" />
<ClCompile Include="libavformat\avidec.c" />
<ClCompile Include="libavformat\avio.c" />
<ClCompile Include="libavformat\aviobuf.c" />
<ClCompile Include="libavformat\cutils.c" />
<ClCompile Include="libavformat\file.c" />
<ClCompile Include="libavformat\utils_format.c" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="berrno.h" />
<ClInclude Include="libavcodec\avcodec.h" />
<ClInclude Include="libavcodec\dsputil.h" />
<ClInclude Include="libavcodec\imgconvert_template.h" />
<ClInclude Include="libavcodec\truespeech_data.h" />
<ClInclude Include="libavformat\avformat.h" />
<ClInclude Include="libavformat\avio.h" />
<ClInclude Include="libavutil\avutil.h" />
<ClInclude Include="libavutil\bswap.h" />
<ClInclude Include="libavutil\common.h" />
<ClInclude Include="libavutil\mathematics.h" />
<ClInclude Include="libavutil\rational.h" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="libavcodec">
<UniqueIdentifier>{94bac1c7-3a5d-4c5c-9ada-d7f42de7259f}</UniqueIdentifier>
</Filter>
<Filter Include="libavformat">
<UniqueIdentifier>{d17f4d25-f28a-4175-92be-4834f6481e36}</UniqueIdentifier>
</Filter>
<Filter Include="libavutil">
<UniqueIdentifier>{e95c5c62-22fc-4e57-8867-b02483bd60f4}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="libavcodec\allcodecs.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavcodec\dsputil.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavcodec\imgconvert.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavcodec\msrle.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavcodec\truespeech.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavcodec\utils_codec.c">
<Filter>libavcodec</Filter>
</ClCompile>
<ClCompile Include="libavformat\allformats.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\avidec.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\avio.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\aviobuf.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\cutils.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\file.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="libavformat\utils_format.c">
<Filter>libavformat</Filter>
</ClCompile>
<ClCompile Include="ffplay.c" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="libavcodec\avcodec.h">
<Filter>libavcodec</Filter>
</ClInclude>
<ClInclude Include="libavcodec\dsputil.h">
<Filter>libavcodec</Filter>
</ClInclude>
<ClInclude Include="libavcodec\imgconvert_template.h">
<Filter>libavcodec</Filter>
</ClInclude>
<ClInclude Include="libavcodec\truespeech_data.h">
<Filter>libavcodec</Filter>
</ClInclude>
<ClInclude Include="libavformat\avformat.h">
<Filter>libavformat</Filter>
</ClInclude>
<ClInclude Include="libavformat\avio.h">
<Filter>libavformat</Filter>
</ClInclude>
<ClInclude Include="libavutil\avutil.h">
<Filter>libavutil</Filter>
</ClInclude>
<ClInclude Include="libavutil\bswap.h">
<Filter>libavutil</Filter>
</ClInclude>
<ClInclude Include="libavutil\common.h">
<Filter>libavutil</Filter>
</ClInclude>
<ClInclude Include="libavutil\mathematics.h">
<Filter>libavutil</Filter>
</ClInclude>
<ClInclude Include="libavutil\rational.h">
<Filter>libavutil</Filter>
</ClInclude>
<ClInclude Include="berrno.h" />
</ItemGroup>
</Project>

@ -0,0 +1,25 @@
#include "avcodec.h"
/************************************************************************/
/* 简单的注册/初始化函数,把编解码器用相应的链表串起来便于查找识别 */
/************************************************************************/
/* 两个全局的编解码器 */
extern AVCodec truespeech_decoder;
extern AVCodec msrle_decoder;
/* 注册所有的编解码器 */
void avcodec_register_all(void)
{
static int inited = 0;
if (inited != 0)
return ;
inited = 1;
// 这个简化版的ffmpeg只支持两种编解码方式:MSRLE、truespeech
register_avcodec(&msrle_decoder);
register_avcodec(&truespeech_decoder);
}

@ -0,0 +1,201 @@
#ifndef AVCODEC_H
#define AVCODEC_H
/*
** 使
*/
#ifdef __cplusplus
extern "C"
{
#endif
#include "../libavutil/avutil.h"
#define FFMPEG_VERSION_INT 0x000409
#define FFMPEG_VERSION "CVS"
#define AV_STRINGIFY(s) AV_TOSTRING(s)
#define AV_TOSTRING(s) #s
#define LIBAVCODEC_VERSION_INT ((51<<16)+(8<<8)+0)
#define LIBAVCODEC_VERSION 51.8.0
#define LIBAVCODEC_BUILD LIBAVCODEC_VERSION_INT
#define LIBAVCODEC_IDENT "Lavc" AV_STRINGIFY(LIBAVCODEC_VERSION)
#define AV_NOPTS_VALUE int64_t_C(0x8000000000000000)
#define AV_TIME_BASE 1000000
/* 解码器ID (简化版的解码器只支持两种解码方式:truespeech和msrle)*/
enum CodecID
{
CODEC_ID_TRUESPEECH,
CODEC_ID_MSRLE,
CODEC_ID_NONE
};
/* 解码器类型 */
enum CodecType
{
CODEC_TYPE_UNKNOWN = - 1,
// 视频
CODEC_TYPE_VIDEO,
// 音频
CODEC_TYPE_AUDIO,
CODEC_TYPE_DATA
};
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
#define FF_INPUT_BUFFER_PADDING_SIZE 8
/*
AVPicture AVFrame 使 YUV YUV
RGB 4 data
*/
/* 图像 */
typedef struct AVPicture
{
uint8_t *data[4];
int linesize[4];
} AVPicture;
/* 帧 */
typedef struct AVFrame
{
uint8_t *data[4];
int linesize[4];
uint8_t *base[4];
} AVFrame;
/*
** Codec 使 Codec (
)codec priv_data 便
*/
/* 程序运行的当前 Codec 使用的上下文 */
typedef struct AVCodecContext
{
int bit_rate;
int frame_number; // audio or video frame number
unsigned char *extradata; // Codec的私有数据,对Audio是WAVEFORMATEX结构扩展字节。
int extradata_size; // 对Video是BITMAPINFOHEADER后的扩展字节
int width, height;
enum PixelFormat pix_fmt;
int sample_rate; // samples per sec // audio only
int channels;
int bits_per_sample;
int block_align;
// 解码器
struct AVCodec *codec;
void *priv_data;
enum CodecType codec_type; // see CODEC_TYPE_xxx
enum CodecID codec_id; // see CODEC_ID_xxx
int(*get_buffer)(struct AVCodecContext *c, AVFrame *pic);
void(*release_buffer)(struct AVCodecContext *c, AVFrame *pic);
int(*reget_buffer)(struct AVCodecContext *c, AVFrame *pic);
int internal_buffer_count;
void *internal_buffer;
// 调色板控制
struct AVPaletteControl *palctrl;
}AVCodecContext;
/* 编解码器(它就可以表示整个编码器、解码器)*/
/* 表示音视频编解码器, 着重于功能函数, 一种媒体类型对应一个 AVCodec
便*/
typedef struct AVCodec
{
const char *name; // 名字
enum CodecType type; // 类型,有 Video,Audio,Data 等类型
enum CodecID id; // id
int priv_data_size; // 对应的上下文的大小
int(*init)(AVCodecContext*); // 初始化
int(*encode)(AVCodecContext *, uint8_t *buf, int buf_size, void *data); // 编码函数
int(*close)(AVCodecContext*); // 关闭
int(*decode)(AVCodecContext *, void *outdata, int *outdata_size, uint8_t *buf, int buf_size); // 解码函数
int capabilities; // 标示Codec 的能力
struct AVCodec *next; // 用于把所有 Codec 串成一个链表,便于遍历
}AVCodec;
#define AVPALETTE_SIZE 1024
#define AVPALETTE_COUNT 256
/* 调色板大小和大小宏定义,每个调色板四字节(R,G,B,α)。有很多的视频图像颜色种类比较少,用索引
4:1 */
/* 调色板控制,调色板数据结构定义,保存调色板数据。 */
typedef struct AVPaletteControl
{
// demuxer sets this to 1 to indicate the palette has changed; decoder resets to 0
int palette_changed;
/* 4-byte ARGB palette entries, stored in native byte order; note that
* the individual palette components should be on a 8-bit scale; if
* the palette data comes from a IBM VGA native format, the component
* data is probably 6 bits in size and needs to be scaled */
unsigned int palette[AVPALETTE_COUNT];
} AVPaletteControl;
int avpicture_alloc(AVPicture *picture, int pix_fmt, int width, int height);
void avpicture_free(AVPicture *picture);
int avpicture_fill(AVPicture *picture, uint8_t *ptr, int pix_fmt, int width, int height);
int avpicture_get_size(int pix_fmt, int width, int height);
void avcodec_get_chroma_sub_sample(int pix_fmt, int *h_shift, int *v_shift);
int img_convert(AVPicture *dst, int dst_pix_fmt, const AVPicture *src, int pix_fmt,
int width, int height);
void avcodec_init(void);
void register_avcodec(AVCodec *format);
AVCodec *avcodec_find_decoder(enum CodecID id);
AVCodecContext *avcodec_alloc_context(void);
int avcodec_default_get_buffer(AVCodecContext *s, AVFrame *pic);
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic);
int avcodec_default_reget_buffer(AVCodecContext *s, AVFrame *pic);
void avcodec_align_dimensions(AVCodecContext *s, int *width, int *height);
int avcodec_check_dimensions(void *av_log_ctx, unsigned int w, unsigned int h);
int avcodec_open(AVCodecContext *avctx, AVCodec *codec);
int avcodec_decode_audio(AVCodecContext *avctx, int16_t *samples, int *frame_size_ptr,
uint8_t *buf, int buf_size);
int avcodec_decode_video(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr,
uint8_t *buf, int buf_size);
int avcodec_close(AVCodecContext *avctx);
void avcodec_register_all(void);
void avcodec_default_free_buffers(AVCodecContext *s);
void *av_malloc(unsigned int size);
void *av_mallocz(unsigned int size);
void *av_realloc(void *ptr, unsigned int size);
void av_free(void *ptr);
void av_freep(void *ptr);
void *av_fast_realloc(void *ptr, unsigned int *size, unsigned int min_size);
void img_copy(AVPicture *dst, const AVPicture *src, int pix_fmt, int width, int height);
#ifdef __cplusplus
}
#endif
#endif

@ -0,0 +1,23 @@
/************************************************************************/
/* 定义 dsp 优化限幅运算使用的查找表,实现其初始化函数 */
/************************************************************************/
#include "avcodec.h"
#include "dsputil.h"
uint8_t cropTbl[256+2 * MAX_NEG_CROP] = {0, };
/* dsp的静态初始化 ,实际作用是设置cropTbl数组的值 */
void dsputil_static_init(void)
{
int i;
for (i = 0; i < 256; i++)
cropTbl[i + MAX_NEG_CROP] = i;
for (i = 0; i < MAX_NEG_CROP; i++)
{
cropTbl[i] = 0;
cropTbl[i + MAX_NEG_CROP + 256] = 255;
}
}

@ -0,0 +1,13 @@
#ifndef DSPUTIL_H
#define DSPUTIL_H
/************************************************************************/
/* 定义 dsp 优化限幅运算使用的查找表及其初始化函数。 */
/************************************************************************/
#define MAX_NEG_CROP 1024
extern uint8_t cropTbl[256+2 * MAX_NEG_CROP];
void dsputil_static_init(void);
#endif

File diff suppressed because it is too large Load Diff

@ -0,0 +1,887 @@
#ifndef RGB_OUT
#define RGB_OUT(d, r, g, b) RGBA_OUT(d, r, g, b, 0xff)
#endif
/************************************************************************/
/* 定义并实现图像颜色空间转换使用的函数和宏 */
/************************************************************************/
#pragma warning (disable:4305 4244)
static void glue(yuv420p_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *y1_ptr, *y2_ptr, *cb_ptr, *cr_ptr;
uint8_t *d, *d1, *d2;
int w, y, cb, cr, r_add, g_add, b_add, width2;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b;
d = dst->data[0];
y1_ptr = src->data[0];
cb_ptr = src->data[1];
cr_ptr = src->data[2];
width2 = (width + 1) >> 1;
for (; height >= 2; height -= 2)
{
d1 = d;
d2 = d + dst->linesize[0];
y2_ptr = y1_ptr + src->linesize[0];
for (w = width; w >= 2; w -= 2)
{
YUV_TO_RGB1_CCIR(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[0]); /* output 4 pixels */
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[1]);
RGB_OUT(d1 + BPP, r, g, b);
YUV_TO_RGB2_CCIR(r, g, b, y2_ptr[0]);
RGB_OUT(d2, r, g, b);
YUV_TO_RGB2_CCIR(r, g, b, y2_ptr[1]);
RGB_OUT(d2 + BPP, r, g, b);
d1 += 2 * BPP;
d2 += 2 * BPP;
y1_ptr += 2;
y2_ptr += 2;
cb_ptr++;
cr_ptr++;
}
if (w) /* handle odd width */
{
YUV_TO_RGB1_CCIR(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[0]);
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2_CCIR(r, g, b, y2_ptr[0]);
RGB_OUT(d2, r, g, b);
d1 += BPP;
d2 += BPP;
y1_ptr++;
y2_ptr++;
cb_ptr++;
cr_ptr++;
}
d += 2 * dst->linesize[0];
y1_ptr += 2 * src->linesize[0] - width;
cb_ptr += src->linesize[1] - width2;
cr_ptr += src->linesize[2] - width2;
}
if (height) /* handle odd height */
{
d1 = d;
for (w = width; w >= 2; w -= 2)
{
YUV_TO_RGB1_CCIR(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[0]); /* output 2 pixels */
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[1]);
RGB_OUT(d1 + BPP, r, g, b);
d1 += 2 * BPP;
y1_ptr += 2;
cb_ptr++;
cr_ptr++;
}
if (w) /* handle width */
{
YUV_TO_RGB1_CCIR(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[0]); /* output 2 pixels */
RGB_OUT(d1, r, g, b);
d1 += BPP;
y1_ptr++;
cb_ptr++;
cr_ptr++;
}
}
}
static void glue(yuvj420p_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *y1_ptr, *y2_ptr, *cb_ptr, *cr_ptr;
uint8_t *d, *d1, *d2;
int w, y, cb, cr, r_add, g_add, b_add, width2;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b;
d = dst->data[0];
y1_ptr = src->data[0];
cb_ptr = src->data[1];
cr_ptr = src->data[2];
width2 = (width + 1) >> 1;
for (; height >= 2; height -= 2)
{
d1 = d;
d2 = d + dst->linesize[0];
y2_ptr = y1_ptr + src->linesize[0];
for (w = width; w >= 2; w -= 2)
{
YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2(r, g, b, y1_ptr[0]); /* output 4 pixels */
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2(r, g, b, y1_ptr[1]);
RGB_OUT(d1 + BPP, r, g, b);
YUV_TO_RGB2(r, g, b, y2_ptr[0]);
RGB_OUT(d2, r, g, b);
YUV_TO_RGB2(r, g, b, y2_ptr[1]);
RGB_OUT(d2 + BPP, r, g, b);
d1 += 2 * BPP;
d2 += 2 * BPP;
y1_ptr += 2;
y2_ptr += 2;
cb_ptr++;
cr_ptr++;
}
if (w) /* handle odd width */
{
YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2(r, g, b, y1_ptr[0]);
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2(r, g, b, y2_ptr[0]);
RGB_OUT(d2, r, g, b);
d1 += BPP;
d2 += BPP;
y1_ptr++;
y2_ptr++;
cb_ptr++;
cr_ptr++;
}
d += 2 * dst->linesize[0];
y1_ptr += 2 * src->linesize[0] - width;
cb_ptr += src->linesize[1] - width2;
cr_ptr += src->linesize[2] - width2;
}
if (height) /* handle odd height */
{
d1 = d;
for (w = width; w >= 2; w -= 2)
{
YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2(r, g, b, y1_ptr[0]); /* output 2 pixels */
RGB_OUT(d1, r, g, b);
YUV_TO_RGB2(r, g, b, y1_ptr[1]);
RGB_OUT(d1 + BPP, r, g, b);
d1 += 2 * BPP;
y1_ptr += 2;
cb_ptr++;
cr_ptr++;
}
if (w) /* handle width */
{
YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2(r, g, b, y1_ptr[0]); /* output 2 pixels */
RGB_OUT(d1, r, g, b);
d1 += BPP;
y1_ptr++;
cb_ptr++;
cr_ptr++;
}
}
}
static void glue(RGB_NAME, _to_yuv420p)(AVPicture *dst, const AVPicture *src, int width, int height)
{
int wrap, wrap3, width2;
int r, g, b, r1, g1, b1, w;
uint8_t *lum, *cb, *cr;
const uint8_t *p;
lum = dst->data[0];
cb = dst->data[1];
cr = dst->data[2];
width2 = (width + 1) >> 1;
wrap = dst->linesize[0];
wrap3 = src->linesize[0];
p = src->data[0];
for (; height >= 2; height -= 2)
{
for (w = width; w >= 2; w -= 2)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y_CCIR(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y_CCIR(r, g, b);
p += wrap3;
lum += wrap;
RGB_IN(r, g, b, p);
r1 += r;
g1 += g;
b1 += b;
lum[0] = RGB_TO_Y_CCIR(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y_CCIR(r, g, b);
cb[0] = RGB_TO_U_CCIR(r1, g1, b1, 2);
cr[0] = RGB_TO_V_CCIR(r1, g1, b1, 2);
cb++;
cr++;
p += - wrap3 + 2 * BPP;
lum += - wrap + 2;
}
if (w)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y_CCIR(r, g, b);
p += wrap3;
lum += wrap;
RGB_IN(r, g, b, p);
r1 += r;
g1 += g;
b1 += b;
lum[0] = RGB_TO_Y_CCIR(r, g, b);
cb[0] = RGB_TO_U_CCIR(r1, g1, b1, 1);
cr[0] = RGB_TO_V_CCIR(r1, g1, b1, 1);
cb++;
cr++;
p += - wrap3 + BPP;
lum += - wrap + 1;
}
p += wrap3 + (wrap3 - width * BPP);
lum += wrap + (wrap - width);
cb += dst->linesize[1] - width2;
cr += dst->linesize[2] - width2;
}
if (height) /* handle odd height */
{
for (w = width; w >= 2; w -= 2)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y_CCIR(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y_CCIR(r, g, b);
cb[0] = RGB_TO_U_CCIR(r1, g1, b1, 1);
cr[0] = RGB_TO_V_CCIR(r1, g1, b1, 1);
cb++;
cr++;
p += 2 * BPP;
lum += 2;
}
if (w)
{
RGB_IN(r, g, b, p);
lum[0] = RGB_TO_Y_CCIR(r, g, b);
cb[0] = RGB_TO_U_CCIR(r, g, b, 0);
cr[0] = RGB_TO_V_CCIR(r, g, b, 0);
}
}
}
static void glue(RGB_NAME, _to_gray)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const unsigned char *p;
unsigned char *q;
int r, g, b, dst_wrap, src_wrap;
int x, y;
p = src->data[0];
src_wrap = src->linesize[0] - BPP * width;
q = dst->data[0];
dst_wrap = dst->linesize[0] - width;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
RGB_IN(r, g, b, p);
q[0] = RGB_TO_Y(r, g, b);
q++;
p += BPP;
}
p += src_wrap;
q += dst_wrap;
}
}
static void glue(gray_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const unsigned char *p;
unsigned char *q;
int r, dst_wrap, src_wrap;
int x, y;
p = src->data[0];
src_wrap = src->linesize[0] - width;
q = dst->data[0];
dst_wrap = dst->linesize[0] - BPP * width;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
r = p[0];
RGB_OUT(q, r, r, r);
q += BPP;
p++;
}
p += src_wrap;
q += dst_wrap;
}
}
static void glue(pal8_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const unsigned char *p;
unsigned char *q;
int r, g, b, dst_wrap, src_wrap;
int x, y;
uint32_t v;
const uint32_t *palette;
p = src->data[0];
src_wrap = src->linesize[0] - width;
palette = (uint32_t*)src->data[1];
q = dst->data[0];
dst_wrap = dst->linesize[0] - BPP * width;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
v = palette[p[0]];
r = (v >> 16) &0xff;
g = (v >> 8) &0xff;
b = (v) &0xff;
#ifdef RGBA_OUT
{
int a;
a = (v >> 24) &0xff;
RGBA_OUT(q, r, g, b, a);
}
#else
RGB_OUT(q, r, g, b);
#endif
q += BPP;
p++;
}
p += src_wrap;
q += dst_wrap;
}
}
#if !defined(FMT_RGBA32) && defined(RGBA_OUT)
/* alpha support */
static void glue(rgba32_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *s;
uint8_t *d;
int src_wrap, dst_wrap, j, y;
unsigned int v, r, g, b, a;
s = src->data[0];
src_wrap = src->linesize[0] - width * 4;
d = dst->data[0];
dst_wrap = dst->linesize[0] - width * BPP;
for (y = 0; y < height; y++)
{
for (j = 0; j < width; j++)
{
v = ((const uint32_t*)(s))[0];
a = (v >> 24) &0xff;
r = (v >> 16) &0xff;
g = (v >> 8) &0xff;
b = v &0xff;
RGBA_OUT(d, r, g, b, a);
s += 4;
d += BPP;
}
s += src_wrap;
d += dst_wrap;
}
}
static void glue(RGB_NAME, _to_rgba32)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *s;
uint8_t *d;
int src_wrap, dst_wrap, j, y;
unsigned int r, g, b, a;
s = src->data[0];
src_wrap = src->linesize[0] - width * BPP;
d = dst->data[0];
dst_wrap = dst->linesize[0] - width * 4;
for (y = 0; y < height; y++)
{
for (j = 0; j < width; j++)
{
RGBA_IN(r, g, b, a, s);
((uint32_t*)(d))[0] = (a << 24) | (r << 16) | (g << 8) | b;
d += 4;
s += BPP;
}
s += src_wrap;
d += dst_wrap;
}
}
#endif /* !defined(FMT_RGBA32) && defined(RGBA_IN) */
#ifndef FMT_RGB24
static void glue(rgb24_to_, RGB_NAME)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *s;
uint8_t *d;
int src_wrap, dst_wrap, j, y;
unsigned int r, g, b;
s = src->data[0];
src_wrap = src->linesize[0] - width * 3;
d = dst->data[0];
dst_wrap = dst->linesize[0] - width * BPP;
for (y = 0; y < height; y++)
{
for (j = 0; j < width; j++)
{
r = s[0];
g = s[1];
b = s[2];
RGB_OUT(d, r, g, b);
s += 3;
d += BPP;
}
s += src_wrap;
d += dst_wrap;
}
}
static void glue(RGB_NAME, _to_rgb24)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *s;
uint8_t *d;
int src_wrap, dst_wrap, j, y;
unsigned int r, g, b;
s = src->data[0];
src_wrap = src->linesize[0] - width * BPP;
d = dst->data[0];
dst_wrap = dst->linesize[0] - width * 3;
for (y = 0; y < height; y++)
{
for (j = 0; j < width; j++)
{
RGB_IN(r, g, b, s)d[0] = r;
d[1] = g;
d[2] = b;
d += 3;
s += BPP;
}
s += src_wrap;
d += dst_wrap;
}
}
#endif /* !FMT_RGB24 */
#ifdef FMT_RGB24
static void yuv444p_to_rgb24(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *y1_ptr, *cb_ptr, *cr_ptr;
uint8_t *d, *d1;
int w, y, cb, cr, r_add, g_add, b_add;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b;
d = dst->data[0];
y1_ptr = src->data[0];
cb_ptr = src->data[1];
cr_ptr = src->data[2];
for (; height > 0; height--)
{
d1 = d;
for (w = width; w > 0; w--)
{
YUV_TO_RGB1_CCIR(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2_CCIR(r, g, b, y1_ptr[0]);
RGB_OUT(d1, r, g, b);
d1 += BPP;
y1_ptr++;
cb_ptr++;
cr_ptr++;
}
d += dst->linesize[0];
y1_ptr += src->linesize[0] - width;
cb_ptr += src->linesize[1] - width;
cr_ptr += src->linesize[2] - width;
}
}
static void yuvj444p_to_rgb24(AVPicture *dst, const AVPicture *src, int width, int height)
{
const uint8_t *y1_ptr, *cb_ptr, *cr_ptr;
uint8_t *d, *d1;
int w, y, cb, cr, r_add, g_add, b_add;
uint8_t *cm = cropTbl + MAX_NEG_CROP;
unsigned int r, g, b;
d = dst->data[0];
y1_ptr = src->data[0];
cb_ptr = src->data[1];
cr_ptr = src->data[2];
for (; height > 0; height--)
{
d1 = d;
for (w = width; w > 0; w--)
{
YUV_TO_RGB1(cb_ptr[0], cr_ptr[0]);
YUV_TO_RGB2(r, g, b, y1_ptr[0]);
RGB_OUT(d1, r, g, b);
d1 += BPP;
y1_ptr++;
cb_ptr++;
cr_ptr++;
}
d += dst->linesize[0];
y1_ptr += src->linesize[0] - width;
cb_ptr += src->linesize[1] - width;
cr_ptr += src->linesize[2] - width;
}
}
static void rgb24_to_yuv444p(AVPicture *dst, const AVPicture *src, int width, int height)
{
int src_wrap, x, y;
int r, g, b;
uint8_t *lum, *cb, *cr;
const uint8_t *p;
lum = dst->data[0];
cb = dst->data[1];
cr = dst->data[2];
src_wrap = src->linesize[0] - width * BPP;
p = src->data[0];
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
RGB_IN(r, g, b, p);
lum[0] = RGB_TO_Y_CCIR(r, g, b);
cb[0] = RGB_TO_U_CCIR(r, g, b, 0);
cr[0] = RGB_TO_V_CCIR(r, g, b, 0);
p += BPP;
cb++;
cr++;
lum++;
}
p += src_wrap;
lum += dst->linesize[0] - width;
cb += dst->linesize[1] - width;
cr += dst->linesize[2] - width;
}
}
static void rgb24_to_yuvj420p(AVPicture *dst, const AVPicture *src, int width, int height)
{
int wrap, wrap3, width2;
int r, g, b, r1, g1, b1, w;
uint8_t *lum, *cb, *cr;
const uint8_t *p;
lum = dst->data[0];
cb = dst->data[1];
cr = dst->data[2];
width2 = (width + 1) >> 1;
wrap = dst->linesize[0];
wrap3 = src->linesize[0];
p = src->data[0];
for (; height >= 2; height -= 2)
{
for (w = width; w >= 2; w -= 2)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y(r, g, b);
p += wrap3;
lum += wrap;
RGB_IN(r, g, b, p);
r1 += r;
g1 += g;
b1 += b;
lum[0] = RGB_TO_Y(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y(r, g, b);
cb[0] = RGB_TO_U(r1, g1, b1, 2);
cr[0] = RGB_TO_V(r1, g1, b1, 2);
cb++;
cr++;
p += - wrap3 + 2 * BPP;
lum += - wrap + 2;
}
if (w)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y(r, g, b);
p += wrap3;
lum += wrap;
RGB_IN(r, g, b, p);
r1 += r;
g1 += g;
b1 += b;
lum[0] = RGB_TO_Y(r, g, b);
cb[0] = RGB_TO_U(r1, g1, b1, 1);
cr[0] = RGB_TO_V(r1, g1, b1, 1);
cb++;
cr++;
p += - wrap3 + BPP;
lum += - wrap + 1;
}
p += wrap3 + (wrap3 - width * BPP);
lum += wrap + (wrap - width);
cb += dst->linesize[1] - width2;
cr += dst->linesize[2] - width2;
}
if (height) /* handle odd height */
{
for (w = width; w >= 2; w -= 2)
{
RGB_IN(r, g, b, p);
r1 = r;
g1 = g;
b1 = b;
lum[0] = RGB_TO_Y(r, g, b);
RGB_IN(r, g, b, p + BPP);
r1 += r;
g1 += g;
b1 += b;
lum[1] = RGB_TO_Y(r, g, b);
cb[0] = RGB_TO_U(r1, g1, b1, 1);
cr[0] = RGB_TO_V(r1, g1, b1, 1);
cb++;
cr++;
p += 2 * BPP;
lum += 2;
}
if (w)
{
RGB_IN(r, g, b, p);
lum[0] = RGB_TO_Y(r, g, b);
cb[0] = RGB_TO_U(r, g, b, 0);
cr[0] = RGB_TO_V(r, g, b, 0);
}
}
}
static void rgb24_to_yuvj444p(AVPicture *dst, const AVPicture *src, int width, int height)
{
int src_wrap, x, y;
int r, g, b;
uint8_t *lum, *cb, *cr;
const uint8_t *p;
lum = dst->data[0];
cb = dst->data[1];
cr = dst->data[2];
src_wrap = src->linesize[0] - width * BPP;
p = src->data[0];
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
RGB_IN(r, g, b, p);
lum[0] = RGB_TO_Y(r, g, b);
cb[0] = RGB_TO_U(r, g, b, 0);
cr[0] = RGB_TO_V(r, g, b, 0);
p += BPP;
cb++;
cr++;
lum++;
}
p += src_wrap;
lum += dst->linesize[0] - width;
cb += dst->linesize[1] - width;
cr += dst->linesize[2] - width;
}
}
#endif /* FMT_RGB24 */
#if defined(FMT_RGB24) || defined(FMT_RGBA32)
static void glue(RGB_NAME, _to_pal8)(AVPicture *dst, const AVPicture *src, int width, int height)
{
const unsigned char *p;
unsigned char *q;
int dst_wrap, src_wrap;
int x, y, has_alpha;
unsigned int r, g, b;
p = src->data[0];
src_wrap = src->linesize[0] - BPP * width;
q = dst->data[0];
dst_wrap = dst->linesize[0] - width;
has_alpha = 0;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
#ifdef RGBA_IN
{
unsigned int a;
RGBA_IN(r, g, b, a, p);
if (a < 0x80) /* crude approximation for alpha ! */
{
has_alpha = 1;
q[0] = TRANSP_INDEX;
}
else
{
q[0] = gif_clut_index(r, g, b);
}
}
#else
RGB_IN(r, g, b, p);
q[0] = gif_clut_index(r, g, b);
#endif
q++;
p += BPP;
}
p += src_wrap;
q += dst_wrap;
}
build_rgb_palette(dst->data[1], has_alpha);
}
#endif /* defined(FMT_RGB24) || defined(FMT_RGBA32) */
#ifdef RGBA_IN
#define FF_ALPHA_TRANSP 0x0001 /* image has some totally transparent pixels */
#define FF_ALPHA_SEMI_TRANSP 0x0002 /* image has some transparent pixels */
static int glue(get_alpha_info_, RGB_NAME)(const AVPicture *src, int width, int height)
{
const unsigned char *p;
int src_wrap, ret, x, y;
unsigned int r, g, b, a;
p = src->data[0];
src_wrap = src->linesize[0] - BPP * width;
ret = 0;
for (y = 0; y < height; y++)
{
for (x = 0; x < width; x++)
{
RGBA_IN(r, g, b, a, p);
if (a == 0x00)
{
ret |= FF_ALPHA_TRANSP;
}
else if (a != 0xff)
{
ret |= FF_ALPHA_SEMI_TRANSP;
}
p += BPP;
}
p += src_wrap;
}
return ret;
}
#endif /* RGBA_IN */
#undef RGB_IN
#undef RGBA_IN
#undef RGB_OUT
#undef RGBA_OUT
#undef BPP
#undef RGB_NAME
#undef FMT_RGB24
#undef FMT_RGBA32

@ -0,0 +1,305 @@
/*
**
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "../libavutil/common.h"
#include "avcodec.h"
#include "dsputil.h"
#define FF_BUFFER_HINTS_VALID 0x01 // Buffer hints value is meaningful (if 0 ignore)
#define FF_BUFFER_HINTS_READABLE 0x02 // Codec will read from buffer
#define FF_BUFFER_HINTS_PRESERVE 0x04 // User must not alter buffer content
#define FF_BUFFER_HINTS_REUSABLE 0x08 // Codec will reuse the buffer (update)
/* Msrle解码器的上下文 */
typedef struct MsrleContext
{
// 所属的解码器上下文(AVCodecContext是一个比较抽象的概念,MsrleContext是一个比较具体的概念)
AVCodecContext *avctx;
// 帧
AVFrame frame;
unsigned char *buf;
int size;
} MsrleContext;
#define FETCH_NEXT_STREAM_BYTE() \
if (stream_ptr >= s->size) \
{ \
return; \
} \
stream_byte = s->buf[stream_ptr++];
static void msrle_decode_pal4(MsrleContext *s)
{
int stream_ptr = 0;
unsigned char rle_code;
unsigned char extra_byte, odd_pixel;
unsigned char stream_byte;
int pixel_ptr = 0;
int row_dec = s->frame.linesize[0];
int row_ptr = (s->avctx->height - 1) *row_dec;
int frame_size = row_dec * s->avctx->height;
int i;
// make the palette available
memcpy(s->frame.data[1], s->avctx->palctrl->palette, AVPALETTE_SIZE);
if (s->avctx->palctrl->palette_changed)
{
// s->frame.palette_has_changed = 1;
s->avctx->palctrl->palette_changed = 0;
}
while (row_ptr >= 0)
{
FETCH_NEXT_STREAM_BYTE();
rle_code = stream_byte;
if (rle_code == 0)
{
// fetch the next byte to see how to handle escape code
FETCH_NEXT_STREAM_BYTE();
if (stream_byte == 0)
{
// line is done, goto the next one
row_ptr -= row_dec;
pixel_ptr = 0;
}
else if (stream_byte == 1)
{
// decode is done
return ;
}
else if (stream_byte == 2)
{
// reposition frame decode coordinates
FETCH_NEXT_STREAM_BYTE();
pixel_ptr += stream_byte;
FETCH_NEXT_STREAM_BYTE();
row_ptr -= stream_byte * row_dec;
}
else
{
// copy pixels from encoded stream
odd_pixel = stream_byte &1;
rle_code = (stream_byte + 1) / 2;
extra_byte = rle_code &0x01;
if ((row_ptr + pixel_ptr + stream_byte > frame_size) || (row_ptr < 0))
{
return ;
}
for (i = 0; i < rle_code; i++)
{
if (pixel_ptr >= s->avctx->width)
break;
FETCH_NEXT_STREAM_BYTE();
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte >> 4;
pixel_ptr++;
if (i + 1 == rle_code && odd_pixel)
break;
if (pixel_ptr >= s->avctx->width)
break;
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte &0x0F;
pixel_ptr++;
}
// if the RLE code is odd, skip a byte in the stream
if (extra_byte)
stream_ptr++;
}
}
else
{
// decode a run of data
if ((row_ptr + pixel_ptr + stream_byte > frame_size) || (row_ptr < 0))
{
return ;
}
FETCH_NEXT_STREAM_BYTE();
for (i = 0; i < rle_code; i++)
{
if (pixel_ptr >= s->avctx->width)
break;
if ((i &1) == 0)
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte >> 4;
else
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte &0x0F;
pixel_ptr++;
}
}
}
// one last sanity check on the way out
if (stream_ptr < s->size)
{
// error
}
}
static void msrle_decode_pal8(MsrleContext *s)
{
int stream_ptr = 0;
unsigned char rle_code;
unsigned char extra_byte;
unsigned char stream_byte;
int pixel_ptr = 0;
int row_dec = s->frame.linesize[0];
int row_ptr = (s->avctx->height - 1) *row_dec;
int frame_size = row_dec * s->avctx->height;
// make the palette available
memcpy(s->frame.data[1], s->avctx->palctrl->palette, AVPALETTE_SIZE);
if (s->avctx->palctrl->palette_changed)
{
// s->frame.palette_has_changed = 1;
s->avctx->palctrl->palette_changed = 0;
}
while (row_ptr >= 0)
{
FETCH_NEXT_STREAM_BYTE();
rle_code = stream_byte;
if (rle_code == 0)
{
// fetch the next byte to see how to handle escape code
FETCH_NEXT_STREAM_BYTE();
if (stream_byte == 0)
{
// line is done, goto the next one
row_ptr -= row_dec;
pixel_ptr = 0;
}
else if (stream_byte == 1)
{
// decode is done
return ;
}
else if (stream_byte == 2)
{
// reposition frame decode coordinates
FETCH_NEXT_STREAM_BYTE();
pixel_ptr += stream_byte;
FETCH_NEXT_STREAM_BYTE();
row_ptr -= stream_byte * row_dec;
}
else
{
// copy pixels from encoded stream
if ((row_ptr + pixel_ptr + stream_byte > frame_size) || (row_ptr < 0))
{
return ;
}
rle_code = stream_byte;
extra_byte = stream_byte &0x01;
if (stream_ptr + rle_code + extra_byte > s->size)
{
return ;
}
while (rle_code--)
{
FETCH_NEXT_STREAM_BYTE();
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte;
pixel_ptr++;
}
// if the RLE code is odd, skip a byte in the stream
if (extra_byte)
stream_ptr++;
}
}
else
{
// decode a run of data
if ((row_ptr + pixel_ptr + stream_byte > frame_size) || (row_ptr < 0))
{
return ;
}
FETCH_NEXT_STREAM_BYTE();
while (rle_code--)
{
s->frame.data[0][row_ptr + pixel_ptr] = stream_byte;
pixel_ptr++;
}
}
}
// one last sanity check on the way out
if (stream_ptr < s->size)
{
// error
}
}
static int msrle_decode_init(AVCodecContext *avctx)
{
MsrleContext *s = (MsrleContext*)avctx->priv_data;
s->avctx = avctx;
avctx->pix_fmt = PIX_FMT_PAL8;
s->frame.data[0] = NULL;
return 0;
}
static int msrle_decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8_t *buf, int buf_size)
{
MsrleContext *s = (MsrleContext*)avctx->priv_data;
s->buf = buf;
s->size = buf_size;
if (avctx->reget_buffer(avctx, &s->frame))
return - 1;
switch (avctx->bits_per_sample)
{
case 8:
msrle_decode_pal8(s);
break;
case 4:
msrle_decode_pal4(s);
break;
default:
break;
}
*data_size = sizeof(AVFrame);
*(AVFrame*)data = s->frame;
// report that the buffer was completely consumed
return buf_size;
}
static int msrle_decode_end(AVCodecContext *avctx)
{
MsrleContext *s = (MsrleContext*)avctx->priv_data;
// release the last frame
if (s->frame.data[0])
avctx->release_buffer(avctx, &s->frame);
return 0;
}
AVCodec msrle_decoder =
{
"msrle",
CODEC_TYPE_VIDEO,
CODEC_ID_MSRLE,
sizeof(MsrleContext),
msrle_decode_init,
NULL,
msrle_decode_end,
msrle_decode_frame
};

@ -0,0 +1,380 @@
/************************************************************************/
/* ´ËÎļþʵÏÖ truespeed ÒôƵ½âÂëÆ÷ */
/************************************************************************/
#include "avcodec.h"
#include "truespeech_data.h"
// TrueSpeech decoder context
typedef struct TSContext
{
// input data
int16_t vector[8]; // input vector: 5/5/4/4/4/3/3/3
int offset1[2]; // 8-bit value, used in one copying offset
int offset2[4]; // 7-bit value, encodes offsets for copying and for two-point filter
int pulseoff[4]; // 4-bit offset of pulse values block
int pulsepos[4]; // 27-bit variable, encodes 7 pulse positions
int pulseval[4]; // 7x2-bit pulse values
int flag; // 1-bit flag, shows how to choose filters
// temporary data
int filtbuf[146]; // some big vector used for storing filters
int prevfilt[8]; // filter from previous frame
int16_t tmp1[8]; // coefficients for adding to out
int16_t tmp2[8]; // coefficients for adding to out
int16_t tmp3[8]; // coefficients for adding to out
int16_t cvector[8]; // correlated input vector
int filtval; // gain value for one function
int16_t newvec[60]; // tmp vector
int16_t filters[32]; // filters for every subframe
} TSContext;
#if !defined(LE_32)
#define LE_32(x) ((((uint8_t*)(x))[3] << 24)| (((uint8_t*)(x))[2] << 16) | \
(((uint8_t*)(x))[1] << 8) | ((uint8_t*)(x))[0])
#endif
static int truespeech_decode_init(AVCodecContext *avctx)
{
return 0; // TSContext *c = avctx->priv_data;
}
static void truespeech_read_frame(TSContext *dec, uint8_t *input)
{
uint32_t t;
t = LE_32(input); // first dword
input += 4;
dec->flag = t &1;
dec->vector[0] = ts_codebook[0][(t >> 1) &0x1F];
dec->vector[1] = ts_codebook[1][(t >> 6) &0x1F];
dec->vector[2] = ts_codebook[2][(t >> 11) &0xF];
dec->vector[3] = ts_codebook[3][(t >> 15) &0xF];
dec->vector[4] = ts_codebook[4][(t >> 19) &0xF];
dec->vector[5] = ts_codebook[5][(t >> 23) &0x7];
dec->vector[6] = ts_codebook[6][(t >> 26) &0x7];
dec->vector[7] = ts_codebook[7][(t >> 29) &0x7];
t = LE_32(input); // second dword
input += 4;
dec->offset2[0] = (t >> 0) &0x7F;
dec->offset2[1] = (t >> 7) &0x7F;
dec->offset2[2] = (t >> 14) &0x7F;
dec->offset2[3] = (t >> 21) &0x7F;
dec->offset1[0] = ((t >> 28) &0xF) << 4;
t = LE_32(input); // third dword
input += 4;
dec->pulseval[0] = (t >> 0) &0x3FFF;
dec->pulseval[1] = (t >> 14) &0x3FFF;
dec->offset1[1] = (t >> 28) &0x0F;
t = LE_32(input); // fourth dword
input += 4;
dec->pulseval[2] = (t >> 0) &0x3FFF;
dec->pulseval[3] = (t >> 14) &0x3FFF;
dec->offset1[1] |= ((t >> 28) &0x0F) << 4;
t = LE_32(input); // fifth dword
input += 4;
dec->pulsepos[0] = (t >> 4) &0x7FFFFFF;
dec->pulseoff[0] = (t >> 0) &0xF;
dec->offset1[0] |= (t >> 31) &1;
t = LE_32(input); // sixth dword
input += 4;
dec->pulsepos[1] = (t >> 4) &0x7FFFFFF;
dec->pulseoff[1] = (t >> 0) &0xF;
dec->offset1[0] |= ((t >> 31) &1) << 1;
t = LE_32(input); // seventh dword
input += 4;
dec->pulsepos[2] = (t >> 4) &0x7FFFFFF;
dec->pulseoff[2] = (t >> 0) &0xF;
dec->offset1[0] |= ((t >> 31) &1) << 2;
t = LE_32(input); // eighth dword
input += 4;
dec->pulsepos[3] = (t >> 4) &0x7FFFFFF;
dec->pulseoff[3] = (t >> 0) &0xF;
dec->offset1[0] |= ((t >> 31) &1) << 3;
}
static void truespeech_correlate_filter(TSContext *dec)
{
int16_t tmp[8];
int i, j;
for (i = 0; i < 8; i++)
{
if (i > 0)
{
memcpy(tmp, dec->cvector, i *2);
for (j = 0; j < i; j++)
dec->cvector[j] = ((tmp[i - j - 1] *dec->vector[i]) + (dec->cvector[j] << 15) + 0x4000) >> 15;
}
dec->cvector[i] = (8-dec->vector[i]) >> 3;
}
for (i = 0; i < 8; i++)
dec->cvector[i] = (dec->cvector[i] *ts_230[i]) >> 15;
dec->filtval = dec->vector[0];
}
static void truespeech_filters_merge(TSContext *dec)
{
int i;
if (!dec->flag)
{
for (i = 0; i < 8; i++)
{
dec->filters[i + 0] = dec->prevfilt[i];
dec->filters[i + 8] = dec->prevfilt[i];
}
}
else
{
for (i = 0; i < 8; i++)
{
dec->filters[i + 0] = (dec->cvector[i] *21846+dec->prevfilt[i] *10923+16384) >> 15;
dec->filters[i + 8] = (dec->cvector[i] *10923+dec->prevfilt[i] *21846+16384) >> 15;
}
}
for (i = 0; i < 8; i++)
{
dec->filters[i + 16] = dec->cvector[i];
dec->filters[i + 24] = dec->cvector[i];
}
}
static void truespeech_apply_twopoint_filter(TSContext *dec, int quart)
{
int16_t tmp[146+60], *ptr0, *ptr1, *filter;
int i, t, off;
t = dec->offset2[quart];
if (t == 127)
{
memset(dec->newvec, 0, 60 *2);
return ;
}
for (i = 0; i < 146; i++)
tmp[i] = dec->filtbuf[i];
off = (t / 25) + dec->offset1[quart >> 1] + 18;
ptr0 = tmp + 145-off;
ptr1 = tmp + 146;
filter = (int16_t*)ts_240 + (t % 25) *2;
for (i = 0; i < 60; i++)
{
t = (ptr0[0] *filter[0] + ptr0[1] *filter[1] + 0x2000) >> 14;
ptr0++;
dec->newvec[i] = t;
ptr1[i] = t;
}
}
static void truespeech_place_pulses(TSContext *dec, int16_t *out, int quart)
{
int16_t tmp[7];
int i, j, t;
int16_t *ptr1, *ptr2;
int coef;
memset(out, 0, 60 *2);
for (i = 0; i < 7; i++)
{
t = dec->pulseval[quart] &3;
dec->pulseval[quart] >>= 2;
tmp[6-i] = ts_562[dec->pulseoff[quart] *4+t];
}
coef = dec->pulsepos[quart] >> 15;
ptr1 = (int16_t*)ts_140 + 30;
ptr2 = tmp;
for (i = 0, j = 3; (i < 30) && (j > 0); i++)
{
t = *ptr1++;
if (coef >= t)
coef -= t;
else
{
out[i] = *ptr2++;
ptr1 += 30;
j--;
}
}
coef = dec->pulsepos[quart] &0x7FFF;
ptr1 = (int16_t*)ts_140;
for (i = 30, j = 4; (i < 60) && (j > 0); i++)
{
t = *ptr1++;
if (coef >= t)
coef -= t;
else
{
out[i] = *ptr2++;
ptr1 += 30;
j--;
}
}
}
static void truespeech_update_filters(TSContext *dec, int16_t *out, int quart)
{
int i;
for (i = 0; i < 86; i++)
dec->filtbuf[i] = dec->filtbuf[i + 60];
for (i = 0; i < 60; i++)
{
dec->filtbuf[i + 86] = out[i] + dec->newvec[i] - (dec->newvec[i] >> 3);
out[i] += dec->newvec[i];
}
}
static void truespeech_synth(TSContext *dec, int16_t *out, int quart)
{
int i, k;
int t[8];
int16_t *ptr0, *ptr1;
ptr0 = dec->tmp1;
ptr1 = dec->filters + quart * 8;
for (i = 0; i < 60; i++)
{
int sum = 0;
for (k = 0; k < 8; k++)
sum += ptr0[k] *ptr1[k];
sum = (sum + (out[i] << 12) + 0x800) >> 12;
out[i] = clip(sum, - 0x7FFE, 0x7FFE);
for (k = 7; k > 0; k--)
ptr0[k] = ptr0[k - 1];
ptr0[0] = out[i];
}
for (i = 0; i < 8; i++)
t[i] = (ts_5E2[i] *ptr1[i]) >> 15;
ptr0 = dec->tmp2;
for (i = 0; i < 60; i++)
{
int sum = 0;
for (k = 0; k < 8; k++)
sum += ptr0[k] *t[k];
for (k = 7; k > 0; k--)
ptr0[k] = ptr0[k - 1];
ptr0[0] = out[i];
out[i] = ((out[i] << 12) - sum) >> 12;
}
for (i = 0; i < 8; i++)
t[i] = (ts_5F2[i] *ptr1[i]) >> 15;
ptr0 = dec->tmp3;
for (i = 0; i < 60; i++)
{
int sum = out[i] << 12;
for (k = 0; k < 8; k++)
sum += ptr0[k] *t[k];
for (k = 7; k > 0; k--)
ptr0[k] = ptr0[k - 1];
ptr0[0] = clip((sum + 0x800) >> 12, - 0x7FFE, 0x7FFE);
sum = ((ptr0[1]*(dec->filtval - (dec->filtval >> 2))) >> 4) + sum;
sum = sum - (sum >> 3);
out[i] = clip((sum + 0x800) >> 12, - 0x7FFE, 0x7FFE);
}
}
static void truespeech_save_prevvec(TSContext *c)
{
int i;
for (i = 0; i < 8; i++)
c->prevfilt[i] = c->cvector[i];
}
static int truespeech_decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8_t *buf, int buf_size)
{
TSContext *c = avctx->priv_data;
int i;
short *samples = data;
int consumed = 0;
int16_t out_buf[240];
if (!buf_size)
return 0;
while (consumed < buf_size)
{
truespeech_read_frame(c, buf + consumed);
consumed += 32;
truespeech_correlate_filter(c);
truespeech_filters_merge(c);
memset(out_buf, 0, 240 *2);
for (i = 0; i < 4; i++)
{
truespeech_apply_twopoint_filter(c, i);
truespeech_place_pulses(c, out_buf + i * 60, i);
truespeech_update_filters(c, out_buf + i * 60, i);
truespeech_synth(c, out_buf + i * 60, i);
}
truespeech_save_prevvec(c);
for (i = 0; i < 240; i++) // finally output decoded frame
*samples++ = out_buf[i];
}
*data_size = consumed * 15;
return buf_size;
}
AVCodec truespeech_decoder =
{
"truespeech",
CODEC_TYPE_AUDIO,
CODEC_ID_TRUESPEECH,
sizeof(TSContext),
truespeech_decode_init,
NULL,
NULL,
truespeech_decode_frame,
};

@ -0,0 +1,139 @@
#ifndef __TRUESPEECH_DATA__
#define __TRUESPEECH_DATA__
/************************************************************************/
/* 此文件定义 truespeed 音频解码器使用的常数 */
/************************************************************************/
#pragma warning(disable:4305 )
/* codebooks fo expanding input filter */
static const int16_t ts_cb_0[32] =
{
0x8240, 0x8364, 0x84CE, 0x865D, 0x8805, 0x89DE, 0x8BD7, 0x8DF4,
0x9051, 0x92E2, 0x95DE, 0x990F, 0x9C81, 0xA079, 0xA54C, 0xAAD2,
0xB18A, 0xB90A, 0xC124, 0xC9CC, 0xD339, 0xDDD3, 0xE9D6, 0xF893,
0x096F, 0x1ACA, 0x29EC, 0x381F, 0x45F9, 0x546A, 0x63C3, 0x73B5,
};
static const int16_t ts_cb_1[32] =
{
0x9F65, 0xB56B, 0xC583, 0xD371, 0xE018, 0xEBB4, 0xF61C, 0xFF59,
0x085B, 0x1106, 0x1952, 0x214A, 0x28C9, 0x2FF8, 0x36E6, 0x3D92,
0x43DF, 0x49BB, 0x4F46, 0x5467, 0x5930, 0x5DA3, 0x61EC, 0x65F9,
0x69D4, 0x6D5A, 0x709E, 0x73AD, 0x766B, 0x78F0, 0x7B5A, 0x7DA5,
};
static const int16_t ts_cb_2[16] =
{
0x96F8, 0xA3B4, 0xAF45, 0xBA53, 0xC4B1, 0xCECC, 0xD86F, 0xE21E,
0xEBF3, 0xF640, 0x00F7, 0x0C20, 0x1881, 0x269A, 0x376B, 0x4D60,
};
static const int16_t ts_cb_3[16] =
{
0xC654, 0xDEF2, 0xEFAA, 0xFD94, 0x096A, 0x143F, 0x1E7B, 0x282C,
0x3176, 0x3A89, 0x439F, 0x4CA2, 0x557F, 0x5E50, 0x6718, 0x6F8D,
};
static const int16_t ts_cb_4[16] =
{
0xABE7, 0xBBA8, 0xC81C, 0xD326, 0xDD0E, 0xE5D4, 0xEE22, 0xF618,
0xFE28, 0x064F, 0x0EB7, 0x17B8, 0x21AA, 0x2D8B, 0x3BA2, 0x4DF9,
};
static const int16_t ts_cb_5[8] = { 0xD51B, 0xF12E, 0x042E, 0x13C7, 0x2260, 0x311B, 0x40DE, 0x5385,};
static const int16_t ts_cb_6[8] = { 0xB550, 0xC825, 0xD980, 0xE997, 0xF883, 0x0752, 0x1811, 0x2E18,};
static const int16_t ts_cb_7[8] = { 0xCEF0, 0xE4F9, 0xF6BB, 0x0646, 0x14F5, 0x23FF, 0x356F, 0x4A8D,};
static const int16_t *ts_codebook[8] = {ts_cb_0, ts_cb_1, ts_cb_2, ts_cb_3, ts_cb_4, ts_cb_5, ts_cb_6, ts_cb_7};
/* table used for decoding pulse positions */
static const int16_t ts_140[120] =
{
0x0E46, 0x0CCC, 0x0B6D, 0x0A28, 0x08FC, 0x07E8, 0x06EB, 0x0604,
0x0532, 0x0474, 0x03C9, 0x0330, 0x02A8, 0x0230, 0x01C7, 0x016C,
0x011E, 0x00DC, 0x00A5, 0x0078, 0x0054, 0x0038, 0x0023, 0x0014,
0x000A, 0x0004, 0x0001, 0x0000, 0x0000, 0x0000,
0x0196, 0x017A, 0x015F, 0x0145, 0x012C, 0x0114, 0x00FD, 0x00E7,
0x00D2, 0x00BE, 0x00AB, 0x0099, 0x0088, 0x0078, 0x0069, 0x005B,
0x004E, 0x0042, 0x0037, 0x002D, 0x0024, 0x001C, 0x0015, 0x000F,
0x000A, 0x0006, 0x0003, 0x0001, 0x0000, 0x0000,
0x001D, 0x001C, 0x001B, 0x001A, 0x0019, 0x0018, 0x0017, 0x0016,
0x0015, 0x0014, 0x0013, 0x0012, 0x0011, 0x0010, 0x000F, 0x000E,
0x000D, 0x000C, 0x000B, 0x000A, 0x0009, 0x0008, 0x0007, 0x0006,
0x0005, 0x0004, 0x0003, 0x0002, 0x0001, 0x0000,
0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001,
0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001,
0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001,
0x0001, 0x0001, 0x0001, 0x0001, 0x0001, 0x0001
};
/* filter for correlated input filter */
static const int16_t ts_230[8] = { 0x7F3B, 0x7E78, 0x7DB6, 0x7CF5, 0x7C35, 0x7B76, 0x7AB8, 0x79FC };
/* two-point filters table */
static const int16_t ts_240[25 * 2] =
{
0xED2F, 0x5239,
0x54F1, 0xE4A9,
0x2620, 0xEE3E,
0x09D6, 0x2C40,
0xEFB5, 0x2BE0,
0x3FE1, 0x3339,
0x442F, 0xE6FE,
0x4458, 0xF9DF,
0xF231, 0x43DB,
0x3DB0, 0xF705,
0x4F7B, 0xFEFB,
0x26AD, 0x0CDC,
0x33C2, 0x0739,
0x12BE, 0x43A2,
0x1BDF, 0x1F3E,
0x0211, 0x0796,
0x2AEB, 0x163F,
0x050D, 0x3A38,
0x0D1E, 0x0D78,
0x150F, 0x3346,
0x38A4, 0x0B7D,
0x2D5D, 0x1FDF,
0x19B7, 0x2822,
0x0D99, 0x1F12,
0x194C, 0x0CE6
};
/* possible pulse values */
static const int16_t ts_562[64] =
{
0x0002, 0x0006, 0xFFFE, 0xFFFA,
0x0004, 0x000C, 0xFFFC, 0xFFF4,
0x0006, 0x0012, 0xFFFA, 0xFFEE,
0x000A, 0x001E, 0xFFF6, 0xFFE2,
0x0010, 0x0030, 0xFFF0, 0xFFD0,
0x0019, 0x004B, 0xFFE7, 0xFFB5,
0x0028, 0x0078, 0xFFD8, 0xFF88,
0x0040, 0x00C0, 0xFFC0, 0xFF40,
0x0065, 0x012F, 0xFF9B, 0xFED1,
0x00A1, 0x01E3, 0xFF5F, 0xFE1D,
0x0100, 0x0300, 0xFF00, 0xFD00,
0x0196, 0x04C2, 0xFE6A, 0xFB3E,
0x0285, 0x078F, 0xFD7B, 0xF871,
0x0400, 0x0C00, 0xFC00, 0xF400,
0x0659, 0x130B, 0xF9A7, 0xECF5,
0x0A14, 0x1E3C, 0xF5EC, 0xE1C4
};
/* filters used in final output calculations */
static const int16_t ts_5E2[8] = { 0x4666, 0x26B8, 0x154C, 0x0BB6, 0x0671, 0x038B, 0x01F3, 0x0112 };
static const int16_t ts_5F2[8] = { 0x6000, 0x4800, 0x3600, 0x2880, 0x1E60, 0x16C8, 0x1116, 0x0CD1 };
#endif

@ -0,0 +1,412 @@
/************************************************************************/
/* 编解码库使用的帮助和工具函数 */
/************************************************************************/
#include <assert.h>
#include "avcodec.h"
#include "dsputil.h"
#define EDGE_WIDTH 16
#define STRIDE_ALIGN 16
#define INT_MAX 2147483647
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
void *av_malloc(unsigned int size)
{
void *ptr;
if (size > INT_MAX)
return NULL;
ptr = malloc(size);
return ptr;
}
void *av_realloc(void *ptr, unsigned int size)
{
if (size > INT_MAX)
return NULL;
return realloc(ptr, size);
}
void av_free(void *ptr)
{
if (ptr)
free(ptr);
}
void *av_mallocz(unsigned int size)
{
void *ptr;
ptr = av_malloc(size);
if (!ptr)
return NULL;
memset(ptr, 0, size);
return ptr;
}
void *av_fast_realloc(void *ptr, unsigned int *size, unsigned int min_size)
{
if (min_size < *size)
return ptr;
*size = FFMAX(17 *min_size / 16+32, min_size);
return av_realloc(ptr, *size);
}
void av_freep(void *arg)
{
void **ptr = (void **)arg;
av_free(*ptr);
*ptr = NULL;
}
/* 编解码器链表(因为可以支持多个编解码器,因此要把所有的编解码器用链表串起来 )*/
AVCodec *first_avcodec = NULL;
/* 注册编解码器 */
void register_avcodec(AVCodec *format)
{
/* 把编码器添加到链表中 */
AVCodec **p;
p = &first_avcodec;
while (*p != NULL)
p = &(*p)->next;
*p = format;
format->next = NULL;
}
typedef struct InternalBuffer
{
uint8_t *base[4];
uint8_t *data[4];
int linesize[4];
} InternalBuffer;
#define INTERNAL_BUFFER_SIZE 32
#define ALIGN(x, a) (((x)+(a)-1)&~((a)-1))
void avcodec_align_dimensions(AVCodecContext *s, int *width, int *height)
{
int w_align = 1;
int h_align = 1;
switch (s->pix_fmt)
{
case PIX_FMT_YUV420P:
case PIX_FMT_YUV422:
case PIX_FMT_UYVY422:
case PIX_FMT_YUV422P:
case PIX_FMT_YUV444P:
case PIX_FMT_GRAY8:
case PIX_FMT_YUVJ420P:
case PIX_FMT_YUVJ422P:
case PIX_FMT_YUVJ444P: //FIXME check for non mpeg style codecs and use less alignment
w_align = 16;
h_align = 16;
break;
case PIX_FMT_YUV411P:
case PIX_FMT_UYVY411:
w_align = 32;
h_align = 8;
break;
case PIX_FMT_YUV410P:
case PIX_FMT_RGB555:
case PIX_FMT_PAL8:
break;
case PIX_FMT_BGR24:
break;
default:
w_align = 1;
h_align = 1;
break;
}
*width = ALIGN(*width, w_align);
*height = ALIGN(*height, h_align);
}
int avcodec_check_dimensions(void *av_log_ctx, unsigned int w, unsigned int h)
{
if ((int)w > 0 && (int)h > 0 && (w + 128)*(uint64_t)(h + 128) < INT_MAX / 4)
return 0;
return - 1;
}
int avcodec_default_get_buffer(AVCodecContext *s, AVFrame *pic)
{
int i;
int w = s->width;
int h = s->height;
int align_off;
InternalBuffer *buf;
assert(pic->data[0] == NULL);
assert(INTERNAL_BUFFER_SIZE > s->internal_buffer_count);
if (avcodec_check_dimensions(s, w, h))
return - 1;
if (s->internal_buffer == NULL)
s->internal_buffer = av_mallocz(INTERNAL_BUFFER_SIZE *sizeof(InternalBuffer));
buf = &((InternalBuffer*)s->internal_buffer)[s->internal_buffer_count];
if (buf->base[0])
{}
else
{
int h_chroma_shift, v_chroma_shift;
int pixel_size, size[3];
AVPicture picture;
avcodec_get_chroma_sub_sample(s->pix_fmt, &h_chroma_shift, &v_chroma_shift);
avcodec_align_dimensions(s, &w, &h);
w+= EDGE_WIDTH*2;
h+= EDGE_WIDTH*2;
avpicture_fill(&picture, NULL, s->pix_fmt, w, h);
pixel_size = picture.linesize[0] * 8 / w;
assert(pixel_size >= 1);
if (pixel_size == 3 *8)
w = ALIGN(w, STRIDE_ALIGN << h_chroma_shift);
else
w = ALIGN(pixel_size *w, STRIDE_ALIGN << (h_chroma_shift + 3)) / pixel_size;
size[1] = avpicture_fill(&picture, NULL, s->pix_fmt, w, h);
size[0] = picture.linesize[0] *h;
size[1] -= size[0];
if (picture.data[2])
size[1] = size[2] = size[1] / 2;
else
size[2] = 0;
memset(buf->base, 0, sizeof(buf->base));
memset(buf->data, 0, sizeof(buf->data));
for (i = 0; i < 3 && size[i]; i++)
{
const int h_shift = i == 0 ? 0 : h_chroma_shift;
const int v_shift = i == 0 ? 0 : v_chroma_shift;
buf->linesize[i] = picture.linesize[i];
buf->base[i] = av_malloc(size[i] + 16); //FIXME 16
if (buf->base[i] == NULL)
return - 1;
memset(buf->base[i], 128, size[i]);
align_off = ALIGN((buf->linesize[i] * EDGE_WIDTH >> v_shift) + ( EDGE_WIDTH >> h_shift), STRIDE_ALIGN);
if ((s->pix_fmt == PIX_FMT_PAL8) || !size[2])
buf->data[i] = buf->base[i];
else
buf->data[i] = buf->base[i] + align_off;
}
}
for (i = 0; i < 4; i++)
{
pic->base[i] = buf->base[i];
pic->data[i] = buf->data[i];
pic->linesize[i] = buf->linesize[i];
}
s->internal_buffer_count++;
return 0;
}
void avcodec_default_release_buffer(AVCodecContext *s, AVFrame *pic)
{
int i;
InternalBuffer *buf, *last, temp;
assert(s->internal_buffer_count);
buf = NULL;
for (i = 0; i < s->internal_buffer_count; i++)
{
buf = &((InternalBuffer*)s->internal_buffer)[i]; //just 3-5 checks so is not worth to optimize
if (buf->data[0] == pic->data[0])
break;
}
assert(i < s->internal_buffer_count);
s->internal_buffer_count--;
last = &((InternalBuffer*)s->internal_buffer)[s->internal_buffer_count];
temp = *buf;
*buf = *last;
*last = temp;
for (i = 0; i < 3; i++)
{
pic->data[i] = NULL;
}
}
int avcodec_default_reget_buffer(AVCodecContext *s, AVFrame *pic)
{
if (pic->data[0] == NULL) // If no picture return a new buffer
{
return s->get_buffer(s, pic);
}
return 0;
}
void avcodec_default_free_buffers(AVCodecContext *s)
{
int i, j;
if (s->internal_buffer == NULL)
return ;
for (i = 0; i < INTERNAL_BUFFER_SIZE; i++)
{
InternalBuffer *buf = &((InternalBuffer*)s->internal_buffer)[i];
for (j = 0; j < 4; j++)
{
av_freep(&buf->base[j]);
buf->data[j] = NULL;
}
}
av_freep(&s->internal_buffer);
s->internal_buffer_count = 0;
}
AVCodecContext *avcodec_alloc_context(void)
{
AVCodecContext *s = av_malloc(sizeof(AVCodecContext));
if (s == NULL)
return NULL;
memset(s, 0, sizeof(AVCodecContext));
s->get_buffer = avcodec_default_get_buffer;
s->release_buffer = avcodec_default_release_buffer;
s->pix_fmt = PIX_FMT_NONE;
s->palctrl = NULL;
s->reget_buffer = avcodec_default_reget_buffer;
return s;
}
int avcodec_open(AVCodecContext *avctx, AVCodec *codec)
{
int ret = - 1;
if (avctx->codec)
goto end;
if (codec->priv_data_size > 0)
{
avctx->priv_data = av_mallocz(codec->priv_data_size);
if (!avctx->priv_data)
goto end;
}
else
{
avctx->priv_data = NULL;
}
avctx->codec = codec;
avctx->codec_id = codec->id;
avctx->frame_number = 0;
ret = avctx->codec->init(avctx);
if (ret < 0)
{
av_freep(&avctx->priv_data);
avctx->codec = NULL;
goto end;
}
ret = 0;
end:
return ret;
}
int avcodec_decode_video(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr,
uint8_t *buf, int buf_size)
{
int ret;
*got_picture_ptr = 0;
if (buf_size)
{
ret = avctx->codec->decode(avctx, picture, got_picture_ptr, buf, buf_size); // 会调用实际的编解码器的编解码函数(例如msrle等编解码器)
if (*got_picture_ptr)
avctx->frame_number++;
}
else
ret = 0;
return ret;
}
int avcodec_decode_audio(AVCodecContext *avctx, int16_t *samples, int *frame_size_ptr,
uint8_t *buf, int buf_size)
{
int ret;
*frame_size_ptr = 0;
if (buf_size)
{
ret = avctx->codec->decode(avctx, samples, frame_size_ptr, buf, buf_size);
avctx->frame_number++;
}
else
ret = 0;
return ret;
}
int avcodec_close(AVCodecContext *avctx)
{
if (avctx->codec->close)
avctx->codec->close(avctx);
avcodec_default_free_buffers(avctx);
av_freep(&avctx->priv_data);
avctx->codec = NULL;
return 0;
}
AVCodec *avcodec_find_decoder(enum CodecID id)
{
AVCodec *p;
p = first_avcodec;
while (p)
{
if (p->decode != NULL && p->id == id)
return p;
p = p->next;
}
return NULL;
}
/* 编解码器初始化 */
void avcodec_init(void)
{
static int inited = 0;
if (inited != 0)
return ;
inited = 1;
dsputil_static_init();
}

@ -0,0 +1,31 @@
/*
** /便
*/
#include "avformat.h"
extern URLProtocol file_protocol;
/* 注册所有支持的格式 */
void av_register_all(void)
{
static int inited = 0;
// 是否已经初始化完成
if (inited != 0)
return ;
inited = 1;
// ffplay 把 CPU 当做一个广义的 DSP。有些计算可以用 CPU 自带的加速指令来优化,ffplay 把这类函数
// 独立出来放到 dsputil.h 和 dsputil.c 文件中, 用函数指针的方法映射到各个 CPU 具体的加速优化实现函数,
// 此处初始化这些函数指针
avcodec_init();
// 注册所有的编解码器
avcodec_register_all();
// 注册支持的格式
avidec_init();
// 注册协议(文件协议、网络协议等)
register_protocol(&file_protocol);
}

@ -0,0 +1,271 @@
#ifndef AVFORMAT_H
#define AVFORMAT_H
/*************************************************
** 使
**
*************************************************/
#ifdef __cplusplus
extern "C"
{
#endif
/* 版本号相关 */
#define LIBAVFORMAT_VERSION_INT ((50<<16)+(4<<8)+0)
#define LIBAVFORMAT_VERSION 50.4.0
#define LIBAVFORMAT_BUILD LIBAVFORMAT_VERSION_INT
#define LIBAVFORMAT_IDENT "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION)
/* 编解码头文件 */
#include "../libavcodec/avcodec.h"
/* io操作头文件 */
#include "avio.h"
/* 错误码定义 */
#define AVERROR_UNKNOWN (-1) // unknown error
#define AVERROR_IO (-2) // i/o error
#define AVERROR_NUMEXPECTED (-3) // number syntax expected in filename
#define AVERROR_INVALIDDATA (-4) // invalid data found
#define AVERROR_NOMEM (-5) // not enough memory
#define AVERROR_NOFMT (-6) // unknown format
#define AVERROR_NOTSUPP (-7) // operation not supported
/* 在文件中跳转 */
#define AVSEEK_FLAG_BACKWARD 1 // seek backward
#define AVSEEK_FLAG_BYTE 2 // seeking based on position in bytes
#define AVSEEK_FLAG_ANY 4 // seek to any frame, even non keyframes
#define AVFMT_NOFILE 0x0001 // no file should be opened
#define PKT_FLAG_KEY 0x0001
#define AVINDEX_KEYFRAME 0x0001
#define AVPROBE_SCORE_MAX 100
#define MAX_STREAMS 20
/*
**
**
*/
typedef struct AVPacket
{
int64_t pts; // presentation time stamp in time_base units 表示时间,对视频是显示时间
int64_t dts; // decompression time stamp in time_base units 解码时间,这个不是很重要
int64_t pos; // byte position in stream, -1 if unknown
uint8_t *data; // 实际保存音视频数据缓存的首地址
int size; // 实际保存音视频数据缓存的大小
int stream_index; // 当前音视频数据包对应的流索引,在本例中用于区别音频还是视频
int flags; //数据包的一些标记,比如是否是关键帧等。
void(*destruct)(struct AVPacket*); // 销毁函数
} AVPacket;
/* 视频包列表 */
typedef struct AVPacketList
{
AVPacket pkt;
struct AVPacketList *next;
} AVPacketList;
/* 释放数据包内的数据 */
static inline void av_destruct_packet(AVPacket *pkt)
{
av_free(pkt->data);
pkt->data = NULL;
pkt->size = 0;
}
/* 释放一个视频包 */
static inline void av_free_packet(AVPacket *pkt)
{
if (pkt && pkt->destruct)
pkt->destruct(pkt);
}
/*
**
**
**
**
*/
static inline int av_get_packet(ByteIOContext *s, AVPacket *pkt, int size)
{
int ret;
unsigned char *data;
if ((unsigned)size > (unsigned)size + FF_INPUT_BUFFER_PADDING_SIZE)
return AVERROR_NOMEM;
// 内存分配
data = av_malloc(size + FF_INPUT_BUFFER_PADDING_SIZE);
if (!data)
return AVERROR_NOMEM;
memset(data + size, 0, FF_INPUT_BUFFER_PADDING_SIZE);
pkt->pts = AV_NOPTS_VALUE;
pkt->dts = AV_NOPTS_VALUE;
pkt->pos = - 1;
pkt->flags = 0;
pkt->stream_index = 0;
pkt->data = data;
pkt->size = size;
pkt->destruct = av_destruct_packet;
pkt->pos = url_ftell(s);
// 读取实际的数据
ret = url_fread(s, pkt->data, size);
if (ret <= 0)
av_free_packet(pkt);
else
pkt->size = ret;
return ret;
}
/*
**
** ffplay
**
*/
typedef struct AVProbeData
{
// 文件名
const char *filename;
// 探测到的数据
unsigned char *buf;
// 数据长度
int buf_size;
} AVProbeData;
/*
** flags size
*/
typedef struct AVIndexEntry
{
int64_t pos;
int64_t timestamp;
int flags: 2;
int size: 30; //yeah trying to keep the size of this small to reduce memory requirements (its 24 vs 32 byte due to possible 8byte align)
} AVIndexEntry;
/*
**
** AVStream
** AVI流和mp4流等
*/
typedef struct AVStream
{
// 解码器上下文
AVCodecContext *actx; // codec context, change from AVCodecContext *codec;
// 具体的流
void *priv_data; // AVIStream 在本例中,关联到 AVIStream
// 时间基准
AVRational time_base; // 由 av_set_pts_info()函数初始化
// 索引对象,假如不支持随机的查找的时候使用
AVIndexEntry *index_entries; // only used if the format does not support seeking natively
int nb_index_entries;
int index_entries_allocated_size;
double frame_last_delay; // 帧最后延迟
} AVStream;
/* 格式参数 */
typedef struct AVFormatParameters
{
int dbg; //only for debug 只保留了一个调试标志
} AVFormatParameters;
/*
**
** AVInputFormat AVIAV Input
**
*/
typedef struct AVInputFormat
{
// 文件名
const char *name;
int priv_data_size;
// 探测函数
int(*read_probe)(AVProbeData*);
// 读取头部
int(*read_header)(struct AVFormatContext *, AVFormatParameters *ap);
// 读取一帧数据(一个数据包)
int(*read_packet)(struct AVFormatContext *, AVPacket *pkt);
// 关闭文件
int(*read_close)(struct AVFormatContext*);
const char *extensions; // 文件扩展名
// 下一个输入文件的格式
struct AVInputFormat *next;
} AVInputFormat;
/*
** AVFormatContext 使
**
*/
typedef struct AVFormatContext // format I/O context
{
// 输入文件的格式,AVFormatContext是一个抽象的概念,而AVInputFormat则是一个具体的概念
struct AVInputFormat *iformat;
void *priv_data;
// 广义的输入文件
ByteIOContext pb;
// 流的数量
int nb_streams;
// 流(音频流、视频流)
AVStream *streams[MAX_STREAMS];
} AVFormatContext;
int avidec_init(void);
void av_register_input_format(AVInputFormat *format);
void av_register_all(void);
AVInputFormat *av_probe_input_format(AVProbeData *pd, int is_opened);
int match_ext(const char *filename, const char *extensions);
int av_open_input_stream(AVFormatContext **ic_ptr, ByteIOContext *pb, const char *filename,
AVInputFormat *fmt, AVFormatParameters *ap);
int av_open_input_file(AVFormatContext **ic_ptr, const char *filename, AVInputFormat *fmt,
int buf_size, AVFormatParameters *ap);
int av_read_frame(AVFormatContext *s, AVPacket *pkt);
int av_read_packet(AVFormatContext *s, AVPacket *pkt);
void av_close_input_file(AVFormatContext *s);
AVStream *av_new_stream(AVFormatContext *s, int id);
void av_set_pts_info(AVStream *s, int pts_wrap_bits, int pts_num, int pts_den);
int av_index_search_timestamp(AVStream *st, int64_t timestamp, int flags);
int av_add_index_entry(AVStream *st, int64_t pos, int64_t timestamp, int size, int distance, int flags);
int strstart(const char *str, const char *val, const char **ptr);
void pstrcpy(char *buf, int buf_size, const char *str);
#ifdef __cplusplus
}
#endif
#endif

@ -0,0 +1,792 @@
/*
** AVI
1AVI
avi
2AVI AVIINDEXENTRY dwChunkOffset
chunk
3 avi
*/
#include "avformat.h"
#include <assert.h>
#define AVIIF_INDEX 0x10
#define AVIF_HASINDEX 0x00000010 // Index at end of file?
#define AVIF_MUSTUSEINDEX 0x00000020
#define INT_MAX 2147483647
#define MKTAG(a,b,c,d) (a | (b << 8) | (c << 16) | (d << 24))
#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
static int avi_load_index(AVFormatContext *s);
static int guess_ni_flag(AVFormatContext *s);
/* AVI格式的视频流 */
typedef struct
{
int64_t frame_offset; // current frame(video) or byte(audio) counter(used to compute the pts)
int remaining;
int packet_size;
int scale;
int rate;
int sample_size; // size of one sample (or packet) (in the rate/scale sense) in bytes
int64_t cum_len; // temporary storage (used during seek)
int prefix; // normally 'd'<<8 + 'c' or 'w'<<8 + 'b'
int prefix_count;
} AVIStream;
/* AVI容器(文件)的上下文*/
typedef struct
{
int64_t riff_end; // RIFF块大小
int64_t movi_list;
int64_t movi_end;
int non_interleaved;
int stream_index_2; // 为了和AVPacket中的stream_index相区别
} AVIContext;
typedef struct
{
int id;
unsigned int tag;
} CodecTag;
const CodecTag codec_bmp_tags[] =
{
{CODEC_ID_MSRLE, MKTAG('m', 'r', 'l', 'e')},
{CODEC_ID_MSRLE, MKTAG(0x1, 0x0, 0x0, 0x0)},
{CODEC_ID_NONE, 0},
};
const CodecTag codec_wav_tags[] =
{
{CODEC_ID_TRUESPEECH, 0x22},
{0, 0},
};
enum CodecID codec_get_id(const CodecTag *tags, unsigned int tag)
{
while (tags->id != CODEC_ID_NONE)
{
if (toupper((tag >> 0) &0xFF) == toupper((tags->tag >> 0) &0xFF)
&& toupper((tag >> 8) &0xFF) == toupper((tags->tag >> 8) &0xFF)
&& toupper((tag >> 16)&0xFF) == toupper((tags->tag >> 16)&0xFF)
&& toupper((tag >> 24)&0xFF) == toupper((tags->tag >> 24)&0xFF))
return tags->id;
tags++;
}
return CODEC_ID_NONE;
}
static int get_riff(AVIContext *avi, ByteIOContext *pb)
{
uint32_t tag;
tag = get_le32(pb);
if (tag != MKTAG('R', 'I', 'F', 'F'))
return - 1;
avi->riff_end = get_le32(pb); // RIFF chunk size
avi->riff_end += url_ftell(pb); // RIFF chunk end
tag = get_le32(pb);
if (tag != MKTAG('A', 'V', 'I', ' ') && tag != MKTAG('A', 'V', 'I', 'X'))
return - 1;
return 0;
}
static void clean_index(AVFormatContext *s)
{
int i, j;
for (i = 0; i < s->nb_streams; i++)
{
AVStream *st = s->streams[i];
AVIStream *ast = st->priv_data;
int n = st->nb_index_entries;
int max = ast->sample_size;
int64_t pos, size, ts;
if (n != 1 || ast->sample_size == 0)
continue;
while (max < 1024)
max += max;
pos = st->index_entries[0].pos;
size = st->index_entries[0].size;
ts = st->index_entries[0].timestamp;
for (j = 0; j < size; j += max)
{
av_add_index_entry(st, pos + j, ts + j / ast->sample_size, FFMIN(max, size - j), 0, AVINDEX_KEYFRAME);
}
}
}
static int avi_read_header(AVFormatContext *s, AVFormatParameters *ap)
{
AVIContext *avi = s->priv_data;
ByteIOContext *pb = &s->pb;
uint32_t tag, tag1, handler;
int codec_type, stream_index, frame_period, bit_rate;
unsigned int size, nb_frames;
int i, n;
AVStream *st;
AVIStream *ast;
avi->stream_index_2 = - 1;
if (get_riff(avi, pb) < 0)
return - 1;
stream_index = - 1; // first list tag
codec_type = - 1;
frame_period = 0;
for (;;)
{
if (url_feof(pb))
goto fail;
tag = get_le32(pb);
size = get_le32(pb);
switch (tag)
{
case MKTAG('L', 'I', 'S', 'T'): // ignored, except when start of video packets
tag1 = get_le32(pb);
if (tag1 == MKTAG('m', 'o', 'v', 'i'))
{
avi->movi_list = url_ftell(pb) - 4;
if (size)
avi->movi_end = avi->movi_list + size;
else
avi->movi_end = url_fsize(pb);
goto end_of_header; // 读到数据段就认为文件头结束了,就goto
}
break;
case MKTAG('a', 'v', 'i', 'h'): // avi header, using frame_period is bad idea
frame_period = get_le32(pb);
bit_rate = get_le32(pb) *8;
get_le32(pb);
avi->non_interleaved |= get_le32(pb) &AVIF_MUSTUSEINDEX;
url_fskip(pb, 2 *4);
n = get_le32(pb);
for (i = 0; i < n; i++)
{
AVIStream *ast;
st = av_new_stream(s, i);
if (!st)
goto fail;
ast = av_mallocz(sizeof(AVIStream));
if (!ast)
goto fail;
st->priv_data = ast;
st->actx->bit_rate = bit_rate;
}
url_fskip(pb, size - 7 * 4);
break;
case MKTAG('s', 't', 'r', 'h'): // stream header
stream_index++;
tag1 = get_le32(pb);
handler = get_le32(pb);
if (stream_index >= s->nb_streams)
{
url_fskip(pb, size - 8);
break;
}
st = s->streams[stream_index];
ast = st->priv_data;
get_le32(pb); // flags
get_le16(pb); // priority
get_le16(pb); // language
get_le32(pb); // initial frame
ast->scale = get_le32(pb);
ast->rate = get_le32(pb);
if (ast->scale && ast->rate)
{}
else if (frame_period)
{
ast->rate = 1000000;
ast->scale = frame_period;
}
else
{
ast->rate = 25;
ast->scale = 1;
}
av_set_pts_info(st, 64, ast->scale, ast->rate);
ast->cum_len = get_le32(pb); // start
nb_frames = get_le32(pb);
get_le32(pb); // buffer size
get_le32(pb); // quality
ast->sample_size = get_le32(pb); // sample ssize
switch (tag1)
{
case MKTAG('v', 'i', 'd', 's'): codec_type = CODEC_TYPE_VIDEO;
ast->sample_size = 0;
break;
case MKTAG('a', 'u', 'd', 's'): codec_type = CODEC_TYPE_AUDIO;
break;
case MKTAG('t', 'x', 't', 's'): //FIXME
codec_type = CODEC_TYPE_DATA; //CODEC_TYPE_SUB ? FIXME
break;
case MKTAG('p', 'a', 'd', 's'): codec_type = CODEC_TYPE_UNKNOWN;
stream_index--;
break;
default:
goto fail;
}
ast->frame_offset = ast->cum_len *FFMAX(ast->sample_size, 1);
url_fskip(pb, size - 12 * 4);
break;
case MKTAG('s', 't', 'r', 'f'): // stream header
if (stream_index >= s->nb_streams)
{
url_fskip(pb, size);
}
else
{
st = s->streams[stream_index];
switch (codec_type)
{
case CODEC_TYPE_VIDEO: // BITMAPINFOHEADER
get_le32(pb); // size
st->actx->width = get_le32(pb);
st->actx->height = get_le32(pb);
get_le16(pb); // panes
st->actx->bits_per_sample = get_le16(pb); // depth
tag1 = get_le32(pb);
get_le32(pb); // ImageSize
get_le32(pb); // XPelsPerMeter
get_le32(pb); // YPelsPerMeter
get_le32(pb); // ClrUsed
get_le32(pb); // ClrImportant
if (size > 10 *4 && size < (1 << 30))
{
st->actx->extradata_size = size - 10 * 4;
st->actx->extradata = av_malloc(st->actx->extradata_size +
FF_INPUT_BUFFER_PADDING_SIZE);
url_fread(pb, st->actx->extradata, st->actx->extradata_size);
}
if (st->actx->extradata_size &1)
get_byte(pb);
/* Extract palette from extradata if bpp <= 8 */
/* This code assumes that extradata contains only palette */
/* This is true for all paletted codecs implemented in ffmpeg */
if (st->actx->extradata_size && (st->actx->bits_per_sample <= 8))
{
int min = FFMIN(st->actx->extradata_size, AVPALETTE_SIZE);
st->actx->palctrl = av_mallocz(sizeof(AVPaletteControl));
memcpy(st->actx->palctrl->palette, st->actx->extradata, min);
st->actx->palctrl->palette_changed = 1;
}
st->actx->codec_type = CODEC_TYPE_VIDEO;
st->actx->codec_id = codec_get_id(codec_bmp_tags, tag1);
st->frame_last_delay = 1.0 * ast->scale / ast->rate;
break;
case CODEC_TYPE_AUDIO:
{
AVCodecContext *actx = st->actx;
int id = get_le16(pb);
actx->codec_type = CODEC_TYPE_AUDIO;
actx->channels = get_le16(pb);
actx->sample_rate = get_le32(pb);
actx->bit_rate = get_le32(pb) *8;
actx->block_align = get_le16(pb);
if (size == 14) // We're dealing with plain vanilla WAVEFORMAT
actx->bits_per_sample = 8;
else
actx->bits_per_sample = get_le16(pb);
actx->codec_id = codec_get_id(codec_wav_tags, id); // wav_codec_get_id(id, codec->bits_per_sample);
if (size > 16)
{
actx->extradata_size = get_le16(pb); // We're obviously dealing with WAVEFORMATEX
if (actx->extradata_size > 0)
{
if (actx->extradata_size > size - 18)
actx->extradata_size = size - 18;
actx->extradata = av_mallocz(actx->extradata_size +
FF_INPUT_BUFFER_PADDING_SIZE);
url_fread(pb, actx->extradata, actx->extradata_size);
}
else
{
actx->extradata_size = 0;
}
// It is possible for the chunk to contain garbage at the end
if (size - actx->extradata_size - 18 > 0)
url_fskip(pb, size - actx->extradata_size - 18);
}
}
if (size % 2) // 2-aligned (fix for Stargate SG-1 - 3x18 - Shades of Grey.avi)
url_fskip(pb, 1);
break;
default:
st->actx->codec_type = CODEC_TYPE_DATA;
st->actx->codec_id = CODEC_ID_NONE;
url_fskip(pb, size);
break;
}
}
break;
default: // skip tag
size += (size &1);
url_fskip(pb, size);
break;
}
}
end_of_header:
if (stream_index != s->nb_streams - 1) // check stream number
{
fail:
for (i = 0; i < s->nb_streams; i++)
{
av_freep(&s->streams[i]->actx->extradata);
av_freep(&s->streams[i]);
}
return - 1;
}
avi_load_index(s);
avi->non_interleaved |= guess_ni_flag(s);
if (avi->non_interleaved)
clean_index(s);
return 0;
}
int avi_read_packet(AVFormatContext *s, AVPacket *pkt)
{
AVIContext *avi = s->priv_data;
ByteIOContext *pb = &s->pb;
int n, d[8], size;
offset_t i, sync;
if (avi->non_interleaved)
{
int best_stream_index = 0;
AVStream *best_st = NULL;
AVIStream *best_ast;
int64_t best_ts = INT64_MAX;
int i;
for (i = 0; i < s->nb_streams; i++)
{
AVStream *st = s->streams[i];
AVIStream *ast = st->priv_data;
int64_t ts = ast->frame_offset;
if (ast->sample_size)
ts /= ast->sample_size;
ts = av_rescale(ts, AV_TIME_BASE *(int64_t)st->time_base.num, st->time_base.den);
if (ts < best_ts)
{
best_ts = ts;
best_st = st;
best_stream_index = i;
}
}
best_ast = best_st->priv_data;
best_ts = av_rescale(best_ts, best_st->time_base.den, AV_TIME_BASE *(int64_t)best_st->time_base.num);
if (best_ast->remaining)
i = av_index_search_timestamp(best_st, best_ts, AVSEEK_FLAG_ANY | AVSEEK_FLAG_BACKWARD);
else
i = av_index_search_timestamp(best_st, best_ts, AVSEEK_FLAG_ANY);
if (i >= 0)
{
int64_t pos = best_st->index_entries[i].pos;
pos += best_ast->packet_size - best_ast->remaining;
url_fseek(&s->pb, pos + 8, SEEK_SET);
assert(best_ast->remaining <= best_ast->packet_size);
avi->stream_index_2 = best_stream_index;
if (!best_ast->remaining)
best_ast->packet_size = best_ast->remaining = best_st->index_entries[i].size;
}
}
resync:
if (avi->stream_index_2 >= 0)
{
AVStream *st = s->streams[avi->stream_index_2];
AVIStream *ast = st->priv_data;
int size;
if (ast->sample_size <= 1) // minorityreport.AVI block_align=1024 sample_size=1 IMA-ADPCM
size = INT_MAX;
else if (ast->sample_size < 32)
size = 64 * ast->sample_size;
else
size = ast->sample_size;
if (size > ast->remaining)
size = ast->remaining;
av_get_packet(pb, pkt, size);
pkt->dts = ast->frame_offset;
if (ast->sample_size)
pkt->dts /= ast->sample_size;
pkt->stream_index = avi->stream_index_2;
if (st->actx->codec_type == CODEC_TYPE_VIDEO)
{
if (st->index_entries)
{
AVIndexEntry *e;
int index;
index = av_index_search_timestamp(st, pkt->dts, 0);
e = &st->index_entries[index];
if (index >= 0 && e->timestamp == ast->frame_offset)
{
if (e->flags &AVINDEX_KEYFRAME)
pkt->flags |= PKT_FLAG_KEY;
}
}
else
{
pkt->flags |= PKT_FLAG_KEY; // if no index, better to say that all frames are key frames
}
}
else
{
pkt->flags |= PKT_FLAG_KEY;
}
if (ast->sample_size)
ast->frame_offset += pkt->size;
else
ast->frame_offset++;
ast->remaining -= size;
if (!ast->remaining)
{
avi->stream_index_2 = - 1;
ast->packet_size = 0;
if (size &1)
{
get_byte(pb);
size++;
}
}
return size;
}
memset(d, - 1, sizeof(int) *8);
for (i = sync = url_ftell(pb); !url_feof(pb); i++)
{
int j;
if (i >= avi->movi_end)
break;
for (j = 0; j < 7; j++)
d[j] = d[j + 1];
d[7] = get_byte(pb);
size = d[4] + (d[5] << 8) + (d[6] << 16) + (d[7] << 24);
if (d[2] >= '0' && d[2] <= '9' && d[3] >= '0' && d[3] <= '9')
{
n = (d[2] - '0') *10+(d[3] - '0');
}
else
{
n = 100; //invalid stream id
}
if (i + size > avi->movi_end || d[0] < 0)
continue;
if ((d[0] == 'i' && d[1] == 'x' && n < s->nb_streams)
|| (d[0] == 'J' && d[1] == 'U' && d[2] == 'N' && d[3] == 'K'))
{
url_fskip(pb, size);
goto resync;
}
if (d[0] >= '0' && d[0] <= '9' && d[1] >= '0' && d[1] <= '9')
{
n = (d[0] - '0') *10+(d[1] - '0');
}
else
{
n = 100; //invalid stream id
}
//parse ##dc/##wb
if (n < s->nb_streams)
{
AVStream *st;
AVIStream *ast;
st = s->streams[n];
ast = st->priv_data;
if(sync + 9 <= i)
{
int dbg=0;
}
else
{
int dbg1=0;
}
if (((ast->prefix_count < 5 || sync + 9 > i) && d[2] < 128 && d[3] < 128)
|| d[2] * 256 + d[3] == ast->prefix)
{
if (d[2] * 256 + d[3] == ast->prefix)
ast->prefix_count++;
else
{
ast->prefix = d[2] *256+d[3];
ast->prefix_count = 0;
}
avi->stream_index_2 = n;
ast->packet_size = size + 8;
ast->remaining = size;
goto resync;
}
}
// palette changed chunk
if (d[0] >= '0' && d[0] <= '9' && d[1] >= '0' && d[1] <= '9'
&& (d[2] == 'p' && d[3] == 'c') && n < s->nb_streams && i + size <= avi->movi_end)
{
AVStream *st;
int first, clr, flags, k, p;
st = s->streams[n];
first = get_byte(pb);
clr = get_byte(pb);
if (!clr) // all 256 colors used
clr = 256;
flags = get_le16(pb);
p = 4;
for (k = first; k < clr + first; k++)
{
int r, g, b;
r = get_byte(pb);
g = get_byte(pb);
b = get_byte(pb);
get_byte(pb);
st->actx->palctrl->palette[k] = b + (g << 8) + (r << 16);
}
st->actx->palctrl->palette_changed = 1;
goto resync;
}
}
return - 1;
}
static int avi_read_idx1(AVFormatContext *s, int size)
{
AVIContext *avi = s->priv_data;
ByteIOContext *pb = &s->pb;
int nb_index_entries, i;
AVStream *st;
AVIStream *ast;
unsigned int index, tag, flags, pos, len;
unsigned last_pos = - 1;
nb_index_entries = size / 16;
if (nb_index_entries <= 0)
return - 1;
for (i = 0; i < nb_index_entries; i++)// read the entries and sort them in each stream component
{
tag = get_le32(pb);
flags = get_le32(pb);
pos = get_le32(pb);
len = get_le32(pb);
if (i == 0 && pos > avi->movi_list)
avi->movi_list = 0;
pos += avi->movi_list;
index = ((tag &0xff) - '0') *10;
index += ((tag >> 8) &0xff) - '0';
if (index >= s->nb_streams)
continue;
st = s->streams[index];
ast = st->priv_data;
if (last_pos == pos)
avi->non_interleaved = 1;
else
av_add_index_entry(st, pos, ast->cum_len, len, 0, (flags &AVIIF_INDEX) ? AVINDEX_KEYFRAME : 0);
if (ast->sample_size)
ast->cum_len += len / ast->sample_size;
else
ast->cum_len++;
last_pos = pos;
}
return 0;
}
static int guess_ni_flag(AVFormatContext *s)
{
int i;
int64_t last_start = 0;
int64_t first_end = INT64_MAX;
for (i = 0; i < s->nb_streams; i++)
{
AVStream *st = s->streams[i];
int n = st->nb_index_entries;
if (n <= 0)
continue;
if (st->index_entries[0].pos > last_start)
last_start = st->index_entries[0].pos;
if (st->index_entries[n - 1].pos < first_end)
first_end = st->index_entries[n - 1].pos;
}
return last_start > first_end;
}
static int avi_load_index(AVFormatContext *s)
{
AVIContext *avi = s->priv_data;
ByteIOContext *pb = &s->pb;
uint32_t tag, size;
offset_t pos = url_ftell(pb);
url_fseek(pb, avi->movi_end, SEEK_SET);
for (;;)
{
if (url_feof(pb))
break;
tag = get_le32(pb);
size = get_le32(pb);
switch (tag)
{
case MKTAG('i', 'd', 'x', '1'):
if (avi_read_idx1(s, size) < 0)
goto skip;
else
goto the_end;
break;
default:
skip:
size += (size &1);
url_fskip(pb, size);
break;
}
}
the_end:
url_fseek(pb, pos, SEEK_SET);
return 0;
}
static int avi_read_close(AVFormatContext *s)
{
int i;
AVIContext *avi = s->priv_data;
for (i = 0; i < s->nb_streams; i++)
{
AVStream *st = s->streams[i];
AVIStream *ast = st->priv_data;
av_free(ast);
av_free(st->actx->extradata);
av_free(st->actx->palctrl);
}
return 0;
}
static int avi_probe(AVProbeData *p)
{
if (p->buf_size <= 32) // check file header
return 0;
if (p->buf[0] == 'R' && p->buf[1] == 'I' && p->buf[2] == 'F' && p->buf[3] == 'F'
&& p->buf[8] == 'A' && p->buf[9] == 'V' && p->buf[10] == 'I'&& p->buf[11] == ' ')
return AVPROBE_SCORE_MAX;
else
return 0;
}
AVInputFormat avi_iformat =
{
"avi",
sizeof(AVIContext),
avi_probe,
avi_read_header,
avi_read_packet,
avi_read_close,
};
/* 支持的格式的初始化 */
int avidec_init(void)
{
// 注册输入格式
av_register_input_format(&avi_iformat);
return 0;
}
/*
AVIF_HASINDEXAVI文件有"idx1"
AVIF_MUSTUSEINDEX
AVIF_ISINTERLEAVEDAVI文件是interleaved格式的
AVIF_WASCAPTUREFILEAVI文件是用捕捉实时视频专门分配的文件
AVIF_COPYRIGHTEDAVI文件包含有版权信息
AVIF_MUSTUSEINDEX : 使index
// */

@ -0,0 +1,134 @@
/************************************************************************
** URLProtocol 广
** URLProtocol (file,pipe )
**
/************************************************************************/
#include "../berrno.h"
#include "avformat.h"
/* ffmpeg可以支持多种协议,用链表把所有支持的协议串起来 */
URLProtocol *first_protocol = NULL;
/* 注册协议(所谓协议就是输入数据的载体:文件、套接字等形式) */
int register_protocol(URLProtocol *protocol)
{
URLProtocol **p;
p = &first_protocol;
while (*p != NULL)
p = &(*p)->next;
*p = protocol;
protocol->next = NULL;
return 0;
}
/* 打开URL */
int url_open(URLContext **puc, const char *filename, int flags)
{
// 上下文
URLContext *uc;
// 协议及操作
URLProtocol *up;
const char *p;
// 协议字符串
char proto_str[128], *q;
int err;
p = filename;
q = proto_str;
// 确定协议的名字
while (*p != '\0' && *p != ':')
{
if (!isalpha(*p)) // protocols can only contain alphabetic chars
goto file_proto;
if ((q - proto_str) < sizeof(proto_str) - 1)
*q++ = *p;
p++;
}
// if the protocol has length 1, we consider it is a dos drive
if (*p == '\0' || (q - proto_str) <= 1)
{
file_proto:
strcpy(proto_str, "file");
}
else
{
*q = '\0';
}
up = first_protocol;
// 寻找合适协议
while (up != NULL)
{
if (!strcmp(proto_str, up->name))
goto found;
up = up->next;
}
err = - ENOENT;
goto fail;
found:
// URL上下文
uc = av_malloc(sizeof(URLContext) + strlen(filename));
if (!uc)
{
err = - ENOMEM;
goto fail;
}
strcpy(uc->filename, filename);
uc->prot = up;
uc->flags = flags;
uc->max_packet_size = 0; // default: stream file
// 打开URL
err = up->url_open(uc, filename, flags);
if (err < 0)
{
av_free(uc);
*puc = NULL;
return err;
}
*puc = uc;
return 0;
fail:
*puc = NULL;
return err;
}
/* 读取数据 */
int url_read(URLContext *h, unsigned char *buf, int size)
{
int ret;
if (h->flags &URL_WRONLY)
return AVERROR_IO;
ret = h->prot->url_read(h, buf, size);
return ret;
}
/* 在广义文件中跳转 */
offset_t url_seek(URLContext *h, offset_t pos, int whence)
{
offset_t ret;
if (!h->prot->url_seek)
return - EPIPE;
ret = h->prot->url_seek(h, pos, whence);
return ret;
}
/* 关闭URL */
int url_close(URLContext *h)
{
int ret;
ret = h->prot->url_close(h);
av_free(h);
return ret;
}
int url_get_max_packet_size(URLContext *h)
{
return h->max_packet_size;
}

@ -0,0 +1,119 @@
#ifndef AVIO_H
#define AVIO_H
/*
**
*/
#define URL_EOF (-1)
typedef int64_t offset_t;
/* 文件访问的权限的定义 */
#define URL_RDONLY 0
#define URL_WRONLY 1
#define URL_RDWR 2
/*
** URL上下文
** URLContext 广使广
** ()
*/
typedef struct URLContext
{
// 具体的协议
struct URLProtocol *prot;
int flags;
int max_packet_size; // if non zero, the stream is packetized with this max packet size
void *priv_data; // 对于file,则是一个文件句柄,如果是其他的协议,就可能是其他东西
char filename[1]; // specified filename
} URLContext;
/*
** URL协议
** URLProtocol 广
** 广 URLProtocol
** pipeudptcp等输入协议 file
*/
typedef struct URLProtocol
{
const char *name;
int(*url_open)(URLContext *h, const char *filename, int flags);
int(*url_read)(URLContext *h, unsigned char *buf, int size);
int(*url_write)(URLContext *h, unsigned char *buf, int size);
offset_t(*url_seek)(URLContext *h, offset_t pos, int whence);
int(*url_close)(URLContext *h);
struct URLProtocol *next;
} URLProtocol;
/*
** 广
*/
typedef struct ByteIOContext
{
// 数据缓冲区
unsigned char *buffer;
// 缓冲区大小
int buffer_size;
// 有效的字节范围
unsigned char *buf_ptr, *buf_end;
// 其他
void *opaque;
// 读取数据
int (*read_buf)(void *opaque, uint8_t *buf, int buf_size);
// 写入数据
int (*write_buf)(void *opaque, uint8_t *buf, int buf_size);
// 调整buffer指针
offset_t(*seek)(void *opaque, offset_t offset, int whence);
// 位置
offset_t pos; // position in the file of the current buffer
// 是不是必须要刷新
int must_flush; // true if the next seek should flush
// 是否到达文件尾部
int eof_reached; // true if eof reached
// 是否可写
int write_flag; // true if open for writing
// 最大的数据包的大小
int max_packet_size;
// 错误码
int error; // contains the error code or 0 if no error happened
} ByteIOContext;
int url_open(URLContext **h, const char *filename, int flags);
int url_read(URLContext *h, unsigned char *buf, int size);
int url_write(URLContext *h, unsigned char *buf, int size);
offset_t url_seek(URLContext *h, offset_t pos, int whence);
int url_close(URLContext *h);
int url_get_max_packet_size(URLContext *h);
int register_protocol(URLProtocol *protocol);
int init_put_byte(ByteIOContext *s,
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int(*read_buf)(void *opaque, uint8_t *buf, int buf_size),
int(*write_buf)(void *opaque, uint8_t *buf, int buf_size),
offset_t(*seek)(void *opaque, offset_t offset, int whence));
offset_t url_fseek(ByteIOContext *s, offset_t offset, int whence);
void url_fskip(ByteIOContext *s, offset_t offset);
offset_t url_ftell(ByteIOContext *s);
offset_t url_fsize(ByteIOContext *s);
int url_feof(ByteIOContext *s);
int url_ferror(ByteIOContext *s);
int url_fread(ByteIOContext *s, unsigned char *buf, int size); // get_buffer
int get_byte(ByteIOContext *s);
unsigned int get_le32(ByteIOContext *s);
unsigned int get_le16(ByteIOContext *s);
int url_setbufsize(ByteIOContext *s, int buf_size);
int url_fopen(ByteIOContext *s, const char *filename, int flags);
int url_fclose(ByteIOContext *s);
int url_open_buf(ByteIOContext *s, uint8_t *buf, int buf_size, int flags);
int url_close_buf(ByteIOContext *s);
#endif

@ -0,0 +1,324 @@
/*
** 广 ByteIOContext
** ByteIOContext和URLContext的不同在于ByteIOContext是带缓存的ByteIOContext的操作都是基于缓存的而不是基于文件描述符
** URLContext读取出来的数据交给ByteIOContext管理URLContext在ByteIOContext的下面ByteIOContext的底层
*/
#include "../berrno.h"
#include "avformat.h"
#include "avio.h"
#include <stdarg.h>
/* 最大的缓存长度 */
#define IO_BUFFER_SIZE 32768
/*
** ByteIOContext
*/
int init_put_byte(ByteIOContext *s,
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int(*read_buf)(void *opaque, uint8_t *buf, int buf_size),
int(*write_buf)(void *opaque, uint8_t *buf, int buf_size),
offset_t(*seek)(void *opaque, offset_t offset, int whence))
{
s->buffer = buffer;
s->buffer_size = buffer_size;
s->buf_ptr = buffer;
s->write_flag = write_flag;
if (!s->write_flag)
s->buf_end = buffer;
else
s->buf_end = buffer + buffer_size;
s->opaque = opaque;
s->write_buf = write_buf;
s->read_buf = read_buf;
s->seek = seek;
s->pos = 0;
s->must_flush = 0;
s->eof_reached = 0;
s->error = 0;
s->max_packet_size = 0;
return 0;
}
/*
** 广 ByteIOContext seek
*/
offset_t url_fseek(ByteIOContext *s, offset_t offset, int whence)
{
offset_t offset1;
if (whence != SEEK_CUR && whence != SEEK_SET)
return - EINVAL;
if (whence == SEEK_CUR)
{
offset1 = s->pos - (s->buf_end - s->buffer) + (s->buf_ptr - s->buffer);
if (offset == 0)
return offset1;
offset += offset1;
}
offset1 = offset - (s->pos - (s->buf_end - s->buffer));
if (offset1 >= 0 && offset1 <= (s->buf_end - s->buffer))
{
s->buf_ptr = s->buffer + offset1; // can do the seek inside the buffer
}
else
{
if (!s->seek)
return - EPIPE;
s->buf_ptr = s->buffer;
s->buf_end = s->buffer;
if (s->seek(s->opaque, offset, SEEK_SET) == (offset_t) - EPIPE)
return - EPIPE;
s->pos = offset;
}
s->eof_reached = 0;
return offset;
}
/* 跳转 */
void url_fskip(ByteIOContext *s, offset_t offset)
{
url_fseek(s, offset, SEEK_CUR);
}
/* 获取长度 */
offset_t url_ftell(ByteIOContext *s)
{
return url_fseek(s, 0, SEEK_CUR);
}
/* 文件大小 */
offset_t url_fsize(ByteIOContext *s)
{
offset_t size;
if (!s->seek)
return - EPIPE;
size = s->seek(s->opaque, - 1, SEEK_END) + 1;
s->seek(s->opaque, s->pos, SEEK_SET);
return size;
}
/* 是否到达文件尾 */
int url_feof(ByteIOContext *s)
{
return s->eof_reached;
}
/* 错误码 */
int url_ferror(ByteIOContext *s)
{
return s->error;
}
// Input stream
/* 填充buffer,也是一个读取文件的操作 */
static void fill_buffer(ByteIOContext *s)
{
int len;
if (s->eof_reached)
return ;
len = s->read_buf(s->opaque, s->buffer, s->buffer_size);
if (len <= 0)
{ // do not modify buffer if EOF reached so that a seek back can be done without rereading data
s->eof_reached = 1;
if (len < 0)
s->error = len;
}
else
{
s->pos += len;
s->buf_ptr = s->buffer;
s->buf_end = s->buffer + len;
}
}
/* 读取一个字节 */
int get_byte(ByteIOContext *s) // NOTE: return 0 if EOF, so you cannot use it if EOF handling is necessary
{
if (s->buf_ptr < s->buf_end)
{
return *s->buf_ptr++;
}
else
{
// 读取数据
fill_buffer(s);
if (s->buf_ptr < s->buf_end)
return *s->buf_ptr++;
else
return 0;
}
}
/* 从广义文件 ByteIOContext 中以小端方式读取两个字节,实现代码充分复用 get_byte()函数 */
unsigned int get_le16(ByteIOContext *s)
{
unsigned int val;
val = get_byte(s);
val |= get_byte(s) << 8;
return val;
}
/* 从广义文件 ByteIOContext 中以小端方式读取四个字节,实现代码充分复用 get_le16()函数 */
unsigned int get_le32(ByteIOContext *s)
{
unsigned int val;
val = get_le16(s);
val |= get_le16(s) << 16;
return val;
}
#define url_write_buf NULL
/* 简单中转读操作函数 */
static int url_read_buf(void *opaque, uint8_t *buf, int buf_size)
{
URLContext *h = opaque;
return url_read(h, buf, buf_size);
}
/* 简单中转 seek 操作函数 */
static offset_t url_seek_buf(void *opaque, offset_t offset, int whence)
{
URLContext *h = opaque;
return url_seek(h, offset, whence);
}
/* 设置并分配广义文件 ByteIOContext 内部缓存的大小,更多的应用在修改内部缓存大小场合 */
int url_setbufsize(ByteIOContext *s, int buf_size) // must be called before any I/O
{
uint8_t *buffer;
buffer = av_malloc(buf_size);
if (!buffer)
return - ENOMEM;
av_free(s->buffer);
s->buffer = buffer;
s->buffer_size = buf_size;
s->buf_ptr = buffer;
if (!s->write_flag)
s->buf_end = buffer;
else
s->buf_end = buffer + buf_size;
return 0;
}
/* 打开广义文件 ByteIOContext */
int url_fopen(ByteIOContext *s, const char *filename, int flags)
{
URLContext *h;
uint8_t *buffer;
int buffer_size, max_packet_size;
int err;
err = url_open(&h, filename, flags);
if (err < 0)
return err;
max_packet_size = url_get_max_packet_size(h);
if (max_packet_size)
{
buffer_size = max_packet_size; // no need to bufferize more than one packet
}
else
{
buffer_size = IO_BUFFER_SIZE;
}
buffer = av_malloc(buffer_size);
if (!buffer)
{
url_close(h);
return - ENOMEM;
}
if (init_put_byte(s,
buffer,
buffer_size,
(h->flags & URL_WRONLY || h->flags & URL_RDWR),
h,
url_read_buf,
url_write_buf,
url_seek_buf) < 0)
{
url_close(h);
av_free(buffer);
return AVERROR_IO;
}
s->max_packet_size = max_packet_size;
return 0;
}
/* 关闭广义文件 ByteIOContext */
int url_fclose(ByteIOContext *s)
{
URLContext *h = s->opaque;
av_free(s->buffer);
memset(s, 0, sizeof(ByteIOContext));
return url_close(h);
}
/* 广义文件 ByteIOContext 读操作 */
int url_fread(ByteIOContext *s, unsigned char *buf, int size) // get_buffer
{
int len, size1;
size1 = size;
while (size > 0)
{
len = s->buf_end - s->buf_ptr;
if (len > size)
len = size;
if (len == 0)
{
if (size > s->buffer_size)
{
len = s->read_buf(s->opaque, buf, size);
if (len <= 0)
{
s->eof_reached = 1;
if (len < 0)
s->error = len;
break;
}
else
{
s->pos += len;
size -= len;
buf += len;
s->buf_ptr = s->buffer;
s->buf_end = s->buffer /* + len*/;
}
}
else
{
fill_buffer(s);
len = s->buf_end - s->buf_ptr;
if (len == 0)
break;
}
}
else
{
memcpy(buf, s->buf_ptr, len);
buf += len;
s->buf_ptr += len;
size -= len;
}
}
return size1 - size;
}

@ -0,0 +1,46 @@
/*
**
*/
#include "avformat.h"
/*
** str val *ptr
*/
int strstart(const char *str, const char *val, const char **ptr)
{
const char *p, *q;
p = str;
q = val;
while (*q != '\0')
{
if (*p != *q)
return 0;
p++;
q++;
}
if (ptr)
*ptr = p;
return 1;
}
/*
**
*/
void pstrcpy(char *buf, int buf_size, const char *str)
{
int c;
char *q = buf;
if (buf_size <= 0)
return ;
for (;;)
{
c = *str++;
if (c == 0 || q >= buf + buf_size - 1)
break;
*q++ = c;
}
*q = '\0';
}

@ -0,0 +1,89 @@
/*
** ffplay file rtsprtptcp file: file
** URLContext 广
** 广 URLContext file 广 URLContext
*/
#include "../berrno.h"
#include "avformat.h"
#include <fcntl.h>
#ifndef CONFIG_WIN32
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/time.h>
#else
#include <io.h>
#define open(fname,oflag,pmode) _open(fname,oflag,pmode)
#endif
/*
**
*/
static int file_open(URLContext *h, const char *filename, int flags)
{
int access;
int fd;
// 搜索file协议
strstart(filename, "file:", &filename);
// 访问标志
if (flags &URL_RDWR)
access = O_CREAT | O_TRUNC | O_RDWR;
else if (flags &URL_WRONLY)
access = O_CREAT | O_TRUNC | O_WRONLY;
else
access = O_RDONLY;
#if defined(CONFIG_WIN32) || defined(CONFIG_OS2) || defined(__CYGWIN__)
access |= O_BINARY;
#endif
// 打开文件
fd = open(filename, access, 0666);
if (fd < 0)
return - ENOENT;
// 文件描述符赋给URLContext的priv_data
h->priv_data = (void*)(size_t)fd;
return 0;
}
/* 读取数据 */
static int file_read(URLContext *h, unsigned char *buf, int size)
{
int fd = (size_t)h->priv_data;
return read(fd, buf, size);
}
/* 写入数据 */
static int file_write(URLContext *h, unsigned char *buf, int size)
{
int fd = (size_t)h->priv_data;
return write(fd, buf, size);
}
/* 随机跳转 */
static offset_t file_seek(URLContext *h, offset_t pos, int whence)
{
int fd = (size_t)h->priv_data;
return lseek(fd, pos, whence);
}
/* 关闭文件 */
static int file_close(URLContext *h)
{
int fd = (size_t)h->priv_data;
return close(fd);
}
/* FILE类型的protocol */
URLProtocol file_protocol =
{
"file",
file_open,
file_read,
file_write,
file_seek,
file_close,
};

@ -0,0 +1,345 @@
/*
** 使
*/
#include "../berrno.h"
#include "avformat.h"
#include <assert.h>
#define UINT_MAX (0xffffffff)
#define PROBE_BUF_MIN 2048
#define PROBE_BUF_MAX 131072
/* 因为支持的格式会有很多,因此要用链表表把所有支持的格式串起来 */
AVInputFormat *first_iformat = NULL;
/* 注册输入的格式*/
void av_register_input_format(AVInputFormat *format)
{
AVInputFormat **p;
p = &first_iformat;
while (*p != NULL)
p = &(*p)->next;
*p = format;
format->next = NULL;
}
/* 比较文件的扩展名来识别文件类型 */
int match_ext(const char *filename, const char *extensions)
{
const char *ext, *p;
char ext1[32], *q;
if (!filename)
return 0;
ext = strrchr(filename, '.');
if (ext)
{
ext++;
p = extensions;
for (;;)
{
q = ext1;
while (*p != '\0' && *p != ',' && q - ext1 < sizeof(ext1) - 1)
*q++ = *p++;
*q = '\0';
if (!strcasecmp(ext1, ext))
return 1;
if (*p == '\0')
break;
p++;
}
}
return 0;
}
/* 探测输入的文件容器格式,返回识别出来的文件格式 */
AVInputFormat *av_probe_input_format(AVProbeData *pd, int is_opened)
{
AVInputFormat *fmt1, *fmt;
int score, score_max;
fmt = NULL;
score_max = 0;
for (fmt1 = first_iformat; fmt1 != NULL; fmt1 = fmt1->next)
{
if (!is_opened)
continue;
score = 0;
if (fmt1->read_probe)
{
score = fmt1->read_probe(pd);
}
else if (fmt1->extensions)
{
if (match_ext(pd->filename, fmt1->extensions))
score = 50;
}
if (score > score_max)
{
score_max = score;
fmt = fmt1;
}
}
return fmt;
}
/* 打开输入流 */
int av_open_input_stream(AVFormatContext **ic_ptr, ByteIOContext *pb, const char *filename,
AVInputFormat *fmt, AVFormatParameters *ap)
{
int err;
AVFormatContext *ic;
AVFormatParameters default_ap;
if (!ap)
{
ap = &default_ap;
memset(ap, 0, sizeof(default_ap));
}
ic = av_mallocz(sizeof(AVFormatContext));
if (!ic)
{
err = AVERROR_NOMEM;
goto fail;
}
ic->iformat = fmt;
if (pb)
ic->pb = *pb;
if (fmt->priv_data_size > 0)
{
ic->priv_data = av_mallocz(fmt->priv_data_size);
if (!ic->priv_data)
{
err = AVERROR_NOMEM;
goto fail;
}
}
else
{
ic->priv_data = NULL;
}
err = ic->iformat->read_header(ic, ap);
if (err < 0)
goto fail;
*ic_ptr = ic;
return 0;
fail:
if (ic)
av_freep(&ic->priv_data);
av_free(ic);
*ic_ptr = NULL;
return err;
}
/* 打开输入文件,并识别文件格式,然后调用函数识别媒体流格式 */
int av_open_input_file(AVFormatContext **ic_ptr, const char *filename, AVInputFormat *fmt,
int buf_size, AVFormatParameters *ap)
{
int err, must_open_file, file_opened, probe_size;
AVProbeData probe_data, *pd = &probe_data;
ByteIOContext pb1, *pb = &pb1;
file_opened = 0;
pd->filename = "";
if (filename)
pd->filename = filename;
pd->buf = NULL;
pd->buf_size = 0;
must_open_file = 1;
if (!fmt || must_open_file)
{
if (url_fopen(pb, filename, URL_RDONLY) < 0)
{
err = AVERROR_IO;
goto fail;
}
file_opened = 1;
if (buf_size > 0)
url_setbufsize(pb, buf_size);
for (probe_size = PROBE_BUF_MIN; probe_size <= PROBE_BUF_MAX && !fmt; probe_size <<= 1)
{
pd->buf = av_realloc(pd->buf, probe_size);
pd->buf_size = url_fread(pb, pd->buf, probe_size);
if (url_fseek(pb, 0, SEEK_SET) == (offset_t) - EPIPE)
{
url_fclose(pb);
if (url_fopen(pb, filename, URL_RDONLY) < 0)
{
file_opened = 0;
err = AVERROR_IO;
goto fail;
}
}
fmt = av_probe_input_format(pd, 1);
}
av_freep(&pd->buf);
}
if (!fmt)
{
err = AVERROR_NOFMT;
goto fail;
}
err = av_open_input_stream(ic_ptr, pb, filename, fmt, ap);
if (err)
goto fail;
return 0;
fail:
av_freep(&pd->buf);
if (file_opened)
url_fclose(pb);
*ic_ptr = NULL;
return err;
}
/* 一次读取一个数据包 */
int av_read_packet(AVFormatContext *s, AVPacket *pkt)
{
return s->iformat->read_packet(s, pkt);
}
/*
** 便 seekffplay
**
*/
int av_add_index_entry(AVStream *st, int64_t pos, int64_t timestamp, int size, int distance, int flags)
{
AVIndexEntry *entries, *ie;
int index;
if ((unsigned)st->nb_index_entries + 1 >= UINT_MAX / sizeof(AVIndexEntry)) // 越界判断
return - 1;
entries = av_fast_realloc(st->index_entries, &st->index_entries_allocated_size,
(st->nb_index_entries + 1) * sizeof(AVIndexEntry));
if (!entries)
return - 1;
st->index_entries = entries;
index = av_index_search_timestamp(st, timestamp, AVSEEK_FLAG_ANY);
if (index < 0) // 后续
{
index = st->nb_index_entries++;
ie = &entries[index];
assert(index == 0 || ie[ - 1].timestamp < timestamp);
}
else // 中插
{
ie = &entries[index];
if (ie->timestamp != timestamp)
{
if (ie->timestamp <= timestamp)
return - 1;
memmove(entries + index + 1, entries + index,
sizeof(AVIndexEntry)*(st->nb_index_entries - index));
st->nb_index_entries++;
}
}
ie->pos = pos;
ie->timestamp = timestamp;
ie->size = size;
ie->flags = flags;
return index;
}
int av_index_search_timestamp(AVStream *st, int64_t wanted_timestamp, int flags)
{
AVIndexEntry *entries = st->index_entries;
int nb_entries = st->nb_index_entries;
int a, b, m;
int64_t timestamp;
a = - 1;
b = nb_entries;
while (b - a > 1) //并没有记录idx值,采用的是折半查找
{
m = (a + b) >> 1;
timestamp = entries[m].timestamp;
if (timestamp >= wanted_timestamp)
b = m;
if (timestamp <= wanted_timestamp)
a = m;
}
m = (flags &AVSEEK_FLAG_BACKWARD) ? a : b;
if (!(flags &AVSEEK_FLAG_ANY))
{
while (m >= 0 && m < nb_entries && !(entries[m].flags &AVINDEX_KEYFRAME))
{
m += (flags &AVSEEK_FLAG_BACKWARD) ? - 1: 1;
}
}
if (m == nb_entries)
return - 1;
return m;
}
void av_close_input_file(AVFormatContext *s)
{
int i;
AVStream *st;
if (s->iformat->read_close)
s->iformat->read_close(s);
for (i = 0; i < s->nb_streams; i++)
{
st = s->streams[i];
av_free(st->index_entries);
av_free(st->actx);
av_free(st);
}
url_fclose(&s->pb);
av_freep(&s->priv_data);
av_free(s);
}
AVStream *av_new_stream(AVFormatContext *s, int id)
{
AVStream *st;
if (s->nb_streams >= MAX_STREAMS)
return NULL;
st = av_mallocz(sizeof(AVStream));
if (!st)
return NULL;
st->actx = avcodec_alloc_context();
s->streams[s->nb_streams++] = st;
return st;
}
void av_set_pts_info(AVStream *s, int pts_wrap_bits, int pts_num, int pts_den)
{
s->time_base.num = pts_num;
s->time_base.den = pts_den;
}

@ -0,0 +1,57 @@
#ifndef AVUTIL_H
#define AVUTIL_H
#ifdef __cplusplus
extern "C"
{
#endif
#include "common.h"
#include "bswap.h"
#include "mathematics.h"
#include "rational.h"
#define AV_STRINGIFY(s) AV_TOSTRING(s)
#define AV_TOSTRING(s) #s
#define LIBAVUTIL_VERSION_INT ((49<<16)+(0<<8)+0)
#define LIBAVUTIL_VERSION 49.0.0
#define LIBAVUTIL_BUILD LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_IDENT "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION)
/* ÏñËظñʽ */
enum PixelFormat
{
PIX_FMT_NONE = - 1,
PIX_FMT_YUV420P, // Planar YUV 4:2:0 (1 Cr & Cb sample per 2x2 Y samples)
PIX_FMT_YUV422, // Packed pixel, Y0 Cb Y1 Cr
PIX_FMT_RGB24, // Packed pixel, 3 bytes per pixel, RGBRGB...
PIX_FMT_BGR24, // Packed pixel, 3 bytes per pixel, BGRBGR...
PIX_FMT_YUV422P, // Planar YUV 4:2:2 (1 Cr & Cb sample per 2x1 Y samples)
PIX_FMT_YUV444P, // Planar YUV 4:4:4 (1 Cr & Cb sample per 1x1 Y samples)
PIX_FMT_RGBA32, // Packed pixel, 4 bytes per pixel, BGRABGRA..., stored in cpu endianness
PIX_FMT_YUV410P, // Planar YUV 4:1:0 (1 Cr & Cb sample per 4x4 Y samples)
PIX_FMT_YUV411P, // Planar YUV 4:1:1 (1 Cr & Cb sample per 4x1 Y samples)
PIX_FMT_RGB565, // always stored in cpu endianness
PIX_FMT_RGB555, // always stored in cpu endianness, most significant bit to 1
PIX_FMT_GRAY8,
PIX_FMT_MONOWHITE, // 0 is white
PIX_FMT_MONOBLACK, // 0 is black
PIX_FMT_PAL8, // 8 bit with RGBA palette
PIX_FMT_YUVJ420P, // Planar YUV 4:2:0 full scale (jpeg)
PIX_FMT_YUVJ422P, // Planar YUV 4:2:2 full scale (jpeg)
PIX_FMT_YUVJ444P, // Planar YUV 4:4:4 full scale (jpeg)
PIX_FMT_XVMC_MPEG2_MC, // XVideo Motion Acceleration via common packet passing(xvmc_render.h)
PIX_FMT_XVMC_MPEG2_IDCT,
PIX_FMT_UYVY422, // Packed pixel, Cb Y0 Cr Y1
PIX_FMT_UYVY411, // Packed pixel, Cb Y0 Y1 Cr Y2 Y3
PIX_FMT_NB,
};
#ifdef __cplusplus
}
#endif
#endif

@ -0,0 +1,29 @@
/************************************************************************/
/* 字节顺序交换 */
/************************************************************************/
#ifndef __BSWAP_H__
#define __BSWAP_H__
/* 16bit的字节顺序交换 */
static inline uint16_t bswap_16(uint16_t x)
{
return (x >> 8) | (x << 8);
}
/* 32bit的字节顺序交换 */
static inline uint32_t bswap_32(uint32_t x)
{
x = ((x << 8) &0xFF00FF00) | ((x >> 8) &0x00FF00FF);
return (x >> 16) | (x << 16);
}
// be2me ... BigEndian to MachineEndian
// le2me ... LittleEndian to MachineEndian
#define be2me_16(x) bswap_16(x)
#define be2me_32(x) bswap_32(x)
#define le2me_16(x) (x)
#define le2me_32(x) (x)
#endif

@ -0,0 +1,69 @@
/************************************************************************/
/* 公用的数据类型和函数 */
/************************************************************************/
#ifndef COMMON_H
#define COMMON_H
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <ctype.h>
#if defined(WIN32) && !defined(__MINGW32__) && !defined(__CYGWIN__)
#define CONFIG_WIN32
#endif
#ifdef CONFIG_WIN32
#define inline __inline
#endif
typedef signed char int8_t;
typedef signed short int16_t;
typedef signed int int32_t;
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
#ifdef CONFIG_WIN32
typedef signed __int64 int64_t;
typedef unsigned __int64 uint64_t;
#else
typedef signed long long int64_t;
typedef unsigned long long uint64_t;
#endif
#ifdef CONFIG_WIN32
#define int64_t_C(c) (c ## i64)
#define uint64_t_C(c) (c ## i64)
#else
#define int64_t_C(c) (c ## LL)
#define uint64_t_C(c) (c ## ULL)
#endif
#ifndef INT64_MAX
#define INT64_MAX int64_t_C(9223372036854775807)
#endif
/* 大小写无关字符串的比较 */
static int strcasecmp(char *s1, const char *s2)
{
while (toupper((unsigned char) *s1) == toupper((unsigned char) *s2++))
if (*s1++ == '\0')
return 0;
return (toupper((unsigned char) *s1) - toupper((unsigned char) *--s2));
}
/* 限幅函数 */
static inline int clip(int a, int amin, int amax)
{
if (a < amin)
return amin;
else if (a > amax)
return amax;
else
return a;
}
#endif

@ -0,0 +1,10 @@
#ifndef MATHEMATICS_H
#define MATHEMATICS_H
/* Ëõ·ÅÔËËã */
static inline int64_t av_rescale(int64_t a, int64_t b, int64_t c)
{
return a *b / c;
}
#endif

@ -0,0 +1,20 @@
/************************************************************************/
/* 定义分数 */
/************************************************************************/
#ifndef RATIONAL_H
#define RATIONAL_H
/* 分数结构体 */
typedef struct AVRational
{
int num; // numerator // 分子
int den; // denominator // 分母
} AVRational;
/* 计算分数 */
static inline double av_q2d(AVRational a)
{
return a.num / (double)a.den;
}
#endif

@ -0,0 +1,2 @@
欢迎光临 mcodec.cnblogs.com
联系作者 tslking@tom.com
Loading…
Cancel
Save