• windows下使用FFmpeg开源库进行视频编解码完整步聚


    最终解码效果:

    1.UI设计

     2.在控件属性窗口中输入默认值

    3.复制已编译FFmpeg库到工程同级目录下

     4.在工程引用FFmpeg库及头文件

     

    5.链接指定FFmpeg库

     

    6.使用FFmpeg库

    引用头文件 

    1. extern "C"
    2. {
    3. #include "libswscale/swscale.h"
    4. #include "libavdevice/avdevice.h"
    5. #include "libavcodec/avcodec.h"
    6. #include "libavcodec/bsf.h"
    7. #include "libavformat/avformat.h"
    8. #include "libavutil/avutil.h"
    9. #include "libavutil/imgutils.h"
    10. #include "libavutil/log.h"
    11. #include "libavutil/imgutils.h"
    12. #include "libavutil/time.h"
    13. #include
    14. }

    创建视频编解码管理类 

    实现视频编解码管理类

    1. #include "ffmpegmananger.h"
    2. #include
    3. ffmpegMananger::ffmpegMananger(QObject *parent ):
    4. QObject(parent)
    5. {
    6. m_pInFmtCtx = nullptr;
    7. m_pTsFmtCtx = nullptr;
    8. m_qstrRtspURL = "";
    9. m_qstrOutPutFile = "";
    10. }
    11. ffmpegMananger::~ffmpegMananger()
    12. {
    13. avformat_free_context(m_pInFmtCtx);
    14. avformat_free_context(m_pTsFmtCtx);
    15. }
    16. void ffmpegMananger::getRtspURL(QString strRtspURL)
    17. {
    18. this->m_qstrRtspURL = strRtspURL;
    19. }
    20. void ffmpegMananger::getOutURL(QString strRute)
    21. {
    22. this->m_qstrOutPutFile = strRute;
    23. printf("===========%s\n",m_qstrOutPutFile.toStdString().c_str());
    24. }
    25. void ffmpegMananger::setOutputCtx(AVCodecContext *encCtx, AVFormatContext **pTsFmtCtx,int &nVideoIdx_out)
    26. {
    27. avformat_alloc_output_context2(pTsFmtCtx , nullptr, nullptr, m_qstrOutPutFile.toStdString().c_str());
    28. if (!pTsFmtCtx ) {
    29. printf("Could not create output context\n");
    30. return;
    31. }
    32. if (avio_open(&((*pTsFmtCtx)->pb), m_qstrOutPutFile.toStdString().c_str(), AVIO_FLAG_READ_WRITE) < 0)
    33. {
    34. avformat_free_context(*pTsFmtCtx);
    35. printf("avio_open fail.");
    36. return;
    37. }
    38. AVStream *out_stream = avformat_new_stream(*pTsFmtCtx, encCtx->codec);
    39. nVideoIdx_out = out_stream->index;
    40. //nVideoIdx_out = out_stream->index;
    41. avcodec_parameters_from_context(out_stream->codecpar, encCtx);
    42. printf("==========Output Information==========\n");
    43. av_dump_format(*pTsFmtCtx, 0, m_qstrOutPutFile.toStdString().c_str(), 1);
    44. printf("======================================\n");
    45. }
    46. int ffmpegMananger::ffmepgInput()
    47. {
    48. int nRet = 0;
    49. AVCodecContext *encCtx = nullptr;//编码器
    50. //const char *pUrl = "D:/videos/264.dat";
    51. std::string temp = m_qstrRtspURL.toStdString();
    52. const char *pUrl = temp.c_str();
    53. printf("===========%s\n",pUrl);
    54. AVDictionary *options = nullptr;
    55. av_dict_set(&options,"rtsp_transport", "tcp", 0);
    56. av_dict_set(&options,"stimeout","10000000",0);
    57. // 设置“buffer_size”缓存容量
    58. av_dict_set(&options, "buffer_size", "1024000", 0);
    59. nRet = avformat_open_input(&m_pInFmtCtx,pUrl,nullptr,&options);
    60. if( nRet < 0)
    61. {
    62. printf("Could not open input file,===========keep trying \n");
    63. return nRet;
    64. }
    65. avformat_find_stream_info(m_pInFmtCtx, nullptr);
    66. printf("===========Input Information==========\n");
    67. av_dump_format(m_pInFmtCtx, 0, pUrl, 0);
    68. printf("======================================\n");
    69. //1.获取视频流编号
    70. int nVideo_indx = av_find_best_stream(m_pInFmtCtx,AVMEDIA_TYPE_VIDEO,-1,-1,nullptr,0);
    71. if(nVideo_indx < 0)
    72. {
    73. avformat_free_context(m_pInFmtCtx);
    74. printf("查找解码器失败\n");
    75. return -1;
    76. }
    77. //2.查找解码器
    78. AVCodec *pInCodec = avcodec_find_decoder(m_pInFmtCtx->streams[nVideo_indx]->codecpar->codec_id);
    79. if(nullptr == pInCodec)
    80. {
    81. printf("avcodec_find_decoder fail.");
    82. return -1;
    83. }
    84. //获取解码器上下文
    85. AVCodecContext* pInCodecCtx = avcodec_alloc_context3(pInCodec);
    86. //复制解码器参数
    87. nRet = avcodec_parameters_to_context(pInCodecCtx, m_pInFmtCtx->streams[nVideo_indx]->codecpar);
    88. if(nRet < 0)
    89. {
    90. avcodec_free_context(&pInCodecCtx);
    91. printf("avcodec_parameters_to_context fail.");
    92. return -1;
    93. }
    94. //打开解码器
    95. if(avcodec_open2(pInCodecCtx, pInCodec, nullptr) < 0)
    96. {
    97. avcodec_free_context(&pInCodecCtx);
    98. printf("Error: Can't open codec!\n");
    99. return -1;
    100. }
    101. printf("width = %d\n", pInCodecCtx->width);
    102. printf("height = %d\n", pInCodecCtx->height);
    103. int frame_index = 0;
    104. int got_picture = 0;
    105. AVStream *in_stream =nullptr;
    106. AVStream *out_stream =nullptr;
    107. AVFrame *pFrame= av_frame_alloc();
    108. AVPacket *newpkt = av_packet_alloc();
    109. AVPacket *packet = av_packet_alloc();
    110. av_init_packet(newpkt);
    111. av_init_packet(packet);
    112. // alloc AVFrame
    113. AVFrame*pFrameRGB = av_frame_alloc();
    114. // 图像色彩空间转换、分辨率缩放、前后图像滤波处理
    115. SwsContext *m_SwsContext = sws_getContext(pInCodecCtx->width, pInCodecCtx->height,
    116. pInCodecCtx->pix_fmt, pInCodecCtx->width, pInCodecCtx->height,
    117. AV_PIX_FMT_RGB32, SWS_BICUBIC, nullptr, nullptr, nullptr);
    118. int bytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, pInCodecCtx->width, pInCodecCtx->height,4);
    119. uint8_t *m_OutBuffer = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
    120. // 将分配的内存空间给pFrameRGB使用
    121. avpicture_fill((AVPicture *)pFrameRGB, m_OutBuffer, AV_PIX_FMT_RGB32, pInCodecCtx->width, pInCodecCtx->height);
    122. if(encCtx == nullptr)
    123. {
    124. //打开编码器
    125. openEncoder(pInCodecCtx->width, pInCodecCtx->height,&encCtx);
    126. }
    127. int videoindex_out = 0;
    128. //设置输出文件上下文
    129. setOutputCtx(encCtx,&m_pTsFmtCtx,videoindex_out);
    130. //Write file header
    131. if (avformat_write_header(m_pTsFmtCtx, nullptr) < 0)
    132. {
    133. avformat_free_context(m_pTsFmtCtx);
    134. printf("Error occurred when opening output file\n");
    135. return -1;
    136. }
    137. printf("==============writer trail===================.\n");
    138. int count = 0;
    139. nRet = 0;
    140. while(av_read_frame(m_pInFmtCtx, packet) >= 0)//从pInFmtCtx读H264数据到packet;
    141. {
    142. if(packet->stream_index != nVideo_indx)//仅保留图像
    143. {
    144. continue;
    145. }
    146. if(avcodec_send_packet(pInCodecCtx, packet)<0)//送packet中H264数据给解码器码器进行解码,解码好的YUV数据放在pInCodecCtx,
    147. {
    148. break;
    149. }
    150. av_packet_unref(packet);
    151. got_picture = avcodec_receive_frame(pInCodecCtx, pFrame);//把解码好的YUV数据放到pFrame中
    152. if(0 == got_picture)//解码好一帧数据
    153. {
    154. //发送显示图像的信号
    155. // 对解码视频帧进行缩放、格式转换等操作
    156. sws_scale(m_SwsContext, (uint8_t const * const *)pFrame->data,
    157. pFrame->linesize, 0, pInCodecCtx->height,
    158. pFrameRGB->data, pFrameRGB->linesize);
    159. // 转换到QImage
    160. QImage tmmImage((uchar *)m_OutBuffer, pInCodecCtx->width, pInCodecCtx->height, QImage::Format_RGB32);
    161. QImage image = tmmImage.copy();
    162. // 发送QImage
    163. emit Sig_GetOneFrame(image);
    164. setDecoderPts(newpkt->stream_index,count, pFrame);
    165. count++;
    166. //送原始数据给编码器进行编码
    167. nRet = avcodec_send_frame(encCtx,pFrame);
    168. if(nRet < 0)
    169. {
    170. continue;
    171. }
    172. //从编码器获取编号的数据
    173. while(nRet >= 0)
    174. {
    175. nRet = avcodec_receive_packet(encCtx,newpkt);
    176. if(nRet < 0)
    177. {
    178. break;
    179. }
    180. setEncoderPts(nVideo_indx,frame_index,videoindex_out,newpkt);
    181. int _count = 1;
    182. printf("Write %d Packet. size:%5d\tpts:%lld\n", _count,newpkt->size, newpkt->pts);
    183. if (av_interleaved_write_frame(m_pTsFmtCtx, newpkt) < 0)
    184. {
    185. printf("Error muxing packet\n");
    186. goto end;
    187. }
    188. _count++;
    189. av_packet_unref(newpkt);
    190. }
    191. }
    192. }
    193. while(1)//从pInFmtCtx读H264数据到packet;
    194. {
    195. if(packet->stream_index != nVideo_indx)//仅保留图像
    196. {
    197. continue;
    198. }
    199. if(avcodec_send_packet(pInCodecCtx, packet)<0)//送packet中H264数据给解码器码器进行解码,解码好的YUV数据放在pInCodecCtx,
    200. {
    201. continue;
    202. }
    203. av_packet_unref(packet);
    204. got_picture = avcodec_receive_frame(pInCodecCtx, pFrame);//把解码好的YUV数据放到pFrame中
    205. if(!got_picture)//解码好一帧数据
    206. {
    207. AVRational in_time_base1 = in_stream->time_base;
    208. in_stream = m_pInFmtCtx->streams[newpkt->stream_index];
    209. //Duration between 2 frames (us)
    210. int64_t in_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
    211. pFrame->pts = (double)(count*in_duration) / (double)(av_q2d(in_time_base1)*AV_TIME_BASE);
    212. count++;
    213. //送原始数据给编码器进行编码
    214. nRet = avcodec_send_frame(encCtx,pFrame);
    215. if(nRet < 0)
    216. {
    217. break;
    218. }
    219. //从编码器获取编号的数据
    220. while(nRet >= 0)
    221. {
    222. nRet = avcodec_receive_packet(encCtx,newpkt);
    223. if(nRet < 0)
    224. {
    225. continue;
    226. }
    227. in_stream = m_pInFmtCtx->streams[newpkt->stream_index];
    228. out_stream = m_pTsFmtCtx->streams[videoindex_out];
    229. if (newpkt->stream_index == nVideo_indx)
    230. {
    231. //FIX:No PTS (Example: Raw H.264)
    232. //Simple Write PTS
    233. if (newpkt->pts == AV_NOPTS_VALUE)
    234. {
    235. //Write PTS
    236. AVRational time_base1 = in_stream->time_base;
    237. //Duration between 2 frames (us)
    238. int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
    239. //Parameters
    240. newpkt->pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
    241. newpkt->dts = newpkt->pts;
    242. newpkt->duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
    243. frame_index++;
    244. }
    245. }
    246. //Convert PTS/DTS
    247. newpkt->pts = av_rescale_q_rnd(newpkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    248. newpkt->dts = av_rescale_q_rnd(newpkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    249. newpkt->duration = av_rescale_q(newpkt->duration, in_stream->time_base, out_stream->time_base);
    250. newpkt->pos = -1;
    251. newpkt->stream_index = videoindex_out;
    252. int count = 1;
    253. printf("Write %d Packet. size:%5d\tpts:%lld\n", count,newpkt->size, newpkt->pts);
    254. if (av_interleaved_write_frame(m_pTsFmtCtx, newpkt) < 0)
    255. {
    256. printf("Error muxing packet\n");
    257. goto end;
    258. }
    259. count++;
    260. av_packet_unref(newpkt);
    261. }
    262. }
    263. }
    264. //Write file trailer
    265. av_write_trailer(m_pTsFmtCtx);
    266. end:
    267. av_frame_free(&pFrame);
    268. av_frame_free(&pFrameRGB);
    269. av_packet_unref(newpkt);
    270. av_packet_unref(packet);
    271. std::cout<<"rtsp's h264 to ts end";
    272. return 0;
    273. }
    274. void ffmpegMananger::setDecoderPts(int idx,int count,AVFrame *pFrame)
    275. {
    276. AVStream* in_stream = m_pInFmtCtx->streams[idx];
    277. AVRational in_time_base1 = in_stream->time_base;
    278. //Duration between 2 frames (us)
    279. int64_t in_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
    280. pFrame->pts = (double)(count*in_duration) / (double)(av_q2d(in_time_base1)*AV_TIME_BASE);
    281. }
    282. void ffmpegMananger::setEncoderPts(int nVideo_indx,int frame_index,int videoindex_out,AVPacket *newpkt)
    283. {
    284. AVStream*in_stream = m_pInFmtCtx->streams[newpkt->stream_index];
    285. AVStream*out_stream = m_pTsFmtCtx->streams[videoindex_out];
    286. if (newpkt->stream_index == nVideo_indx)
    287. {
    288. //FIX:No PTS (Example: Raw H.264)
    289. //Simple Write PTS
    290. if (newpkt->pts == AV_NOPTS_VALUE)
    291. {
    292. //Write PTS
    293. AVRational time_base1 = in_stream->time_base;
    294. //Duration between 2 frames (us)
    295. int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(in_stream->r_frame_rate);
    296. //Parameters
    297. newpkt->pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
    298. newpkt->dts = newpkt->pts;
    299. newpkt->duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
    300. frame_index++;
    301. }
    302. }
    303. //Convert PTS/DTS
    304. newpkt->pts = av_rescale_q_rnd(newpkt->pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    305. newpkt->dts = av_rescale_q_rnd(newpkt->dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
    306. newpkt->duration = av_rescale_q(newpkt->duration, in_stream->time_base, out_stream->time_base);
    307. newpkt->pos = -1;
    308. newpkt->stream_index = videoindex_out;
    309. }
    310. void ffmpegMananger::writeTail()
    311. {
    312. //Write file trailer
    313. av_write_trailer(m_pTsFmtCtx);
    314. }
    315. void ffmpegMananger::openEncoder(int width, int height, AVCodecContext** enc_ctx)
    316. {
    317. //使用libx264编码器
    318. AVCodec * pCodec = avcodec_find_encoder_by_name("libx264");
    319. if(nullptr == pCodec)
    320. {
    321. printf("avcodec_find_encoder_by_name fail.\n");
    322. return;
    323. }
    324. //获取编码器上下文
    325. *enc_ctx = avcodec_alloc_context3(pCodec);
    326. if(nullptr == enc_ctx)
    327. {
    328. printf("avcodec_alloc_context3(pCodec) fail.\n");
    329. return;
    330. }
    331. //sps/pps
    332. (*enc_ctx)->profile = FF_PROFILE_H264_MAIN;
    333. (*enc_ctx)->level = 30;//表示level是5.0
    334. //分辨率
    335. (*enc_ctx)->width = width;
    336. (*enc_ctx)->height = height;
    337. //gop
    338. (*enc_ctx)->gop_size = 25;//i帧间隔
    339. (*enc_ctx)->keyint_min = 20;//设置最小自动插入i帧的间隔.OPTION
    340. //B帧
    341. (*enc_ctx)->max_b_frames = 0;//不要B帧
    342. (*enc_ctx)->has_b_frames = 0;//
    343. //参考帧
    344. (*enc_ctx)->refs = 3;//OPTION
    345. //设置输入的yuv格式
    346. (*enc_ctx)->pix_fmt = AV_PIX_FMT_YUV420P;
    347. //设置码率
    348. (*enc_ctx)->bit_rate = 3000000;
    349. //设置帧率
    350. //(*enc_ctx)->time_base = (AVRational){1,25};//帧与帧之间的间隔
    351. (*enc_ctx)->time_base.num = 1;
    352. (*enc_ctx)->time_base.den = 25;
    353. //(*enc_ctx)->framerate = (AVRational){25,1};//帧率 25帧每秒
    354. (*enc_ctx)->framerate.num = 25;
    355. (*enc_ctx)->framerate.den = 1;
    356. if(avcodec_open2((*enc_ctx),pCodec,nullptr) < 0)
    357. {
    358. printf("avcodec_open2 fail.\n");
    359. }
    360. return;
    361. }

     

  • 相关阅读:
    深入理解Windows句柄
    KingbaseES集群管理维护案例之---备库checkpoint分析
    2021了,真的不要再说 Node.js 是一门编程语言了
    python 文件和目录相关操作
    模式识别与人工智能(程序与算法)系列讲解 - 总目录
    QT编程:QT int和float转QString
    【PX4学习笔记】06.PID控制原理
    强!!如何测试自制调试器的功能?
    MySQL-锁
    SpringCloud Nacos + Ribbon 调用服务的 2 种方法!
  • 原文地址:https://blog.csdn.net/fittec/article/details/134042078