赞
踩
VS2017 环境配置
下载FFmpeg dev和share两个版本
将dev文件下的include 和 lib 文件夹添加到vs项目属性的vc++目录中的 包含目录和库目录中
再将share中bin文件夹下面的八个dll文件拷贝到项目中debug的那个文件夹中也就是包含exe的文件夹
然后在项目属性页的链接器输入里面附加依赖项添加刚才复制的八个dll文件的名字,之前有的不要删除
avcodec.lib;avdevice.lib;avfilter.lib;avformat.lib;avutil.lib;postproc.lib;swresample.lib;swscale.lib;
VS的SDL环境配置同理。
需要注意点:
ffmpeg的版本要和开发的版本一致 如果是64就选择64 如果是32就选择32位
如果你的项目引用有32位的dll(c++编译生成的),则只能选择32位平台,否则也会报错,整个项目要保持一致。
如何检测是否配置好环境 直接引入头文件 如果没有红色 就表示配置成功了
extern "C" {
#include "libavutil/imgutils.h"
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include "libswresample/swresample.h"
#include <SDL.h>
#include <SDL_video.h>
#include <SDL_render.h>
#include <SDL_rect.h>
}
ffmpeg的解码流程
先引用雷神的图片
1.初始化解封器
av_register_all();
2.打开视频文件
avformat_open_input()
3.检查数据流
avformat_find_stream_info()
4.找到第一个视频流,根据此获得解码器参数
v_idx = -1;
for (i = 0; i < p_fmt_ctx->nb_streams; i++)
{
if (p_fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
{
v_idx = i;
printf("Find a video stream, index %d\n", v_idx);
frame_rate = p_fmt_ctx->streams[i]->avg_frame_rate.num /
p_fmt_ctx->streams[i]->avg_frame_rate.den;
break;
}
}
也可以直接调用
av_find_best_stream()
5.查找解码器
avcodec_find_decoder()
6.打开解码器
avcodec_open2()
7.为解码帧分配内存
avcodec_alloc_frame()
8.不停地从码流中提取出帧数据
av_read_frame()
9.向解码器发送数据
avcodec_send_packet()//之前版本对应 avcodec_decode_video2()
10.接收解码器输出数据,处理视频帧,解码得到一个frame
avcodec_receive_frame()//之前版本对应 avcodec_decode_video2()
原本的解码函数avcodec_decode_video2()被拆解为两个函数avcodec_send_packet()和avcodec_receive_frame()
解码流程到此结束 后面就是关于解码后数据的处理 可以通过av_write_frame()将编码后的视频码流写入文件,也可以通过SDL等将视频播放
#include <iostream> #include <stdio.h> #define __STDC_CONSTANT_MACROS extern "C" { #include "libavutil/imgutils.h" #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h> #include "libswresample/swresample.h" #include <SDL.h> #include <SDL_video.h> #include <SDL_render.h> #include <SDL_rect.h> } #define SDL_USEREVENT_REFRESH (SDL_USEREVENT + 1) static bool s_playing_exit = false; static bool s_playing_pause = false; //按照opaque传入的播放帧率参数,按照固定间隔时间发送刷新事件 int sdl_thread_handle_refreshing(void *opaque) { SDL_Event sdl_event; int frame_rate = *((int *)opaque); int interval = (frame_rate > 0) ? 1000 / frame_rate : 40; printf("frame rate %d FPS, refresh interval %d ms\n", frame_rate, interval); while (!s_playing_exit) { if (!s_playing_pause) { sdl_event.type = SDL_USEREVENT_REFRESH; SDL_PushEvent(&sdl_event); } SDL_Delay(interval); } return 0; } int main(int argc, char *argv[]) { AVFormatContext* p_fmt_ctx = NULL; AVCodecContext* p_codec_ctx = NULL; AVCodecParameters* p_codec_par = NULL; AVCodec* p_codec = NULL; AVFrame* p_frm_raw = NULL; AVFrame* p_frm_yuv = NULL; AVPacket* p_packet = NULL; struct SwsContext* sws_ctx = NULL; int buf_size; uint8_t* buffer = NULL; unsigned int i; int v_idx; int ret; int res; int frame_rate; SDL_Window* screen; SDL_Renderer* sdl_renderer; SDL_Texture* sdl_texture; SDL_Rect sdl_rect; SDL_Thread* sdl_thread; SDL_Event sdl_event; res = 0; const char *filename = "Test.ts";//视频文件 av_register_all(); //初始化解封装 //avformat_network_init(); // 初始化网络 ret = avformat_open_input(&p_fmt_ctx, filename, NULL, NULL); if (ret != 0) { printf("acformat_open_input() failed %d\n", ret); res = -1; goto exit0; } ret = avformat_find_stream_info(p_fmt_ctx, NULL); if (ret < 0) { printf("avformat_find_stream_info() failed %d\n", ret); res = -1; goto exit1; } av_dump_format(p_fmt_ctx, 0, argv[1], 0); //查找第一个视频流 v_idx = -1; for (i = 0; i < p_fmt_ctx->nb_streams; i++) { if (p_fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { v_idx = i; printf("Find a video stream, index %d\n", v_idx); frame_rate = p_fmt_ctx->streams[i]->avg_frame_rate.num / p_fmt_ctx->streams[i]->avg_frame_rate.den; break; } } if (v_idx == -1) { printf("Cann't find a video stream\n"); res = -1; goto exit1; } /* 查找到第一个视频流 v_idx = av_find_best_stream(p_fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &p_codec, 0); printf("Find a video stream, index %d\n", v_idx); */ //为视频构建解码器AVCodecContext //a1. 获取解码器参数AVCodecParameter p_codec_par = p_fmt_ctx->streams[v_idx]->codecpar; //a2. 获取解码器 p_codec = avcodec_find_decoder(p_codec_par->codec_id); if (p_codec == NULL) { printf("cann't find codec"); res = -1; goto exit1; } //a3 构建解码器AVCodecContext //a3.1 p_codec_ctx初始化:分配结构体,使用p_codec初始化相应成员为默认值 p_codec_ctx = avcodec_alloc_context3(p_codec); if (p_codec_ctx == NULL) { printf("avcodec_alloc_context3() failed %d\n"); res = -1; goto exit1; } //a3.2 p_codec_ctx初始化:p_codec_par==>p_codec_ctx,初始化相应成员 ret = avcodec_parameters_to_context(p_codec_ctx, p_codec_par); if (ret < 0) { printf("avcodec_parameters_to_context() failed %d\n", ret); res = -1; goto exit2; } //a3.3 p_codec_ctx初始化:使用p_codec初始化p_codec_ctx,初始化完成 ret = avcodec_open2(p_codec_ctx, p_codec, NULL); if (ret < 0) { printf("avcodec_open2() failed %d\n", ret); res = -1; goto exit2; } //a4 分配AVFrame //a4.1 分配AVFrame结构 p_frm_raw = av_frame_alloc(); if (p_frm_raw == NULL) { printf("av_frame_alloc() for p_frm_raw failed\n"); res = -1; goto exit2; } p_frm_yuv = av_frame_alloc(); if (p_frm_yuv == NULL) { printf("av_frame_alloc() for p_frm_yuv failed\n"); res = -1; goto exit3; } //a4.2 为AVFrame. *data[]手工分配缓冲区,用于存储sws_scale()中目的帧视频数据 //p_frm_raw的data_buffer由av__read_frame()分配,因此不需要手工分配 //p_frm_yuv的data_buffer无处分配,因此需要手工分配 buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, p_codec_ctx->width, p_codec_ctx->height, 1 ); //buffer将作为p_frm_yuv的视频数据缓冲区 buffer = (uint8_t *)av_malloc(buf_size); if (buffer == NULL) { printf("av_malloc() for buffer failed\n"); res = -1; goto exit4; } //使用给定参数设定p_frm_yuv->data和p_frm_yuv->linesize ret = av_image_fill_arrays(p_frm_yuv->data, p_frm_yuv->linesize, buffer, AV_PIX_FMT_YUV420P, p_codec_ctx->width, p_codec_ctx->height, 1 ); if (ret < 0) { printf("av_image_fill_arrays() failed %d\n", ret); res = -1; goto exit5; } //a5 初始化sws context, 用于后续图像转换 //第六个参数使用的ffmpeg中的像素格式 //FFmpeg中的像素格式AV_PIX_FMT_YUV420P对应SDL像素格式SDL_PIXELFORMAT_IYUV //如果解码后所得到的图像不被SDL支持,不进行图像转换,SDL是无法正常显示图像 sws_ctx = sws_getContext(p_codec_ctx->width, p_codec_ctx->height, p_codec_ctx->pix_fmt, p_codec_ctx->width, p_codec_ctx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, // define SWS_BICUBIC 4 NULL, NULL, NULL ); if (sws_ctx == NULL) { printf("sws_getContext() failed\n"); res = -1; goto exit6; } //图像显示模块 //b1 初始化SDL子系统 if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER)) { printf("SDL_Init() failed: %s\n", SDL_GetError()); res = -1; goto exit6; } //b2 创建SDL窗口, SDL_Window运行程序后弹出的视频窗口 screen = SDL_CreateWindow("simple ffplayer", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, p_codec_ctx->width, p_codec_ctx->height, SDL_WINDOW_OPENGL ); if (screen == NULL) { printf("SDL_CreateWindow() failed : %s\n", SDL_GetError()); res = -1; goto exit7; } //b3 创建SDL_Renderer 渲染器 sdl_renderer = SDL_CreateRenderer(screen, -1, 0); if (sdl_renderer == NULL) { printf("SDL_CreateRenderer() failed: %d\n", SDL_GetError()); res = -1; goto exit7; } //b4 创建SDL_Texture 一个SDL_Texture对应一帧YUV数据 //第二个参数使用的是SDL中的像素格式 sdl_texture = SDL_CreateTexture(sdl_renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, p_codec_ctx->width, p_codec_ctx->height ); if (sdl_texture == NULL) { printf("SDL_CreateTexture() failed : %s\n", SDL_GetError()); res = -1; goto exit7; } sdl_rect.x = 0; sdl_rect.y = 0; sdl_rect.w = p_codec_ctx->width; sdl_rect.h = p_codec_ctx->height; p_packet = (AVPacket *)av_malloc(sizeof(AVPacket)); if (p_packet == NULL) { printf("SDL_CreateThread() failed: %s\n", SDL_GetError()); res = -1; goto exit7; } //b5创建定时刷新事件线程,按照预设帧率产生刷新事件 sdl_thread = SDL_CreateThread(sdl_thread_handle_refreshing, NULL, (void *)&frame_rate); if (sdl_thread == NULL) { printf("SDL_CreateThread() failed: %s\n", SDL_GetError()); res = -1; goto exit8; } while (1) { //b6 等待刷新事件 SDL_WaitEvent(&sdl_event); if (sdl_event.type == SDL_USEREVENT_REFRESH) { //a6从视频文件中读取一个packet //对于视频来说,一个packet只包含一个frame while (av_read_frame(p_fmt_ctx, p_packet) == 0) { if (p_packet->stream_index == v_idx) //取到一帧视频帧,则退出 { break; } } //a7 视频解码:packet ==> frame //a7.1 向解码器发送数据 ret = avcodec_send_packet(p_codec_ctx, p_packet); if (ret != 0) { printf("avcodec_send_packet() failed %d\n", ret); res = -1; goto exit8; } //a7.2 接收解码器输出数据,此处只处理视频帧,每次接收一个packet,将之解码得到一个frame ret = avcodec_receive_frame(p_codec_ctx, p_frm_raw); if (ret != 0) { if (ret == AVERROR_EOF) { printf("avcodec_receive_frame(): the decoder has been fully flushed\n"); } else if (ret == AVERROR(EAGAIN)) { printf("avcodec_receive_frame(): output is not available in this state, user must try to send new input\n"); continue; } else if (ret == AVERROR(EINVAL)) { printf("avcodec_receive_frame(): codec not opened, or it is an encoder\n"); } else { printf("avcodec_receive_frame(): legitimate decoding errors\n"); } res = -1; goto exit8; } //a8 图像转换:p_frm_raw->data ==> p_frm_yuv->data sws_scale(sws_ctx, (const uint8_t *const *)p_frm_raw->data, p_frm_raw->linesize, 0, p_codec_ctx->height, p_frm_yuv->data, p_frm_yuv->linesize ); //b7 使用新的YUV像素数据更新SDL_Rect SDL_UpdateYUVTexture(sdl_texture, &sdl_rect, p_frm_yuv->data[0], p_frm_yuv->linesize[0], p_frm_yuv->data[1], p_frm_yuv->linesize[1], p_frm_yuv->data[2], p_frm_yuv->linesize[2] ); //b8 使用特定颜色清空当前渲染目标 SDL_RenderClear(sdl_renderer); //b9 使用部分图像数据(texture)更新当前渲染目标 SDL_RenderCopy(sdl_renderer, sdl_texture, NULL, &sdl_rect ); //b10 执行渲染,更新屏幕显示 SDL_RenderPresent(sdl_renderer); av_packet_unref(p_packet); } else if (sdl_event.type == SDL_KEYDOWN) { if (sdl_event.key.keysym.sym == SDL_KEYDOWN) { //用户按空格键, 暂停|继续状态切换 s_playing_pause = !s_playing_pause; printf("player %s\n", s_playing_pause ? "pause" : "continue"); } } else if (sdl_event.type == SDL_QUIT) { //关闭窗口 printf("SDL-event QUIT\n"); s_playing_exit = true; break; } else { } } //fix: flush frame remained in codec while (1) { printf("\ntest\n"); ret = avcodec_send_packet(p_codec_ctx, p_packet); if (ret != 0) { printf("avcodec_send_packet() failed %d\n", ret); res = -1; goto exit8; } ret = avcodec_receive_frame(p_codec_ctx, p_frm_raw); if (ret != 0) { if (ret != 0) { if (ret == AVERROR_EOF) { printf("avcodec_receive_frame(): the decoder has been fully flushed\n"); } else if (ret == AVERROR(EAGAIN)) { printf("avcodec_receive_frame(): output is not available in this state, user must try to send new input\n"); continue; } else if (ret == AVERROR(EINVAL)) { printf("avcodec_receive_frame(): codec not opened, or it is an encoder\n"); } else { printf("avcodec_receive_frame(): legitimate decoding errors\n"); } res = -1; goto exit8; } // 图像转换:p_frm_raw->data ==> p_frm_yuv->data sws_scale(sws_ctx, (const uint8_t *const *)p_frm_raw->data, p_frm_raw->linesize, 0, p_codec_ctx->height, p_frm_yuv->data, p_frm_yuv->linesize ); // 使用新的YUV像素数据更新SDL_Rect SDL_UpdateYUVTexture(sdl_texture, &sdl_rect, p_frm_yuv->data[0], p_frm_yuv->linesize[0], p_frm_yuv->data[1], p_frm_yuv->linesize[1], p_frm_yuv->data[2], p_frm_yuv->linesize[2] ); // 使用特定颜色清空当前渲染目标 SDL_RenderClear(sdl_renderer); // 使用部分图像数据(texture)更新当前渲染目标 SDL_RenderCopy(sdl_renderer, sdl_texture, NULL, &sdl_rect ); // 执行渲染,更新屏幕显示 SDL_RenderPresent(sdl_renderer); av_packet_unref(p_packet); } } exit8: SDL_Quit(); exit7: av_packet_unref(p_packet); exit6: sws_freeContext(sws_ctx); exit5: av_free(buffer); exit4: av_frame_free(&p_frm_yuv); exit3: av_frame_free(&p_frm_raw); exit2: avcodec_free_context(&p_codec_ctx); exit1: avformat_close_input(&p_fmt_ctx); exit0: return res; }
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。